hbase git commit: HBASE-15644 Use org.scala-tools:mave-scala-plugin for scaladocs.

2016-04-18 Thread busbey
Repository: hbase
Updated Branches:
  refs/heads/master e9211e415 -> a56b374c5


HBASE-15644 Use org.scala-tools:mave-scala-plugin for scaladocs.

Signed-off-by: Sean Busbey 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/a56b374c
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/a56b374c
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/a56b374c

Branch: refs/heads/master
Commit: a56b374c5278b6d912143f954a3a424a9488bd5b
Parents: e9211e4
Author: Apekshit 
Authored: Wed Apr 13 14:54:56 2016 -0700
Committer: Sean Busbey 
Committed: Mon Apr 18 01:53:43 2016 -0500

--
 pom.xml | 14 ++
 1 file changed, 14 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/a56b374c/pom.xml
--
diff --git a/pom.xml b/pom.xml
index 0324c1c..8bd3706 100644
--- a/pom.xml
+++ b/pom.xml
@@ -76,6 +76,11 @@
   e.g. surefire 2.18-SNAPSHOT-->
   
 
+  scala-tools.org
+  Scala-tools Maven2 Repository
+  http://scala-tools.org/repo-releases
+
+
   apache.snapshots
   http://repository.apache.org/snapshots/
 
@@ -1166,6 +1171,11 @@
 true
 true
   
+  
+org.scala-tools
+maven-scala-plugin
+2.15.2
+  
 
   
   
@@ -3067,6 +3077,10 @@
 
   
 
+  
+org.scala-tools
+maven-scala-plugin
+  
 
   
   



hbase git commit: HBASE-15664 Use Long.MAX_VALUE instead of HConstants.FOREVER in CompactionPolicy

2016-04-18 Thread zhangduo
Repository: hbase
Updated Branches:
  refs/heads/master a56b374c5 -> f2e0aca2b


HBASE-15664 Use Long.MAX_VALUE instead of HConstants.FOREVER in CompactionPolicy


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/f2e0aca2
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/f2e0aca2
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/f2e0aca2

Branch: refs/heads/master
Commit: f2e0aca2b6925cdd1f2896052f4b4e847ee4e1fd
Parents: a56b374
Author: zhangduo 
Authored: Mon Apr 18 11:46:34 2016 +0800
Committer: zhangduo 
Committed: Mon Apr 18 20:10:31 2016 +0800

--
 .../regionserver/compactions/DateTieredCompactionPolicy.java| 5 ++---
 .../regionserver/compactions/RatioBasedCompactionPolicy.java| 2 +-
 2 files changed, 3 insertions(+), 4 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/f2e0aca2/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/DateTieredCompactionPolicy.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/DateTieredCompactionPolicy.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/DateTieredCompactionPolicy.java
index d61af42..6527c9b 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/DateTieredCompactionPolicy.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/DateTieredCompactionPolicy.java
@@ -36,7 +36,6 @@ import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.HBaseInterfaceAudience;
-import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.HDFSBlocksDistribution;
 import org.apache.hadoop.hbase.classification.InterfaceAudience;
 import org.apache.hadoop.hbase.regionserver.RSRpcServices;
@@ -118,8 +117,8 @@ public class DateTieredCompactionPolicy extends 
SortedCompactionPolicy {
 
 for (StoreFile file: filesToCompact) {
   Long minTimestamp = file.getMinimumTimestamp();
-  long oldest = (minTimestamp == null) ? (Long)Long.MIN_VALUE : now - 
minTimestamp.longValue();
-  if (cfTTL != HConstants.FOREVER && oldest >= cfTTL) {
+  long oldest = (minTimestamp == null) ? Long.MIN_VALUE : now - 
minTimestamp.longValue();
+  if (cfTTL != Long.MAX_VALUE && oldest >= cfTTL) {
 LOG.debug("Major compaction triggered on store " + this
   + "; for TTL maintenance");
 return true;

http://git-wip-us.apache.org/repos/asf/hbase/blob/f2e0aca2/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/RatioBasedCompactionPolicy.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/RatioBasedCompactionPolicy.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/RatioBasedCompactionPolicy.java
index 5600a4e..3386bfd 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/RatioBasedCompactionPolicy.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/RatioBasedCompactionPolicy.java
@@ -70,7 +70,7 @@ public class RatioBasedCompactionPolicy extends 
SortedCompactionPolicy {
 StoreFile sf = filesToCompact.iterator().next();
 Long minTimestamp = sf.getMinimumTimestamp();
 long oldest = (minTimestamp == null) ? Long.MIN_VALUE : now - 
minTimestamp.longValue();
-if (sf.isMajorCompaction() && (cfTTL == HConstants.FOREVER || oldest < 
cfTTL)) {
+if (sf.isMajorCompaction() && (cfTTL == Long.MAX_VALUE || oldest < 
cfTTL)) {
   float blockLocalityIndex =
 sf.getHDFSBlockDistribution().getBlockLocalityIndex(
 RSRpcServices.getHostname(comConf.conf, false));



hbase git commit: HBASE-15664 Use Long.MAX_VALUE instead of HConstants.FOREVER in CompactionPolicy

2016-04-18 Thread zhangduo
Repository: hbase
Updated Branches:
  refs/heads/branch-1 7dacf9f10 -> ee78b6da7


HBASE-15664 Use Long.MAX_VALUE instead of HConstants.FOREVER in CompactionPolicy


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/ee78b6da
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/ee78b6da
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/ee78b6da

Branch: refs/heads/branch-1
Commit: ee78b6da7aade033e529d6e6e3a8214eb01d791f
Parents: 7dacf9f
Author: zhangduo 
Authored: Mon Apr 18 11:46:34 2016 +0800
Committer: zhangduo 
Committed: Mon Apr 18 20:11:35 2016 +0800

--
 .../regionserver/compactions/DateTieredCompactionPolicy.java| 5 ++---
 .../regionserver/compactions/RatioBasedCompactionPolicy.java| 2 +-
 2 files changed, 3 insertions(+), 4 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/ee78b6da/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/DateTieredCompactionPolicy.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/DateTieredCompactionPolicy.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/DateTieredCompactionPolicy.java
index d61af42..6527c9b 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/DateTieredCompactionPolicy.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/DateTieredCompactionPolicy.java
@@ -36,7 +36,6 @@ import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.HBaseInterfaceAudience;
-import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.HDFSBlocksDistribution;
 import org.apache.hadoop.hbase.classification.InterfaceAudience;
 import org.apache.hadoop.hbase.regionserver.RSRpcServices;
@@ -118,8 +117,8 @@ public class DateTieredCompactionPolicy extends 
SortedCompactionPolicy {
 
 for (StoreFile file: filesToCompact) {
   Long minTimestamp = file.getMinimumTimestamp();
-  long oldest = (minTimestamp == null) ? (Long)Long.MIN_VALUE : now - 
minTimestamp.longValue();
-  if (cfTTL != HConstants.FOREVER && oldest >= cfTTL) {
+  long oldest = (minTimestamp == null) ? Long.MIN_VALUE : now - 
minTimestamp.longValue();
+  if (cfTTL != Long.MAX_VALUE && oldest >= cfTTL) {
 LOG.debug("Major compaction triggered on store " + this
   + "; for TTL maintenance");
 return true;

http://git-wip-us.apache.org/repos/asf/hbase/blob/ee78b6da/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/RatioBasedCompactionPolicy.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/RatioBasedCompactionPolicy.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/RatioBasedCompactionPolicy.java
index c95911a..7b812cd 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/RatioBasedCompactionPolicy.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/RatioBasedCompactionPolicy.java
@@ -70,7 +70,7 @@ public class RatioBasedCompactionPolicy extends 
SortedCompactionPolicy {
 StoreFile sf = filesToCompact.iterator().next();
 Long minTimestamp = sf.getMinimumTimestamp();
 long oldest = (minTimestamp == null) ? Long.MIN_VALUE : now - 
minTimestamp.longValue();
-if (sf.isMajorCompaction() && (cfTTL == HConstants.FOREVER || oldest < 
cfTTL)) {
+if (sf.isMajorCompaction() && (cfTTL == Long.MAX_VALUE || oldest < 
cfTTL)) {
   float blockLocalityIndex =
 sf.getHDFSBlockDistribution().getBlockLocalityIndex(
 RSRpcServices.getHostname(comConf.conf, false));



hbase git commit: HBASE-15664 Use Long.MAX_VALUE instead of HConstants.FOREVER in CompactionPolicy

2016-04-18 Thread zhangduo
Repository: hbase
Updated Branches:
  refs/heads/branch-1.3 265a4d695 -> 87d8c8863


HBASE-15664 Use Long.MAX_VALUE instead of HConstants.FOREVER in CompactionPolicy


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/87d8c886
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/87d8c886
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/87d8c886

Branch: refs/heads/branch-1.3
Commit: 87d8c8863b37d6c87115535cbe8dd72038f7bb59
Parents: 265a4d6
Author: zhangduo 
Authored: Mon Apr 18 11:46:34 2016 +0800
Committer: zhangduo 
Committed: Mon Apr 18 20:13:00 2016 +0800

--
 .../regionserver/compactions/DateTieredCompactionPolicy.java| 5 ++---
 .../regionserver/compactions/RatioBasedCompactionPolicy.java| 2 +-
 2 files changed, 3 insertions(+), 4 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/87d8c886/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/DateTieredCompactionPolicy.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/DateTieredCompactionPolicy.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/DateTieredCompactionPolicy.java
index d61af42..6527c9b 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/DateTieredCompactionPolicy.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/DateTieredCompactionPolicy.java
@@ -36,7 +36,6 @@ import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.HBaseInterfaceAudience;
-import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.HDFSBlocksDistribution;
 import org.apache.hadoop.hbase.classification.InterfaceAudience;
 import org.apache.hadoop.hbase.regionserver.RSRpcServices;
@@ -118,8 +117,8 @@ public class DateTieredCompactionPolicy extends 
SortedCompactionPolicy {
 
 for (StoreFile file: filesToCompact) {
   Long minTimestamp = file.getMinimumTimestamp();
-  long oldest = (minTimestamp == null) ? (Long)Long.MIN_VALUE : now - 
minTimestamp.longValue();
-  if (cfTTL != HConstants.FOREVER && oldest >= cfTTL) {
+  long oldest = (minTimestamp == null) ? Long.MIN_VALUE : now - 
minTimestamp.longValue();
+  if (cfTTL != Long.MAX_VALUE && oldest >= cfTTL) {
 LOG.debug("Major compaction triggered on store " + this
   + "; for TTL maintenance");
 return true;

http://git-wip-us.apache.org/repos/asf/hbase/blob/87d8c886/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/RatioBasedCompactionPolicy.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/RatioBasedCompactionPolicy.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/RatioBasedCompactionPolicy.java
index c95911a..7b812cd 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/RatioBasedCompactionPolicy.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/RatioBasedCompactionPolicy.java
@@ -70,7 +70,7 @@ public class RatioBasedCompactionPolicy extends 
SortedCompactionPolicy {
 StoreFile sf = filesToCompact.iterator().next();
 Long minTimestamp = sf.getMinimumTimestamp();
 long oldest = (minTimestamp == null) ? Long.MIN_VALUE : now - 
minTimestamp.longValue();
-if (sf.isMajorCompaction() && (cfTTL == HConstants.FOREVER || oldest < 
cfTTL)) {
+if (sf.isMajorCompaction() && (cfTTL == Long.MAX_VALUE || oldest < 
cfTTL)) {
   float blockLocalityIndex =
 sf.getHDFSBlockDistribution().getBlockLocalityIndex(
 RSRpcServices.getHostname(comConf.conf, false));



hbase git commit: HBASE-15664 Use Long.MAX_VALUE instead of HConstants.FOREVER in CompactionPolicy

2016-04-18 Thread zhangduo
Repository: hbase
Updated Branches:
  refs/heads/0.98 a3846b132 -> 7a0216b7a


HBASE-15664 Use Long.MAX_VALUE instead of HConstants.FOREVER in CompactionPolicy


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/7a0216b7
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/7a0216b7
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/7a0216b7

Branch: refs/heads/0.98
Commit: 7a0216b7a48c8afd0c31d001a0f1c8cede030a93
Parents: a3846b1
Author: zhangduo 
Authored: Mon Apr 18 11:51:10 2016 +0800
Committer: zhangduo 
Committed: Mon Apr 18 20:18:49 2016 +0800

--
 .../regionserver/compactions/DateTieredCompactionPolicy.java| 5 ++---
 .../regionserver/compactions/RatioBasedCompactionPolicy.java| 2 +-
 2 files changed, 3 insertions(+), 4 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/7a0216b7/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/DateTieredCompactionPolicy.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/DateTieredCompactionPolicy.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/DateTieredCompactionPolicy.java
index d32afe1..aefcd8f 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/DateTieredCompactionPolicy.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/DateTieredCompactionPolicy.java
@@ -35,7 +35,6 @@ import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.HBaseInterfaceAudience;
-import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.HDFSBlocksDistribution;
 import org.apache.hadoop.hbase.classification.InterfaceAudience;
 import org.apache.hadoop.hbase.regionserver.HRegionServer;
@@ -117,7 +116,7 @@ public class DateTieredCompactionPolicy extends 
SortedCompactionPolicy {
 for (StoreFile file: filesToCompact) {
   Long minTimestamp = file.getMinimumTimestamp();
   long oldest = (minTimestamp == null) ? Long.MIN_VALUE : now - 
minTimestamp.longValue();
-  if (cfTtl != HConstants.FOREVER && oldest >= cfTtl) {
+  if (cfTtl != Long.MAX_VALUE && oldest >= cfTtl) {
 LOG.debug("Major compaction triggered on store " + this
   + "; for TTL maintenance");
 return true;
@@ -426,4 +425,4 @@ public class DateTieredCompactionPolicy extends 
SortedCompactionPolicy {
   return "[" + startMillis() + ", " + endMillis() + ")"; 
 }
   }
-}
\ No newline at end of file
+}

http://git-wip-us.apache.org/repos/asf/hbase/blob/7a0216b7/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/RatioBasedCompactionPolicy.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/RatioBasedCompactionPolicy.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/RatioBasedCompactionPolicy.java
index 1685aa2..86e9088 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/RatioBasedCompactionPolicy.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/RatioBasedCompactionPolicy.java
@@ -68,7 +68,7 @@ public class RatioBasedCompactionPolicy extends 
SortedCompactionPolicy {
 StoreFile sf = filesToCompact.iterator().next();
 Long minTimestamp = sf.getMinimumTimestamp();
 long oldest = (minTimestamp == null) ? Long.MIN_VALUE : now - 
minTimestamp.longValue();
-if (sf.isMajorCompaction() && (cfTtl == HConstants.FOREVER || oldest < 
cfTtl)) {
+if (sf.isMajorCompaction() && (cfTtl == Long.MAX_VALUE || oldest < 
cfTtl)) {
   float blockLocalityIndex =
   sf.getHDFSBlockDistribution().getBlockLocalityIndex(
 HRegionServer.getHostname(comConf.conf));



hbase git commit: HBASE-15668 HFileReplicator fails to replicate other hfiles in the request when a hfile in not found in FS anywhere

2016-04-18 Thread ashishsinghi
Repository: hbase
Updated Branches:
  refs/heads/master f2e0aca2b -> 70687c18b


HBASE-15668 HFileReplicator fails to replicate other hfiles in the request when 
a hfile in not found in FS anywhere


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/70687c18
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/70687c18
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/70687c18

Branch: refs/heads/master
Commit: 70687c18bbebf86235091a2b0cbf89600e52ec63
Parents: f2e0aca
Author: Ashish Singhi 
Authored: Mon Apr 18 22:17:02 2016 +0530
Committer: Ashish Singhi 
Committed: Mon Apr 18 22:17:02 2016 +0530

--
 .../hadoop/hbase/replication/regionserver/HFileReplicator.java   | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/70687c18/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/HFileReplicator.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/HFileReplicator.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/HFileReplicator.java
index 17f6780..1a1044d 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/HFileReplicator.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/HFileReplicator.java
@@ -378,11 +378,11 @@ public class HFileReplicator {
 FileUtil.copy(sourceFs, sourceHFilePath, sinkFs, localHFilePath, 
false, conf);
   } catch (FileNotFoundException e1) {
 // This will mean that the hfile does not exists any where in 
source cluster FS. So we
-// cannot do anything here just log and return.
+// cannot do anything here just log and continue.
 LOG.error("Failed to copy hfile from " + sourceHFilePath + " to " 
+ localHFilePath
 + ". Hence ignoring this hfile from replication..",
   e1);
-return null;
+continue;
   }
 }
 sinkFs.setPermission(localHFilePath, PERM_ALL_ACCESS);



hbase git commit: HBASE-15668 HFileReplicator fails to replicate other hfiles in the request when a hfile in not found in FS anywhere

2016-04-18 Thread ashishsinghi
Repository: hbase
Updated Branches:
  refs/heads/branch-1.3 87d8c8863 -> 5fa78dd48


HBASE-15668 HFileReplicator fails to replicate other hfiles in the request when 
a hfile in not found in FS anywhere


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/5fa78dd4
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/5fa78dd4
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/5fa78dd4

Branch: refs/heads/branch-1.3
Commit: 5fa78dd483e16a19a13096116aa63b6f35af0f79
Parents: 87d8c88
Author: Ashish Singhi 
Authored: Mon Apr 18 22:17:02 2016 +0530
Committer: Ashish Singhi 
Committed: Mon Apr 18 22:19:04 2016 +0530

--
 .../hadoop/hbase/replication/regionserver/HFileReplicator.java   | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/5fa78dd4/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/HFileReplicator.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/HFileReplicator.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/HFileReplicator.java
index 17f6780..1a1044d 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/HFileReplicator.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/HFileReplicator.java
@@ -378,11 +378,11 @@ public class HFileReplicator {
 FileUtil.copy(sourceFs, sourceHFilePath, sinkFs, localHFilePath, 
false, conf);
   } catch (FileNotFoundException e1) {
 // This will mean that the hfile does not exists any where in 
source cluster FS. So we
-// cannot do anything here just log and return.
+// cannot do anything here just log and continue.
 LOG.error("Failed to copy hfile from " + sourceHFilePath + " to " 
+ localHFilePath
 + ". Hence ignoring this hfile from replication..",
   e1);
-return null;
+continue;
   }
 }
 sinkFs.setPermission(localHFilePath, PERM_ALL_ACCESS);



hbase git commit: HBASE-15668 HFileReplicator fails to replicate other hfiles in the request when a hfile in not found in FS anywhere

2016-04-18 Thread ashishsinghi
Repository: hbase
Updated Branches:
  refs/heads/branch-1 ee78b6da7 -> 6d40b7a0e


HBASE-15668 HFileReplicator fails to replicate other hfiles in the request when 
a hfile in not found in FS anywhere


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/6d40b7a0
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/6d40b7a0
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/6d40b7a0

Branch: refs/heads/branch-1
Commit: 6d40b7a0e4b8fb0bb3ada214e790aaf496070989
Parents: ee78b6d
Author: Ashish Singhi 
Authored: Mon Apr 18 22:17:02 2016 +0530
Committer: Ashish Singhi 
Committed: Mon Apr 18 22:18:46 2016 +0530

--
 .../hadoop/hbase/replication/regionserver/HFileReplicator.java   | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/6d40b7a0/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/HFileReplicator.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/HFileReplicator.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/HFileReplicator.java
index 17f6780..1a1044d 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/HFileReplicator.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/HFileReplicator.java
@@ -378,11 +378,11 @@ public class HFileReplicator {
 FileUtil.copy(sourceFs, sourceHFilePath, sinkFs, localHFilePath, 
false, conf);
   } catch (FileNotFoundException e1) {
 // This will mean that the hfile does not exists any where in 
source cluster FS. So we
-// cannot do anything here just log and return.
+// cannot do anything here just log and continue.
 LOG.error("Failed to copy hfile from " + sourceHFilePath + " to " 
+ localHFilePath
 + ". Hence ignoring this hfile from replication..",
   e1);
-return null;
+continue;
   }
 }
 sinkFs.setPermission(localHFilePath, PERM_ALL_ACCESS);



[20/51] [partial] hbase-site git commit: Published site at f2e0aca2b6925cdd1f2896052f4b4e847ee4e1fd.

2016-04-18 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/9f9a078f/devapidocs/org/apache/hadoop/hbase/client/class-use/Increment.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/client/class-use/Increment.html 
b/devapidocs/org/apache/hadoop/hbase/client/class-use/Increment.html
index d086b0f..ee734b9 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/class-use/Increment.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/class-use/Increment.html
@@ -250,42 +250,42 @@ service.
 
 
 Result
-BaseRegionObserver.postIncrement(ObserverContext e,
-  Increment increment,
-  Result result) 
-
-
-Result
 RegionObserver.postIncrement(ObserverContext c,
   Increment increment,
   Result result)
 Called after increment
 
 
-
+
 Result
-BaseRegionObserver.preIncrement(ObserverContext e,
-Increment increment) 
+BaseRegionObserver.postIncrement(ObserverContext e,
+  Increment increment,
+  Result result) 
 
-
+
 Result
 RegionObserver.preIncrement(ObserverContext c,
 Increment increment)
 Called before Increment.
 
 
-
+
 Result
-BaseRegionObserver.preIncrementAfterRowLock(ObserverContext e,
-Increment increment) 
+BaseRegionObserver.preIncrement(ObserverContext e,
+Increment increment) 
 
-
+
 Result
 RegionObserver.preIncrementAfterRowLock(ObserverContext c,
 Increment increment)
 Called before Increment but after acquiring rowlock.
 
 
+
+Result
+BaseRegionObserver.preIncrementAfterRowLock(ObserverContext e,
+Increment increment) 
+
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/9f9a078f/devapidocs/org/apache/hadoop/hbase/client/class-use/IsolationLevel.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/client/class-use/IsolationLevel.html 
b/devapidocs/org/apache/hadoop/hbase/client/class-use/IsolationLevel.html
index 7d9493d..f699071 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/class-use/IsolationLevel.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/class-use/IsolationLevel.html
@@ -135,19 +135,19 @@ the order they are declared.
 
 
 
-Get
-Get.setIsolationLevel(IsolationLevel level) 
-
-
 Scan
 Scan.setIsolationLevel(IsolationLevel level) 
 
-
+
 Query
 Query.setIsolationLevel(IsolationLevel level)
 Set the isolation level for this query.
 
 
+
+Get
+Get.setIsolationLevel(IsolationLevel level) 
+
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/9f9a078f/devapidocs/org/apache/hadoop/hbase/client/class-use/MasterKeepAliveConnection.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/client/class-use/MasterKeepAliveConnection.html
 
b/devapidocs/org/apache/hadoop/hbase/client/class-use/MasterKeepAliveConnection.html
index e5a6d32..97fa2a6 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/client/class-use/MasterKeepAliveConnection.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/client/class-use/MasterKeepAliveConnection.html
@@ -111,7 +111,7 @@
 
 
 MasterKeepAliveConnection
-HConnection.getKeepAliveMasterService()
+ClusterConnection.getKeepAliveMasterService()
 Deprecated. 
 Since 0.96.0
 
@@ -119,16 +119,16 @@
 
 
 MasterKeepAliveConnection
-ClusterConnection.getKeepAliveMasterService()
+ConnectionImplementation.getKeepAliveMasterService() 
+
+
+MasterKeepAliveConnection
+HConnection.getKeepAliveMasterService()
 Deprecated. 
 Since 0.96.0
 
 
 
-
-MasterKeepAliveConnection
-ConnectionImplementation.getKeepAliveMasterService() 
-
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/9f9a078f/devapidocs/org/apache/hadoop/hbase/client/class-use/MetricsConnection.CallStats.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/client/class-use/MetricsConnection.CallStats.html
 
b/devapidocs/org/apache/hadoop/hbase/client/class-use/MetricsConnection.CallStats.html
index 7e76f2c..bcd685e 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/client/class-use/MetricsConnection.CallStats.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/client/class-use/MetricsConnection.CallStats.html
@@ -168,13 +168,13 @@
 
 
 
-protected abstract Pair
-AbstractRpcClient.call(PayloadCarryingRpcController pcrc,
+protected Pair

[13/51] [partial] hbase-site git commit: Published site at f2e0aca2b6925cdd1f2896052f4b4e847ee4e1fd.

2016-04-18 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/9f9a078f/devapidocs/org/apache/hadoop/hbase/coprocessor/class-use/RegionCoprocessorEnvironment.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/coprocessor/class-use/RegionCoprocessorEnvironment.html
 
b/devapidocs/org/apache/hadoop/hbase/coprocessor/class-use/RegionCoprocessorEnvironment.html
index 256b142..6d94a1a 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/coprocessor/class-use/RegionCoprocessorEnvironment.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/coprocessor/class-use/RegionCoprocessorEnvironment.html
@@ -152,15 +152,15 @@
 
 
 private RegionCoprocessorEnvironment
-MultiRowMutationEndpoint.env 
+AggregateImplementation.env 
 
 
 private RegionCoprocessorEnvironment
-BaseRowProcessorEndpoint.env 
+MultiRowMutationEndpoint.env 
 
 
 private RegionCoprocessorEnvironment
-AggregateImplementation.env 
+BaseRowProcessorEndpoint.env 
 
 
 
@@ -173,37 +173,31 @@
 
 
 Result
-BaseRegionObserver.postAppend(ObserverContext e,
-Append append,
-Result result) 
-
-
-Result
 RegionObserver.postAppend(ObserverContext c,
 Append append,
 Result result)
 Called after Append
 
 
-
-void
-BaseRegionObserver.postBatchMutate(ObserverContext c,
-  MiniBatchOperationInProgress miniBatchOp) 
-
 
+Result
+BaseRegionObserver.postAppend(ObserverContext e,
+Append append,
+Result result) 
+
+
 void
 RegionObserver.postBatchMutate(ObserverContext c,
   MiniBatchOperationInProgress miniBatchOp)
 This will be called after applying a batch of Mutations on 
a region.
 
 
-
+
 void
-BaseRegionObserver.postBatchMutateIndispensably(ObserverContext ctx,
-MiniBatchOperationInProgress miniBatchOp,
-
boolean success) 
+BaseRegionObserver.postBatchMutate(ObserverContext c,
+  MiniBatchOperationInProgress miniBatchOp) 
 
-
+
 void
 RegionObserver.postBatchMutateIndispensably(ObserverContext ctx,
 MiniBatchOperationInProgress miniBatchOp,
@@ -212,13 +206,13 @@
  fails
 
 
-
-boolean
-BaseRegionObserver.postBulkLoadHFile(ObserverContext ctx,
-  http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">ListString>> familyPaths,
-  boolean hasLoaded) 
-
 
+void
+BaseRegionObserver.postBatchMutateIndispensably(ObserverContext ctx,
+MiniBatchOperationInProgress miniBatchOp,
+
boolean success) 
+
+
 boolean
 RegionObserver.postBulkLoadHFile(ObserverContext ctx,
   http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">ListString>> familyPaths,
@@ -226,85 +220,85 @@
 Called after bulkLoadHFile.
 
 
+
+boolean
+BaseRegionObserver.postBulkLoadHFile(ObserverContext ctx,
+  http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">ListString>> familyPaths,
+  boolean hasLoaded) 
+
 
 boolean
-BaseRegionObserver.postCheckAndDelete(ObserverContext e,
+RegionObserver.postCheckAndDelete(ObserverContext c,
 byte[] row,
 byte[] family,
 byte[] qualifier,
 CompareFilter.CompareOp compareOp,
 ByteArrayComparable comparator,
 Delete delete,
-boolean result) 
+  

[41/51] [partial] hbase-site git commit: Published site at f2e0aca2b6925cdd1f2896052f4b4e847ee4e1fd.

2016-04-18 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/9f9a078f/checkstyle.rss
--
diff --git a/checkstyle.rss b/checkstyle.rss
index 2d45ce3..ca15074 100644
--- a/checkstyle.rss
+++ b/checkstyle.rss
@@ -25,8 +25,8 @@ under the License.
 en-us
 ©2007 - 2016 The Apache Software Foundation
 
-  File: 1731,
- Errors: 12421,
+  File: 1732,
+ Errors: 12423,
  Warnings: 0,
  Infos: 0
   
@@ -830,7 +830,7 @@ under the License.
   
   
 
-  http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.io.hfile.CorruptHFileException.java";>org/apache/hadoop/hbase/io/hfile/CorruptHFileException.java
+  http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.regionserver.wal.Compressor.java";>org/apache/hadoop/hbase/regionserver/wal/Compressor.java
 
 
   0
@@ -839,12 +839,12 @@ under the License.
   0
 
 
-  1
+  8
 
   
   
 
-  http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.regionserver.wal.Compressor.java";>org/apache/hadoop/hbase/regionserver/wal/Compressor.java
+  http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.io.hfile.CorruptHFileException.java";>org/apache/hadoop/hbase/io/hfile/CorruptHFileException.java
 
 
   0
@@ -853,7 +853,7 @@ under the License.
   0
 
 
-  8
+  1
 
   
   
@@ -1054,7 +1054,7 @@ under the License.
   
   
 
-  http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.ipc.CallRunner.java";>org/apache/hadoop/hbase/ipc/CallRunner.java
+  http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.exceptions.DeserializationException.java";>org/apache/hadoop/hbase/exceptions/DeserializationException.java
 
 
   0
@@ -1063,12 +1063,12 @@ under the License.
   0
 
 
-  1
+  0
 
   
   
 
-  http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.exceptions.DeserializationException.java";>org/apache/hadoop/hbase/exceptions/DeserializationException.java
+  http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.ipc.CallRunner.java";>org/apache/hadoop/hbase/ipc/CallRunner.java
 
 
   0
@@ -1077,7 +1077,7 @@ under the License.
   0
 
 
-  0
+  1
 
   
   
@@ -1236,7 +1236,7 @@ under the License.
   
   
 
-  http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.regionserver.NoSuchColumnFamilyException.java";>org/apache/hadoop/hbase/regionserver/NoSuchColumnFamilyException.java
+  http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.mapreduce.TableSnapshotInputFormat.java";>org/apache/hadoop/hbase/mapreduce/TableSnapshotInputFormat.java
 
 
   0
@@ -1245,12 +1245,12 @@ under the License.
   0
 
 
-  0
+  3
 
   
   
 
-  http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.mapreduce.TableSnapshotInputFormat.java";>org/apache/hadoop/hbase/mapreduce/TableSnapshotInputFormat.java
+  http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.regionserver.NoSuchColumnFamilyException.java";>org/apache/hadoop/hbase/regionserver/NoSuchColumnFamilyException.java
 
 
   0
@@ -1259,7 +1259,7 @@ under the License.
   0
 
 
-  3
+  0
 
   
   
@@ -3765,7 +3765,7 @@ under the License.
   0
 
 
-  0
+  1
 
   
   
@@ -4232,7 +4232,7 @@ under the License.
   
   
 
-  http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbas

[35/51] [partial] hbase-site git commit: Published site at f2e0aca2b6925cdd1f2896052f4b4e847ee4e1fd.

2016-04-18 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/9f9a078f/devapidocs/org/apache/hadoop/hbase/class-use/CellComparator.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/class-use/CellComparator.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/CellComparator.html
index 47a7b22..b6bd440 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/CellComparator.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/CellComparator.html
@@ -234,41 +234,41 @@
 
 
 int
-BufferedDataBlockEncoder.BufferedEncodedSeeker.compareKey(CellComparator comparator,
-Cell key) 
-
-
-int
 DataBlockEncoder.EncodedSeeker.compareKey(CellComparator comparator,
 Cell key)
 Compare the given key against the current key
 
 
+
+int
+BufferedDataBlockEncoder.BufferedEncodedSeeker.compareKey(CellComparator comparator,
+Cell key) 
+
 
 DataBlockEncoder.EncodedSeeker
-DiffKeyDeltaEncoder.createSeeker(CellComparator comparator,
+CopyKeyDataBlockEncoder.createSeeker(CellComparator comparator,
 HFileBlockDecodingContext decodingCtx) 
 
 
 DataBlockEncoder.EncodedSeeker
-FastDiffDeltaEncoder.createSeeker(CellComparator comparator,
+PrefixKeyDeltaEncoder.createSeeker(CellComparator comparator,
 HFileBlockDecodingContext decodingCtx) 
 
 
 DataBlockEncoder.EncodedSeeker
+DiffKeyDeltaEncoder.createSeeker(CellComparator comparator,
+HFileBlockDecodingContext decodingCtx) 
+
+
+DataBlockEncoder.EncodedSeeker
 DataBlockEncoder.createSeeker(CellComparator comparator,
 HFileBlockDecodingContext decodingCtx)
 Create a HFileBlock seeker which find KeyValues within a 
block.
 
 
-
-DataBlockEncoder.EncodedSeeker
-CopyKeyDataBlockEncoder.createSeeker(CellComparator comparator,
-HFileBlockDecodingContext decodingCtx) 
-
 
 DataBlockEncoder.EncodedSeeker
-PrefixKeyDeltaEncoder.createSeeker(CellComparator comparator,
+FastDiffDeltaEncoder.createSeeker(CellComparator comparator,
 HFileBlockDecodingContext decodingCtx) 
 
 
@@ -299,16 +299,6 @@
 
 
 private CellComparator
-HFileBlockIndex.CellBasedKeyBlockIndexReader.comparator
-Needed doing lookup on blocks.
-
-
-
-protected CellComparator
-HFile.WriterFactory.comparator 
-
-
-private CellComparator
 HFileReaderImpl.comparator
 Key comparator
 
@@ -320,11 +310,21 @@
 
 
 
+private CellComparator
+HFileBlockIndex.CellBasedKeyBlockIndexReader.comparator
+Needed doing lookup on blocks.
+
+
+
 protected CellComparator
 CompoundBloomFilterBase.comparator
 Comparator used to compare Bloom filter keys
 
 
+
+protected CellComparator
+HFile.WriterFactory.comparator 
+
 
 
 
@@ -344,11 +344,11 @@
 
 
 CellComparator
-HFile.Reader.getComparator() 
+HFileReaderImpl.getComparator() 
 
 
 CellComparator
-HFileReaderImpl.getComparator() 
+HFile.Reader.getComparator() 
 
 
 
@@ -500,14 +500,6 @@
 
 
 private CellComparator
-ScanInfo.comparator 
-
-
-protected CellComparator
-HRegion.RegionScannerImpl.comparator 
-
-
-private CellComparator
 HStore.comparator 
 
 
@@ -515,29 +507,37 @@
 Segment.comparator 
 
 
-private CellComparator
-StoreFileWriter.Builder.comparator 
+protected CellComparator
+StripeStoreFlusher.StripeFlushRequest.comparator 
 
 
 private CellComparator
-AbstractMemStore.comparator 
+ScanInfo.comparator 
 
 
-protected CellComparator
-StripeStoreFlusher.StripeFlushRequest.comparator 
+private CellComparator
+AbstractMemStore.comparator 
 
 
 protected CellComparator
 StripeMultiFileWriter.comparator 
 
 
-private CellComparator
-DefaultStoreFileManager.kvComparator 
+protected CellComparator
+HRegion.RegionScannerImpl.comparator 
 
 
+private CellComparator
+StoreFileWriter.Builder.comparator 
+
+
 protected CellComparator
 KeyValueHeap.KVScannerComparator.kvComparator 
 
+
+private CellComparator
+DefaultStoreFileManager.kvComparator 
+
 
 private CellComparator
 ScanQueryMatcher.rowComparator
@@ -565,7 +565,17 @@
 
 
 CellComparator
-ScanInfo.getComparator() 
+HStore.getComparator() 
+
+
+CellComparator
+Store.getComparator() 
+
+
+protected CellComparator
+Segment.getComparator()
+Returns the Cell comparator used by this segment
+
 
 
 (package private) CellComparator
@@ -577,26 +587,16 @@
 
 
 CellComparator
-Store.getComparator() 
+KeyValueHeap.KVScannerComparator.getComparator() 
 
 
 CellComparator
-HStore.getComparator() 
+ScanInfo.getComparator() 
 
 
 protected CellComparator
-Segment.getComparator()
-Returns the Cell comparator used by this segment
-
-
-
-protected CellComparator
 AbstractMemStore.getComparator() 
 
-
-CellComparator
-KeyValueHeap.KVScannerComparator.getComparator() 
-
 
 
 
@@ -630,23 +630,17 @@
 
 
 protected void
-StripeStoreEngine.createComponents(org.apache.hadoop.conf.Configuration conf,
+DateTieredStoreEngine.createComponents(org.apache.hadoop.conf.Configuration conf,
 

[45/51] [partial] hbase-site git commit: Published site at f2e0aca2b6925cdd1f2896052f4b4e847ee4e1fd.

2016-04-18 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/9f9a078f/apidocs/org/apache/hadoop/hbase/util/class-use/PositionedByteRange.html
--
diff --git 
a/apidocs/org/apache/hadoop/hbase/util/class-use/PositionedByteRange.html 
b/apidocs/org/apache/hadoop/hbase/util/class-use/PositionedByteRange.html
index 1380982..227f913 100644
--- a/apidocs/org/apache/hadoop/hbase/util/class-use/PositionedByteRange.html
+++ b/apidocs/org/apache/hadoop/hbase/util/class-use/PositionedByteRange.html
@@ -117,122 +117,122 @@
 
 
 http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
-RawString.decode(PositionedByteRange src) 
+OrderedString.decode(PositionedByteRange src) 
 
 
-http://docs.oracle.com/javase/7/docs/api/java/lang/Float.html?is-external=true";
 title="class or interface in java.lang">Float
-RawFloat.decode(PositionedByteRange src) 
+http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true";
 title="class or interface in java.lang">Object[]
+Struct.decode(PositionedByteRange src) 
 
 
-http://docs.oracle.com/javase/7/docs/api/java/lang/Number.html?is-external=true";
 title="class or interface in java.lang">Number
-OrderedNumeric.decode(PositionedByteRange src) 
+http://docs.oracle.com/javase/7/docs/api/java/lang/Long.html?is-external=true";
 title="class or interface in java.lang">Long
+RawLong.decode(PositionedByteRange src) 
 
 
-byte[]
-RawBytes.decode(PositionedByteRange src) 
+http://docs.oracle.com/javase/7/docs/api/java/lang/Short.html?is-external=true";
 title="class or interface in java.lang">Short
+OrderedInt16.decode(PositionedByteRange src) 
 
 
-http://docs.oracle.com/javase/7/docs/api/java/lang/Integer.html?is-external=true";
 title="class or interface in java.lang">Integer
-RawInteger.decode(PositionedByteRange src) 
+http://docs.oracle.com/javase/7/docs/api/java/lang/Float.html?is-external=true";
 title="class or interface in java.lang">Float
+RawFloat.decode(PositionedByteRange src) 
 
 
-T
-TerminatedWrapper.decode(PositionedByteRange src) 
+http://docs.oracle.com/javase/7/docs/api/java/lang/Byte.html?is-external=true";
 title="class or interface in java.lang">Byte
+RawByte.decode(PositionedByteRange src) 
 
 
-http://docs.oracle.com/javase/7/docs/api/java/lang/Float.html?is-external=true";
 title="class or interface in java.lang">Float
-OrderedFloat32.decode(PositionedByteRange src) 
+http://docs.oracle.com/javase/7/docs/api/java/lang/Long.html?is-external=true";
 title="class or interface in java.lang">Long
+OrderedInt64.decode(PositionedByteRange src) 
 
 
-http://docs.oracle.com/javase/7/docs/api/java/lang/Double.html?is-external=true";
 title="class or interface in java.lang">Double
-RawDouble.decode(PositionedByteRange src) 
+byte[]
+OrderedBlob.decode(PositionedByteRange src) 
 
 
-http://docs.oracle.com/javase/7/docs/api/java/lang/Long.html?is-external=true";
 title="class or interface in java.lang">Long
-OrderedInt64.decode(PositionedByteRange src) 
+T
+DataType.decode(PositionedByteRange src)
+Read an instance of T from the buffer 
src.
+
 
 
-http://docs.oracle.com/javase/7/docs/api/java/lang/Integer.html?is-external=true";
 title="class or interface in java.lang">Integer
-OrderedInt32.decode(PositionedByteRange src) 
+http://docs.oracle.com/javase/7/docs/api/java/lang/Double.html?is-external=true";
 title="class or interface in java.lang">Double
+OrderedFloat64.decode(PositionedByteRange src) 
 
 
-http://docs.oracle.com/javase/7/docs/api/java/lang/Long.html?is-external=true";
 title="class or interface in java.lang">Long
-RawLong.decode(PositionedByteRange src) 
+http://docs.oracle.com/javase/7/docs/api/java/lang/Byte.html?is-external=true";
 title="class or interface in java.lang">Byte
+OrderedInt8.decode(PositionedByteRange src) 
 
 
-http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true";
 title="class or interface in java.lang">Object[]
-Struct.decode(PositionedByteRange src) 
+http://docs.oracle.com/javase/7/docs/api/java/lang/Integer.html?is-external=true";
 title="class or interface in java.lang">Integer
+RawInteger.decode(PositionedByteRange src) 
 
 
-byte[]
-OrderedBlobVar.decode(PositionedByteRange src) 
+http://docs.oracle.com/javase/7/docs/api/java/lang/Float.html?is-external=true";
 title="class or interface in java.lang">Float
+OrderedFloat32.decode(PositionedByteRange src) 
 
 
-http://docs.oracle.com/javase/7/docs/api/java/lang/Short.html?is-external=true";
 title="class or interface in java.lang">Short
-OrderedInt16.decode(PositionedByteRange src) 
+http://docs.oracle.com/javase/7/docs/api/java/lang/Integer.html?is-external=true";
 title="class or interface in java.lang">Integer
+OrderedInt32.decode(PositionedByteRange src) 
 
 
-http://docs.oracle.com/javase/7/docs/api/java/lang/Byte.html?is-external=true";
 title="class or interface in java.lang">Byte
-RawByte.decode(Positio

[08/51] [partial] hbase-site git commit: Published site at f2e0aca2b6925cdd1f2896052f4b4e847ee4e1fd.

2016-04-18 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/9f9a078f/devapidocs/org/apache/hadoop/hbase/io/class-use/FSDataInputStreamWrapper.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/io/class-use/FSDataInputStreamWrapper.html 
b/devapidocs/org/apache/hadoop/hbase/io/class-use/FSDataInputStreamWrapper.html
index 44377b6..79fe9a4 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/io/class-use/FSDataInputStreamWrapper.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/io/class-use/FSDataInputStreamWrapper.html
@@ -113,51 +113,51 @@
 
 
 StoreFileReader
-BaseRegionObserver.postStoreFileReaderOpen(ObserverContext ctx,
+RegionObserver.postStoreFileReaderOpen(ObserverContext ctx,
   
org.apache.hadoop.fs.FileSystem fs,
   org.apache.hadoop.fs.Path p,
   FSDataInputStreamWrapper in,
   long size,
   CacheConfig cacheConf,
   Reference r,
-  StoreFileReader reader) 
+  StoreFileReader reader)
+Called after the creation of Reader for a store file.
+
 
 
 StoreFileReader
-RegionObserver.postStoreFileReaderOpen(ObserverContext ctx,
+BaseRegionObserver.postStoreFileReaderOpen(ObserverContext ctx,
   
org.apache.hadoop.fs.FileSystem fs,
   org.apache.hadoop.fs.Path p,
   FSDataInputStreamWrapper in,
   long size,
   CacheConfig cacheConf,
   Reference r,
-  StoreFileReader reader)
-Called after the creation of Reader for a store file.
-
+  StoreFileReader reader) 
 
 
 StoreFileReader
-BaseRegionObserver.preStoreFileReaderOpen(ObserverContext ctx,
+RegionObserver.preStoreFileReaderOpen(ObserverContext ctx,
 
org.apache.hadoop.fs.FileSystem fs,
 org.apache.hadoop.fs.Path p,
 FSDataInputStreamWrapper in,
 long size,
 CacheConfig cacheConf,
 Reference r,
-StoreFileReader reader) 
+StoreFileReader reader)
+Called before creation of Reader for a store file.
+
 
 
 StoreFileReader
-RegionObserver.preStoreFileReaderOpen(ObserverContext ctx,
+BaseRegionObserver.preStoreFileReaderOpen(ObserverContext ctx,
 
org.apache.hadoop.fs.FileSystem fs,
 org.apache.hadoop.fs.Path p,
 FSDataInputStreamWrapper in,
 long size,
 CacheConfig cacheConf,
 Reference r,
-StoreFileReader reader)
-Called before creation of Reader for a store file.
-
+StoreFileReader reader) 
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/9f9a078f/devapidocs/org/apache/hadoop/hbase/io/class-use/ImmutableBytesWritable.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/io/class-use/ImmutableBytesWritable.html 
b/devapidocs/org/apache/hadoop/hbase/io/class-use/ImmutableBytesWritable.html
index 6ad0fda..64d6d2b 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/io/class-use/ImmutableBytesWritable.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/io/class-use/ImmutableBytesWritable.html
@@ -158,15 +158,15 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 ImmutableBytesWritable
-TableRecordReaderImpl.createKey() 
+TableSnapshotInputFormat.TableSnapshotRecordReader.createKey() 
 
 
 ImmutableBytesWritable
-TableRecordReader.createKey() 
+TableRecordReaderImpl.createKey() 
 
 
 ImmutableBytesWritable
-TableSnapshotInputFormat.TableSnapshotRecordReader.createKey() 
+TableRecordReader.createKey() 
 
 
 
@@ -214,10 +214,12 @@ Input/OutputFormats, a table indexing MapReduce job

[27/51] [partial] hbase-site git commit: Published site at f2e0aca2b6925cdd1f2896052f4b4e847ee4e1fd.

2016-04-18 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/9f9a078f/devapidocs/org/apache/hadoop/hbase/class-use/ServerName.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/class-use/ServerName.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/ServerName.html
index 260bca1..6345db2 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/ServerName.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/ServerName.html
@@ -698,16 +698,16 @@
 
 
 
-private void
-ConnectionImplementation.cacheLocation(TableName tableName,
+void
+MetaCache.cacheLocation(TableName tableName,
   ServerName source,
   HRegionLocation location)
 Put a newly discovered HRegionLocation into the cache.
 
 
 
-void
-MetaCache.cacheLocation(TableName tableName,
+private void
+ConnectionImplementation.cacheLocation(TableName tableName,
   ServerName source,
   HRegionLocation location)
 Put a newly discovered HRegionLocation into the cache.
@@ -736,6 +736,16 @@
 
 
 void
+ClusterConnection.clearCaches(ServerName sn)
+Clear any caches that pertain to server name 
sn.
+
+
+
+void
+ConnectionImplementation.clearCaches(ServerName serverName) 
+
+
+void
 HConnection.clearCaches(ServerName sn)
 Deprecated. 
 internal method, do not use thru HConnection
@@ -744,26 +754,16 @@
 
 
 void
-ClusterConnection.clearCaches(ServerName sn)
-Clear any caches that pertain to server name 
sn.
-
+HBaseAdmin.closeRegion(ServerName sn,
+  HRegionInfo hri) 
 
 
 void
-ConnectionImplementation.clearCaches(ServerName serverName) 
-
-
-void
 Admin.closeRegion(ServerName sn,
   HRegionInfo hri)
 Close a region.
 
 
-
-void
-HBaseAdmin.closeRegion(ServerName sn,
-  HRegionInfo hri) 
-
 
 private void
 HBaseAdmin.compact(ServerName sn,
@@ -773,29 +773,29 @@
 
 
 void
-Admin.compactRegionServer(ServerName sn,
+HBaseAdmin.compactRegionServer(ServerName sn,
   boolean major)
 Compact all regions on the region server
 
 
 
 void
-HBaseAdmin.compactRegionServer(ServerName sn,
+Admin.compactRegionServer(ServerName sn,
   boolean major)
 Compact all regions on the region server
 
 
 
 CoprocessorRpcChannel
+HBaseAdmin.coprocessorService(ServerName sn) 
+
+
+CoprocessorRpcChannel
 Admin.coprocessorService(ServerName sn)
 Creates and returns a RpcChannel instance
  connected to the passed region server.
 
 
-
-CoprocessorRpcChannel
-HBaseAdmin.coprocessorService(ServerName sn) 
-
 
 protected MultiServerCallable
 AsyncProcess.createCallable(ServerName server,
@@ -838,6 +838,16 @@
 
 
 org.apache.hadoop.hbase.protobuf.generated.AdminProtos.AdminService.BlockingInterface
+ClusterConnection.getAdmin(ServerName serverName)
+Establishes a connection to the region server at the 
specified address.
+
+
+
+org.apache.hadoop.hbase.protobuf.generated.AdminProtos.AdminService.BlockingInterface
+ConnectionImplementation.getAdmin(ServerName serverName) 
+
+
+org.apache.hadoop.hbase.protobuf.generated.AdminProtos.AdminService.BlockingInterface
 HConnection.getAdmin(ServerName serverName)
 Deprecated. 
 internal method, do not use thru HConnection
@@ -846,16 +856,11 @@
 
 
 org.apache.hadoop.hbase.protobuf.generated.AdminProtos.AdminService.BlockingInterface
-ClusterConnection.getAdmin(ServerName serverName)
-Establishes a connection to the region server at the 
specified address.
-
+ConnectionImplementation.getAdmin(ServerName serverName,
+boolean master) 
 
 
 org.apache.hadoop.hbase.protobuf.generated.AdminProtos.AdminService.BlockingInterface
-ConnectionImplementation.getAdmin(ServerName serverName) 
-
-
-org.apache.hadoop.hbase.protobuf.generated.AdminProtos.AdminService.BlockingInterface
 HConnection.getAdmin(ServerName serverName,
 boolean getMaster)
 Deprecated. 
@@ -863,11 +868,6 @@
 
 
 
-
-org.apache.hadoop.hbase.protobuf.generated.AdminProtos.AdminService.BlockingInterface
-ConnectionImplementation.getAdmin(ServerName serverName,
-boolean master) 
-
 
 private http://docs.oracle.com/javase/7/docs/api/java/lang/Long.html?is-external=true";
 title="class or interface in java.lang">Long
 AsyncProcess.AsyncRequestFutureImpl.getBackoff(ServerName server,
@@ -875,23 +875,23 @@
 
 
 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ClientService.BlockingInterface
-HConnection.getClient(ServerName serverName)
-Deprecated. 
-internal method, do not use thru HConnection
-
-
-
-
-org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ClientService.BlockingInterface
 ClusterConnection.getClient(ServerName serverName)
 Establishes a connection to the region server at the 
specified address, and returns
  a region client protocol.
 
 
-
+
 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ClientService.BlockingInterface
 Connect

[38/51] [partial] hbase-site git commit: Published site at f2e0aca2b6925cdd1f2896052f4b4e847ee4e1fd.

2016-04-18 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/9f9a078f/devapidocs/index-all.html
--
diff --git a/devapidocs/index-all.html b/devapidocs/index-all.html
index b07d226..51ea51b 100644
--- a/devapidocs/index-all.html
+++ b/devapidocs/index-all.html
@@ -1281,6 +1281,8 @@
 
 Add a table coprocessor to this table.
 
+addCSRFFilter(Context,
 Configuration) - Static method in class 
org.apache.hadoop.hbase.rest.RESTServer
+ 
 addCurrentScanners(List) - Method in class 
org.apache.hadoop.hbase.regionserver.StoreScanner
  
 addDaughter(Connection,
 HRegionInfo, ServerName, long) - Static method in class 
org.apache.hadoop.hbase.MetaTableAccessor
@@ -5198,6 +5200,12 @@
  
 breakLines
 - Variable in class org.apache.hadoop.hbase.util.Base64.Base64OutputStream
  
+BROWSER_USER_AGENT_PARAM
 - Static variable in class org.apache.hadoop.hbase.rest.filter.RestCsrfPreventionFilter
+ 
+BROWSER_USER_AGENTS_DEFAULT
 - Static variable in class org.apache.hadoop.hbase.rest.filter.RestCsrfPreventionFilter
+ 
+browserUserAgents
 - Variable in class org.apache.hadoop.hbase.rest.filter.RestCsrfPreventionFilter
+ 
 BUCKET_CACHE_BUCKETS_KEY
 - Static variable in class org.apache.hadoop.hbase.io.hfile.CacheConfig
 
 A comma-delimited array of values for use as bucket 
sizes.
@@ -7610,6 +7618,8 @@
  
 cfVsMaxVersions
 - Variable in class org.apache.hadoop.hbase.security.visibility.VisibilityLabelFilter
  
+chain
 - Variable in class org.apache.hadoop.hbase.rest.filter.RestCsrfPreventionFilter.ServletFilterHttpInteraction
+ 
 ChainWALEntryFilter - Class in org.apache.hadoop.hbase.replication
 
 A WALEntryFilter which 
contains multiple filters and applies them
@@ -16632,6 +16642,10 @@
 
 CUSTOM_FILTERS
 - Static variable in interface org.apache.hadoop.hbase.rest.Constants
  
+CUSTOM_HEADER_PARAM
 - Static variable in class org.apache.hadoop.hbase.rest.filter.RestCsrfPreventionFilter
+ 
+CUSTOM_METHODS_TO_IGNORE_PARAM
 - Static variable in class org.apache.hadoop.hbase.rest.filter.RestCsrfPreventionFilter
+ 
 CUSTOM_RPC_CLIENT_IMPL_CONF_KEY
 - Static variable in class org.apache.hadoop.hbase.ipc.RpcClientFactory
  
 cyclePushSize
 - Variable in class org.apache.hadoop.hbase.replication.regionserver.ReplicationThrottler
@@ -18660,10 +18674,18 @@
 
 Send a DELETE request
 
+delete(String,
 Header) - Method in class org.apache.hadoop.hbase.rest.client.Client
+
+Send a DELETE request
+
 delete(Cluster,
 String) - Method in class org.apache.hadoop.hbase.rest.client.Client
 
 Send a DELETE request
 
+delete(Cluster,
 String, Header) - Method in class 
org.apache.hadoop.hbase.rest.client.Client
+
+Send a DELETE request
+
 delete(Delete)
 - Method in class org.apache.hadoop.hbase.rest.client.RemoteHTable
  
 delete(List)
 - Method in class org.apache.hadoop.hbase.rest.client.RemoteHTable
@@ -19514,6 +19536,8 @@
  
 destroy()
 - Method in class org.apache.hadoop.hbase.rest.filter.GzipFilter
  
+destroy()
 - Method in class org.apache.hadoop.hbase.rest.filter.RestCsrfPreventionFilter
+ 
 destroyDirectByteBuffer(ByteBuffer)
 - Static method in class org.apache.hadoop.hbase.util.DirectMemoryUtils
 
 DirectByteBuffers are garbage collected by using a phantom 
reference and a
@@ -19983,6 +20007,8 @@
  
 doFilter(ServletRequest,
 ServletResponse, FilterChain) - Method in class 
org.apache.hadoop.hbase.rest.filter.GzipFilter
  
+doFilter(ServletRequest,
 ServletResponse, FilterChain) - Method in class 
org.apache.hadoop.hbase.rest.filter.RestCsrfPreventionFilter
+ 
 doFilterRegionServerByName(Map>) - Method in class 
org.apache.hadoop.hbase.tool.Canary.RegionServerMonitor
  
 doFullAppendTransaction(WAL,
 NavigableMap, HRegionInfo, WALEdit, 
MultiVersionConcurrencyControl, boolean) - Static method in class 
org.apache.hadoop.hbase.regionserver.wal.WALUtil
@@ -30165,6 +30191,11 @@
 
 Returns the filter name given a simple filter 
expression
 
+getFilterParams(Configuration,
 String) - Static method in class 
org.apache.hadoop.hbase.rest.filter.RestCsrfPreventionFilter
+
+Constructs a mapping of configuration properties to be used 
for filter
+ initialization.
+
 getFilters()
 - Method in class org.apache.hadoop.hbase.filter.FilterList
 
 Get the filters.
@@ -30529,6 +30560,12 @@
  
 getHeader(String)
 - Method in class org.apache.hadoop.hbase.rest.client.Response
  
+getHeader(String)
 - Method in interface org.apache.hadoop.hbase.rest.filter.RestCsrfPreventionFilter.HttpInteraction
+
+Returns the value of a header.
+
+getHeader(String)
 - Method in class org.apache.hadoop.hbase.rest.filter.RestCsrfPreventionFilter.ServletFilterHttpInteraction
+ 
 getHeaderAndDataForTest()
 - Method in class org.apache.hadoop.hbase.io.hfile.HFileBlock.Writer
 
 Returns the header or the compressed data (or uncompressed 
data when not
@@ -32167,6 +32204,8 @@
  
 getMax() 
- Method in class org.apache.hadoop.hbase.io.Tim

[18/51] [partial] hbase-site git commit: Published site at f2e0aca2b6925cdd1f2896052f4b4e847ee4e1fd.

2016-04-18 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/9f9a078f/devapidocs/org/apache/hadoop/hbase/client/class-use/Result.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/client/class-use/Result.html 
b/devapidocs/org/apache/hadoop/hbase/client/class-use/Result.html
index dd062e6..45d1e7a 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/class-use/Result.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/class-use/Result.html
@@ -399,15 +399,15 @@ service.
 
 
 Result[]
-ScannerCallable.call(int callTimeout) 
+ScannerCallableWithReplicas.call(int timeout) 
 
 
 Result[]
-ScannerCallableWithReplicas.call(int timeout) 
+ClientSmallScanner.SmallScannerCallable.call(int timeout) 
 
 
 Result[]
-ClientSmallScanner.SmallScannerCallable.call(int timeout) 
+ScannerCallable.call(int callTimeout) 
 
 
 Result
@@ -527,11 +527,13 @@ service.
 
 
 Result
-ClientSmallScanner.next() 
+ResultScanner.next()
+Grab the next row's worth of values.
+
 
 
 Result
-ClientAsyncPrefetchScanner.next() 
+ClientSmallScanner.next() 
 
 
 Result
@@ -539,9 +541,7 @@ service.
 
 
 Result
-ResultScanner.next()
-Grab the next row's worth of values.
-
+ClientAsyncPrefetchScanner.next() 
 
 
 Result
@@ -557,13 +557,13 @@ service.
 
 
 Result[]
-AbstractClientScanner.next(int nbRows)
-Get nbRows rows.
-
+ResultScanner.next(int nbRows) 
 
 
 Result[]
-ResultScanner.next(int nbRows) 
+AbstractClientScanner.next(int nbRows)
+Get nbRows rows.
+
 
 
 protected Result
@@ -715,25 +715,19 @@ service.
 
 
 Result
-BaseRegionObserver.postAppend(ObserverContext e,
-Append append,
-Result result) 
-
-
-Result
 RegionObserver.postAppend(ObserverContext c,
 Append append,
 Result result)
 Called after Append
 
 
-
+
 Result
-BaseRegionObserver.postIncrement(ObserverContext e,
-  Increment increment,
-  Result result) 
+BaseRegionObserver.postAppend(ObserverContext e,
+Append append,
+Result result) 
 
-
+
 Result
 RegionObserver.postIncrement(ObserverContext c,
   Increment increment,
@@ -741,54 +735,60 @@ service.
 Called after increment
 
 
-
+
 Result
-BaseRegionObserver.preAppend(ObserverContext e,
-  Append append) 
+BaseRegionObserver.postIncrement(ObserverContext e,
+  Increment increment,
+  Result result) 
 
-
+
 Result
 RegionObserver.preAppend(ObserverContext c,
   Append append)
 Called before Append.
 
 
-
+
 Result
-BaseRegionObserver.preAppendAfterRowLock(ObserverContext e,
-  Append append) 
+BaseRegionObserver.preAppend(ObserverContext e,
+  Append append) 
 
-
+
 Result
 RegionObserver.preAppendAfterRowLock(ObserverContext c,
   Append append)
 Called before Append but after acquiring rowlock.
 
 
-
+
 Result
-BaseRegionObserver.preIncrement(ObserverContext e,
-Increment increment) 
+BaseRegionObserver.preAppendAfterRowLock(ObserverContext e,
+  Append append) 
 
-
+
 Result
 RegionObserver.preIncrement(ObserverContext c,
 Increment increment)
 Called before Increment.
 
 
-
+
 Result
-BaseRegionObserver.preIncrementAfterRowLock(ObserverContext e,
-Increment increment) 
+BaseRegionObserver.preIncrement(ObserverContext e,
+Increment increment) 
 
-
+
 Result
 RegionObserver.preIncrementAfterRowLock(ObserverContext c,
 Increment increment)
 Called before Increment but after acquiring rowlock.
 
 
+
+Result
+BaseRegionObserver.preIncrementAfterRowLock(ObserverContext e,
+Increment increment) 
+
 
 
 
@@ -800,25 +800,19 @@ service.
 
 
 Result
-BaseRegionObserver.postAppend(ObserverContext e,
-Append append,
-Result result) 
-
-
-Result
 RegionObserver.postAppend(ObserverContext c,
 Append append,
 Result result)
 Called after Append
 
 
-
+
 Result
-BaseRegionObserver.postIncrement

[30/51] [partial] hbase-site git commit: Published site at f2e0aca2b6925cdd1f2896052f4b4e847ee4e1fd.

2016-04-18 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/9f9a078f/devapidocs/org/apache/hadoop/hbase/class-use/HTableDescriptor.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/class-use/HTableDescriptor.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/HTableDescriptor.html
index 4ac5fdf..b24a409 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/HTableDescriptor.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/HTableDescriptor.html
@@ -529,76 +529,68 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 HTableDescriptor[]
-Admin.deleteTables(http://docs.oracle.com/javase/7/docs/api/java/util/regex/Pattern.html?is-external=true";
 title="class or interface in java.util.regex">Pattern pattern)
+HBaseAdmin.deleteTables(http://docs.oracle.com/javase/7/docs/api/java/util/regex/Pattern.html?is-external=true";
 title="class or interface in java.util.regex">Pattern pattern)
 Delete tables matching the passed in pattern and wait on 
completion.
 
 
 
 HTableDescriptor[]
-HBaseAdmin.deleteTables(http://docs.oracle.com/javase/7/docs/api/java/util/regex/Pattern.html?is-external=true";
 title="class or interface in java.util.regex">Pattern pattern)
+Admin.deleteTables(http://docs.oracle.com/javase/7/docs/api/java/util/regex/Pattern.html?is-external=true";
 title="class or interface in java.util.regex">Pattern pattern)
 Delete tables matching the passed in pattern and wait on 
completion.
 
 
 
 HTableDescriptor[]
+HBaseAdmin.deleteTables(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in 
java.lang">String regex) 
+
+
+HTableDescriptor[]
 Admin.deleteTables(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String regex)
 Deletes tables matching the passed in pattern and wait on 
completion.
 
 
-
+
 HTableDescriptor[]
-HBaseAdmin.deleteTables(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in 
java.lang">String regex) 
+HBaseAdmin.disableTables(http://docs.oracle.com/javase/7/docs/api/java/util/regex/Pattern.html?is-external=true";
 title="class or interface in 
java.util.regex">Pattern pattern) 
 
-
+
 HTableDescriptor[]
 Admin.disableTables(http://docs.oracle.com/javase/7/docs/api/java/util/regex/Pattern.html?is-external=true";
 title="class or interface in java.util.regex">Pattern pattern)
 Disable tables matching the passed in pattern and wait on 
completion.
 
 
-
+
 HTableDescriptor[]
-HBaseAdmin.disableTables(http://docs.oracle.com/javase/7/docs/api/java/util/regex/Pattern.html?is-external=true";
 title="class or interface in 
java.util.regex">Pattern pattern) 
+HBaseAdmin.disableTables(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in 
java.lang">String regex) 
 
-
+
 HTableDescriptor[]
 Admin.disableTables(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String regex)
 Disable tables matching the passed in pattern and wait on 
completion.
 
 
-
+
 HTableDescriptor[]
-HBaseAdmin.disableTables(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in 
java.lang">String regex) 
+HBaseAdmin.enableTables(http://docs.oracle.com/javase/7/docs/api/java/util/regex/Pattern.html?is-external=true";
 title="class or interface in 
java.util.regex">Pattern pattern) 
 
-
+
 HTableDescriptor[]
 Admin.enableTables(http://docs.oracle.com/javase/7/docs/api/java/util/regex/Pattern.html?is-external=true";
 title="class or interface in java.util.regex">Pattern pattern)
 Enable tables matching the passed in pattern and wait on 
completion.
 
 
-
+
 HTableDescriptor[]
-HBaseAdmin.enableTables(http://docs.oracle.com/javase/7/docs/api/java/util/regex/Pattern.html?is-external=true";
 title="class or interface in 
java.util.regex">Pattern pattern) 
+HBaseAdmin.enableTables(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in 
java.lang">String regex) 
 
-
+
 HTableDescriptor[]
 Admin.enableTables(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String regex)
 Enable tables matching the passed in pattern and wait on 
completion.
 
 
-
-HTableDescriptor[]
-HBaseAdmin.enableTables(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in 
java.lang">String regex) 
-
 
 HTableDescriptor
-HConnection.getHTableDescriptor(byte[] tableName)
-Deprecated. 
-internal method, do not use through HConnection
-
-
-
-
-HTableDescriptor
 ConnectionImplementation.getHTableDescriptor(byte[] tableName)
 Deprecated. 
 Use Admin.getTableDescriptor(org.apache.hadoop.hbase.TableName

[48/51] [partial] hbase-site git commit: Published site at f2e0aca2b6925cdd1f2896052f4b4e847ee4e1fd.

2016-04-18 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/9f9a078f/apidocs/org/apache/hadoop/hbase/client/package-tree.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/client/package-tree.html 
b/apidocs/org/apache/hadoop/hbase/client/package-tree.html
index f7809ed..451b0b6 100644
--- a/apidocs/org/apache/hadoop/hbase/client/package-tree.html
+++ b/apidocs/org/apache/hadoop/hbase/client/package-tree.html
@@ -202,11 +202,11 @@
 
 java.lang.http://docs.oracle.com/javase/7/docs/api/java/lang/Enum.html?is-external=true";
 title="class or interface in java.lang">Enum (implements java.lang.http://docs.oracle.com/javase/7/docs/api/java/lang/Comparable.html?is-external=true";
 title="class or interface in java.lang">Comparable, java.io.http://docs.oracle.com/javase/7/docs/api/java/io/Serializable.html?is-external=true";
 title="class or interface in java.io">Serializable)
 
-org.apache.hadoop.hbase.client.Durability
+org.apache.hadoop.hbase.client.Consistency
+org.apache.hadoop.hbase.client.IsolationLevel
 org.apache.hadoop.hbase.client.Admin.CompactType
 org.apache.hadoop.hbase.client.Admin.MasterSwitchType
-org.apache.hadoop.hbase.client.IsolationLevel
-org.apache.hadoop.hbase.client.Consistency
+org.apache.hadoop.hbase.client.Durability
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/9f9a078f/apidocs/org/apache/hadoop/hbase/filter/CompareFilter.CompareOp.html
--
diff --git 
a/apidocs/org/apache/hadoop/hbase/filter/CompareFilter.CompareOp.html 
b/apidocs/org/apache/hadoop/hbase/filter/CompareFilter.CompareOp.html
index 9887764..650588b 100644
--- a/apidocs/org/apache/hadoop/hbase/filter/CompareFilter.CompareOp.html
+++ b/apidocs/org/apache/hadoop/hbase/filter/CompareFilter.CompareOp.html
@@ -306,7 +306,7 @@ the order they are declared.
 
 
 values
-public static CompareFilter.CompareOp[] values()
+public static CompareFilter.CompareOp[] values()
 Returns an array containing the constants of this enum 
type, in
 the order they are declared.  This method may be used to iterate
 over the constants as follows:
@@ -323,7 +323,7 @@ for (CompareFilter.CompareOp c : 
CompareFilter.CompareOp.values())
 
 
 valueOf
-public static CompareFilter.CompareOp valueOf(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String name)
+public static CompareFilter.CompareOp valueOf(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String name)
 Returns the enum constant of this type with the specified 
name.
 The string must match exactly an identifier used to declare an
 enum constant in this type.  (Extraneous whitespace characters are 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/9f9a078f/apidocs/org/apache/hadoop/hbase/filter/class-use/Filter.ReturnCode.html
--
diff --git 
a/apidocs/org/apache/hadoop/hbase/filter/class-use/Filter.ReturnCode.html 
b/apidocs/org/apache/hadoop/hbase/filter/class-use/Filter.ReturnCode.html
index 931046f..6c98f44 100644
--- a/apidocs/org/apache/hadoop/hbase/filter/class-use/Filter.ReturnCode.html
+++ b/apidocs/org/apache/hadoop/hbase/filter/class-use/Filter.ReturnCode.html
@@ -107,107 +107,107 @@
 
 
 Filter.ReturnCode
-ColumnRangeFilter.filterKeyValue(Cell kv) 
+PrefixFilter.filterKeyValue(Cell v) 
 
 
-abstract Filter.ReturnCode
-Filter.filterKeyValue(Cell v)
-A way to filter based on the column family, column 
qualifier and/or the column value.
-
+Filter.ReturnCode
+FirstKeyOnlyFilter.filterKeyValue(Cell v) 
 
 
 Filter.ReturnCode
-RandomRowFilter.filterKeyValue(Cell v) 
+SingleColumnValueFilter.filterKeyValue(Cell c) 
 
 
 Filter.ReturnCode
-ColumnPaginationFilter.filterKeyValue(Cell v) 
+SkipFilter.filterKeyValue(Cell v) 
 
 
 Filter.ReturnCode
-ColumnPrefixFilter.filterKeyValue(Cell cell) 
+FuzzyRowFilter.filterKeyValue(Cell c) 
 
 
 Filter.ReturnCode
-FirstKeyValueMatchingQualifiersFilter.filterKeyValue(Cell v)
-Deprecated. 
- 
+PageFilter.filterKeyValue(Cell ignored) 
 
 
 Filter.ReturnCode
-SingleColumnValueFilter.filterKeyValue(Cell c) 
+ColumnPaginationFilter.filterKeyValue(Cell v) 
 
 
 Filter.ReturnCode
-FilterList.filterKeyValue(Cell c) 
+FirstKeyValueMatchingQualifiersFilter.filterKeyValue(Cell v)
+Deprecated. 
+ 
 
 
 Filter.ReturnCode
-PrefixFilter.filterKeyValue(Cell v) 
+KeyOnlyFilter.filterKeyValue(Cell ignored) 
 
 
 Filter.ReturnCode
-MultipleColumnPrefixFilter.filterKeyValue(Cell kv) 
+FamilyFilter.filterKeyValue(Cell v) 
 
 
 Filter.ReturnCode
-QualifierFilter.filterKeyValue(Cell v) 
+ColumnCountGetFilter.filterKeyValue(Cell v) 
 
 
 Filter.ReturnCode
-SkipFilter.filterKeyValue(Cell v) 
+RowFilter.filterKeyValue(Cell v) 
 
 
 Filter.ReturnCode
-TimestampsFilter.filterKeyValue(Cell v) 
+ColumnPrefixFilte

[32/51] [partial] hbase-site git commit: Published site at f2e0aca2b6925cdd1f2896052f4b4e847ee4e1fd.

2016-04-18 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/9f9a078f/devapidocs/org/apache/hadoop/hbase/class-use/HRegionInfo.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/class-use/HRegionInfo.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/HRegionInfo.html
index 68116ad..14781e3 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/HRegionInfo.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/HRegionInfo.html
@@ -863,15 +863,15 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 HRegionInfo
-ScannerCallable.getHRegionInfo() 
+ScannerCallableWithReplicas.getHRegionInfo() 
 
 
 HRegionInfo
-MultiServerCallable.getHRegionInfo() 
+ScannerCallable.getHRegionInfo() 
 
 
 HRegionInfo
-ScannerCallableWithReplicas.getHRegionInfo() 
+MultiServerCallable.getHRegionInfo() 
 
 
 HRegionInfo
@@ -905,13 +905,13 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List
-Admin.getOnlineRegions(ServerName sn)
-Get all the online regions on a region server.
-
+HBaseAdmin.getOnlineRegions(ServerName sn) 
 
 
 http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List
-HBaseAdmin.getOnlineRegions(ServerName sn) 
+Admin.getOnlineRegions(ServerName sn)
+Get all the online regions on a region server.
+
 
 
 (package private) Pair
@@ -919,13 +919,13 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List
-Admin.getTableRegions(TableName tableName)
-Get the regions of a given table.
-
+HBaseAdmin.getTableRegions(TableName tableName) 
 
 
 http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List
-HBaseAdmin.getTableRegions(TableName tableName) 
+Admin.getTableRegions(TableName tableName)
+Get the regions of a given table.
+
 
 
 
@@ -944,16 +944,16 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 void
+HBaseAdmin.closeRegion(ServerName sn,
+  HRegionInfo hri) 
+
+
+void
 Admin.closeRegion(ServerName sn,
   HRegionInfo hri)
 Close a region.
 
 
-
-void
-HBaseAdmin.closeRegion(ServerName sn,
-  HRegionInfo hri) 
-
 
 private void
 HBaseAdmin.compact(ServerName sn,
@@ -1113,6 +1113,16 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 void
+BaseMasterAndRegionObserver.postAssign(ObserverContext ctx,
+HRegionInfo regionInfo) 
+
+
+void
+BaseMasterObserver.postAssign(ObserverContext ctx,
+HRegionInfo regionInfo) 
+
+
+void
 MasterObserver.postAssign(ObserverContext ctx,
 HRegionInfo regionInfo)
 Called after the region assignment has been requested.
@@ -1120,13 +1130,15 @@ Input/OutputFormats, a table indexing MapReduce job, 
and utility methods.
 
 
 void
-BaseMasterAndRegionObserver.postAssign(ObserverContext ctx,
-HRegionInfo regionInfo) 
+BaseMasterAndRegionObserver.postCreateTable(ObserverContext ctx,
+  HTableDescriptor desc,
+  HRegionInfo[] regions) 
 
 
 void
-BaseMasterObserver.postAssign(ObserverContext ctx,
-HRegionInfo regionInfo) 
+BaseMasterObserver.postCreateTable(ObserverContext ctx,
+  HTableDescriptor desc,
+  HRegionInfo[] regions) 
 
 
 void
@@ -1138,15 +1150,15 @@ Input/OutputFormats, a table indexing MapReduce job, 
and utility methods.
 
 
 void
-BaseMasterAndRegionObserver.postCreateTable(ObserverContext ctx,
-  HTableDescriptor desc,
-  HRegionInfo[] regions) 
+BaseMasterAndRegionObserver.postCreateTableHandler(ObserverContext ctx,
+HTableDescriptor desc,
+HRegionInfo[] regions) 
 
 
 void
-BaseMasterObserver.postCreateTable(ObserverContext ctx,
-  HTableDescriptor desc,
-  HRegionInfo[] regions) 
+BaseMasterObserver.postCreateTableHandler(ObserverContext ctx,
+HTableDescriptor desc,
+

[12/51] [partial] hbase-site git commit: Published site at f2e0aca2b6925cdd1f2896052f4b4e847ee4e1fd.

2016-04-18 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/9f9a078f/devapidocs/org/apache/hadoop/hbase/coprocessor/class-use/RegionObserver.MutationType.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/coprocessor/class-use/RegionObserver.MutationType.html
 
b/devapidocs/org/apache/hadoop/hbase/coprocessor/class-use/RegionObserver.MutationType.html
index 29fee71..1c4a4db 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/coprocessor/class-use/RegionObserver.MutationType.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/coprocessor/class-use/RegionObserver.MutationType.html
@@ -132,22 +132,22 @@ the order they are declared.
 
 
 Cell
-BaseRegionObserver.postMutationBeforeWAL(ObserverContext ctx,
+RegionObserver.postMutationBeforeWAL(ObserverContext ctx,
   RegionObserver.MutationType opType,
   Mutation mutation,
   Cell oldCell,
-  Cell newCell) 
+  Cell newCell)
+Called after a new cell has been created during an 
increment operation, but before
+ it is committed to the WAL or memstore.
+
 
 
 Cell
-RegionObserver.postMutationBeforeWAL(ObserverContext ctx,
+BaseRegionObserver.postMutationBeforeWAL(ObserverContext ctx,
   RegionObserver.MutationType opType,
   Mutation mutation,
   Cell oldCell,
-  Cell newCell)
-Called after a new cell has been created during an 
increment operation, but before
- it is committed to the WAL or memstore.
-
+  Cell newCell) 
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/9f9a078f/devapidocs/org/apache/hadoop/hbase/coprocessor/class-use/RegionServerCoprocessorEnvironment.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/coprocessor/class-use/RegionServerCoprocessorEnvironment.html
 
b/devapidocs/org/apache/hadoop/hbase/coprocessor/class-use/RegionServerCoprocessorEnvironment.html
index f76650a..a6609dd 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/coprocessor/class-use/RegionServerCoprocessorEnvironment.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/coprocessor/class-use/RegionServerCoprocessorEnvironment.html
@@ -110,24 +110,17 @@
 
 
 ReplicationEndpoint
-BaseRegionServerObserver.postCreateReplicationEndPoint(ObserverContext ctx,
-  ReplicationEndpoint endpoint) 
-
-
-ReplicationEndpoint
 RegionServerObserver.postCreateReplicationEndPoint(ObserverContext ctx,
   ReplicationEndpoint endpoint)
 This will be called after the replication endpoint is 
instantiated.
 
 
-
-void
-BaseRegionServerObserver.postMerge(ObserverContext c,
-  Region regionA,
-  Region regionB,
-  Region mergedRegion) 
-
 
+ReplicationEndpoint
+BaseRegionServerObserver.postCreateReplicationEndPoint(ObserverContext ctx,
+  ReplicationEndpoint endpoint) 
+
+
 void
 RegionServerObserver.postMerge(ObserverContext c,
   Region regionA,
@@ -136,14 +129,14 @@
 called after the regions merge.
 
 
-
+
 void
-BaseRegionServerObserver.postMergeCommit(ObserverContext ctx,
-  Region regionA,
-  Region regionB,
-  Region mergedRegion) 
+BaseRegionServerObserver.postMerge(ObserverContext c,
+  Region regionA,
+  Region regionB,
+  Region mergedRegion) 
 
-
+
 void
 RegionServerObserver.postMergeCommit(ObserverContext ctx,
   Region regionA,
@@ -152,13 +145,14 @@
 This will be called after PONR step as part of regions 
merge transaction.
 
 
-
+
 void
-BaseRegionServerObserver.postReplicateLogEntries(ObserverContext ctx,
-  http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true";
 title="class or interface in 
java.util">List entries,
-  CellScanner cells) 
+BaseRegionServerObser

[43/51] [partial] hbase-site git commit: Published site at f2e0aca2b6925cdd1f2896052f4b4e847ee4e1fd.

2016-04-18 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/9f9a078f/apidocs/src-html/org/apache/hadoop/hbase/rest/client/Client.html
--
diff --git a/apidocs/src-html/org/apache/hadoop/hbase/rest/client/Client.html 
b/apidocs/src-html/org/apache/hadoop/hbase/rest/client/Client.html
index 4e979be..ebac866 100644
--- a/apidocs/src-html/org/apache/hadoop/hbase/rest/client/Client.html
+++ b/apidocs/src-html/org/apache/hadoop/hbase/rest/client/Client.html
@@ -383,31 +383,31 @@
 375
 376  /**
 377   * Send a PUT request
-378   * @param cluster the cluster 
definition
-379   * @param path the path or URI
-380   * @param contentType the content MIME 
type
-381   * @param content the content bytes
+378   * @param path the path or URI
+379   * @param contentType the content MIME 
type
+380   * @param content the content bytes
+381   * @param extraHdr extra Header to 
send
 382   * @return a Response object with 
response detail
 383   * @throws IOException
 384   */
-385  public Response put(Cluster cluster, 
String path, String contentType, 
-386  byte[] content) throws IOException 
{
-387Header[] headers = new Header[1];
-388headers[0] = new 
Header("Content-Type", contentType);
-389return put(cluster, path, headers, 
content);
-390  }
-391
-392  /**
-393   * Send a PUT request
-394   * @param path the path or URI
-395   * @param headers the HTTP headers to 
include, Content-Type must be
-396   * supplied
-397   * @param content the content bytes
-398   * @return a Response object with 
response detail
-399   * @throws IOException
-400   */
-401  public Response put(String path, 
Header[] headers, byte[] content) 
-402  throws IOException {
+385  public Response put(String path, String 
contentType, byte[] content, Header extraHdr)
+386  throws IOException {
+387return put(cluster, path, 
contentType, content, extraHdr);
+388  }
+389
+390  /**
+391   * Send a PUT request
+392   * @param cluster the cluster 
definition
+393   * @param path the path or URI
+394   * @param contentType the content MIME 
type
+395   * @param content the content bytes
+396   * @return a Response object with 
response detail
+397   * @throws IOException for error
+398   */
+399  public Response put(Cluster cluster, 
String path, String contentType, 
+400  byte[] content) throws IOException 
{
+401Header[] headers = new Header[1];
+402headers[0] = new 
Header("Content-Type", contentType);
 403return put(cluster, path, headers, 
content);
 404  }
 405
@@ -415,122 +415,223 @@
 407   * Send a PUT request
 408   * @param cluster the cluster 
definition
 409   * @param path the path or URI
-410   * @param headers the HTTP headers to 
include, Content-Type must be
-411   * supplied
-412   * @param content the content bytes
+410   * @param contentType the content MIME 
type
+411   * @param content the content bytes
+412   * @param extraHdr additional Header to 
send
 413   * @return a Response object with 
response detail
-414   * @throws IOException
+414   * @throws IOException for error
 415   */
-416  public Response put(Cluster cluster, 
String path, Header[] headers, 
-417  byte[] content) throws IOException 
{
-418PutMethod method = new PutMethod();
-419try {
-420  method.setRequestEntity(new 
ByteArrayRequestEntity(content));
-421  int code = execute(cluster, method, 
headers, path);
-422  headers = 
method.getResponseHeaders();
-423  content = 
method.getResponseBody();
-424  return new Response(code, headers, 
content);
-425} finally {
-426  method.releaseConnection();
-427}
-428  }
-429
-430  /**
-431   * Send a POST request
-432   * @param path the path or URI
-433   * @param contentType the content MIME 
type
-434   * @param content the content bytes
-435   * @return a Response object with 
response detail
-436   * @throws IOException
-437   */
-438  public Response post(String path, 
String contentType, byte[] content)
-439  throws IOException {
-440return post(cluster, path, 
contentType, content);
-441  }
-442
-443  /**
-444   * Send a POST request
-445   * @param cluster the cluster 
definition
-446   * @param path the path or URI
-447   * @param contentType the content MIME 
type
-448   * @param content the content bytes
-449   * @return a Response object with 
response detail
-450   * @throws IOException
-451   */
-452  public Response post(Cluster cluster, 
String path, String contentType, 
-453  byte[] content) throws IOException 
{
-454Header[] headers = new Header[1];
-455headers[0] = new 
Header("Content-Type", contentType);
-456return post(cluster, path, headers, 
content);
-457  }
-458
-459  /**
-460   * Send a POST request
-461   * @param path the path or URI
-462   * @param headers the HTTP headers to 
include, Content-Type must be
-463   * supplied
-464   * @param content the content bytes
-465   * @return a Response ob

[31/51] [partial] hbase-site git commit: Published site at f2e0aca2b6925cdd1f2896052f4b4e847ee4e1fd.

2016-04-18 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/9f9a078f/devapidocs/org/apache/hadoop/hbase/class-use/HRegionLocation.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/class-use/HRegionLocation.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/HRegionLocation.html
index 023be44..442ab80 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/HRegionLocation.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/HRegionLocation.html
@@ -266,11 +266,11 @@ service.
 
 
 protected HRegionLocation
-RegionServerCallable.location 
+RegionAdminServiceCallable.location 
 
 
 protected HRegionLocation
-RegionAdminServiceCallable.location 
+RegionServerCallable.location 
 
 
 
@@ -310,57 +310,47 @@ service.
 
 
 HRegionLocation
-HRegionLocator.getRegionLocation(byte[] row)
+RegionLocator.getRegionLocation(byte[] row)
 Finds the region on which the given row is being 
served.
 
 
 
 HRegionLocation
-RegionLocator.getRegionLocation(byte[] row)
+HRegionLocator.getRegionLocation(byte[] row)
 Finds the region on which the given row is being 
served.
 
 
 
 HRegionLocation
-HRegionLocator.getRegionLocation(byte[] row,
+RegionLocator.getRegionLocation(byte[] row,
   boolean reload)
 Finds the region on which the given row is being 
served.
 
 
 
 HRegionLocation
-RegionLocator.getRegionLocation(byte[] row,
+HRegionLocator.getRegionLocation(byte[] row,
   boolean reload)
 Finds the region on which the given row is being 
served.
 
 
 
 HRegionLocation
-HConnection.getRegionLocation(byte[] tableName,
-  byte[] row,
-  boolean reload)
-Deprecated. 
-internal method, do not use through HConnection
-
-
-
-
-HRegionLocation
 ConnectionImplementation.getRegionLocation(byte[] tableName,
   byte[] row,
   boolean reload) 
 
-
+
 HRegionLocation
-HConnection.getRegionLocation(TableName tableName,
+HConnection.getRegionLocation(byte[] tableName,
   byte[] row,
   boolean reload)
 Deprecated. 
-internal method, do not use thru HConnection
+internal method, do not use through HConnection
 
 
 
-
+
 HRegionLocation
 ClusterConnection.getRegionLocation(TableName tableName,
   byte[] row,
@@ -368,58 +358,59 @@ service.
 Find region location hosting passed row
 
 
-
+
 HRegionLocation
 ConnectionImplementation.getRegionLocation(TableName tableName,
   byte[] row,
   boolean reload) 
 
-
-private HRegionLocation
-AsyncProcess.AsyncRequestFutureImpl.getReplicaLocationOrFail(Action action) 
-
 
 HRegionLocation
-HConnection.locateRegion(byte[] regionName)
+HConnection.getRegionLocation(TableName tableName,
+  byte[] row,
+  boolean reload)
 Deprecated. 
 internal method, do not use thru HConnection
 
 
 
 
+private HRegionLocation
+AsyncProcess.AsyncRequestFutureImpl.getReplicaLocationOrFail(Action action) 
+
+
 HRegionLocation
 ClusterConnection.locateRegion(byte[] regionName)
 Gets the location of the region of regionName.
 
 
-
+
 HRegionLocation
 ConnectionImplementation.locateRegion(byte[] regionName) 
 
-
+
 HRegionLocation
-HConnection.locateRegion(byte[] tableName,
-byte[] row)
+HConnection.locateRegion(byte[] regionName)
 Deprecated. 
-internal method, do not use through HConnection
+internal method, do not use thru HConnection
 
 
 
-
+
 HRegionLocation
 ConnectionImplementation.locateRegion(byte[] tableName,
 byte[] row) 
 
-
+
 HRegionLocation
-HConnection.locateRegion(TableName tableName,
+HConnection.locateRegion(byte[] tableName,
 byte[] row)
 Deprecated. 
 internal method, do not use through HConnection
 
 
 
-
+
 HRegionLocation
 ClusterConnection.locateRegion(TableName tableName,
 byte[] row)
@@ -427,35 +418,35 @@ service.
  lives in.
 
 
-
+
 HRegionLocation
 ConnectionImplementation.locateRegion(TableName tableName,
 byte[] row) 
 
-
+
 HRegionLocation
-HConnection.relocateRegion(byte[] tableName,
-byte[] row)
+HConnection.locateRegion(TableName tableName,
+byte[] row)
 Deprecated. 
 internal method, do not use through HConnection
 
 
 
-
+
 HRegionLocation
 ConnectionImplementation.relocateRegion(byte[] tableName,
 byte[] row) 
 
-
+
 HRegionLocation
-HConnection.relocateRegion(TableName tableName,
+HConnection.relocateRegion(byte[] tableName,
 byte[] row)
 Deprecated. 
 internal method, do not use through HConnection
 
 
 
-
+
 HRegionLocation
 ClusterConnection.relocateRegion(TableName tableNam

[47/51] [partial] hbase-site git commit: Published site at f2e0aca2b6925cdd1f2896052f4b4e847ee4e1fd.

2016-04-18 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/9f9a078f/apidocs/org/apache/hadoop/hbase/io/class-use/ImmutableBytesWritable.html
--
diff --git 
a/apidocs/org/apache/hadoop/hbase/io/class-use/ImmutableBytesWritable.html 
b/apidocs/org/apache/hadoop/hbase/io/class-use/ImmutableBytesWritable.html
index eac7032..8b60247 100644
--- a/apidocs/org/apache/hadoop/hbase/io/class-use/ImmutableBytesWritable.html
+++ b/apidocs/org/apache/hadoop/hbase/io/class-use/ImmutableBytesWritable.html
@@ -150,11 +150,11 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 ImmutableBytesWritable
-TableRecordReader.createKey() 
+TableRecordReaderImpl.createKey() 
 
 
 ImmutableBytesWritable
-TableRecordReaderImpl.createKey() 
+TableRecordReader.createKey() 
 
 
 
@@ -167,23 +167,23 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 org.apache.hadoop.mapred.RecordReader
-TableSnapshotInputFormat.getRecordReader(org.apache.hadoop.mapred.InputSplit split,
+TableInputFormatBase.getRecordReader(org.apache.hadoop.mapred.InputSplit split,
   org.apache.hadoop.mapred.JobConf job,
-  
org.apache.hadoop.mapred.Reporter reporter) 
+  
org.apache.hadoop.mapred.Reporter reporter)
+Builds a TableRecordReader.
+
 
 
 org.apache.hadoop.mapred.RecordReader
-MultiTableSnapshotInputFormat.getRecordReader(org.apache.hadoop.mapred.InputSplit split,
+TableSnapshotInputFormat.getRecordReader(org.apache.hadoop.mapred.InputSplit split,
   org.apache.hadoop.mapred.JobConf job,
   
org.apache.hadoop.mapred.Reporter reporter) 
 
 
 org.apache.hadoop.mapred.RecordReader
-TableInputFormatBase.getRecordReader(org.apache.hadoop.mapred.InputSplit split,
+MultiTableSnapshotInputFormat.getRecordReader(org.apache.hadoop.mapred.InputSplit split,
   org.apache.hadoop.mapred.JobConf job,
-  
org.apache.hadoop.mapred.Reporter reporter)
-Builds a TableRecordReader.
-
+  
org.apache.hadoop.mapred.Reporter reporter) 
 
 
 
@@ -220,12 +220,12 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 boolean
-TableRecordReader.next(ImmutableBytesWritable key,
+TableRecordReaderImpl.next(ImmutableBytesWritable key,
 Result value) 
 
 
 boolean
-TableRecordReaderImpl.next(ImmutableBytesWritable key,
+TableRecordReader.next(ImmutableBytesWritable key,
 Result value) 
 
 
@@ -295,13 +295,13 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 ImmutableBytesWritable
-TableRecordReader.getCurrentKey()
+TableRecordReaderImpl.getCurrentKey()
 Returns the current key.
 
 
 
 ImmutableBytesWritable
-TableRecordReaderImpl.getCurrentKey()
+TableRecordReader.getCurrentKey()
 Returns the current key.
 
 
@@ -316,8 +316,10 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 org.apache.hadoop.mapreduce.RecordReader
-TableSnapshotInputFormat.createRecordReader(org.apache.hadoop.mapreduce.InputSplit split,
-
org.apache.hadoop.mapreduce.TaskAttemptContext context) 
+TableInputFormatBase.createRecordReader(org.apache.hadoop.mapreduce.InputSplit split,
+
org.apache.hadoop.mapreduce.TaskAttemptContext context)
+Builds a TableRecordReader.
+
 
 
 org.apache.hadoop.mapreduce.RecordReader
@@ -328,19 +330,17 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 org.apache.hadoop.mapreduce.RecordReader
-TableInputFormatBase.createRecordReader(org.apache.hadoop.mapreduce.InputSplit split,
-
org.apache.hadoop.mapreduce.TaskAttemptContext context)
-Builds a TableRecordReader.
-
+TableSnapshotInputFormat.createRecordReader(org.apache.hadoop.mapreduce.InputSplit split,
+
org.apache.hadoop.mapreduce.TaskAttemptContext context) 
 
 
-org.apache.hadoop.mapreduce.RecordWriter
-MultiTableOutputFormat.getRecordWriter(org.apache.hadoop.mapreduce.TaskAttemptContext context) 
-
-
 org.apache.hadoop.mapreduce.RecordWriter
 HFileOutputFormat2.getRecordWriter(org.apache.hadoop.mapreduce.TaskAttemptContext context) 
 
+
+org.apache.hadoop.mapreduce.RecordWriter
+MultiTableOutputFormat.getRecordWriter(org.apache.hadoop.mapreduce.TaskAttemptContext context) 
+
 
 
 
@@ -352,12 +352,6 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 int
-SimpleTotalOrderPartitioner.getPartition(ImmutableBy

[11/51] [partial] hbase-site git commit: Published site at f2e0aca2b6925cdd1f2896052f4b4e847ee4e1fd.

2016-04-18 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/9f9a078f/devapidocs/org/apache/hadoop/hbase/exceptions/class-use/DeserializationException.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/exceptions/class-use/DeserializationException.html
 
b/devapidocs/org/apache/hadoop/hbase/exceptions/class-use/DeserializationException.html
index 8d2fbd1..0de9040 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/exceptions/class-use/DeserializationException.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/exceptions/class-use/DeserializationException.html
@@ -144,20 +144,20 @@
 
 
 
-static ClusterId
-ClusterId.parseFrom(byte[] bytes) 
+static HRegionInfo
+HRegionInfo.parseFrom(byte[] bytes) 
 
 
-static HColumnDescriptor
-HColumnDescriptor.parseFrom(byte[] bytes) 
-
-
 static HTableDescriptor
 HTableDescriptor.parseFrom(byte[] bytes) 
 
+
+static ClusterId
+ClusterId.parseFrom(byte[] bytes) 
+
 
-static HRegionInfo
-HRegionInfo.parseFrom(byte[] bytes) 
+static HColumnDescriptor
+HColumnDescriptor.parseFrom(byte[] bytes) 
 
 
 static TableDescriptor
@@ -257,146 +257,146 @@
 ByteArrayComparable.parseFrom(byte[] pbBytes) 
 
 
-static BinaryPrefixComparator
-BinaryPrefixComparator.parseFrom(byte[] pbBytes) 
+static SingleColumnValueFilter
+SingleColumnValueFilter.parseFrom(byte[] pbBytes) 
 
 
-static FuzzyRowFilter
-FuzzyRowFilter.parseFrom(byte[] pbBytes) 
+static QualifierFilter
+QualifierFilter.parseFrom(byte[] pbBytes) 
 
 
-static BitComparator
-BitComparator.parseFrom(byte[] pbBytes) 
+static LongComparator
+LongComparator.parseFrom(byte[] pbBytes) 
 
 
-static MultipleColumnPrefixFilter
-MultipleColumnPrefixFilter.parseFrom(byte[] pbBytes) 
+static FamilyFilter
+FamilyFilter.parseFrom(byte[] pbBytes) 
 
 
-static RowFilter
-RowFilter.parseFrom(byte[] pbBytes) 
+static ColumnRangeFilter
+ColumnRangeFilter.parseFrom(byte[] pbBytes) 
 
 
-static FamilyFilter
-FamilyFilter.parseFrom(byte[] pbBytes) 
+static ColumnCountGetFilter
+ColumnCountGetFilter.parseFrom(byte[] pbBytes) 
 
 
-static SkipFilter
-SkipFilter.parseFrom(byte[] pbBytes) 
+static DependentColumnFilter
+DependentColumnFilter.parseFrom(byte[] pbBytes) 
 
 
-static PrefixFilter
-PrefixFilter.parseFrom(byte[] pbBytes) 
+static BinaryComparator
+BinaryComparator.parseFrom(byte[] pbBytes) 
 
 
-static FilterList
-FilterList.parseFrom(byte[] pbBytes) 
+static ColumnPrefixFilter
+ColumnPrefixFilter.parseFrom(byte[] pbBytes) 
 
 
-static SubstringComparator
-SubstringComparator.parseFrom(byte[] pbBytes) 
+static FilterList
+FilterList.parseFrom(byte[] pbBytes) 
 
 
+static FilterWrapper
+FilterWrapper.parseFrom(byte[] pbBytes) 
+
+
 static Filter
 Filter.parseFrom(byte[] pbBytes)
 Concrete implementers can signal a failure condition in 
their code by throwing an
  http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true";
 title="class or interface in java.io">IOException.
 
 
-
-static DependentColumnFilter
-DependentColumnFilter.parseFrom(byte[] pbBytes) 
-
 
-static SingleColumnValueFilter
-SingleColumnValueFilter.parseFrom(byte[] pbBytes) 
+static WhileMatchFilter
+WhileMatchFilter.parseFrom(byte[] pbBytes) 
 
 
-static InclusiveStopFilter
-InclusiveStopFilter.parseFrom(byte[] pbBytes) 
+static ValueFilter
+ValueFilter.parseFrom(byte[] pbBytes) 
 
 
-static BinaryComparator
-BinaryComparator.parseFrom(byte[] pbBytes) 
+static RowFilter
+RowFilter.parseFrom(byte[] pbBytes) 
 
 
-static TimestampsFilter
-TimestampsFilter.parseFrom(byte[] pbBytes) 
+static InclusiveStopFilter
+InclusiveStopFilter.parseFrom(byte[] pbBytes) 
 
 
-static MultiRowRangeFilter
-MultiRowRangeFilter.parseFrom(byte[] pbBytes) 
+static SingleColumnValueExcludeFilter
+SingleColumnValueExcludeFilter.parseFrom(byte[] pbBytes) 
 
 
-static ColumnPaginationFilter
-ColumnPaginationFilter.parseFrom(byte[] pbBytes) 
+static PageFilter
+PageFilter.parseFrom(byte[] pbBytes) 
 
 
-static LongComparator
-LongComparator.parseFrom(byte[] pbBytes) 
+static TimestampsFilter
+TimestampsFilter.parseFrom(byte[] pbBytes) 
 
 
-static WhileMatchFilter
-WhileMatchFilter.parseFrom(byte[] pbBytes) 
+static FirstKeyValueMatchingQualifiersFilter
+FirstKeyValueMatchingQualifiersFilter.parseFrom(byte[] pbBytes)
+Deprecated. 
+ 
 
 
-static PageFilter
-PageFilter.parseFrom(byte[] pbBytes) 
+static FirstKeyOnlyFilter
+FirstKeyOnlyFilter.parseFrom(byte[] pbBytes) 
 
 
-static SingleColumnValueExcludeFilter
-SingleColumnValueExcludeFilter.parseFrom(byte[] pbBytes) 
+static FuzzyRowFilter
+FuzzyRowFilter.parseFrom(byte[] pbBytes) 
 
 
-static ColumnPrefixFilter
-ColumnPrefixFilter.parseFrom(byte[] pbBytes) 
+static SubstringComparator
+SubstringComparator.parseFrom(byte[] pbBytes) 
 
 
-static FirstKeyValueMatchingQualifiersFilter
-FirstKeyValueMatchingQualifiersFilter.parseFrom(byte[] pbBytes)
-Deprecated. 
- 
+static RandomRowFilter
+RandomRowFilter.parseFrom(byte[] pbBytes) 
 
 
-static FilterWrapper
-FilterWrapper

[50/51] [partial] hbase-site git commit: Published site at f2e0aca2b6925cdd1f2896052f4b4e847ee4e1fd.

2016-04-18 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/9f9a078f/apidocs/org/apache/hadoop/hbase/class-use/Cell.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/class-use/Cell.html 
b/apidocs/org/apache/hadoop/hbase/class-use/Cell.html
index 2abbccd..a5333e3 100644
--- a/apidocs/org/apache/hadoop/hbase/class-use/Cell.html
+++ b/apidocs/org/apache/hadoop/hbase/class-use/Cell.html
@@ -913,23 +913,23 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 
-Append
-Append.add(Cell cell)
-Add column and value to this Append operation.
-
-
-
 Increment
 Increment.add(Cell cell)
 Add the specified KeyValue to this operation.
 
 
-
+
 Put
 Put.add(Cell kv)
 Add the specified KeyValue to this Put operation.
 
 
+
+Append
+Append.add(Cell cell)
+Add column and value to this Append operation.
+
+
 
 Delete
 Delete.addDeleteMarker(Cell kv)
@@ -1007,27 +1007,27 @@ Input/OutputFormats, a table indexing MapReduce job, 
and utility methods.
 boolean partial) 
 
 
-Append
-Append.setFamilyCellMap(http://docs.oracle.com/javase/7/docs/api/java/util/NavigableMap.html?is-external=true";
 title="class or interface in java.util">NavigableMapList> map) 
-
-
 Increment
 Increment.setFamilyCellMap(http://docs.oracle.com/javase/7/docs/api/java/util/NavigableMap.html?is-external=true";
 title="class or interface in java.util">NavigableMapList> map) 
 
-
-Put
-Put.setFamilyCellMap(http://docs.oracle.com/javase/7/docs/api/java/util/NavigableMap.html?is-external=true";
 title="class or interface in java.util">NavigableMapList> map) 
-
 
 Delete
 Delete.setFamilyCellMap(http://docs.oracle.com/javase/7/docs/api/java/util/NavigableMap.html?is-external=true";
 title="class or interface in java.util">NavigableMapList> map) 
 
 
+Put
+Put.setFamilyCellMap(http://docs.oracle.com/javase/7/docs/api/java/util/NavigableMap.html?is-external=true";
 title="class or interface in java.util">NavigableMapList> map) 
+
+
 Mutation
 Mutation.setFamilyCellMap(http://docs.oracle.com/javase/7/docs/api/java/util/NavigableMap.html?is-external=true";
 title="class or interface in java.util">NavigableMapList> map)
 Method for setting the put's familyMap
 
 
+
+Append
+Append.setFamilyCellMap(http://docs.oracle.com/javase/7/docs/api/java/util/NavigableMap.html?is-external=true";
 title="class or interface in java.util">NavigableMapList> map) 
+
 
 
 
@@ -1044,30 +1044,30 @@ Input/OutputFormats, a table indexing MapReduce job, 
and utility methods.
 
 
 Cell
-ColumnRangeFilter.getNextCellHint(Cell cell) 
+FuzzyRowFilter.getNextCellHint(Cell currentCell) 
 
 
-abstract Cell
-Filter.getNextCellHint(Cell currentCell)
-If the filter returns the match code SEEK_NEXT_USING_HINT, 
then it should also tell which is
- the next key it must seek to.
-
+Cell
+ColumnPaginationFilter.getNextCellHint(Cell cell) 
 
 
 Cell
-ColumnPaginationFilter.getNextCellHint(Cell cell) 
+ColumnPrefixFilter.getNextCellHint(Cell cell) 
 
 
 Cell
-ColumnPrefixFilter.getNextCellHint(Cell cell) 
+ColumnRangeFilter.getNextCellHint(Cell cell) 
 
 
-Cell
-FilterList.getNextCellHint(Cell currentCell) 
+abstract Cell
+Filter.getNextCellHint(Cell currentCell)
+If the filter returns the match code SEEK_NEXT_USING_HINT, 
then it should also tell which is
+ the next key it must seek to.
+
 
 
 Cell
-MultipleColumnPrefixFilter.getNextCellHint(Cell cell) 
+FilterList.getNextCellHint(Cell currentCell) 
 
 
 Cell
@@ -1077,21 +1077,11 @@ Input/OutputFormats, a table indexing MapReduce job, 
and utility methods.
 
 
 Cell
-FuzzyRowFilter.getNextCellHint(Cell currentCell) 
-
-
-Cell
 MultiRowRangeFilter.getNextCellHint(Cell currentKV) 
 
-
-abstract Cell
-Filter.transformCell(Cell v)
-Give the filter a chance to transform the passed 
KeyValue.
-
-
 
 Cell
-FilterList.transformCell(Cell c) 
+MultipleColumnPrefixFilter.getNextCellHint(Cell cell) 
 
 
 Cell
@@ -1102,9 +1092,19 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 KeyOnlyFilter.transformCel

[02/51] [partial] hbase-site git commit: Published site at f2e0aca2b6925cdd1f2896052f4b4e847ee4e1fd.

2016-04-18 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/9f9a078f/devapidocs/org/apache/hadoop/hbase/ipc/class-use/RpcScheduler.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/ipc/class-use/RpcScheduler.html 
b/devapidocs/org/apache/hadoop/hbase/ipc/class-use/RpcScheduler.html
index ed48dc4..839c54c 100644
--- a/devapidocs/org/apache/hadoop/hbase/ipc/class-use/RpcScheduler.html
+++ b/devapidocs/org/apache/hadoop/hbase/ipc/class-use/RpcScheduler.html
@@ -141,11 +141,11 @@
 
 
 RpcScheduler
-RpcServerInterface.getScheduler() 
+RpcServer.getScheduler() 
 
 
 RpcScheduler
-RpcServer.getScheduler() 
+RpcServerInterface.getScheduler() 
 
 
 
@@ -199,31 +199,31 @@
 
 
 RpcScheduler
-RpcSchedulerFactory.create(org.apache.hadoop.conf.Configuration conf,
+SimpleRpcSchedulerFactory.create(org.apache.hadoop.conf.Configuration conf,
 PriorityFunction priority)
 Deprecated. 
 
 
 
 RpcScheduler
-SimpleRpcSchedulerFactory.create(org.apache.hadoop.conf.Configuration conf,
+RpcSchedulerFactory.create(org.apache.hadoop.conf.Configuration conf,
 PriorityFunction priority)
 Deprecated. 
 
 
 
 RpcScheduler
-RpcSchedulerFactory.create(org.apache.hadoop.conf.Configuration conf,
+SimpleRpcSchedulerFactory.create(org.apache.hadoop.conf.Configuration conf,
 PriorityFunction priority,
-Abortable server)
-Constructs a RpcScheduler.
-
+Abortable server) 
 
 
 RpcScheduler
-SimpleRpcSchedulerFactory.create(org.apache.hadoop.conf.Configuration conf,
+RpcSchedulerFactory.create(org.apache.hadoop.conf.Configuration conf,
 PriorityFunction priority,
-Abortable server) 
+Abortable server)
+Constructs a RpcScheduler.
+
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/9f9a078f/devapidocs/org/apache/hadoop/hbase/ipc/class-use/RpcServerInterface.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/ipc/class-use/RpcServerInterface.html 
b/devapidocs/org/apache/hadoop/hbase/ipc/class-use/RpcServerInterface.html
index 0d42775..97c8c1a 100644
--- a/devapidocs/org/apache/hadoop/hbase/ipc/class-use/RpcServerInterface.html
+++ b/devapidocs/org/apache/hadoop/hbase/ipc/class-use/RpcServerInterface.html
@@ -162,13 +162,13 @@
 
 
 RpcServerInterface
-RegionServerServices.getRpcServer()
-Returns a reference to the region server's RPC server
-
+HRegionServer.getRpcServer() 
 
 
 RpcServerInterface
-HRegionServer.getRpcServer() 
+RegionServerServices.getRpcServer()
+Returns a reference to the region server's RPC server
+
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/9f9a078f/devapidocs/org/apache/hadoop/hbase/mapreduce/class-use/CellCreator.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/mapreduce/class-use/CellCreator.html 
b/devapidocs/org/apache/hadoop/hbase/mapreduce/class-use/CellCreator.html
index 29e8d2a..e8fd5ee 100644
--- a/devapidocs/org/apache/hadoop/hbase/mapreduce/class-use/CellCreator.html
+++ b/devapidocs/org/apache/hadoop/hbase/mapreduce/class-use/CellCreator.html
@@ -98,13 +98,13 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 
-private CellCreator
-TextSortReducer.kvCreator 
-
-
 protected CellCreator
 TsvImporterMapper.kvCreator 
 
+
+private CellCreator
+TextSortReducer.kvCreator 
+
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/9f9a078f/devapidocs/org/apache/hadoop/hbase/mapreduce/class-use/ImportTsv.TsvParser.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/mapreduce/class-use/ImportTsv.TsvParser.html
 
b/devapidocs/org/apache/hadoop/hbase/mapreduce/class-use/ImportTsv.TsvParser.html
index 8fa8eb1..4ce31db 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/mapreduce/class-use/ImportTsv.TsvParser.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/mapreduce/class-use/ImportTsv.TsvParser.html
@@ -102,13 +102,13 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 TsvImporterTextMapper.parser 
 
 
-private ImportTsv.TsvParser
-TextSortReducer.parser 
-
-
 protected ImportTsv.TsvParser
 TsvImporterMapper.parser 
 
+
+private ImportTsv.TsvParser
+TextSortReducer.parser 
+
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/9f9a078f/devapidocs/org/apache/hadoop/hbase/mapreduce/package-tree.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/mapreduce/package-tree.html 
b/devapidocs/org/apache/hadoop/hbase/mapreduce/package-tree.html
index cac2de0..b3f6c5b 100644
--- a/devapidocs/org/apache/hadoop/hbase/mapreduce/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/mapreduce/package-tree.html
@@ -267,10 +267,10 @@
 
 java.lang.http://docs.oracle.com/javase/7/docs/api/java/

[01/51] [partial] hbase-site git commit: Published site at f2e0aca2b6925cdd1f2896052f4b4e847ee4e1fd.

2016-04-18 Thread misty
Repository: hbase-site
Updated Branches:
  refs/heads/asf-site 3c6f3528e -> 9f9a078f0


http://git-wip-us.apache.org/repos/asf/hbase-site/blob/9f9a078f/devapidocs/org/apache/hadoop/hbase/master/MasterRpcServices.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/master/MasterRpcServices.html 
b/devapidocs/org/apache/hadoop/hbase/master/MasterRpcServices.html
index fd34c77..4717912 100644
--- a/devapidocs/org/apache/hadoop/hbase/master/MasterRpcServices.html
+++ b/devapidocs/org/apache/hadoop/hbase/master/MasterRpcServices.html
@@ -105,7 +105,7 @@
 
 
 @InterfaceAudience.Private
-public class MasterRpcServices
+public class MasterRpcServices
 extends RSRpcServices
 implements 
org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MasterService.BlockingInterface,
 
org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStatusService.BlockingInterface
 Implements the master RPC services.
@@ -503,81 +503,86 @@ implements 
org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MasterService
   
org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest request) 
 
 
+org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ReleaseSplitOrMergeLockAndRollbackResponse
+releaseSplitOrMergeLockAndRollback(com.google.protobuf.RpcController controller,
+
org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ReleaseSplitOrMergeLockAndRollbackRequest request) 
+
+
 org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRegionStateTransitionResponse
 reportRegionStateTransition(com.google.protobuf.RpcController c,
   
org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRegionStateTransitionRequest req) 
 
-
+
 org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorResponse
 reportRSFatalError(com.google.protobuf.RpcController controller,
 
org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorRequest request) 
 
-
+
 org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotResponse
 restoreSnapshot(com.google.protobuf.RpcController controller,
   
org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotRequest request)
 Execute Restore/Clone snapshot operation.
 
 
-
+
 org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanResponse
 runCatalogScan(com.google.protobuf.RpcController c,
 
org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanRequest req) 
 
-
+
 org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetBalancerRunningResponse
 setBalancerRunning(com.google.protobuf.RpcController c,
 
org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetBalancerRunningRequest req) 
 
-
+
 org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningResponse
 setNormalizerRunning(com.google.protobuf.RpcController controller,
 
org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningRequest request) 
 
-
+
 org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetQuotaResponse
 setQuota(com.google.protobuf.RpcController c,
 
org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetQuotaRequest req) 
 
-
+
 org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetSplitOrMergeEnabledResponse
 setSplitOrMergeEnabled(com.google.protobuf.RpcController controller,
 
org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetSplitOrMergeEnabledRequest request) 
 
-
+
 org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ShutdownResponse
 shutdown(com.google.protobuf.RpcController controller,
 
org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ShutdownRequest request) 
 
-
+
 org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotResponse
 snapshot(com.google.protobuf.RpcController controller,
 
org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotRequest request)
 Triggers an asynchronous attempt to take a snapshot.
 
 
-
+
 org.apache.hadoop.hbase.protobuf.generated.MasterProtos.StopMasterResponse
 stopMaster(com.google.protobuf.RpcController controller,
 
org.apache.hadoop.hbase.protobuf.generated.MasterProtos.StopMasterRequest request) 
 
-
+
 (package private) boolean
 switchBalancer(boolean b,
 MasterRpcServices.BalanceSwitchMode mode)
 Assigns balancer switch according to BalanceSwitchMode
 
 
-
+
 (package private) boolean
 synchronousBalanceSwitch(boolean b) 
 
-
+
 org.apache.hadoop.hbase.protobuf

[04/51] [partial] hbase-site git commit: Published site at f2e0aca2b6925cdd1f2896052f4b4e847ee4e1fd.

2016-04-18 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/9f9a078f/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/CacheConfig.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/CacheConfig.html 
b/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/CacheConfig.html
index 63c8d5a..58f8c31 100644
--- a/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/CacheConfig.html
+++ b/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/CacheConfig.html
@@ -137,51 +137,51 @@
 
 
 StoreFileReader
-BaseRegionObserver.postStoreFileReaderOpen(ObserverContext ctx,
+RegionObserver.postStoreFileReaderOpen(ObserverContext ctx,
   
org.apache.hadoop.fs.FileSystem fs,
   org.apache.hadoop.fs.Path p,
   FSDataInputStreamWrapper in,
   long size,
   CacheConfig cacheConf,
   Reference r,
-  StoreFileReader reader) 
+  StoreFileReader reader)
+Called after the creation of Reader for a store file.
+
 
 
 StoreFileReader
-RegionObserver.postStoreFileReaderOpen(ObserverContext ctx,
+BaseRegionObserver.postStoreFileReaderOpen(ObserverContext ctx,
   
org.apache.hadoop.fs.FileSystem fs,
   org.apache.hadoop.fs.Path p,
   FSDataInputStreamWrapper in,
   long size,
   CacheConfig cacheConf,
   Reference r,
-  StoreFileReader reader)
-Called after the creation of Reader for a store file.
-
+  StoreFileReader reader) 
 
 
 StoreFileReader
-BaseRegionObserver.preStoreFileReaderOpen(ObserverContext ctx,
+RegionObserver.preStoreFileReaderOpen(ObserverContext ctx,
 
org.apache.hadoop.fs.FileSystem fs,
 org.apache.hadoop.fs.Path p,
 FSDataInputStreamWrapper in,
 long size,
 CacheConfig cacheConf,
 Reference r,
-StoreFileReader reader) 
+StoreFileReader reader)
+Called before creation of Reader for a store file.
+
 
 
 StoreFileReader
-RegionObserver.preStoreFileReaderOpen(ObserverContext ctx,
+BaseRegionObserver.preStoreFileReaderOpen(ObserverContext ctx,
 
org.apache.hadoop.fs.FileSystem fs,
 org.apache.hadoop.fs.Path p,
 FSDataInputStreamWrapper in,
 long size,
 CacheConfig cacheConf,
 Reference r,
-StoreFileReader reader)
-Called before creation of Reader for a store file.
-
+StoreFileReader reader) 
 
 
 
@@ -232,25 +232,25 @@
 
 
 private CacheConfig
-HFileBlockIndex.BlockIndexWriter.cacheConf
-CacheConfig, or null if cache-on-write is disabled
+HFileReaderImpl.cacheConf
+Block cache configuration.
 
 
 
 protected CacheConfig
-HFile.WriterFactory.cacheConf 
+HFileWriterImpl.cacheConf
+Cache configuration for caching data on write.
+
 
 
 private CacheConfig
-HFileReaderImpl.cacheConf
-Block cache configuration.
+HFileBlockIndex.BlockIndexWriter.cacheConf
+CacheConfig, or null if cache-on-write is disabled
 
 
 
 protected CacheConfig
-HFileWriterImpl.cacheConf
-Cache configuration for caching data on write.
-
+HFile.WriterFactory.cacheConf 
 
 
 
@@ -534,15 +534,15 @@
 
 
 private CacheConfig
-SweepReducer.cacheConfig 
+SweepJob.cacheConfig 
 
 
 private CacheConfig
-MemStoreWrapper.cacheConfig 
+SweepReducer.cacheConfig 
 
 
 private CacheConfig
-SweepJob.cacheConfig 
+MemStoreWrapper.cacheConfig 
 
 
 
@@ -605,13 +605,13 @@
 
 
 CacheConfig
-Store.getCacheConfig()
-Used for tests.
-
+HStore.getCacheConfig() 
 
 
 CacheConfig
-HStore.getCacheConfig() 
+Store.getCacheConfig()
+Used for tests.
+
 
 
 CacheConfig
@@ -735,11

[33/51] [partial] hbase-site git commit: Published site at f2e0aca2b6925cdd1f2896052f4b4e847ee4e1fd.

2016-04-18 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/9f9a078f/devapidocs/org/apache/hadoop/hbase/class-use/HDFSBlocksDistribution.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/class-use/HDFSBlocksDistribution.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/HDFSBlocksDistribution.html
index d037968..3df7b03 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/HDFSBlocksDistribution.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/HDFSBlocksDistribution.html
@@ -262,11 +262,11 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 HDFSBlocksDistribution
-StoreFileInfo.getHDFSBlockDistribution() 
+StoreFile.getHDFSBlockDistribution() 
 
 
 HDFSBlocksDistribution
-StoreFile.getHDFSBlockDistribution() 
+StoreFileInfo.getHDFSBlockDistribution() 
 
 
 HDFSBlocksDistribution



[42/51] [partial] hbase-site git commit: Published site at f2e0aca2b6925cdd1f2896052f4b4e847ee4e1fd.

2016-04-18 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/9f9a078f/checkstyle-aggregate.html
--
diff --git a/checkstyle-aggregate.html b/checkstyle-aggregate.html
index 73230ba..e434cd9 100644
--- a/checkstyle-aggregate.html
+++ b/checkstyle-aggregate.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase – Checkstyle Results
 
@@ -280,10 +280,10 @@
  Warnings
  Errors
 
-1731
+1732
 0
 0
-12421
+12423
 
 Files
 
@@ -1941,7 +1941,7 @@
 org/apache/hadoop/hbase/io/hfile/HFilePrettyPrinter.java
 0
 0
-19
+15
 
 org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.java
 0
@@ -2651,7 +2651,7 @@
 org/apache/hadoop/hbase/master/MasterRpcServices.java
 0
 0
-13
+14
 
 org/apache/hadoop/hbase/master/MasterServices.java
 0
@@ -3326,7 +3326,7 @@
 org/apache/hadoop/hbase/protobuf/RequestConverter.java
 0
 0
-239
+240
 
 org/apache/hadoop/hbase/protobuf/ResponseConverter.java
 0
@@ -3961,7 +3961,7 @@
 org/apache/hadoop/hbase/regionserver/TimeRangeTracker.java
 0
 0
-2
+6
 
 org/apache/hadoop/hbase/regionserver/UnexpectedStateException.java
 0
@@ -4421,7 +4421,7 @@
 org/apache/hadoop/hbase/rest/RESTServer.java
 0
 0
-5
+4
 
 org/apache/hadoop/hbase/rest/RESTServlet.java
 0
@@ -5903,96 +5903,101 @@
 0
 4
 
-org/apache/hadoop/hbase/zookeeper/ZKClusterId.java
+org/apache/hadoop/hbase/zookeeper/SplitOrMergeTracker.java
 0
 0
 1
 
+org/apache/hadoop/hbase/zookeeper/ZKClusterId.java
+0
+0
+1
+
 org/apache/hadoop/hbase/zookeeper/ZKConfig.java
 0
 0
 4
-
+
 org/apache/hadoop/hbase/zookeeper/ZKServerTool.java
 0
 0
 3
-
+
 org/apache/hadoop/hbase/zookeeper/ZKSplitLog.java
 0
 0
 10
-
+
 org/apache/hadoop/hbase/zookeeper/ZKUtil.java
 0
 0
 56
-
+
 org/apache/hadoop/hbase/zookeeper/ZooKeeperMainServer.java
 0
 0
 9
-
+
 org/apache/hadoop/hbase/zookeeper/ZooKeeperNodeTracker.java
 0
 0
 7
-
+
 org/apache/hadoop/hbase/zookeeper/ZooKeeperWatcher.java
 0
 0
 41
-
+
 org/apache/hadoop/hbase/zookeeper/lock/ZKInterProcessLockBase.java
 0
 0
 8
-
+
 org/apache/hadoop/hbase/zookeeper/lock/ZKInterProcessReadWriteLock.java
 0
 0
 2
-
+
 org/apache/hadoop/metrics2/MetricHistogram.java
 0
 0
 1
-
+
 org/apache/hadoop/metrics2/impl/JmxCacheBuster.java
 0
 0
 1
-
+
 org/apache/hadoop/metrics2/lib/DefaultMetricsSystemHelper.java
 0
 0
 1
-
+
 org/apache/hadoop/metrics2/lib/DynamicMetricsRegistry.java
 0
 0
 14
-
+
 org/apache/hadoop/metrics2/lib/MutableTimeHistogram.java
 0
 0
 1
-
+
 org/apache/hadoop/metrics2/util/MetricQuantile.java
 0
 0
 2
-
+
 org/apache/hadoop/metrics2/util/MetricSampleQuantiles.java
 0
 0
 6
-
-test-classes/log4j.properties
+
+test-classes/ESAPI.properties
 0
 0
 1
-
+
 test-classes/org/apache/hadoop/hbase/PerformanceEvaluation_Counter.properties
 0
 0
@@ -6000,72 +6005,72 @@
 
 Rules
 
-
+
 Category
 Rule
 Violations
 Severity
-
+
 annotation
 http://checkstyle.sourceforge.net/config_annotation.html#MissingDeprecated";>MissingDeprecated
 73
  Error
-
+
 blocks
 http://checkstyle.sourceforge.net/config_blocks.html#EmptyBlock";>EmptyBlock
 18
  Error
-
+
 
 http://checkstyle.sourceforge.net/config_blocks.html#LeftCurly";>LeftCurly
 223
  Error
-
+
 
 http://checkstyle.sourceforge.net/config_blocks.html#NeedBraces";>NeedBraces
 1724
  Error
-
+
 coding
 http://checkstyle.sourceforge.net/config_coding.html#EmptyStatement";>EmptyStatement
 7
  Error
-
+
 
 http://checkstyle.sourceforge.net/config_coding.html#EqualsHashCode";>EqualsHashCode
 7
  Error
-
+
 
 http://checkstyle.sourceforge.net/config_coding.html#InnerAssignment";>InnerAssignment
 61
  Error
-
+
 
 http://checkstyle.sourceforge.net/config_coding.html#MissingSwitchDefault";>MissingSwitchDefault
 28
  Error
-
+
 
 http://checkstyle.sourceforge.net/config_coding.html#NoFinalizer";>NoFinalizer
 1
  Error
-
+
 design
 http://checkstyle.sourceforge.net/config_design.html#FinalClass";>FinalClass
 46
  Error
-
+
 
 http://checkstyle.sourceforge.net/config_design.html#HideUtilityClassConstructor";>HideUtilityClassConstructor
 87
  Error
-
+
 
 http://checkstyle.sourceforge.net/config_design.html#InterfaceIsType";>InterfaceIsType
 5
  Error
-
+
 
 http://checkstyle.sourceforge.net/config_design.html#VisibilityModifier";>VisibilityModifier
 
@@ -6073,32 +6078,32 @@
 protectedAllowed: "true"
 120
  Error
-
+
 imports
 http://checkstyle.sourceforge.net/config_imports.html#AvoidStarImport";>AvoidStarImport
 3
  Error
-
+
 
 http://checkstyle.sourceforge.net/config_imports.html#ImportOrder";>ImportOrder
 
 ordered: "true"
 sortStaticImportsAlphabetically: "true"
-936
+933
  Error
-
+
 
 http://checkstyle.sourceforge.net/config_imports.html#RedundantImport";>RedundantImport
 10
  Error
-
+
 
 http://checkstyle.sourceforge.net/config_imports.html#UnusedImports";>UnusedImports
 
 processJavadoc: "true"
 3
  Error
-
+
 indentation
 http://checkstyle.sourceforge.net/config_indentation.html#Indentation";>Indentation
 
@@ -6107,54 +6112,54 @@
 arrayInitIndent: "2"
 throwsIndent: "2"
 basicOffset: "2"
-4

[22/51] [partial] hbase-site git commit: Published site at f2e0aca2b6925cdd1f2896052f4b4e847ee4e1fd.

2016-04-18 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/9f9a078f/devapidocs/org/apache/hadoop/hbase/client/ZooKeeperKeepAliveConnection.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/client/ZooKeeperKeepAliveConnection.html 
b/devapidocs/org/apache/hadoop/hbase/client/ZooKeeperKeepAliveConnection.html
index 17eb88c..4ffc9ce 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/client/ZooKeeperKeepAliveConnection.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/client/ZooKeeperKeepAliveConnection.html
@@ -198,7 +198,7 @@ extends ZooKeeperWatcher
-abort,
 checkAndSetZNodeAcls,
 getBaseZNode,
 getConfiguration,
 getListeners,
 getMasterAddressZNode,
 getMetaReplicaIdFromZnode,
 getMetaReplicaNodes, getNumberOfListeners,
 getQuorum,
 getRecoverableZooKeeper,
 getRegionNormalizerZNode,
 getSwitchZNode,
 getZNodeForReplica,
 interruptedException,
 isAborted, isAnyMetaReplicaZnode,
 isClientReadable,
 isDefaultMetaReplicaZnode,
 isSuperUserId,
 keeperException,
 prefix,
 process,
 reconnectAfterExpiration,
 registerListener,
 registerListenerFirst,
 sync,
 toString,
 unregisterAllListeners,
 unregisterListener
+abort,
 checkAndSetZNodeAcls,
 getBaseZNode,
 getConfiguration,
 getListeners,
 getMasterAddressZNode,
 getMetaReplicaIdFromZnode,
 getMetaReplicaNodes, getNumberOfListeners,
 getQuorum,
 getRecoverableZooKeeper,
 getRegionNormalizerZNode,
 getSwitchLockZNode,
 getSwitchZNode,
 getZNodeForReplica,
 interruptedException,
 isAborted,
 isAnyMetaReplicaZnode,
 isClientReadable,
 isDefaultMetaReplicaZnode,
 isSuperUserId,
 keeperException,
 prefix,
 process,
 reconnectAfterExpiration,
 registerListener,
 registerListenerFirst,
 sync,
 toString,
 unregisterAllListeners, unregisterListener
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/9f9a078f/devapidocs/org/apache/hadoop/hbase/client/backoff/class-use/ServerStatistics.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/client/backoff/class-use/ServerStatistics.html
 
b/devapidocs/org/apache/hadoop/hbase/client/backoff/class-use/ServerStatistics.html
index 17f60eb..2736f33 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/client/backoff/class-use/ServerStatistics.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/client/backoff/class-use/ServerStatistics.html
@@ -137,13 +137,13 @@
 
 
 long
-ClientBackoffPolicyFactory.NoBackoffPolicy.getBackoffTime(ServerName serverName,
+ClientBackoffPolicy.getBackoffTime(ServerName serverName,
 byte[] region,
 ServerStatistics stats) 
 
 
 long
-ClientBackoffPolicy.getBackoffTime(ServerName serverName,
+ClientBackoffPolicyFactory.NoBackoffPolicy.getBackoffTime(ServerName serverName,
 byte[] region,
 ServerStatistics stats) 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/9f9a078f/devapidocs/org/apache/hadoop/hbase/client/class-use/Admin.CompactType.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/client/class-use/Admin.CompactType.html 
b/devapidocs/org/apache/hadoop/hbase/client/class-use/Admin.CompactType.html
index c272de6..0da980f 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/class-use/Admin.CompactType.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/class-use/Admin.CompactType.html
@@ -120,21 +120,21 @@ the order they are declared.
 
 
 void
-Admin.compact(TableName tableName,
+HBaseAdmin.compact(TableName tableName,
   Admin.CompactType compactType)
 Compact a table.
 
 
 
 void
-HBaseAdmin.compact(TableName tableName,
+Admin.compact(TableName tableName,
   Admin.CompactType compactType)
 Compact a table.
 
 
 
 void
-Admin.compact(TableName tableName,
+HBaseAdmin.compact(TableName tableName,
   byte[] columnFamily,
   Admin.CompactType compactType)
 Compact a column family within a table.
@@ -142,7 +142,7 @@ the order they are declared.
 
 
 void
-HBaseAdmin.compact(TableName tableName,
+Admin.compact(TableName tableName,
   byte[] columnFamily,
   Admin.CompactType compactType)
 Compact a column family within a table.
@@ -159,35 +159,35 @@ the order they are declared.
 
 
 org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse.CompactionState
-Admin.getCompactionState(TableName tableName,
+HBaseAdmin.getCompactionState(TableName tableName,
 Admin.CompactType compactType)
 Get the current compaction state of a table.
 
 
 
 org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse.CompactionState
-HBaseAdmin.getCompactionState(TableName tableName,
+Admin.getCompactionState(TableName tableName,
 Admin.CompactType compactType)
 Get the current compaction state of a table.
 
 
 
 voi

[15/51] [partial] hbase-site git commit: Published site at f2e0aca2b6925cdd1f2896052f4b4e847ee4e1fd.

2016-04-18 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/9f9a078f/devapidocs/org/apache/hadoop/hbase/coprocessor/class-use/MasterCoprocessorEnvironment.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/coprocessor/class-use/MasterCoprocessorEnvironment.html
 
b/devapidocs/org/apache/hadoop/hbase/coprocessor/class-use/MasterCoprocessorEnvironment.html
index 9b9ec87..feed379 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/coprocessor/class-use/MasterCoprocessorEnvironment.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/coprocessor/class-use/MasterCoprocessorEnvironment.html
@@ -114,17 +114,33 @@
 
 
 void
+BaseMasterAndRegionObserver.postAbortProcedure(ObserverContext ctx) 
+
+
+void
+BaseMasterObserver.postAbortProcedure(ObserverContext ctx) 
+
+
+void
 MasterObserver.postAbortProcedure(ObserverContext ctx)
 Called after a abortProcedure request has been 
processed.
 
 
 
 void
-BaseMasterAndRegionObserver.postAbortProcedure(ObserverContext ctx) 
+BaseMasterAndRegionObserver.postAddColumn(ObserverContext ctx,
+  TableName tableName,
+  HColumnDescriptor columnFamily)
+Deprecated. 
+
 
 
 void
-BaseMasterObserver.postAbortProcedure(ObserverContext ctx) 
+BaseMasterObserver.postAddColumn(ObserverContext ctx,
+  TableName tableName,
+  HColumnDescriptor columnFamily)
+Deprecated. 
+
 
 
 void
@@ -140,19 +156,15 @@
 
 
 void
-BaseMasterAndRegionObserver.postAddColumn(ObserverContext ctx,
-  TableName tableName,
-  HColumnDescriptor columnFamily)
-Deprecated. 
-
+BaseMasterAndRegionObserver.postAddColumnFamily(ObserverContext ctx,
+  TableName tableName,
+  HColumnDescriptor columnFamily) 
 
 
 void
-BaseMasterObserver.postAddColumn(ObserverContext ctx,
-  TableName tableName,
-  HColumnDescriptor columnFamily)
-Deprecated. 
-
+BaseMasterObserver.postAddColumnFamily(ObserverContext ctx,
+  TableName tableName,
+  HColumnDescriptor columnFamily) 
 
 
 void
@@ -164,15 +176,15 @@
 
 
 void
-BaseMasterAndRegionObserver.postAddColumnFamily(ObserverContext ctx,
-  TableName tableName,
-  HColumnDescriptor columnFamily) 
+BaseMasterAndRegionObserver.postAddColumnFamilyHandler(ObserverContext ctx,
+TableName tableName,
+HColumnDescriptor columnFamily) 
 
 
 void
-BaseMasterObserver.postAddColumnFamily(ObserverContext ctx,
-  TableName tableName,
-  HColumnDescriptor columnFamily) 
+BaseMasterObserver.postAddColumnFamilyHandler(ObserverContext ctx,
+TableName tableName,
+HColumnDescriptor columnFamily) 
 
 
 void
@@ -184,15 +196,19 @@
 
 
 void
-BaseMasterAndRegionObserver.postAddColumnFamilyHandler(ObserverContext ctx,
-TableName tableName,
-HColumnDescriptor columnFamily) 
+BaseMasterAndRegionObserver.postAddColumnHandler(ObserverContext ctx,
+TableName tableName,
+HColumnDescriptor columnFamily)
+Deprecated. 
+
 
 
 void
-BaseMasterObserver.postAddColumnFamilyHandler(ObserverContext ctx,
-TableName tableName,
-HColumnDescriptor columnFamily) 
+BaseMasterObserver.postAddColumnHandler(ObserverContext ctx,
+TableName tableName,
+HColumnDescriptor columnFamily)
+Deprecated. 
+
 
 
 void
@@ -208,19 +224,13 @@
 
 
 void
-BaseMasterAndRegionObserver.postAddColumnHandler(ObserverContext ctx,
-TableName tableName,
-HColumnDescriptor columnFamily)
-Depre

[03/51] [partial] hbase-site git commit: Published site at f2e0aca2b6925cdd1f2896052f4b4e847ee4e1fd.

2016-04-18 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/9f9a078f/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/HFileBlockIndex.BlockIndexReader.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/HFileBlockIndex.BlockIndexReader.html
 
b/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/HFileBlockIndex.BlockIndexReader.html
index 19962b5..7fe5bdb 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/HFileBlockIndex.BlockIndexReader.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/HFileBlockIndex.BlockIndexReader.html
@@ -136,11 +136,11 @@
 
 
 HFileBlockIndex.BlockIndexReader
-HFile.Reader.getDataBlockIndexReader() 
+HFileReaderImpl.getDataBlockIndexReader() 
 
 
 HFileBlockIndex.BlockIndexReader
-HFileReaderImpl.getDataBlockIndexReader() 
+HFile.Reader.getDataBlockIndexReader() 
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/9f9a078f/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/HFileContext.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/HFileContext.html 
b/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/HFileContext.html
index af76407..3887e33 100644
--- a/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/HFileContext.html
+++ b/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/HFileContext.html
@@ -164,11 +164,11 @@
 
 
 HFileContext
-HFileBlockDefaultEncodingContext.getHFileContext() 
+HFileBlockEncodingContext.getHFileContext() 
 
 
 HFileContext
-HFileBlockEncodingContext.getHFileContext() 
+HFileBlockDefaultEncodingContext.getHFileContext() 
 
 
 
@@ -181,22 +181,16 @@
 
 
 HFileBlockDecodingContext
-BufferedDataBlockEncoder.newDataBlockDecodingContext(HFileContext meta) 
-
-
-HFileBlockDecodingContext
 DataBlockEncoder.newDataBlockDecodingContext(HFileContext meta)
 Creates an encoder specific decoding context, which will 
prepare the data
  before actual decoding
 
 
-
-HFileBlockEncodingContext
-BufferedDataBlockEncoder.newDataBlockEncodingContext(DataBlockEncoding encoding,
-  byte[] header,
-  HFileContext meta) 
-
 
+HFileBlockDecodingContext
+BufferedDataBlockEncoder.newDataBlockDecodingContext(HFileContext meta) 
+
+
 HFileBlockEncodingContext
 DataBlockEncoder.newDataBlockEncodingContext(DataBlockEncoding encoding,
   byte[] headerBytes,
@@ -204,6 +198,12 @@
 Creates a encoder specific encoding context
 
 
+
+HFileBlockEncodingContext
+BufferedDataBlockEncoder.newDataBlockEncodingContext(DataBlockEncoding encoding,
+  byte[] header,
+  HFileContext meta) 
+
 
 
 
@@ -297,23 +297,23 @@
 
 
 HFileContext
-HFile.Writer.getFileContext()
-Return the file context for the HFile this writer belongs 
to
-
+HFileReaderImpl.getFileContext() 
 
 
 HFileContext
-HFile.Reader.getFileContext()
-Return the file context of the HFile this reader belongs 
to
-
+HFileWriterImpl.getFileContext() 
 
 
 HFileContext
-HFileReaderImpl.getFileContext() 
+HFile.Writer.getFileContext()
+Return the file context for the HFile this writer belongs 
to
+
 
 
 HFileContext
-HFileWriterImpl.getFileContext() 
+HFile.Reader.getFileContext()
+Return the file context of the HFile this reader belongs 
to
+
 
 
 (package private) HFileContext
@@ -343,35 +343,35 @@
 
 
 HFileBlockDecodingContext
-NoOpDataBlockEncoder.newDataBlockDecodingContext(HFileContext meta) 
+HFileDataBlockEncoderImpl.newDataBlockDecodingContext(HFileContext fileContext) 
 
 
 HFileBlockDecodingContext
-HFileDataBlockEncoder.newDataBlockDecodingContext(HFileContext fileContext)
-create a encoder specific decoding context for 
reading.
-
+NoOpDataBlockEncoder.newDataBlockDecodingContext(HFileContext meta) 
 
 
 HFileBlockDecodingContext
-HFileDataBlockEncoderImpl.newDataBlockDecodingContext(HFileContext fileContext) 
+HFileDataBlockEncoder.newDataBlockDecodingContext(HFileContext fileContext)
+create a encoder specific decoding context for 
reading.
+
 
 
 HFileBlockEncodingContext
+HFileDataBlockEncoderImpl.newDataBlockEncodingContext(byte[] dummyHeader,
+  HFileContext fileContext) 
+
+
+HFileBlockEncodingContext
 NoOpDataBlockEncoder.newDataBlockEncodingContext(byte[] dummyHeader,
   HFileContext meta) 
 
-
+
 HFileBlockEncodingContext
 HFileDataBlockEncoder.newDataBlockEncodingContext(byte[] headerBytes,
   HFileContext fileContext)
 Create an encoder specific encoding context object for 
writing.
 
 
-
-HFileBlockEncodingContext
-HFileDataBlockEncoderImpl.newDataBlockEncodingContext(

[39/51] [partial] hbase-site git commit: Published site at f2e0aca2b6925cdd1f2896052f4b4e847ee4e1fd.

2016-04-18 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/9f9a078f/devapidocs/deprecated-list.html
--
diff --git a/devapidocs/deprecated-list.html b/devapidocs/deprecated-list.html
index 7cc82f5..8b0ba2e 100644
--- a/devapidocs/deprecated-list.html
+++ b/devapidocs/deprecated-list.html
@@ -299,6 +299,12 @@
 
 
 
+org.apache.hadoop.hbase.client.HBaseAdmin.addColumn(TableName,
 HColumnDescriptor)
+Since 2.0. Will be removed in 3.0. Use
+ HBaseAdmin.addColumnFamily(TableName,
 HColumnDescriptor) instead.
+
+
+
 org.apache.hadoop.hbase.client.Admin.addColumn(TableName,
 HColumnDescriptor)
 As of release 2.0.0.
  (https://issues.apache.org/jira/browse/HBASE-1989";>HBASE-1989).
@@ -306,12 +312,6 @@
  Use Admin.addColumnFamily(TableName,
 HColumnDescriptor).
 
 
-
-org.apache.hadoop.hbase.client.HBaseAdmin.addColumn(TableName,
 HColumnDescriptor)
-Since 2.0. Will be removed in 3.0. Use
- HBaseAdmin.addColumnFamily(TableName,
 HColumnDescriptor) instead.
-
-
 
 org.apache.hadoop.hbase.security.visibility.VisibilityClient.addLabel(Configuration,
 String)
 Use VisibilityClient.addLabel(Connection,String)
 instead.
@@ -393,10 +393,10 @@
 org.apache.hadoop.hbase.mapreduce.CellCreator.create(byte[],
 int, int, byte[], int, int, byte[], int, int, long, byte[], int, int, 
String)
 
 
-org.apache.hadoop.hbase.regionserver.RpcSchedulerFactory.create(Configuration,
 PriorityFunction)
+org.apache.hadoop.hbase.regionserver.SimpleRpcSchedulerFactory.create(Configuration,
 PriorityFunction)
 
 
-org.apache.hadoop.hbase.regionserver.SimpleRpcSchedulerFactory.create(Configuration,
 PriorityFunction)
+org.apache.hadoop.hbase.regionserver.RpcSchedulerFactory.create(Configuration,
 PriorityFunction)
 
 
 org.apache.hadoop.hbase.client.HConnection.deleteCachedRegionLocation(HRegionLocation)
@@ -404,6 +404,12 @@
 
 
 
+org.apache.hadoop.hbase.client.HBaseAdmin.deleteColumn(TableName,
 byte[])
+Since 2.0. Will be removed in 3.0. Use
+ HBaseAdmin.deleteColumnFamily(TableName,
 byte[]) instead.
+
+
+
 org.apache.hadoop.hbase.client.Admin.deleteColumn(TableName,
 byte[])
 As of release 2.0.0.
  (https://issues.apache.org/jira/browse/HBASE-1989";>HBASE-1989).
@@ -411,12 +417,6 @@
  Use Admin.deleteColumnFamily(TableName,
 byte[])}.
 
 
-
-org.apache.hadoop.hbase.client.HBaseAdmin.deleteColumn(TableName,
 byte[])
-Since 2.0. Will be removed in 3.0. Use
- HBaseAdmin.deleteColumnFamily(TableName,
 byte[]) instead.
-
-
 
 org.apache.hadoop.hbase.KeyValueUtil.ensureKeyValue(Cell)
 without any replacement.
@@ -426,13 +426,13 @@
 org.apache.hadoop.hbase.KeyValueUtil.ensureKeyValues(List)
 
 
-org.apache.hadoop.hbase.regionserver.RegionMergeTransaction.execute(Server,
 RegionServerServices)
-use #execute(Server, RegionServerServices, 
User)
+org.apache.hadoop.hbase.regionserver.SplitTransaction.execute(Server,
 RegionServerServices)
+use #execute(Server, RegionServerServices, User);  as of 
1.0.2, remove in 3.0
 
 
 
-org.apache.hadoop.hbase.regionserver.SplitTransaction.execute(Server,
 RegionServerServices)
-use #execute(Server, RegionServerServices, User);  as of 
1.0.2, remove in 3.0
+org.apache.hadoop.hbase.regionserver.RegionMergeTransaction.execute(Server,
 RegionServerServices)
+use #execute(Server, RegionServerServices, 
User)
 
 
 
@@ -444,15 +444,15 @@
 org.apache.hadoop.hbase.rest.client.RemoteHTable.exists(List)
 
 
-org.apache.hadoop.hbase.filter.FilterBase.filterRowKey(byte[],
 int, int)
+org.apache.hadoop.hbase.filter.Filter.filterRowKey(byte[],
 int, int)
 As of release 2.0.0, this will be removed in HBase 3.0.0.
- Instead use FilterBase.filterRowKey(Cell)
+ Instead use Filter.filterRowKey(Cell)
 
 
 
-org.apache.hadoop.hbase.filter.Filter.filterRowKey(byte[],
 int, int)
+org.apache.hadoop.hbase.filter.FilterBase.filterRowKey(byte[],
 int, int)
 As of release 2.0.0, this will be removed in HBase 3.0.0.
- Instead use Filter.filterRowKey(Cell)
+ Instead use FilterBase.filterRowKey(Cell)
 
 
 
@@ -539,56 +539,56 @@
 
 
 
-org.apache.hadoop.hbase.client.HConnection.getHTableDescriptor(byte[])
-internal method, do not use through HConnection
-
-
-
 org.apache.hadoop.hbase.client.ConnectionImplementation.getHTableDescriptor(byte[])
 Use Admin.getTableDescriptor(org.apache.hadoop.hbase.TableName)
   instead
 
 
-
-org.apache.hadoop.hbase.client.HConnection.getHTableDescriptor(TableName)
+
+org.apache.hadoop.hbase.client.HConnection.getHTableDescriptor(byte[])
 internal method, do not use through HConnection
 
 
-
+
 org.apache.hadoop.hbase.client.ConnectionImplementation.getHTableDescriptor(TableName)
 Use Admin.getTableDescriptor(org.apache.hadoop.hbase.TableName)
   instead
 
 
-
-org.apache.hadoop.hbase.client.HConnection.getHTableDescriptors(List)
-since 0.96.0
+
+org.apache.hadoop.hbase.client.HConnection.getHTableDescriptor(TableName)
+internal method, do no

[36/51] [partial] hbase-site git commit: Published site at f2e0aca2b6925cdd1f2896052f4b4e847ee4e1fd.

2016-04-18 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/9f9a078f/devapidocs/org/apache/hadoop/hbase/class-use/Cell.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/class-use/Cell.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/Cell.html
index 6e649c7..282d9bb 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/Cell.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/Cell.html
@@ -606,16 +606,6 @@ service.
 
 
 int
-CellComparator.compare(Cell a,
-  Cell b) 
-
-
-int
-CellComparator.RowComparator.compare(Cell a,
-  Cell b) 
-
-
-int
 KeyValue.MetaComparator.compare(Cell left,
   Cell right)
 Deprecated. 
@@ -640,6 +630,16 @@ service.
  
 
 
+int
+CellComparator.compare(Cell a,
+  Cell b) 
+
+
+int
+CellComparator.RowComparator.compare(Cell a,
+  Cell b) 
+
+
 private int
 CellComparator.compare(Cell a,
   Cell b,
@@ -815,37 +815,37 @@ service.
 
 
 int
+KeyValue.KVComparator.compareRows(Cell left,
+  Cell right)
+Deprecated. 
+ 
+
+
+int
 CellComparator.compareRows(Cell left,
   Cell right)
 Compares the rows of the left and right cell.
 
 
-
+
 int
 CellComparator.MetaCellComparator.compareRows(Cell left,
   Cell right) 
 
-
+
 int
-KeyValue.KVComparator.compareRows(Cell left,
-  Cell right)
+KeyValue.KVComparator.compareTimestamps(Cell left,
+  Cell right)
 Deprecated. 
  
 
-
+
 static int
 CellComparator.compareTimestamps(Cell left,
   Cell right)
 Compares cell's timestamps in DESCENDING order.
 
 
-
-int
-KeyValue.KVComparator.compareTimestamps(Cell left,
-  Cell right)
-Deprecated. 
- 
-
 
 static int
 CellComparator.compareValue(Cell cell,
@@ -1260,32 +1260,32 @@ service.
 
 
 
-boolean
-KeyValue.KVComparator.matchingRowColumn(Cell left,
+static boolean
+CellUtil.matchingRowColumn(Cell left,
   Cell right)
-Deprecated. 
 Compares the row and column of two keyvalues for 
equality
 
 
 
-static boolean
-CellUtil.matchingRowColumn(Cell left,
+boolean
+KeyValue.KVComparator.matchingRowColumn(Cell left,
   Cell right)
+Deprecated. 
 Compares the row and column of two keyvalues for 
equality
 
 
 
-boolean
-KeyValue.KVComparator.matchingRows(Cell left,
+static boolean
+CellUtil.matchingRows(Cell left,
 Cell right)
-Deprecated. 
 Compares the row of two keyvalues for equality
 
 
 
-static boolean
-CellUtil.matchingRows(Cell left,
+boolean
+KeyValue.KVComparator.matchingRows(Cell left,
 Cell right)
+Deprecated. 
 Compares the row of two keyvalues for equality
 
 
@@ -1651,17 +1651,17 @@ service.
 
 
 
-Put
-Put.add(Cell kv)
-Add the specified KeyValue to this Put operation.
-
-
-
 Increment
 Increment.add(Cell cell)
 Add the specified KeyValue to this operation.
 
 
+
+Put
+Put.add(Cell kv)
+Add the specified KeyValue to this Put operation.
+
+
 
 Delete
 Delete.addDeleteMarker(Cell kv)
@@ -1754,13 +1754,13 @@ service.
 Append.setFamilyCellMap(http://docs.oracle.com/javase/7/docs/api/java/util/NavigableMap.html?is-external=true";
 title="class or interface in java.util">NavigableMapList> map) 
 
 
-Put
-Put.setFamilyCellMap(http://docs.oracle.com/javase/7/docs/api/java/util/NavigableMap.html?is-external=true";
 title="class or interface in java.util">NavigableMapList> map) 
-
-
 Delete
 Delete.setFamilyCellMap(http://docs.oracle.com/javase/7/docs/api/java/util/NavigableMap.html?is-external=true";
 title="class or interface in java.util">NavigableMapList> map) 
 
+
+Increment
+Increment.setFamilyCellMap(http://docs.oracle.com/javase/7/docs/api/java/util/NavigableMap.html?is-external=true";
 title="class or interface in java.util">NavigableMapList> map) 
+
 
 Mutation
 Mutation.setFamilyCellMap(http://docs.oracle.com/javase/7/docs/api/java/util/NavigableMap.html?is-external=true";
 title="class or interface in java.util">NavigableMapList> map)
@@ -1768,8 +1768,8 @@ service.
 
 
 
-Increment
-Increment.setFamilyCellMap(http://docs.oracle.com/javase/7/docs/api/java/util/NavigableMap.html?is-external=true";
 

[37/51] [partial] hbase-site git commit: Published site at f2e0aca2b6925cdd1f2896052f4b4e847ee4e1fd.

2016-04-18 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/9f9a078f/devapidocs/org/apache/hadoop/hbase/HealthChecker.HealthCheckerExitStatus.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/HealthChecker.HealthCheckerExitStatus.html 
b/devapidocs/org/apache/hadoop/hbase/HealthChecker.HealthCheckerExitStatus.html
index e252b6c..97174a2 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/HealthChecker.HealthCheckerExitStatus.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/HealthChecker.HealthCheckerExitStatus.html
@@ -258,7 +258,7 @@ the order they are declared.
 
 
 values
-public static HealthChecker.HealthCheckerExitStatus[] values()
+public static HealthChecker.HealthCheckerExitStatus[] values()
 Returns an array containing the constants of this enum 
type, in
 the order they are declared.  This method may be used to iterate
 over the constants as follows:
@@ -275,7 +275,7 @@ for (HealthChecker.HealthCheckerExitStatus c : 
HealthChecker.HealthCheckerExitSt
 
 
 valueOf
-public static HealthChecker.HealthCheckerExitStatus valueOf(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String name)
+public static HealthChecker.HealthCheckerExitStatus valueOf(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String name)
 Returns the enum constant of this type with the specified 
name.
 The string must match exactly an identifier used to declare an
 enum constant in this type.  (Extraneous whitespace characters are 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/9f9a078f/devapidocs/org/apache/hadoop/hbase/class-use/Abortable.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/class-use/Abortable.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/Abortable.html
index 2208af2..8c07308 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/Abortable.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/Abortable.html
@@ -294,11 +294,11 @@
 
 
 private Abortable
-SimpleRpcScheduler.abortable 
+RpcExecutor.abortable 
 
 
 private Abortable
-RpcExecutor.abortable 
+SimpleRpcScheduler.abortable 
 
 
 
@@ -522,17 +522,17 @@
 
 
 RpcScheduler
-RpcSchedulerFactory.create(org.apache.hadoop.conf.Configuration conf,
+SimpleRpcSchedulerFactory.create(org.apache.hadoop.conf.Configuration conf,
 PriorityFunction priority,
-Abortable server)
-Constructs a RpcScheduler.
-
+Abortable server) 
 
 
 RpcScheduler
-SimpleRpcSchedulerFactory.create(org.apache.hadoop.conf.Configuration conf,
+RpcSchedulerFactory.create(org.apache.hadoop.conf.Configuration conf,
 PriorityFunction priority,
-Abortable server) 
+Abortable server)
+Constructs a RpcScheduler.
+
 
 
 
@@ -600,13 +600,13 @@
 
 
 
-protected Abortable
-ReplicationStateZKBase.abortable 
-
-
 private Abortable
 ReplicationPeersZKImpl.abortable 
 
+
+protected Abortable
+ReplicationStateZKBase.abortable 
+
 
 
 
@@ -821,19 +821,19 @@
 
 
 
+private Abortable
+ZKClusterId.abortable 
+
+
 protected Abortable
 ZooKeeperNodeTracker.abortable
 Used to abort if a fatal error occurs
 
 
-
+
 protected Abortable
 ZooKeeperWatcher.abortable 
 
-
-private Abortable
-ZKClusterId.abortable 
-
 
 private Abortable
 DrainingServerTracker.abortable 



[19/51] [partial] hbase-site git commit: Published site at f2e0aca2b6925cdd1f2896052f4b4e847ee4e1fd.

2016-04-18 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/9f9a078f/devapidocs/org/apache/hadoop/hbase/client/class-use/Put.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/client/class-use/Put.html 
b/devapidocs/org/apache/hadoop/hbase/client/class-use/Put.html
index 12fa41a..6b672ab 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/class-use/Put.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/class-use/Put.html
@@ -574,11 +574,11 @@ service.
 
 
 void
-HTable.validatePut(Put put) 
+BufferedMutatorImpl.validatePut(Put put) 
 
 
 void
-BufferedMutatorImpl.validatePut(Put put) 
+HTable.validatePut(Put put) 
 
 
 static void
@@ -687,107 +687,107 @@ service.
 
 
 boolean
-BaseRegionObserver.postCheckAndPut(ObserverContext e,
+RegionObserver.postCheckAndPut(ObserverContext c,
   byte[] row,
   byte[] family,
   byte[] qualifier,
   CompareFilter.CompareOp compareOp,
   ByteArrayComparable comparator,
   Put put,
-  boolean result) 
+  boolean result)
+Called after checkAndPut
+
 
 
 boolean
-RegionObserver.postCheckAndPut(ObserverContext c,
+BaseRegionObserver.postCheckAndPut(ObserverContext e,
   byte[] row,
   byte[] family,
   byte[] qualifier,
   CompareFilter.CompareOp compareOp,
   ByteArrayComparable comparator,
   Put put,
-  boolean result)
-Called after checkAndPut
-
+  boolean result) 
 
 
 void
-BaseRegionObserver.postPut(ObserverContext e,
+RegionObserver.postPut(ObserverContext c,
   Put put,
   WALEdit edit,
-  Durability durability) 
+  Durability durability)
+Called after the client stores a value.
+
 
 
 void
-RegionObserver.postPut(ObserverContext c,
+BaseRegionObserver.postPut(ObserverContext e,
   Put put,
   WALEdit edit,
-  Durability durability)
-Called after the client stores a value.
-
+  Durability durability) 
 
 
 boolean
-BaseRegionObserver.preCheckAndPut(ObserverContext e,
+RegionObserver.preCheckAndPut(ObserverContext c,
 byte[] row,
 byte[] family,
 byte[] qualifier,
 CompareFilter.CompareOp compareOp,
 ByteArrayComparable comparator,
 Put put,
-boolean result) 
+boolean result)
+Called before checkAndPut.
+
 
 
 boolean
-RegionObserver.preCheckAndPut(ObserverContext c,
+BaseRegionObserver.preCheckAndPut(ObserverContext e,
 byte[] row,
 byte[] family,
 byte[] qualifier,
 CompareFilter.CompareOp compareOp,
 ByteArrayComparable comparator,
 Put put,
-boolean result)
-Called before checkAndPut.
-
+boolean result) 
 
 
 boolean
-BaseRegionObserver.preCheckAndPutAfterRowLock(ObserverContext e,
+RegionObserver.preCheckAndPutAfterRowLock(ObserverContext c,
 byte[] row,
 byte[] family,
 byte[] qualifier,
 CompareFilter.CompareOp compareOp,
 ByteArrayComparable comparator,
 Put put,
-
boolean result) 
+boolean result)
+Called before checkAndPut but after acquiring rowlock.
+
 
 
 boolean
-RegionObserver.preCheckAndPutAfterRowLock(ObserverContext c,
+BaseRegionObserver.preCheckAndPutAfterRowLock(ObserverContext e,
 byte[] row,
 byte[] family,

[21/51] [partial] hbase-site git commit: Published site at f2e0aca2b6925cdd1f2896052f4b4e847ee4e1fd.

2016-04-18 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/9f9a078f/devapidocs/org/apache/hadoop/hbase/client/class-use/Delete.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/client/class-use/Delete.html 
b/devapidocs/org/apache/hadoop/hbase/client/class-use/Delete.html
index 25795cd..d11a3af 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/class-use/Delete.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/class-use/Delete.html
@@ -450,107 +450,107 @@ service.
 
 
 boolean
-BaseRegionObserver.postCheckAndDelete(ObserverContext e,
+RegionObserver.postCheckAndDelete(ObserverContext c,
 byte[] row,
 byte[] family,
 byte[] qualifier,
 CompareFilter.CompareOp compareOp,
 ByteArrayComparable comparator,
 Delete delete,
-boolean result) 
+boolean result)
+Called after checkAndDelete
+
 
 
 boolean
-RegionObserver.postCheckAndDelete(ObserverContext c,
+BaseRegionObserver.postCheckAndDelete(ObserverContext e,
 byte[] row,
 byte[] family,
 byte[] qualifier,
 CompareFilter.CompareOp compareOp,
 ByteArrayComparable comparator,
 Delete delete,
-boolean result)
-Called after checkAndDelete
-
+boolean result) 
 
 
 void
-BaseRegionObserver.postDelete(ObserverContext e,
+RegionObserver.postDelete(ObserverContext c,
 Delete delete,
 WALEdit edit,
-Durability durability) 
+Durability durability)
+Called after the client deletes a value.
+
 
 
 void
-RegionObserver.postDelete(ObserverContext c,
+BaseRegionObserver.postDelete(ObserverContext e,
 Delete delete,
 WALEdit edit,
-Durability durability)
-Called after the client deletes a value.
-
+Durability durability) 
 
 
 boolean
-BaseRegionObserver.preCheckAndDelete(ObserverContext e,
+RegionObserver.preCheckAndDelete(ObserverContext c,
   byte[] row,
   byte[] family,
   byte[] qualifier,
   CompareFilter.CompareOp compareOp,
   ByteArrayComparable comparator,
   Delete delete,
-  boolean result) 
+  boolean result)
+Called before checkAndDelete.
+
 
 
 boolean
-RegionObserver.preCheckAndDelete(ObserverContext c,
+BaseRegionObserver.preCheckAndDelete(ObserverContext e,
   byte[] row,
   byte[] family,
   byte[] qualifier,
   CompareFilter.CompareOp compareOp,
   ByteArrayComparable comparator,
   Delete delete,
-  boolean result)
-Called before checkAndDelete.
-
+  boolean result) 
 
 
 boolean
-BaseRegionObserver.preCheckAndDeleteAfterRowLock(ObserverContext e,
+RegionObserver.preCheckAndDeleteAfterRowLock(ObserverContext c,
   byte[] row,
   byte[] family,
   
byte[] qualifier,
   CompareFilter.CompareOp compareOp,
   ByteArrayComparable comparator,
   Delete delete,
-  
boolean result) 
+  
boolean result)
+Called before checkAndDelete but after acquiring 
rowock.
+
 
 
 boolean
-RegionObserver.preCheckAndDeleteAfterRowLock(ObserverContext c,
+BaseRegionObserver.preCheckAndDeleteAfterRowL

[16/51] [partial] hbase-site git commit: Published site at f2e0aca2b6925cdd1f2896052f4b4e847ee4e1fd.

2016-04-18 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/9f9a078f/devapidocs/org/apache/hadoop/hbase/client/package-tree.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/client/package-tree.html 
b/devapidocs/org/apache/hadoop/hbase/client/package-tree.html
index 32309b9..8c241d0 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/package-tree.html
@@ -396,13 +396,13 @@
 
 java.lang.http://docs.oracle.com/javase/7/docs/api/java/lang/Enum.html?is-external=true";
 title="class or interface in java.lang">Enum (implements java.lang.http://docs.oracle.com/javase/7/docs/api/java/lang/Comparable.html?is-external=true";
 title="class or interface in java.lang">Comparable, java.io.http://docs.oracle.com/javase/7/docs/api/java/io/Serializable.html?is-external=true";
 title="class or interface in java.io">Serializable)
 
-org.apache.hadoop.hbase.client.Admin.CompactType
-org.apache.hadoop.hbase.client.Durability
-org.apache.hadoop.hbase.client.TableState.State
-org.apache.hadoop.hbase.client.Consistency
 org.apache.hadoop.hbase.client.IsolationLevel
+org.apache.hadoop.hbase.client.TableState.State
 org.apache.hadoop.hbase.client.AsyncProcess.Retry
 org.apache.hadoop.hbase.client.Admin.MasterSwitchType
+org.apache.hadoop.hbase.client.Consistency
+org.apache.hadoop.hbase.client.Admin.CompactType
+org.apache.hadoop.hbase.client.Durability
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/9f9a078f/devapidocs/org/apache/hadoop/hbase/client/security/SecurityCapability.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/client/security/SecurityCapability.html 
b/devapidocs/org/apache/hadoop/hbase/client/security/SecurityCapability.html
index 2c7a37b..f91b175 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/security/SecurityCapability.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/security/SecurityCapability.html
@@ -305,7 +305,7 @@ the order they are declared.
 
 
 values
-public static SecurityCapability[] values()
+public static SecurityCapability[] values()
 Returns an array containing the constants of this enum 
type, in
 the order they are declared.  This method may be used to iterate
 over the constants as follows:
@@ -322,7 +322,7 @@ for (SecurityCapability c : SecurityCapability.values())
 
 
 valueOf
-public static SecurityCapability valueOf(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String name)
+public static SecurityCapability valueOf(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String name)
 Returns the enum constant of this type with the specified 
name.
 The string must match exactly an identifier used to declare an
 enum constant in this type.  (Extraneous whitespace characters are 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/9f9a078f/devapidocs/org/apache/hadoop/hbase/client/security/class-use/SecurityCapability.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/client/security/class-use/SecurityCapability.html
 
b/devapidocs/org/apache/hadoop/hbase/client/security/class-use/SecurityCapability.html
index 2e5d672..0af7b8c 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/client/security/class-use/SecurityCapability.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/client/security/class-use/SecurityCapability.html
@@ -102,13 +102,13 @@
 
 
 http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List
-Admin.getSecurityCapabilities()
-Return the set of supported security capabilities.
-
+HBaseAdmin.getSecurityCapabilities() 
 
 
 http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List
-HBaseAdmin.getSecurityCapabilities() 
+Admin.getSecurityCapabilities()
+Return the set of supported security capabilities.
+
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/9f9a078f/devapidocs/org/apache/hadoop/hbase/codec/class-use/Codec.Decoder.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/codec/class-use/Codec.Decoder.html 
b/devapidocs/org/apache/hadoop/hbase/codec/class-use/Codec.Decoder.html
index ac6187a..d80cb3a 100644
--- a/devapidocs/org/apache/hadoop/hbase/codec/class-use/Codec.Decoder.html
+++ b/devapidocs/org/apache/hadoop/hbase/codec/class-use/Codec.Decoder.html
@@ -143,7 +143,7 @@
 
 
 Codec.Decoder
-CellCodecWithTags.getDecoder(http://docs.oracle.com/javase/7/docs/api/java/nio/ByteBuffer.html?is-external=true";
 title="class or interface in 
java.nio">ByteBuffer buf) 
+CellC

[06/51] [partial] hbase-site git commit: Published site at f2e0aca2b6925cdd1f2896052f4b4e847ee4e1fd.

2016-04-18 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/9f9a078f/devapidocs/org/apache/hadoop/hbase/io/encoding/class-use/HFileBlockEncodingContext.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/io/encoding/class-use/HFileBlockEncodingContext.html
 
b/devapidocs/org/apache/hadoop/hbase/io/encoding/class-use/HFileBlockEncodingContext.html
index 9626f4a..de71447 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/io/encoding/class-use/HFileBlockEncodingContext.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/io/encoding/class-use/HFileBlockEncodingContext.html
@@ -181,18 +181,18 @@
 
 
 HFileBlockEncodingContext
-BufferedDataBlockEncoder.newDataBlockEncodingContext(DataBlockEncoding encoding,
-  byte[] header,
-  HFileContext meta) 
-
-
-HFileBlockEncodingContext
 DataBlockEncoder.newDataBlockEncodingContext(DataBlockEncoding encoding,
   byte[] headerBytes,
   HFileContext meta)
 Creates a encoder specific encoding context
 
 
+
+HFileBlockEncodingContext
+BufferedDataBlockEncoder.newDataBlockEncodingContext(DataBlockEncoding encoding,
+  byte[] header,
+  HFileContext meta) 
+
 
 
 
@@ -204,25 +204,19 @@
 
 
 int
-BufferedDataBlockEncoder.encode(Cell cell,
-HFileBlockEncodingContext encodingCtx,
-http://docs.oracle.com/javase/7/docs/api/java/io/DataOutputStream.html?is-external=true";
 title="class or interface in 
java.io">DataOutputStream out) 
-
-
-int
 DataBlockEncoder.encode(Cell cell,
 HFileBlockEncodingContext encodingCtx,
 http://docs.oracle.com/javase/7/docs/api/java/io/DataOutputStream.html?is-external=true";
 title="class or interface in java.io">DataOutputStream out)
 Encodes a KeyValue.
 
 
-
-void
-BufferedDataBlockEncoder.endBlockEncoding(HFileBlockEncodingContext encodingCtx,
-http://docs.oracle.com/javase/7/docs/api/java/io/DataOutputStream.html?is-external=true";
 title="class or interface in java.io">DataOutputStream out,
-
byte[] uncompressedBytesWithHeader) 
-
 
+int
+BufferedDataBlockEncoder.encode(Cell cell,
+HFileBlockEncodingContext encodingCtx,
+http://docs.oracle.com/javase/7/docs/api/java/io/DataOutputStream.html?is-external=true";
 title="class or interface in 
java.io">DataOutputStream out) 
+
+
 void
 DataBlockEncoder.endBlockEncoding(HFileBlockEncodingContext encodingCtx,
 http://docs.oracle.com/javase/7/docs/api/java/io/DataOutputStream.html?is-external=true";
 title="class or interface in java.io">DataOutputStream out,
@@ -230,18 +224,24 @@
 Ends encoding for a block of KeyValues.
 
 
-
+
 void
-BufferedDataBlockEncoder.startBlockEncoding(HFileBlockEncodingContext blkEncodingCtx,
-http://docs.oracle.com/javase/7/docs/api/java/io/DataOutputStream.html?is-external=true";
 title="class or interface in 
java.io">DataOutputStream out) 
+BufferedDataBlockEncoder.endBlockEncoding(HFileBlockEncodingContext encodingCtx,
+http://docs.oracle.com/javase/7/docs/api/java/io/DataOutputStream.html?is-external=true";
 title="class or interface in java.io">DataOutputStream out,
+
byte[] uncompressedBytesWithHeader) 
 
-
+
 void
 DataBlockEncoder.startBlockEncoding(HFileBlockEncodingContext encodingCtx,
 http://docs.oracle.com/javase/7/docs/api/java/io/DataOutputStream.html?is-external=true";
 title="class or interface in java.io">DataOutputStream out)
 Starts encoding for a block of KeyValues.
 
 
+
+void
+BufferedDataBlockEncoder.startBlockEncoding(HFileBlockEncodingContext blkEncodingCtx,
+http://docs.oracle.com/javase/7/docs/api/java/io/DataOutputStream.html?is-external=true";
 title="class or interface in 
java.io">DataOutputStream out) 
+
 
 
 
@@ -271,21 +271,21 @@
 
 
 HFileBlockEncodingContext
+HFileDataBlockEncoderImpl.newDataBlockEncodingContext(byte[] dummyHeader,
+  HFileContext fileContext) 
+
+
+HFileBlockEncodingContext
 NoOpDataBlockEncoder.newDataBlockEncodingContext(byte[] dummyHeader,
   HFileContext meta) 
 
-
+
 HFileBlockEncodingContext
 HFileDataBlockEncoder.newDataBlockEncodingContext(byte[] headerBytes,
   HFileContext fileContext)
 Create an encoder specific encoding context object for 
writing.
 
 
-
-HFileBlockEncodingContext
-HFileDataBlockEncoderImpl.newDataBlockEncodingContext(byte[] dummyHeader,

[34/51] [partial] hbase-site git commit: Published site at f2e0aca2b6925cdd1f2896052f4b4e847ee4e1fd.

2016-04-18 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/9f9a078f/devapidocs/org/apache/hadoop/hbase/class-use/HColumnDescriptor.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/class-use/HColumnDescriptor.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/HColumnDescriptor.html
index 9659e06..fa4e24e 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/HColumnDescriptor.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/HColumnDescriptor.html
@@ -459,38 +459,38 @@ service.
 
 
 void
-Admin.addColumn(TableName tableName,
+HBaseAdmin.addColumn(TableName tableName,
   HColumnDescriptor columnFamily)
 Deprecated. 
-As of release 2.0.0.
- (https://issues.apache.org/jira/browse/HBASE-1989";>HBASE-1989).
- This will be removed in HBase 3.0.0.
- Use Admin.addColumnFamily(TableName,
 HColumnDescriptor).
+Since 2.0. Will be removed in 3.0. Use
+ HBaseAdmin.addColumnFamily(TableName,
 HColumnDescriptor) instead.
 
 
 
 
 void
-HBaseAdmin.addColumn(TableName tableName,
+Admin.addColumn(TableName tableName,
   HColumnDescriptor columnFamily)
 Deprecated. 
-Since 2.0. Will be removed in 3.0. Use
- HBaseAdmin.addColumnFamily(TableName,
 HColumnDescriptor) instead.
+As of release 2.0.0.
+ (https://issues.apache.org/jira/browse/HBASE-1989";>HBASE-1989).
+ This will be removed in HBase 3.0.0.
+ Use Admin.addColumnFamily(TableName,
 HColumnDescriptor).
 
 
 
 
 http://docs.oracle.com/javase/7/docs/api/java/util/concurrent/Future.html?is-external=true";
 title="class or interface in java.util.concurrent">FutureVoid>
+HBaseAdmin.addColumnFamily(TableName tableName,
+  HColumnDescriptor columnFamily) 
+
+
+http://docs.oracle.com/javase/7/docs/api/java/util/concurrent/Future.html?is-external=true";
 title="class or interface in java.util.concurrent">FutureVoid>
 Admin.addColumnFamily(TableName tableName,
   HColumnDescriptor columnFamily)
 Add a column family to an existing table.
 
 
-
-http://docs.oracle.com/javase/7/docs/api/java/util/concurrent/Future.html?is-external=true";
 title="class or interface in java.util.concurrent">FutureVoid>
-HBaseAdmin.addColumnFamily(TableName tableName,
-  HColumnDescriptor columnFamily) 
-
 
 UnmodifyableHTableDescriptor
 UnmodifyableHTableDescriptor.addFamily(HColumnDescriptor family)
@@ -499,38 +499,38 @@ service.
 
 
 void
-Admin.modifyColumn(TableName tableName,
+HBaseAdmin.modifyColumn(TableName tableName,
 HColumnDescriptor columnFamily)
 Deprecated. 
-As of release 2.0.0.
- (https://issues.apache.org/jira/browse/HBASE-1989";>HBASE-1989).
- This will be removed in HBase 3.0.0.
- Use Admin.modifyColumnFamily(TableName,
 HColumnDescriptor).
+As of 2.0. Will be removed in 3.0. Use
+ HBaseAdmin.modifyColumnFamily(TableName,
 HColumnDescriptor) instead.
 
 
 
 
 void
-HBaseAdmin.modifyColumn(TableName tableName,
+Admin.modifyColumn(TableName tableName,
 HColumnDescriptor columnFamily)
 Deprecated. 
-As of 2.0. Will be removed in 3.0. Use
- HBaseAdmin.modifyColumnFamily(TableName,
 HColumnDescriptor) instead.
+As of release 2.0.0.
+ (https://issues.apache.org/jira/browse/HBASE-1989";>HBASE-1989).
+ This will be removed in HBase 3.0.0.
+ Use Admin.modifyColumnFamily(TableName,
 HColumnDescriptor).
 
 
 
 
 http://docs.oracle.com/javase/7/docs/api/java/util/concurrent/Future.html?is-external=true";
 title="class or interface in java.util.concurrent">FutureVoid>
+HBaseAdmin.modifyColumnFamily(TableName tableName,
+HColumnDescriptor columnFamily) 
+
+
+http://docs.oracle.com/javase/7/docs/api/java/util/concurrent/Future.html?is-external=true";
 title="class or interface in java.util.concurrent">FutureVoid>
 Admin.modifyColumnFamily(TableName tableName,
 HColumnDescriptor columnFamily)
 Modify an existing column family on a table.
 
 
-
-http://docs.oracle.com/javase/7/docs/api/java/util/concurrent/Future.html?is-external=true";
 title="class or interface in java.util.concurrent">Future

[49/51] [partial] hbase-site git commit: Published site at f2e0aca2b6925cdd1f2896052f4b4e847ee4e1fd.

2016-04-18 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/9f9a078f/apidocs/org/apache/hadoop/hbase/class-use/TableName.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/class-use/TableName.html 
b/apidocs/org/apache/hadoop/hbase/class-use/TableName.html
index e032379..dfe0e2d 100644
--- a/apidocs/org/apache/hadoop/hbase/class-use/TableName.html
+++ b/apidocs/org/apache/hadoop/hbase/class-use/TableName.html
@@ -397,14 +397,14 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 TableName
-BufferedMutator.getName()
-Gets the fully qualified table name instance of the table 
that this BufferedMutator writes to.
+Table.getName()
+Gets the fully qualified table name instance of this 
table.
 
 
 
 TableName
-Table.getName()
-Gets the fully qualified table name instance of this 
table.
+BufferedMutator.getName()
+Gets the fully qualified table name instance of the table 
that this BufferedMutator writes to.
 
 
 
@@ -413,18 +413,18 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 TableName[]
-Admin.listTableNames()
-List all of the names of userspace tables.
-
-
-
-TableName[]
 HConnection.listTableNames()
 Deprecated. 
 Use Admin.listTables()
 instead.
 
 
 
+
+TableName[]
+Admin.listTableNames()
+List all of the names of userspace tables.
+
+
 
 TableName[]
 Admin.listTableNames(http://docs.oracle.com/javase/7/docs/api/java/util/regex/Pattern.html?is-external=true";
 title="class or interface in java.util.regex">Pattern pattern)
@@ -728,23 +728,15 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 boolean
-Admin.isTableAvailable(TableName tableName) 
-
-
-boolean
 HConnection.isTableAvailable(TableName tableName)
 Deprecated. 
  
 
-
+
 boolean
-Admin.isTableAvailable(TableName tableName,
-byte[][] splitKeys)
-Use this api to check if the table has been created with 
the specified number of splitkeys
- which was used while creating the given table.
-
+Admin.isTableAvailable(TableName tableName) 
 
-
+
 boolean
 HConnection.isTableAvailable(TableName tableName,
 byte[][] splitKeys)
@@ -753,21 +745,25 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 
-
+
 boolean
-Admin.isTableDisabled(TableName tableName) 
+Admin.isTableAvailable(TableName tableName,
+byte[][] splitKeys)
+Use this api to check if the table has been created with 
the specified number of splitkeys
+ which was used while creating the given table.
+
 
-
+
 boolean
 HConnection.isTableDisabled(TableName tableName)
 Deprecated. 
  
 
-
+
 boolean
-Admin.isTableEnabled(TableName tableName) 
+Admin.isTableDisabled(TableName tableName) 
 
-
+
 boolean
 HConnection.isTableEnabled(TableName tableName)
 Deprecated. 
@@ -775,6 +771,10 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
  is possible.
 
 
+
+boolean
+Admin.isTableEnabled(TableName tableName) 
+
 
 HRegionLocation
 HConnection.locateRegion(TableName tableName,

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/9f9a078f/apidocs/org/apache/hadoop/hbase/client/Admin.CompactType.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/client/Admin.CompactType.html 
b/apidocs/org/apache/hadoop/hbase/client/Admin.CompactType.html
index fcaa208..d6537e0 100644
--- a/apidocs/org/apache/hadoop/hbase/client/Admin.CompactType.html
+++ b/apidocs/org/apache/hadoop/hbase/client/Admin.CompactType.html
@@ -110,7 +110,7 @@
 
 @InterfaceAudience.Public
 @InterfaceStability.Unstable
-public static enum Admin.CompactType
+public static enum Admin.CompactType
 extends http://docs.oracle.com/javase/7/docs/api/java/lang/Enum.html?is-external=true";
 title="class or interface in java.lang">Enum
 Currently, there are only two compact types:
  NORMAL means do store files compaction;
@@ -201,7 +201,7 @@ the order they are declared.
 
 
 NORMAL
-public static final Admin.CompactType NORMAL
+public static final Admin.CompactType NORMAL
 
 
 
@@ -210,7 +210,7 @@ the order they are declared.
 
 
 MOB
-public static final Admin.CompactType MOB
+public static final Admin.CompactType MOB
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/9f9a078f/apidocs/org/apache/hadoop/hbase/client/Admin.MasterSwitchType.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/client/Admin.MasterSwitchType.html 
b/apidocs/org/apache/hadoop/hbase/client/Admin.MasterSwitchType.html
index 6175ffa..00c3ef4 100644
--- a/apidocs/org/apache/hadoop/hbase/client/Admin.MasterSwitchType.html
+++ b/apidocs/org/apache/hadoop/hbase/client/Admin.MasterSwitchType.html
@@ -110,7 +110,7 @@
 
 @InterfaceAudience.Public
 @InterfaceStability.Evolving
-public static enum Admin.MasterSwit

[40/51] [partial] hbase-site git commit: Published site at f2e0aca2b6925cdd1f2896052f4b4e847ee4e1fd.

2016-04-18 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/9f9a078f/coc.html
--
diff --git a/coc.html b/coc.html
index 1eeb2f4..d02abbc 100644
--- a/coc.html
+++ b/coc.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase – 
   Code of Conduct Policy
@@ -331,7 +331,7 @@ For flagrant violations requiring a firm response the PMC 
may opt to skip early
 http://www.apache.org/";>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2016-04-14
+  Last Published: 
2016-04-18
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/9f9a078f/cygwin.html
--
diff --git a/cygwin.html b/cygwin.html
index 6cc616e..042ec61 100644
--- a/cygwin.html
+++ b/cygwin.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase – Installing Apache HBase (TM) on Windows using 
Cygwin
 
@@ -673,7 +673,7 @@ Now your HBase server is running, start 
coding and build that next
 http://www.apache.org/";>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2016-04-14
+  Last Published: 
2016-04-18
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/9f9a078f/dependencies.html
--
diff --git a/dependencies.html b/dependencies.html
index a924278..1b4dd73 100644
--- a/dependencies.html
+++ b/dependencies.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase – Project Dependencies
 
@@ -518,7 +518,7 @@
 http://www.apache.org/";>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2016-04-14
+  Last Published: 
2016-04-18
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/9f9a078f/dependency-convergence.html
--
diff --git a/dependency-convergence.html b/dependency-convergence.html
index 1e58413..f9cccb2 100644
--- a/dependency-convergence.html
+++ b/dependency-convergence.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase – Reactor Dependency Convergence
 
@@ -1702,7 +1702,7 @@
 http://www.apache.org/";>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2016-04-14
+  Last Published: 
2016-04-18
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/9f9a078f/dependency-info.html
--
diff --git a/dependency-info.html b/dependency-info.html
index 48d..b68adf8 100644
--- a/dependency-info.html
+++ b/dependency-info.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase – Dependency Information
 
@@ -312,7 +312,7 @@
 http://www.apache.org/";>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2016-04-14
+  Last Published: 
2016-04-18
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/9f9a078f/dependency-management.html
--
diff --git a/dependency-management.html b/dependency-management.html
index d3d3d09..ffe3042 100644
--- a/dependency-management.html
+++ b/dependency-management.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase – Project Dependency Management
 
@@ -798,7 +798,7 @@
 http://www.apache.org/";>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2016-04-14
+  Last Published: 
2016-04-18
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/9f9a078f/devapidocs/allclasses-frame.html
--
diff --git a/devapidocs/allclasses-frame.html b/devapidocs/allclasses-frame.html
index 4942eb7..9842c6a 100644
--- a/devapidocs/allclasses-frame.html
+++ b/devapidocs/allclasses-frame.html
@@ -1713,6 +1713,9 @@
 ResourceBase
 ResourceConfig
 Response
+RestCsrfPreventionFilter
+RestCsrfPreventionFilter.HttpInteraction
+RestCsrfPreventionFilter.ServletFilterHttpInteraction
 RestoreSnapshotException
 RestoreSnapshotHelper
 RestoreSnapshotHelper.RestoreMetaChanges

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/9f9a078f/devapidocs/allclass

[09/51] [partial] hbase-site git commit: Published site at f2e0aca2b6925cdd1f2896052f4b4e847ee4e1fd.

2016-04-18 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/9f9a078f/devapidocs/org/apache/hadoop/hbase/filter/class-use/Filter.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/filter/class-use/Filter.html 
b/devapidocs/org/apache/hadoop/hbase/filter/class-use/Filter.html
index 856ab2e..5556625 100644
--- a/devapidocs/org/apache/hadoop/hbase/filter/class-use/Filter.html
+++ b/devapidocs/org/apache/hadoop/hbase/filter/class-use/Filter.html
@@ -174,19 +174,19 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 
-Get
-Get.setFilter(Filter filter) 
-
-
 Scan
 Scan.setFilter(Filter filter) 
 
-
+
 Query
 Query.setFilter(Filter filter)
 Apply the specified server-side filter when performing the 
Query.
 
 
+
+Get
+Get.setFilter(Filter filter) 
+
 
 
 
@@ -413,16 +413,16 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 
-private Filter
-SkipFilter.filter 
+(package private) Filter
+FilterWrapper.filter 
 
 
 private Filter
 WhileMatchFilter.filter 
 
 
-(package private) Filter
-FilterWrapper.filter 
+private Filter
+SkipFilter.filter 
 
 
 private Filter
@@ -452,11 +452,11 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 static Filter
-MultipleColumnPrefixFilter.createFilterFromArguments(http://docs.oracle.com/javase/7/docs/api/java/util/ArrayList.html?is-external=true";
 title="class or interface in 
java.util">ArrayList filterArguments) 
+SingleColumnValueFilter.createFilterFromArguments(http://docs.oracle.com/javase/7/docs/api/java/util/ArrayList.html?is-external=true";
 title="class or interface in 
java.util">ArrayList filterArguments) 
 
 
 static Filter
-RowFilter.createFilterFromArguments(http://docs.oracle.com/javase/7/docs/api/java/util/ArrayList.html?is-external=true";
 title="class or interface in 
java.util">ArrayList filterArguments) 
+QualifierFilter.createFilterFromArguments(http://docs.oracle.com/javase/7/docs/api/java/util/ArrayList.html?is-external=true";
 title="class or interface in 
java.util">ArrayList filterArguments) 
 
 
 static Filter
@@ -464,13 +464,11 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 static Filter
-PrefixFilter.createFilterFromArguments(http://docs.oracle.com/javase/7/docs/api/java/util/ArrayList.html?is-external=true";
 title="class or interface in 
java.util">ArrayList filterArguments) 
+ColumnRangeFilter.createFilterFromArguments(http://docs.oracle.com/javase/7/docs/api/java/util/ArrayList.html?is-external=true";
 title="class or interface in 
java.util">ArrayList filterArguments) 
 
 
 static Filter
-FilterBase.createFilterFromArguments(http://docs.oracle.com/javase/7/docs/api/java/util/ArrayList.html?is-external=true";
 title="class or interface in 
java.util">ArrayList filterArguments)
-Given the filter's arguments it constructs the filter
-
+ColumnCountGetFilter.createFilterFromArguments(http://docs.oracle.com/javase/7/docs/api/java/util/ArrayList.html?is-external=true";
 title="class or interface in 
java.util">ArrayList filterArguments) 
 
 
 static Filter
@@ -478,63 +476,65 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 static Filter
-SingleColumnValueFilter.createFilterFromArguments(http://docs.oracle.com/javase/7/docs/api/java/util/ArrayList.html?is-external=true";
 title="class or interface in 
java.util">ArrayList filterArguments) 
+ColumnPrefixFilter.createFilterFromArguments(http://docs.oracle.com/javase/7/docs/api/java/util/ArrayList.html?is-external=true";
 title="class or interface in 
java.util">ArrayList filterArguments) 
 
 
 static Filter
-InclusiveStopFilter.createFilterFromArguments(http://docs.oracle.com/javase/7/docs/api/java/util/ArrayList.html?is-external=true";
 title="class or interface in 
java.util">ArrayList filterArguments) 
+ValueFilter.createFilterFromArguments(http://docs.oracle.com/javase/7/docs/api/java/util/ArrayList.html?is-external=true";
 title="class or interface in 
java.util">ArrayList filterArguments) 
 
 
 static Filter
-TimestampsFilter.createFilterFromArguments(http://docs.oracle.com/javase/7/docs/api/java/util/ArrayList.html?is-external=true";
 title="class or interface in 
java.util">ArrayList filterArguments) 
+RowFilter.createFilterFromArguments(http://docs.oracle.com/javase/7/docs/api/java/util/ArrayList.html?is-external=true";
 title="class or interface in 
java.util">ArrayList filterArguments) 
 
 
 static Filter
-ColumnPaginationFilter.createFilterFromArguments(http://docs.oracle.com/javase/7/docs/api/java/util/ArrayList.html?is-external=true";
 title="class or interface in 
java.util">ArrayList filterArguments) 
+InclusiveStopFilter.createFilterFromArguments(http://docs.oracle.com/javase/7/docs/api/java/util/ArrayList.html?is-external=true";
 title="class o

[14/51] [partial] hbase-site git commit: Published site at f2e0aca2b6925cdd1f2896052f4b4e847ee4e1fd.

2016-04-18 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/9f9a078f/devapidocs/org/apache/hadoop/hbase/coprocessor/class-use/ObserverContext.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/coprocessor/class-use/ObserverContext.html 
b/devapidocs/org/apache/hadoop/hbase/coprocessor/class-use/ObserverContext.html
index 05e06b1..85649eb 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/coprocessor/class-use/ObserverContext.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/coprocessor/class-use/ObserverContext.html
@@ -185,17 +185,33 @@
 
 
 void
+BaseMasterAndRegionObserver.postAbortProcedure(ObserverContext ctx) 
+
+
+void
+BaseMasterObserver.postAbortProcedure(ObserverContext ctx) 
+
+
+void
 MasterObserver.postAbortProcedure(ObserverContext ctx)
 Called after a abortProcedure request has been 
processed.
 
 
 
 void
-BaseMasterAndRegionObserver.postAbortProcedure(ObserverContext ctx) 
+BaseMasterAndRegionObserver.postAddColumn(ObserverContext ctx,
+  TableName tableName,
+  HColumnDescriptor columnFamily)
+Deprecated. 
+
 
 
 void
-BaseMasterObserver.postAbortProcedure(ObserverContext ctx) 
+BaseMasterObserver.postAddColumn(ObserverContext ctx,
+  TableName tableName,
+  HColumnDescriptor columnFamily)
+Deprecated. 
+
 
 
 void
@@ -211,19 +227,15 @@
 
 
 void
-BaseMasterAndRegionObserver.postAddColumn(ObserverContext ctx,
-  TableName tableName,
-  HColumnDescriptor columnFamily)
-Deprecated. 
-
+BaseMasterAndRegionObserver.postAddColumnFamily(ObserverContext ctx,
+  TableName tableName,
+  HColumnDescriptor columnFamily) 
 
 
 void
-BaseMasterObserver.postAddColumn(ObserverContext ctx,
-  TableName tableName,
-  HColumnDescriptor columnFamily)
-Deprecated. 
-
+BaseMasterObserver.postAddColumnFamily(ObserverContext ctx,
+  TableName tableName,
+  HColumnDescriptor columnFamily) 
 
 
 void
@@ -235,15 +247,15 @@
 
 
 void
-BaseMasterAndRegionObserver.postAddColumnFamily(ObserverContext ctx,
-  TableName tableName,
-  HColumnDescriptor columnFamily) 
+BaseMasterAndRegionObserver.postAddColumnFamilyHandler(ObserverContext ctx,
+TableName tableName,
+HColumnDescriptor columnFamily) 
 
 
 void
-BaseMasterObserver.postAddColumnFamily(ObserverContext ctx,
-  TableName tableName,
-  HColumnDescriptor columnFamily) 
+BaseMasterObserver.postAddColumnFamilyHandler(ObserverContext ctx,
+TableName tableName,
+HColumnDescriptor columnFamily) 
 
 
 void
@@ -255,15 +267,19 @@
 
 
 void
-BaseMasterAndRegionObserver.postAddColumnFamilyHandler(ObserverContext ctx,
-TableName tableName,
-HColumnDescriptor columnFamily) 
+BaseMasterAndRegionObserver.postAddColumnHandler(ObserverContext ctx,
+TableName tableName,
+HColumnDescriptor columnFamily)
+Deprecated. 
+
 
 
 void
-BaseMasterObserver.postAddColumnFamilyHandler(ObserverContext ctx,
-TableName tableName,
-HColumnDescriptor columnFamily) 
+BaseMasterObserver.postAddColumnHandler(ObserverContext ctx,
+TableName tableName,
+HColumnDescriptor columnFamily)
+Deprecated. 
+
 
 
 void
@@ -279,19 +295,13 @@
 
 
 void
-BaseMasterAndRegionObserver.postAddColumnHandler(ObserverContext ctx,
-TableName tableName,
-HColumnDescriptor columnFamily)
-Deprecated. 
-
+BaseMasterAndRegionObserver.postAddRSGroup(ObserverCont

[23/51] [partial] hbase-site git commit: Published site at f2e0aca2b6925cdd1f2896052f4b4e847ee4e1fd.

2016-04-18 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/9f9a078f/devapidocs/org/apache/hadoop/hbase/client/ConnectionImplementation.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/client/ConnectionImplementation.html 
b/devapidocs/org/apache/hadoop/hbase/client/ConnectionImplementation.html
index ef26087..3a6b27c 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/ConnectionImplementation.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/ConnectionImplementation.html
@@ -2634,7 +2634,7 @@ public boolean 
 
 release
-private static void release(ConnectionImplementation.MasterServiceState mss)
+private static void release(ConnectionImplementation.MasterServiceState mss)
 
 
 
@@ -2643,7 +2643,7 @@ public boolean 
 
 isKeepAliveMasterConnectedAndRunning
-private boolean isKeepAliveMasterConnectedAndRunning(ConnectionImplementation.MasterServiceState mss)
+private boolean isKeepAliveMasterConnectedAndRunning(ConnectionImplementation.MasterServiceState mss)
 
 
 
@@ -2652,7 +2652,7 @@ public boolean 
 
 releaseMaster
-void releaseMaster(ConnectionImplementation.MasterServiceState mss)
+void releaseMaster(ConnectionImplementation.MasterServiceState mss)
 
 
 
@@ -2661,7 +2661,7 @@ public boolean 
 
 closeMasterService
-private void closeMasterService(ConnectionImplementation.MasterServiceState mss)
+private void closeMasterService(ConnectionImplementation.MasterServiceState mss)
 
 
 
@@ -2670,7 +2670,7 @@ public boolean 
 
 closeMaster
-private void closeMaster()
+private void closeMaster()
 Immediate close of the shared master. Can be by the delayed 
close or when closing the
  connection itself.
 
@@ -2681,7 +2681,7 @@ public boolean 
 
 updateCachedLocation
-void updateCachedLocation(HRegionInfo hri,
+void updateCachedLocation(HRegionInfo hri,
 ServerName source,
 ServerName serverName,
 long seqNum)
@@ -2693,7 +2693,7 @@ public boolean 
 
 deleteCachedRegionLocation
-public void deleteCachedRegionLocation(HRegionLocation location)
+public void deleteCachedRegionLocation(HRegionLocation location)
 Description copied from interface: ClusterConnection
 Deletes cached locations for the specific region.
 
@@ -2710,7 +2710,7 @@ public boolean 
 
 updateCachedLocations
-public void updateCachedLocations(TableName tableName,
+public void updateCachedLocations(TableName tableName,
  byte[] rowkey,
  http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true";
 title="class or interface in java.lang">Object exception,
  HRegionLocation source)
@@ -2726,7 +2726,7 @@ public boolean 
 
 updateCachedLocations
-public void updateCachedLocations(TableName tableName,
+public void updateCachedLocations(TableName tableName,
  byte[] regionName,
  byte[] rowkey,
  http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true";
 title="class or interface in java.lang">Object exception,
@@ -2749,7 +2749,7 @@ public boolean 
 
 updateCachedLocations
-public void updateCachedLocations(byte[] tableName,
+public void updateCachedLocations(byte[] tableName,
  byte[] rowkey,
  http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true";
 title="class or interface in java.lang">Object exception,
  HRegionLocation source)
@@ -2766,7 +2766,7 @@ public boolean 
 processBatch
 http://docs.oracle.com/javase/7/docs/api/java/lang/Deprecated.html?is-external=true";
 title="class or interface in java.lang">@Deprecated
-public void processBatch(http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List list,
+public void processBatch(http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List list,
TableName tableName,
http://docs.oracle.com/javase/7/docs/api/java/util/concurrent/ExecutorService.html?is-external=true";
 title="class or interface in 
java.util.concurrent">ExecutorService pool,
http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true";
 title="class or interface in java.lang">Object[] results)
@@ -2795,7 +2795,7 @@ public void 
 processBatch
 http://docs.oracle.com/javase/7/docs/api/java/lang/Deprecated.html?is-external=true";
 title="class or interface in java.lang">@Deprecated
-public void processBatch(http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List list,
+public void processBatch(http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-externa

[10/51] [partial] hbase-site git commit: Published site at f2e0aca2b6925cdd1f2896052f4b4e847ee4e1fd.

2016-04-18 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/9f9a078f/devapidocs/org/apache/hadoop/hbase/filter/class-use/CompareFilter.CompareOp.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/filter/class-use/CompareFilter.CompareOp.html
 
b/devapidocs/org/apache/hadoop/hbase/filter/class-use/CompareFilter.CompareOp.html
index 4ab2ad2..cb92b06 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/filter/class-use/CompareFilter.CompareOp.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/filter/class-use/CompareFilter.CompareOp.html
@@ -241,147 +241,147 @@ service.
 
 
 boolean
-BaseRegionObserver.postCheckAndDelete(ObserverContext e,
+RegionObserver.postCheckAndDelete(ObserverContext c,
 byte[] row,
 byte[] family,
 byte[] qualifier,
 CompareFilter.CompareOp compareOp,
 ByteArrayComparable comparator,
 Delete delete,
-boolean result) 
+boolean result)
+Called after checkAndDelete
+
 
 
 boolean
-RegionObserver.postCheckAndDelete(ObserverContext c,
+BaseRegionObserver.postCheckAndDelete(ObserverContext e,
 byte[] row,
 byte[] family,
 byte[] qualifier,
 CompareFilter.CompareOp compareOp,
 ByteArrayComparable comparator,
 Delete delete,
-boolean result)
-Called after checkAndDelete
-
+boolean result) 
 
 
 boolean
-BaseRegionObserver.postCheckAndPut(ObserverContext e,
+RegionObserver.postCheckAndPut(ObserverContext c,
   byte[] row,
   byte[] family,
   byte[] qualifier,
   CompareFilter.CompareOp compareOp,
   ByteArrayComparable comparator,
   Put put,
-  boolean result) 
+  boolean result)
+Called after checkAndPut
+
 
 
 boolean
-RegionObserver.postCheckAndPut(ObserverContext c,
+BaseRegionObserver.postCheckAndPut(ObserverContext e,
   byte[] row,
   byte[] family,
   byte[] qualifier,
   CompareFilter.CompareOp compareOp,
   ByteArrayComparable comparator,
   Put put,
-  boolean result)
-Called after checkAndPut
-
+  boolean result) 
 
 
 boolean
-BaseRegionObserver.preCheckAndDelete(ObserverContext e,
+RegionObserver.preCheckAndDelete(ObserverContext c,
   byte[] row,
   byte[] family,
   byte[] qualifier,
   CompareFilter.CompareOp compareOp,
   ByteArrayComparable comparator,
   Delete delete,
-  boolean result) 
+  boolean result)
+Called before checkAndDelete.
+
 
 
 boolean
-RegionObserver.preCheckAndDelete(ObserverContext c,
+BaseRegionObserver.preCheckAndDelete(ObserverContext e,
   byte[] row,
   byte[] family,
   byte[] qualifier,
   CompareFilter.CompareOp compareOp,
   ByteArrayComparable comparator,
   Delete delete,
-  boolean result)
-Called before checkAndDelete.
-
+  boolean result) 
 
 
 boolean
-BaseRegionObserver.preCheckAndDeleteAfterRowLock(ObserverContext e,
+RegionObserver.preCheckAndDeleteAfterRowLock(ObserverContext c,
   byte[] row,
   byte[] family,
   
byte[] qualifier,
   C

[46/51] [partial] hbase-site git commit: Published site at f2e0aca2b6925cdd1f2896052f4b4e847ee4e1fd.

2016-04-18 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/9f9a078f/apidocs/org/apache/hadoop/hbase/rest/client/class-use/Response.html
--
diff --git 
a/apidocs/org/apache/hadoop/hbase/rest/client/class-use/Response.html 
b/apidocs/org/apache/hadoop/hbase/rest/client/class-use/Response.html
index 6199b64..20943fb 100644
--- a/apidocs/org/apache/hadoop/hbase/rest/client/class-use/Response.html
+++ b/apidocs/org/apache/hadoop/hbase/rest/client/class-use/Response.html
@@ -103,10 +103,25 @@
 
 
 Response
+Client.delete(Cluster cluster,
+http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String path,
+org.apache.commons.httpclient.Header extraHdr)
+Send a DELETE request
+
+
+
+Response
 Client.delete(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String path)
 Send a DELETE request
 
 
+
+Response
+Client.delete(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String path,
+org.apache.commons.httpclient.Header extraHdr)
+Send a DELETE request
+
+
 
 Response
 Client.get(Cluster cluster,
@@ -184,13 +199,23 @@
 
 
 Response
+Client.post(Cluster cluster,
+http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String path,
+http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String contentType,
+byte[] content,
+org.apache.commons.httpclient.Header extraHdr)
+Send a POST request
+
+
+
+Response
 Client.post(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String path,
 org.apache.commons.httpclient.Header[] headers,
 byte[] content)
 Send a POST request
 
 
-
+
 Response
 Client.post(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String path,
 http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String contentType,
@@ -198,6 +223,15 @@
 Send a POST request
 
 
+
+Response
+Client.post(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String path,
+http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String contentType,
+byte[] content,
+org.apache.commons.httpclient.Header extraHdr)
+Send a POST request
+
+
 
 Response
 Client.put(Cluster cluster,
@@ -218,13 +252,23 @@
 
 
 Response
+Client.put(Cluster cluster,
+  http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String path,
+  http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String contentType,
+  byte[] content,
+  org.apache.commons.httpclient.Header extraHdr)
+Send a PUT request
+
+
+
+Response
 Client.put(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String path,
   org.apache.commons.httpclient.Header[] headers,
   byte[] content)
 Send a PUT request
 
 
-
+
 Response
 Client.put(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String path,
   http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String contentType,
@@ -232,6 +276,15 @@
 Send a PUT request
 
 
+
+Response
+Client.put(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String path,
+  http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String contentType,
+  byte[] content,
+  org.apache.commons.httpclient.Header extraHdr)
+Send a PUT request
+
+
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/9f9a078f/apidocs/org/apache/hadoop/hbase/util/class-use/ByteRange.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/util/class-use/ByteRange.html 
b/apidocs/org/apache/hadoop/hbase/util/class-use/ByteRange.html
index 910b506..ccc396d 100644
--- a/apidocs/org/apache/hadoop/hbase/util/class-use/ByteRange.html
+++ b/apidocs/org/apache/hadoop/hbase/util/class-use/ByteRange.html
@@ -242,18 +242,18 @@
 
 
 ByteRange
-SimpleMutableByteRange.deepCopy() 
+ByteRange.deepCopy()
+Create a new ByteRange with

[51/51] [partial] hbase-site git commit: Published site at f2e0aca2b6925cdd1f2896052f4b4e847ee4e1fd.

2016-04-18 Thread misty
Published site at f2e0aca2b6925cdd1f2896052f4b4e847ee4e1fd.


Project: http://git-wip-us.apache.org/repos/asf/hbase-site/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase-site/commit/9f9a078f
Tree: http://git-wip-us.apache.org/repos/asf/hbase-site/tree/9f9a078f
Diff: http://git-wip-us.apache.org/repos/asf/hbase-site/diff/9f9a078f

Branch: refs/heads/asf-site
Commit: 9f9a078f00d1ddf5303798e76cae9244b644f8a4
Parents: 3c6f352
Author: jenkins 
Authored: Mon Apr 18 15:26:02 2016 +
Committer: Misty Stanley-Jones 
Committed: Mon Apr 18 14:03:30 2016 -0700

--
 acid-semantics.html | 4 +-
 apache_hbase_reference_guide.pdf|   Bin 13383444 -> 13388981 
bytes
 apache_hbase_reference_guide.pdfmarks   | 4 +-
 apidocs/index-all.html  |37 +-
 .../apache/hadoop/hbase/KeepDeletedCells.html   | 4 +-
 .../org/apache/hadoop/hbase/class-use/Cell.html |   242 +-
 .../hbase/class-use/HTableDescriptor.html   |12 +-
 .../hadoop/hbase/class-use/ServerName.html  | 4 +-
 .../hadoop/hbase/class-use/TableName.html   |54 +-
 .../hadoop/hbase/client/Admin.CompactType.html  | 6 +-
 .../hbase/client/Admin.MasterSwitchType.html|10 +-
 .../org/apache/hadoop/hbase/client/Admin.html   |98 +-
 .../apache/hadoop/hbase/client/Consistency.html | 4 +-
 .../apache/hadoop/hbase/client/Durability.html  | 4 +-
 .../hadoop/hbase/client/IsolationLevel.html | 4 +-
 .../class-use/Admin.MasterSwitchType.html   | 3 +-
 .../hbase/client/class-use/Consistency.html |14 +-
 .../hbase/client/class-use/Durability.html  |16 +-
 .../hbase/client/class-use/IsolationLevel.html  |14 +-
 .../hadoop/hbase/client/class-use/Mutation.html | 8 +-
 .../hadoop/hbase/client/class-use/Result.html   |46 +-
 .../hadoop/hbase/client/class-use/Row.html  | 4 +-
 .../hadoop/hbase/client/class-use/Scan.html | 4 +-
 .../hadoop/hbase/client/class-use/Table.html| 4 +-
 .../hadoop/hbase/client/package-tree.html   | 6 +-
 .../hbase/filter/CompareFilter.CompareOp.html   | 4 +-
 .../filter/class-use/Filter.ReturnCode.html |62 +-
 .../hadoop/hbase/filter/class-use/Filter.html   |64 +-
 .../hadoop/hbase/filter/package-tree.html   | 4 +-
 .../org/apache/hadoop/hbase/io/TimeRange.html   |50 +-
 .../io/class-use/ImmutableBytesWritable.html|66 +-
 .../hadoop/hbase/io/class-use/TimeRange.html|32 +-
 .../hbase/io/crypto/class-use/Cipher.html   | 8 +-
 .../hbase/io/encoding/DataBlockEncoding.html| 4 +-
 .../org/apache/hadoop/hbase/io/package-use.html | 5 +
 .../mapreduce/class-use/TableRecordReader.html  | 4 +-
 .../apache/hadoop/hbase/quotas/QuotaType.html   | 4 +-
 .../hadoop/hbase/regionserver/BloomType.html| 4 +-
 .../apache/hadoop/hbase/rest/client/Client.html |   190 +-
 .../hbase/rest/client/class-use/Cluster.html|42 +-
 .../hbase/rest/client/class-use/Response.html   |57 +-
 .../hadoop/hbase/util/class-use/ByteRange.html  |   126 +-
 .../hadoop/hbase/util/class-use/Bytes.html  |16 +-
 .../hadoop/hbase/util/class-use/Order.html  |46 +-
 .../hadoop/hbase/util/class-use/Pair.html   | 4 +-
 .../util/class-use/PositionedByteRange.html |   274 +-
 apidocs/overview-tree.html  |12 +-
 .../hadoop/hbase/client/Admin.CompactType.html  |83 +-
 .../hbase/client/Admin.MasterSwitchType.html|83 +-
 .../org/apache/hadoop/hbase/client/Admin.html   |83 +-
 .../org/apache/hadoop/hbase/io/TimeRange.html   |   313 +-
 .../hadoop/hbase/mapred/GroupingTableMap.html   | 2 +-
 .../hadoop/hbase/mapreduce/CellCounter.html | 2 +-
 .../hadoop/hbase/mapreduce/CopyTable.html   | 4 +-
 .../apache/hadoop/hbase/mapreduce/Export.html   | 6 +-
 .../hbase/mapreduce/GroupingTableMapper.html| 2 +-
 .../hadoop/hbase/mapreduce/RowCounter.html  | 4 +-
 .../mapreduce/SimpleTotalOrderPartitioner.html  | 2 +-
 .../hbase/mapreduce/TableInputFormat.html   | 4 +-
 .../apache/hadoop/hbase/rest/client/Client.html |   373 +-
 book.html   |32 +-
 bulk-loads.html | 4 +-
 checkstyle-aggregate.html   | 16334 +
 checkstyle.rss  |   510 +-
 coc.html| 4 +-
 cygwin.html | 4 +-
 dependencies.html   | 4 +-
 dependency-convergence.html | 4 +-
 dependency-info.html| 4 +-
 dependency-management.html  | 4 +-
 devapidocs/allclasses-frame.html| 3 +
 devapidocs/allclasses-noframe.html  | 3 +
 de

[17/51] [partial] hbase-site git commit: Published site at f2e0aca2b6925cdd1f2896052f4b4e847ee4e1fd.

2016-04-18 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/9f9a078f/devapidocs/org/apache/hadoop/hbase/client/class-use/Row.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/client/class-use/Row.html 
b/devapidocs/org/apache/hadoop/hbase/client/class-use/Row.html
index 53dd4e9..64ca784 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/class-use/Row.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/class-use/Row.html
@@ -251,7 +251,7 @@
 
 
 int
-Get.compareTo(Row other) 
+RegionCoprocessorServiceExec.compareTo(Row o) 
 
 
 int
@@ -259,7 +259,7 @@
 
 
 int
-Mutation.compareTo(Row d) 
+Get.compareTo(Row other) 
 
 
 int
@@ -267,7 +267,7 @@
 
 
 int
-RegionCoprocessorServiceExec.compareTo(Row o) 
+Mutation.compareTo(Row d) 
 
 
 private boolean
@@ -477,23 +477,23 @@
 
 
 void
-HConnection.processBatch(http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List actions,
+ConnectionImplementation.processBatch(http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List list,
 byte[] tableName,
 http://docs.oracle.com/javase/7/docs/api/java/util/concurrent/ExecutorService.html?is-external=true";
 title="class or interface in 
java.util.concurrent">ExecutorService pool,
 http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true";
 title="class or interface in java.lang">Object[] results)
 Deprecated. 
-internal method, do not use through HConnection
+Unsupported API
 
 
 
 
 void
-ConnectionImplementation.processBatch(http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List list,
+HConnection.processBatch(http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List actions,
 byte[] tableName,
 http://docs.oracle.com/javase/7/docs/api/java/util/concurrent/ExecutorService.html?is-external=true";
 title="class or interface in 
java.util.concurrent">ExecutorService pool,
 http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true";
 title="class or interface in java.lang">Object[] results)
 Deprecated. 
-Unsupported API
+internal method, do not use through HConnection
 
 
 
@@ -507,29 +507,29 @@
 
 
 void
-HConnection.processBatch(http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List actions,
+ConnectionImplementation.processBatch(http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List list,
 TableName tableName,
 http://docs.oracle.com/javase/7/docs/api/java/util/concurrent/ExecutorService.html?is-external=true";
 title="class or interface in 
java.util.concurrent">ExecutorService pool,
 http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true";
 title="class or interface in java.lang">Object[] results)
 Deprecated. 
-since 0.96 - Use Table.batch(java.util.List, java.lang.Object[]) 
instead
+since 0.96 Use Table.batch(java.util.List, java.lang.Object[]) 
instead
 
 
 
 
 void
-ConnectionImplementation.processBatch(http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List list,
+HConnection.processBatch(http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List actions,
 TableName tableName,
 http://docs.oracle.com/javase/7/docs/api/java/util/concurrent/ExecutorService.html?is-external=true";
 title="class or interface in 
java.util.concurrent">ExecutorService pool,
 http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true";
 title="class or interface in java.lang">Object[] results)
 Deprecated. 
-since 0.96 Use Table.batch(java.util.List, java.lang.Object[]) 
instead
+since 0.96 - Use Table.batch(java.util.List, java.lang.Object[]) 
instead
 
 
 
 
  void
-HConnection.processBatchCallback(http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List list,
+ConnectionImplementation.processBatchCallback(http://docs.oracle.com/javase/7/docs/api/java/util/List

[28/51] [partial] hbase-site git commit: Published site at f2e0aca2b6925cdd1f2896052f4b4e847ee4e1fd.

2016-04-18 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/9f9a078f/devapidocs/org/apache/hadoop/hbase/class-use/ScheduledChore.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/class-use/ScheduledChore.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/ScheduledChore.html
index 34907e4..1526f62 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/ScheduledChore.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/ScheduledChore.html
@@ -172,43 +172,43 @@
 
 
 void
-ScheduledChore.ChoreServicer.cancelChore(ScheduledChore chore)
-Cancel any ongoing schedules that this chore has with the 
implementer of this interface.
-
+ChoreService.cancelChore(ScheduledChore chore) 
 
 
 void
-ChoreService.cancelChore(ScheduledChore chore) 
+ScheduledChore.ChoreServicer.cancelChore(ScheduledChore chore)
+Cancel any ongoing schedules that this chore has with the 
implementer of this interface.
+
 
 
 void
-ScheduledChore.ChoreServicer.cancelChore(ScheduledChore chore,
+ChoreService.cancelChore(ScheduledChore chore,
   boolean mayInterruptIfRunning) 
 
 
 void
-ChoreService.cancelChore(ScheduledChore chore,
+ScheduledChore.ChoreServicer.cancelChore(ScheduledChore chore,
   boolean mayInterruptIfRunning) 
 
 
 boolean
-ScheduledChore.ChoreServicer.isChoreScheduled(ScheduledChore chore) 
+ChoreService.isChoreScheduled(ScheduledChore chore) 
 
 
 boolean
-ChoreService.isChoreScheduled(ScheduledChore chore) 
+ScheduledChore.ChoreServicer.isChoreScheduled(ScheduledChore chore) 
 
 
 void
+ChoreService.onChoreMissedStartTime(ScheduledChore chore) 
+
+
+void
 ScheduledChore.ChoreServicer.onChoreMissedStartTime(ScheduledChore chore)
 A callback that tells the implementer of this interface 
that one of the scheduled chores is
  missing its start time.
 
 
-
-void
-ChoreService.onChoreMissedStartTime(ScheduledChore chore) 
-
 
 private void
 ChoreService.printChoreDetails(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String header,
@@ -226,13 +226,13 @@
 
 
 boolean
-ScheduledChore.ChoreServicer.triggerNow(ScheduledChore chore)
-This method tries to execute the chore immediately.
-
+ChoreService.triggerNow(ScheduledChore chore) 
 
 
 boolean
-ChoreService.triggerNow(ScheduledChore chore) 
+ScheduledChore.ChoreServicer.triggerNow(ScheduledChore chore)
+This method tries to execute the chore immediately.
+
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/9f9a078f/devapidocs/org/apache/hadoop/hbase/class-use/Server.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/class-use/Server.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/Server.html
index 60b8af5..c1f2efe 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/Server.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/Server.html
@@ -166,13 +166,13 @@
 
 
 
-private Server
-ZKSplitLogManagerCoordination.server 
-
-
 protected Server
 ZkCoordinatedStateManager.server 
 
+
+private Server
+ZKSplitLogManagerCoordination.server 
+
 
 
 
@@ -184,11 +184,11 @@
 
 
 Server
-ZkCoordinatedStateManager.getServer() 
+BaseCoordinatedStateManager.getServer() 
 
 
 Server
-BaseCoordinatedStateManager.getServer() 
+ZkCoordinatedStateManager.getServer() 
 
 
 
@@ -201,11 +201,11 @@
 
 
 void
-ZkCoordinatedStateManager.initialize(Server server) 
+BaseCoordinatedStateManager.initialize(Server server) 
 
 
 void
-BaseCoordinatedStateManager.initialize(Server server) 
+ZkCoordinatedStateManager.initialize(Server server) 
 
 
 
@@ -324,13 +324,13 @@
 
 
 
-private Server
-ActiveMasterManager.master 
-
-
 (package private) Server
 MasterFileSystem.master 
 
+
+private Server
+ActiveMasterManager.master 
+
 
 private Server
 ServerManager.master 
@@ -340,16 +340,16 @@
 RegionStateStore.server 
 
 
-private Server
-SplitLogManager.server 
+protected Server
+BulkAssigner.server 
 
 
 private Server
 CatalogJanitor.server 
 
 
-protected Server
-BulkAssigner.server 
+private Server
+SplitLogManager.server 
 
 
 
@@ -514,23 +514,23 @@
 
 
 private Server
-HeapMemoryManager.server 
+LogRoller.server 
 
 
 private Server
-RegionMergeTransactionImpl.server 
+SplitTransactionImpl.server 
 
 
 private Server
-SplitTransactionImpl.server 
+SplitTransactionImpl.DaughterOpener.server 
 
 
 private Server
-SplitTransactionImpl.DaughterOpener.server 
+HeapMemoryManager.server 
 
 
 private Server
-LogRoller.server 
+RegionMergeTransactionImpl.server 
 
 
 
@@ -543,11 +543,13 @@
 
 
 Server
-RegionMergeTransactionImpl.getServer() 
+SplitTransactionImpl.getServer() 
 
 
 Server
-SplitTransactionImpl.getServer() 
+SplitTransaction.getServer()
+Get the Server running the transaction or rollback
+
 
 
 Server
@@ -557,9 +559,7 @@
 
 
 Server
-SplitTransaction.getServer()
-Get the Server running the transaction or rollback
-
+RegionM

[26/51] [partial] hbase-site git commit: Published site at f2e0aca2b6925cdd1f2896052f4b4e847ee4e1fd.

2016-04-18 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/9f9a078f/devapidocs/org/apache/hadoop/hbase/class-use/SplitLogTask.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/class-use/SplitLogTask.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/SplitLogTask.html
index 0855b61..73ded02 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/SplitLogTask.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/SplitLogTask.html
@@ -147,19 +147,19 @@
 
 
 void
-ZkSplitLogWorkerCoordination.endTask(SplitLogTask slt,
+SplitLogWorkerCoordination.endTask(SplitLogTask slt,
   http://docs.oracle.com/javase/7/docs/api/java/util/concurrent/atomic/AtomicLong.html?is-external=true";
 title="class or interface in 
java.util.concurrent.atomic">AtomicLong ctr,
-  SplitLogWorkerCoordination.SplitTaskDetails details)
-endTask() can fail and the only way to recover out of it is 
for the
- SplitLogManager to timeout the 
task node.
+  SplitLogWorkerCoordination.SplitTaskDetails splitTaskDetails)
+Notify coordination engine that splitting task has 
completed.
 
 
 
 void
-SplitLogWorkerCoordination.endTask(SplitLogTask slt,
+ZkSplitLogWorkerCoordination.endTask(SplitLogTask slt,
   http://docs.oracle.com/javase/7/docs/api/java/util/concurrent/atomic/AtomicLong.html?is-external=true";
 title="class or interface in 
java.util.concurrent.atomic">AtomicLong ctr,
-  SplitLogWorkerCoordination.SplitTaskDetails splitTaskDetails)
-Notify coordination engine that splitting task has 
completed.
+  SplitLogWorkerCoordination.SplitTaskDetails details)
+endTask() can fail and the only way to recover out of it is 
for the
+ SplitLogManager to timeout the 
task node.
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/9f9a078f/devapidocs/org/apache/hadoop/hbase/class-use/TableDescriptors.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/class-use/TableDescriptors.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/TableDescriptors.html
index 615907d..294d686 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/TableDescriptors.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/TableDescriptors.html
@@ -118,11 +118,11 @@
 
 
 TableDescriptors
-MasterServices.getTableDescriptors() 
+HMaster.getTableDescriptors() 
 
 
 TableDescriptors
-HMaster.getTableDescriptors() 
+MasterServices.getTableDescriptors() 
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/9f9a078f/devapidocs/org/apache/hadoop/hbase/class-use/TableExistsException.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/class-use/TableExistsException.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/TableExistsException.html
index 967b9af..4f49372 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/TableExistsException.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/TableExistsException.html
@@ -98,41 +98,41 @@
 
 
 void
+HBaseAdmin.cloneSnapshot(byte[] snapshotName,
+  TableName tableName) 
+
+
+void
 Admin.cloneSnapshot(byte[] snapshotName,
   TableName tableName)
 Create a new table by cloning the snapshot content.
 
 
-
+
 void
-HBaseAdmin.cloneSnapshot(byte[] snapshotName,
+HBaseAdmin.cloneSnapshot(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String snapshotName,
   TableName tableName) 
 
-
+
 void
 Admin.cloneSnapshot(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String snapshotName,
   TableName tableName)
 Create a new table by cloning the snapshot content.
 
 
-
-void
-HBaseAdmin.cloneSnapshot(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String snapshotName,
-  TableName tableName) 
-
 
 http://docs.oracle.com/javase/7/docs/api/java/util/concurrent/Future.html?is-external=true";
 title="class or interface in java.util.concurrent">FutureVoid>
+HBaseAdmin.cloneSnapshotAsync(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String snapshotName,
+TableName tableName) 
+
+
+http://docs.oracle.com/javase/7/docs/api/java/util/concurrent/Future.html?is-external=true";
 title="class or interface in java.util.concurrent">FutureVoid>
 Admin.cloneSnapshot

[44/51] [partial] hbase-site git commit: Published site at f2e0aca2b6925cdd1f2896052f4b4e847ee4e1fd.

2016-04-18 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/9f9a078f/apidocs/overview-tree.html
--
diff --git a/apidocs/overview-tree.html b/apidocs/overview-tree.html
index ae22f15..37be400 100644
--- a/apidocs/overview-tree.html
+++ b/apidocs/overview-tree.html
@@ -825,22 +825,22 @@
 
 org.apache.hadoop.hbase.util.Order
 org.apache.hadoop.hbase.KeepDeletedCells
-org.apache.hadoop.hbase.filter.Filter.ReturnCode
+org.apache.hadoop.hbase.io.encoding.DataBlockEncoding
 org.apache.hadoop.hbase.filter.RegexStringComparator.EngineType
 org.apache.hadoop.hbase.filter.BitComparator.BitwiseOp
-org.apache.hadoop.hbase.filter.FilterList.Operator
 org.apache.hadoop.hbase.filter.CompareFilter.CompareOp
-org.apache.hadoop.hbase.io.encoding.DataBlockEncoding
+org.apache.hadoop.hbase.filter.Filter.ReturnCode
+org.apache.hadoop.hbase.filter.FilterList.Operator
 org.apache.hadoop.hbase.regionserver.BloomType
 org.apache.hadoop.hbase.quotas.ThrottlingException.Type
 org.apache.hadoop.hbase.quotas.QuotaScope
 org.apache.hadoop.hbase.quotas.QuotaType
 org.apache.hadoop.hbase.quotas.ThrottleType
-org.apache.hadoop.hbase.client.Durability
+org.apache.hadoop.hbase.client.Consistency
+org.apache.hadoop.hbase.client.IsolationLevel
 org.apache.hadoop.hbase.client.Admin.CompactType
 org.apache.hadoop.hbase.client.Admin.MasterSwitchType
-org.apache.hadoop.hbase.client.IsolationLevel
-org.apache.hadoop.hbase.client.Consistency
+org.apache.hadoop.hbase.client.Durability
 org.apache.hadoop.hbase.client.security.SecurityCapability
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/9f9a078f/apidocs/src-html/org/apache/hadoop/hbase/client/Admin.CompactType.html
--
diff --git 
a/apidocs/src-html/org/apache/hadoop/hbase/client/Admin.CompactType.html 
b/apidocs/src-html/org/apache/hadoop/hbase/client/Admin.CompactType.html
index 31eac7c..be61fef 100644
--- a/apidocs/src-html/org/apache/hadoop/hbase/client/Admin.CompactType.html
+++ b/apidocs/src-html/org/apache/hadoop/hbase/client/Admin.CompactType.html
@@ -1724,42 +1724,55 @@
 1716   *
 1717   * @param enabled enabled or not
 1718   * @param synchronous If true, it 
waits until current split() call, if outstanding, to return.
-1719   * @param switchTypes switchType list 
{@link MasterSwitchType}
-1720   * @return Previous switch value 
array
-1721   */
-1722  boolean[] setSplitOrMergeEnabled(final 
boolean enabled, final boolean synchronous,
-1723   final 
MasterSwitchType... switchTypes) throws IOException;
-1724
-1725  /**
-1726   * Query the current state of the 
switch
-1727   *
-1728   * @return true if the switch is 
enabled, false otherwise.
-1729   */
-1730  boolean isSplitOrMergeEnabled(final 
MasterSwitchType switchType) throws IOException;
-1731
-1732  /**
-1733   * Currently, there are only two 
compact types:
-1734   * {@code NORMAL} means do store files 
compaction;
-1735   * {@code MOB} means do mob files 
compaction.
-1736   * */
-1737  @InterfaceAudience.Public
-1738  @InterfaceStability.Unstable
-1739  public enum CompactType {
-1740
-1741NORMAL(0),
-1742MOB   (1);
-1743
-1744CompactType(int value) {}
-1745  }
-1746  
-1747  @InterfaceAudience.Public
-1748  @InterfaceStability.Evolving
-1749  public enum MasterSwitchType {
-1750SPLIT,
-1751MERGE
-1752  }
+1719   * @param skipLock if false, we will 
do lock before change switch.
+1720   * with the lock, 
other requests to change the switch will be rejected!
+1721   * And when you set it 
to be false,
+1722   * you should call 
{@link #releaseSplitOrMergeLockAndRollback()} by yourself
+1723   * @param switchTypes switchType list 
{@link MasterSwitchType}
+1724   * @return Previous switch value 
array
+1725   */
+1726  boolean[] setSplitOrMergeEnabled(final 
boolean enabled, final boolean synchronous,
+1727   final 
boolean skipLock,
+1728   final 
MasterSwitchType... switchTypes) throws IOException;
+1729
+1730  /**
+1731   * Query the current state of the 
switch
+1732   *
+1733   * @return true if the switch is 
enabled, false otherwise.
+1734   */
+1735  boolean isSplitOrMergeEnabled(final 
MasterSwitchType switchType) throws IOException;
+1736
+1737  /**
+1738   *  You should call this method after 
you call
+1739   *  {@link 
#setSplitOrMergeEnabled(boolean, boolean, boolean, MasterSwitchType...)}
+1740   *  with skipLock be false, this 
method will release the lock created by above method
+1741   *  and rollback the switch state to 
be original state before you change switch
+1742   * */
+1743  void 
releaseSplitOrMergeLockAndRollback() throws IOException;
+1744
+1745  /**
+1746   * Currently, there are only two 
compact types:
+1747   * {@code NORMAL} means do store files 
compaction;
+1748   * {@code MOB} means 

[29/51] [partial] hbase-site git commit: Published site at f2e0aca2b6925cdd1f2896052f4b4e847ee4e1fd.

2016-04-18 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/9f9a078f/devapidocs/org/apache/hadoop/hbase/class-use/InterProcessLock.MetadataHandler.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/class-use/InterProcessLock.MetadataHandler.html
 
b/devapidocs/org/apache/hadoop/hbase/class-use/InterProcessLock.MetadataHandler.html
index 1689c47..38c22d9 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/class-use/InterProcessLock.MetadataHandler.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/class-use/InterProcessLock.MetadataHandler.html
@@ -166,13 +166,13 @@
 
 
 
-protected InterProcessLock.MetadataHandler
-ZKInterProcessLockBase.handler 
-
-
 private InterProcessLock.MetadataHandler
 ZKInterProcessReadWriteLock.handler 
 
+
+protected InterProcessLock.MetadataHandler
+ZKInterProcessLockBase.handler 
+
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/9f9a078f/devapidocs/org/apache/hadoop/hbase/class-use/KeepDeletedCells.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/class-use/KeepDeletedCells.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/KeepDeletedCells.html
index 4f4468b..e9a6b35 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/KeepDeletedCells.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/KeepDeletedCells.html
@@ -159,14 +159,14 @@ the order they are declared.
 
 
 private KeepDeletedCells
-ScanInfo.keepDeletedCells 
-
-
-private KeepDeletedCells
 ScanQueryMatcher.keepDeletedCells
 whether to return deleted rows
 
 
+
+private KeepDeletedCells
+ScanInfo.keepDeletedCells 
+
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/9f9a078f/devapidocs/org/apache/hadoop/hbase/class-use/KeyValue.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/class-use/KeyValue.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/KeyValue.html
index e034c65..92a396e 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/KeyValue.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/KeyValue.html
@@ -208,22 +208,22 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 static KeyValue
-KeyValue.create(http://docs.oracle.com/javase/7/docs/api/java/io/DataInput.html?is-external=true";
 title="class or interface in java.io">DataInput in) 
+KeyValueUtil.create(http://docs.oracle.com/javase/7/docs/api/java/io/DataInput.html?is-external=true";
 title="class or interface in java.io">DataInput in) 
 
 
 static KeyValue
-KeyValueUtil.create(http://docs.oracle.com/javase/7/docs/api/java/io/DataInput.html?is-external=true";
 title="class or interface in java.io">DataInput in) 
+KeyValue.create(http://docs.oracle.com/javase/7/docs/api/java/io/DataInput.html?is-external=true";
 title="class or interface in java.io">DataInput in) 
 
 
 static KeyValue
-KeyValue.create(int length,
+KeyValueUtil.create(int length,
 http://docs.oracle.com/javase/7/docs/api/java/io/DataInput.html?is-external=true";
 title="class or interface in java.io">DataInput in)
 Create a KeyValue reading length from 
in
 
 
 
 static KeyValue
-KeyValueUtil.create(int length,
+KeyValue.create(int length,
 http://docs.oracle.com/javase/7/docs/api/java/io/DataInput.html?is-external=true";
 title="class or interface in java.io">DataInput in)
 Create a KeyValue reading length from 
in
 
@@ -339,31 +339,31 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 static KeyValue
-KeyValue.createKeyValueFromKey(byte[] b) 
+KeyValueUtil.createKeyValueFromKey(byte[] b) 
 
 
 static KeyValue
-KeyValueUtil.createKeyValueFromKey(byte[] b) 
+KeyValue.createKeyValueFromKey(byte[] b) 
 
 
 static KeyValue
-KeyValue.createKeyValueFromKey(byte[] b,
+KeyValueUtil.createKeyValueFromKey(byte[] b,
   int o,
   int l) 
 
 
 static KeyValue
-KeyValueUtil.createKeyValueFromKey(byte[] b,
+KeyValue.createKeyValueFromKey(byte[] b,
   int o,
   int l) 
 
 
 static KeyValue
-KeyValue.createKeyValueFromKey(http://docs.oracle.com/javase/7/docs/api/java/nio/ByteBuffer.html?is-external=true";
 title="class or interface in 
java.nio">ByteBuffer bb) 
+KeyValueUtil.createKeyValueFromKey(http://docs.oracle.com/javase/7/docs/api/java/nio/ByteBuffer.html?is-external=true";
 title="class or interface in 
java.nio">ByteBuffer bb) 
 
 
 static KeyValue
-KeyValueUtil.createKeyValueFromKey(http://docs.oracle.com/javase/7/docs/api/java/nio/ByteBuffer.html?is-external=true";
 title="class or interface in 
java.nio">ByteBuffer bb) 
+KeyValue.createKeyValueFromKey(http://docs.oracle.com/javase/7/docs/api/java/nio/ByteBuffer.html?is-external=true";
 title="class or interface in 
java.nio">ByteBuffer bb) 
 
 
 static Key

[24/51] [partial] hbase-site git commit: Published site at f2e0aca2b6925cdd1f2896052f4b4e847ee4e1fd.

2016-04-18 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/9f9a078f/devapidocs/org/apache/hadoop/hbase/class-use/TableNotDisabledException.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/class-use/TableNotDisabledException.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/TableNotDisabledException.html
index 74fe4e6..65d17de 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/class-use/TableNotDisabledException.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/class-use/TableNotDisabledException.html
@@ -100,13 +100,13 @@
 
 
 void
-MasterServices.checkTableModifiable(TableName tableName)
-Check table is modifiable; i.e.
-
+HMaster.checkTableModifiable(TableName tableName) 
 
 
 void
-HMaster.checkTableModifiable(TableName tableName) 
+MasterServices.checkTableModifiable(TableName tableName)
+Check table is modifiable; i.e.
+
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/9f9a078f/devapidocs/org/apache/hadoop/hbase/class-use/TableNotFoundException.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/class-use/TableNotFoundException.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/TableNotFoundException.html
index 3a9942d..f7e60ae 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/TableNotFoundException.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/TableNotFoundException.html
@@ -171,13 +171,13 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 void
-MasterServices.checkTableModifiable(TableName tableName)
-Check table is modifiable; i.e.
-
+HMaster.checkTableModifiable(TableName tableName) 
 
 
 void
-HMaster.checkTableModifiable(TableName tableName) 
+MasterServices.checkTableModifiable(TableName tableName)
+Check table is modifiable; i.e.
+
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/9f9a078f/devapidocs/org/apache/hadoop/hbase/class-use/Tag.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/class-use/Tag.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/Tag.html
index 2e6d3dd..8a4b480 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/Tag.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/Tag.html
@@ -146,15 +146,15 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 KeyValue.EMPTY_ARRAY_LIST 
 
 
-private static http://docs.oracle.com/javase/7/docs/api/java/util/Iterator.html?is-external=true";
 title="class or interface in java.util">Iterator
-CellUtil.EMPTY_TAGS_ITR 
-
-
 (package private) static http://docs.oracle.com/javase/7/docs/api/java/util/Iterator.html?is-external=true";
 title="class or interface in java.util">Iterator
 TagUtil.EMPTY_TAGS_ITR
 Iterator returned when no Tags.
 
 
+
+private static http://docs.oracle.com/javase/7/docs/api/java/util/Iterator.html?is-external=true";
 title="class or interface in java.util">Iterator
+CellUtil.EMPTY_TAGS_ITR 
+
 
 
 
@@ -767,17 +767,17 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List
-VisibilityLabelService.createVisibilityExpTags(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String visExpression,
+DefaultVisibilityLabelServiceImpl.createVisibilityExpTags(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String visExpression,
   
boolean withSerializationFormat,
-  boolean checkAuths)
-Creates tags corresponding to given visibility 
expression.
-
+  
boolean checkAuths) 
 
 
 http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List
-DefaultVisibilityLabelServiceImpl.createVisibilityExpTags(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String visExpression,
+VisibilityLabelService.createVisibilityExpTags(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String visExpression,
   
boolean withSerializationFormat,
-  
boolean checkAuths) 
+  boolean checkAuths)
+Creates tags corresponding to given visibility 
expression.
+
 
 
 static http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List
@@ -823,6 +823,11 @@ Input/OutputFormats, a table i

[07/51] [partial] hbase-site git commit: Published site at f2e0aca2b6925cdd1f2896052f4b4e847ee4e1fd.

2016-04-18 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/9f9a078f/devapidocs/org/apache/hadoop/hbase/io/encoding/class-use/DataBlockEncoder.EncodedSeeker.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/io/encoding/class-use/DataBlockEncoder.EncodedSeeker.html
 
b/devapidocs/org/apache/hadoop/hbase/io/encoding/class-use/DataBlockEncoder.EncodedSeeker.html
index 888c25b..4a23592 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/io/encoding/class-use/DataBlockEncoder.EncodedSeeker.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/io/encoding/class-use/DataBlockEncoder.EncodedSeeker.html
@@ -157,29 +157,29 @@
 
 
 DataBlockEncoder.EncodedSeeker
-DiffKeyDeltaEncoder.createSeeker(CellComparator comparator,
+CopyKeyDataBlockEncoder.createSeeker(CellComparator comparator,
 HFileBlockDecodingContext decodingCtx) 
 
 
 DataBlockEncoder.EncodedSeeker
-FastDiffDeltaEncoder.createSeeker(CellComparator comparator,
+PrefixKeyDeltaEncoder.createSeeker(CellComparator comparator,
 HFileBlockDecodingContext decodingCtx) 
 
 
 DataBlockEncoder.EncodedSeeker
+DiffKeyDeltaEncoder.createSeeker(CellComparator comparator,
+HFileBlockDecodingContext decodingCtx) 
+
+
+DataBlockEncoder.EncodedSeeker
 DataBlockEncoder.createSeeker(CellComparator comparator,
 HFileBlockDecodingContext decodingCtx)
 Create a HFileBlock seeker which find KeyValues within a 
block.
 
 
-
-DataBlockEncoder.EncodedSeeker
-CopyKeyDataBlockEncoder.createSeeker(CellComparator comparator,
-HFileBlockDecodingContext decodingCtx) 
-
 
 DataBlockEncoder.EncodedSeeker
-PrefixKeyDeltaEncoder.createSeeker(CellComparator comparator,
+FastDiffDeltaEncoder.createSeeker(CellComparator comparator,
 HFileBlockDecodingContext decodingCtx) 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/9f9a078f/devapidocs/org/apache/hadoop/hbase/io/encoding/class-use/DataBlockEncoding.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/io/encoding/class-use/DataBlockEncoding.html
 
b/devapidocs/org/apache/hadoop/hbase/io/encoding/class-use/DataBlockEncoding.html
index 41d5852..8acb1ef 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/io/encoding/class-use/DataBlockEncoding.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/io/encoding/class-use/DataBlockEncoding.html
@@ -199,11 +199,11 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 DataBlockEncoding
-HFileBlockDefaultEncodingContext.getDataBlockEncoding() 
+HFileBlockEncodingContext.getDataBlockEncoding() 
 
 
 DataBlockEncoding
-HFileBlockEncodingContext.getDataBlockEncoding() 
+HFileBlockDefaultEncodingContext.getDataBlockEncoding() 
 
 
 static DataBlockEncoding
@@ -233,18 +233,18 @@ the order they are declared.
 
 
 HFileBlockEncodingContext
-BufferedDataBlockEncoder.newDataBlockEncodingContext(DataBlockEncoding encoding,
-  byte[] header,
-  HFileContext meta) 
-
-
-HFileBlockEncodingContext
 DataBlockEncoder.newDataBlockEncodingContext(DataBlockEncoding encoding,
   byte[] headerBytes,
   HFileContext meta)
 Creates a encoder specific encoding context
 
 
+
+HFileBlockEncodingContext
+BufferedDataBlockEncoder.newDataBlockEncodingContext(DataBlockEncoding encoding,
+  byte[] header,
+  HFileContext meta) 
+
 
 
 
@@ -307,11 +307,11 @@ the order they are declared.
 
 
 DataBlockEncoding
-HFile.Reader.getDataBlockEncoding() 
+HFileDataBlockEncoderImpl.getDataBlockEncoding() 
 
 
 DataBlockEncoding
-HFileBlock.getDataBlockEncoding() 
+HFileReaderImpl.getDataBlockEncoding() 
 
 
 DataBlockEncoding
@@ -319,15 +319,15 @@ the order they are declared.
 
 
 DataBlockEncoding
-HFileReaderImpl.getDataBlockEncoding() 
+HFile.Reader.getDataBlockEncoding() 
 
 
 DataBlockEncoding
-HFileDataBlockEncoder.getDataBlockEncoding() 
+HFileBlock.getDataBlockEncoding() 
 
 
 DataBlockEncoding
-HFileDataBlockEncoderImpl.getDataBlockEncoding() 
+HFileDataBlockEncoder.getDataBlockEncoding() 
 
 
 DataBlockEncoding
@@ -335,23 +335,23 @@ the order they are declared.
 
 
 DataBlockEncoding
-HFile.Reader.getEffectiveEncodingInCache(boolean isCompaction) 
+HFileDataBlockEncoderImpl.getEffectiveEncodingInCache(boolean isCompaction) 
 
 
 DataBlockEncoding
-NoOpDataBlockEncoder.getEffectiveEncodingInCache(boolean isCompaction) 
+HFileReaderImpl.getEffectiveEncodingInCache(boolean isCompaction) 
 
 
 DataBlockEncoding
-HFileReaderImpl.getEffectiveEncodingInCache(boolean isCompaction) 
+NoOpDataBlockEncoder.getEffectiveEncodingInCache(bo

[25/51] [partial] hbase-site git commit: Published site at f2e0aca2b6925cdd1f2896052f4b4e847ee4e1fd.

2016-04-18 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/9f9a078f/devapidocs/org/apache/hadoop/hbase/class-use/TableName.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/class-use/TableName.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/TableName.html
index 4281a90..212ac06 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/TableName.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/TableName.html
@@ -313,11 +313,11 @@ service.
 
 
 private TableName
-MetaTableAccessor.TableVisitorBase.tableName 
+HRegionInfo.tableName 
 
 
 private TableName
-HRegionInfo.tableName 
+MetaTableAccessor.TableVisitorBase.tableName 
 
 
 
@@ -759,7 +759,7 @@ service.
 
 
 private TableName
-HRegionLocator.tableName 
+HBaseAdmin.TableFuture.tableName 
 
 
 private TableName
@@ -767,43 +767,43 @@ service.
 
 
 private TableName
-HTable.tableName 
+BufferedMutatorParams.tableName 
 
 
 private TableName
-AsyncProcess.AsyncRequestFutureImpl.tableName 
+ClientScanner.tableName 
 
 
 protected TableName
-RegionServerCallable.tableName 
+RegionAdminServiceCallable.tableName 
 
 
 private TableName
-HBaseAdmin.TableFuture.tableName 
+TableState.tableName 
 
 
-protected TableName
-RpcRetryingCallerWithReadReplicas.tableName 
+private TableName
+AsyncProcess.AsyncRequestFutureImpl.tableName 
 
 
 private TableName
-BufferedMutatorImpl.tableName 
+HRegionLocator.tableName 
 
 
 private TableName
-BufferedMutatorParams.tableName 
+BufferedMutatorImpl.tableName 
 
 
-protected TableName
-RegionAdminServiceCallable.tableName 
+private TableName
+HTable.tableName 
 
 
-private TableName
-ClientScanner.tableName 
+protected TableName
+RegionServerCallable.tableName 
 
 
-private TableName
-TableState.tableName 
+protected TableName
+RpcRetryingCallerWithReadReplicas.tableName 
 
 
 
@@ -843,17 +843,19 @@ service.
 
 
 TableName
-HRegionLocator.getName() 
+RegionLocator.getName()
+Gets the fully qualified table name instance of this 
table.
+
 
 
 TableName
-Table.getName()
-Gets the fully qualified table name instance of this 
table.
-
+HRegionLocator.getName() 
 
 
 TableName
-HTable.getName() 
+Table.getName()
+Gets the fully qualified table name instance of this 
table.
+
 
 
 TableName
@@ -861,9 +863,7 @@ service.
 
 
 TableName
-RegionLocator.getName()
-Gets the fully qualified table name instance of this 
table.
-
+HTable.getName() 
 
 
 TableName
@@ -874,107 +874,107 @@ service.
 ClientScanner.getTable() 
 
 
-TableName
-RegionServerCallable.getTableName() 
-
-
 protected TableName
 HBaseAdmin.TableFuture.getTableName() 
 
-
+
 TableName
 BufferedMutatorParams.getTableName() 
 
-
+
 TableName
 TableState.getTableName()
 Table name for state
 
 
+
+TableName
+RegionServerCallable.getTableName() 
+
 
 private TableName
 HBaseAdmin.getTableNameBeforeRestoreSnapshot(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in 
java.lang">String snapshotName) 
 
 
 TableName[]
-Admin.listTableNames()
-List all of the names of userspace tables.
-
+HBaseAdmin.listTableNames() 
 
 
 TableName[]
-HBaseAdmin.listTableNames() 
+ConnectionImplementation.listTableNames()
+Deprecated. 
+Use Admin.listTableNames()
 instead
+
+
 
 
 TableName[]
-HConnection.listTableNames()
-Deprecated. 
-Use Admin.listTables()
 instead.
-
+Admin.listTableNames()
+List all of the names of userspace tables.
 
 
 
 TableName[]
-ConnectionImplementation.listTableNames()
+HConnection.listTableNames()
 Deprecated. 
-Use Admin.listTableNames()
 instead
+Use Admin.listTables()
 instead.
 
 
 
 
 TableName[]
+HBaseAdmin.listTableNames(http://docs.oracle.com/javase/7/docs/api/java/util/regex/Pattern.html?is-external=true";
 title="class or interface in 
java.util.regex">Pattern pattern) 
+
+
+TableName[]
 Admin.listTableNames(http://docs.oracle.com/javase/7/docs/api/java/util/regex/Pattern.html?is-external=true";
 title="class or interface in java.util.regex">Pattern pattern)
 List all of the names of userspace tables.
 
 
-
+
 TableName[]
-HBaseAdmin.listTableNames(http://docs.oracle.com/javase/7/docs/api/java/util/regex/Pattern.html?is-external=true";
 title="class or interface in 
java.util.regex">Pattern pattern) 
+HBaseAdmin.listTableNames(http://docs.oracle.com/javase/7/docs/api/java/util/regex/Pattern.html?is-external=true";
 title="class or interface in java.util.regex">Pattern pattern,
+boolean includeSysTables) 
 
-
+
 TableName[]
 Admin.listTableNames(http://docs.oracle.com/javase/7/docs/api/java/util/regex/Pattern.html?is-external=true";
 title="class or interface in java.util.regex">Pattern pattern,
 boolean includeSysTables)
 List all of the names of userspace tables.
 
 
-
+
 TableName[]
-HBaseAdmin.listTableNames(http://docs.oracle.com/javase/7/docs/api/java/util/regex/Pattern.html?is-external=true";
 title="class or interface in java.util.regex">Pattern pattern,
-

[05/51] [partial] hbase-site git commit: Published site at f2e0aca2b6925cdd1f2896052f4b4e847ee4e1fd.

2016-04-18 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/9f9a078f/devapidocs/org/apache/hadoop/hbase/io/hfile/HFilePrettyPrinter.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/io/hfile/HFilePrettyPrinter.html 
b/devapidocs/org/apache/hadoop/hbase/io/hfile/HFilePrettyPrinter.html
index e423cee..9973fa8 100644
--- a/devapidocs/org/apache/hadoop/hbase/io/hfile/HFilePrettyPrinter.html
+++ b/devapidocs/org/apache/hadoop/hbase/io/hfile/HFilePrettyPrinter.html
@@ -106,7 +106,7 @@
 
 @InterfaceAudience.LimitedPrivate(value="Tools")
 @InterfaceStability.Evolving
-public class HFilePrettyPrinter
+public class HFilePrettyPrinter
 extends org.apache.hadoop.conf.Configured
 implements org.apache.hadoop.util.Tool
 Implements pretty-printing functionality for HFiles.
@@ -373,7 +373,7 @@ implements org.apache.hadoop.util.Tool
 
 
 LOG
-private static final org.apache.commons.logging.Log LOG
+private static final org.apache.commons.logging.Log LOG
 
 
 
@@ -382,7 +382,7 @@ implements org.apache.hadoop.util.Tool
 
 
 options
-private org.apache.commons.cli.Options options
+private org.apache.commons.cli.Options options
 
 
 
@@ -391,7 +391,7 @@ implements org.apache.hadoop.util.Tool
 
 
 verbose
-private boolean verbose
+private boolean verbose
 
 
 
@@ -400,7 +400,7 @@ implements org.apache.hadoop.util.Tool
 
 
 printValue
-private boolean printValue
+private boolean printValue
 
 
 
@@ -409,7 +409,7 @@ implements org.apache.hadoop.util.Tool
 
 
 printKey
-private boolean printKey
+private boolean printKey
 
 
 
@@ -418,7 +418,7 @@ implements org.apache.hadoop.util.Tool
 
 
 shouldPrintMeta
-private boolean shouldPrintMeta
+private boolean shouldPrintMeta
 
 
 
@@ -427,7 +427,7 @@ implements org.apache.hadoop.util.Tool
 
 
 printBlockIndex
-private boolean printBlockIndex
+private boolean printBlockIndex
 
 
 
@@ -436,7 +436,7 @@ implements org.apache.hadoop.util.Tool
 
 
 printBlockHeaders
-private boolean printBlockHeaders
+private boolean printBlockHeaders
 
 
 
@@ -445,7 +445,7 @@ implements org.apache.hadoop.util.Tool
 
 
 printStats
-private boolean printStats
+private boolean printStats
 
 
 
@@ -454,7 +454,7 @@ implements org.apache.hadoop.util.Tool
 
 
 checkRow
-private boolean checkRow
+private boolean checkRow
 
 
 
@@ -463,7 +463,7 @@ implements org.apache.hadoop.util.Tool
 
 
 checkFamily
-private boolean checkFamily
+private boolean checkFamily
 
 
 
@@ -472,7 +472,7 @@ implements org.apache.hadoop.util.Tool
 
 
 isSeekToRow
-private boolean isSeekToRow
+private boolean isSeekToRow
 
 
 
@@ -481,7 +481,7 @@ implements org.apache.hadoop.util.Tool
 
 
 checkMobIntegrity
-private boolean checkMobIntegrity
+private boolean checkMobIntegrity
 
 
 
@@ -490,7 +490,7 @@ implements org.apache.hadoop.util.Tool
 
 
 mobFileLocations
-private http://docs.oracle.com/javase/7/docs/api/java/util/Map.html?is-external=true";
 title="class or interface in java.util">MapString,http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true";
 title="class or interface in 
java.util">List> mobFileLocations
+private http://docs.oracle.com/javase/7/docs/api/java/util/Map.html?is-external=true";
 title="class or interface in java.util">MapString,http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true";
 title="class or interface in 
java.util">List> mobFileLocations
 
 
 
@@ -499,7 +499,7 @@ implements org.apache.hadoop.util.Tool
 
 
 FOUND_MOB_FILES_CACHE_CAPACITY
-private static final int FOUND_MOB_FILES_CACHE_CAPACITY
+private static final int FOUND_MOB_FILES_CACHE_CAPACITY
 See Also:Constant
 Field Values
 
 
@@ -509,7 +509,7 @@ implements org.apache.hadoop.util.Tool
 
 
 MISSING_MOB_FILES_CACHE_CAPACITY
-private static final int MISSING_MOB_FILES_CACHE_CAPACITY
+private static final int MISSING_MOB_FILES_CACHE_CAPACITY
 See Also:Constant
 Field Values
 
 
@@ -519,7 +519,7 @@ implements org.apache.hadoop.util.Tool
 
 
 row
-private byte[] row
+private byte[] row
 The row which the user wants to specify and print all the 
KeyValues for.
 
 
@@ -529,7 +529,7 @@ implements org.apache.hadoop.util.Tool
 
 
 files
-private http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true";
 title="class or interface in 
java.util">List files
+private http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true";
 title="class or interface in 
java.util">List files
 
 
 
@@ -538,7 +538,7 @@ implements org.apache.hadoop.util.Tool
 
 
 count
-private int count
+private int count
 
 
 
@@ -547,7 +547,7 @@ implements org.apache.hadoop.util.Tool
 
 
 

hbase git commit: HBASE-15649 Ignore buck-out when building docker image

2016-04-18 Thread eclark
Repository: hbase
Updated Branches:
  refs/heads/HBASE-14850 d5e5a38bc -> bcb7b1a29


HBASE-15649 Ignore buck-out when building docker image


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/bcb7b1a2
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/bcb7b1a2
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/bcb7b1a2

Branch: refs/heads/HBASE-14850
Commit: bcb7b1a29c6d35de3576d6bf73848b4f110153ff
Parents: d5e5a38
Author: Elliott Clark 
Authored: Wed Apr 13 17:02:38 2016 -0700
Committer: Elliott Clark 
Committed: Mon Apr 18 14:20:01 2016 -0700

--
 hbase-native-client/.dockerignore | 2 ++
 1 file changed, 2 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/bcb7b1a2/hbase-native-client/.dockerignore
--
diff --git a/hbase-native-client/.dockerignore 
b/hbase-native-client/.dockerignore
new file mode 100644
index 000..2cfffb7
--- /dev/null
+++ b/hbase-native-client/.dockerignore
@@ -0,0 +1,2 @@
+buck-out
+.buckd



hbase git commit: HBASE-15045 Keep hbase-native-client/if and hbase-protocol in sync.

2016-04-18 Thread eclark
Repository: hbase
Updated Branches:
  refs/heads/HBASE-14850 bcb7b1a29 -> b8919a4f6


HBASE-15045 Keep hbase-native-client/if and hbase-protocol in sync.

Summary: On every start of docker make sure that protos are up to date.

Test Plan: Added the script and it updated the protos, but didn't delete BUCK.

Differential Revision: https://reviews.facebook.net/D56763


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/b8919a4f
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/b8919a4f
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/b8919a4f

Branch: refs/heads/HBASE-14850
Commit: b8919a4f63a67cb317c605c7b1133edbadaef7e9
Parents: bcb7b1a
Author: Elliott Clark 
Authored: Wed Apr 13 15:59:13 2016 -0700
Committer: Elliott Clark 
Committed: Mon Apr 18 14:21:00 2016 -0700

--
 hbase-native-client/bin/copy-protobuf.sh |   8 ++
 hbase-native-client/bin/start-docker.sh  |  22 +++-
 hbase-native-client/if/Admin.proto   |   1 +
 hbase-native-client/if/Client.proto  |   8 +-
 hbase-native-client/if/ClusterStatus.proto   |   3 +
 hbase-native-client/if/Filter.proto  |   1 +
 hbase-native-client/if/Master.proto  |  47 +++-
 hbase-native-client/if/MasterProcedure.proto |  40 +++
 hbase-native-client/if/RSGroup.proto |  34 ++
 hbase-native-client/if/RSGroupAdmin.proto| 136 ++
 hbase-native-client/if/ZooKeeper.proto   |  13 +++
 11 files changed, 304 insertions(+), 9 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/b8919a4f/hbase-native-client/bin/copy-protobuf.sh
--
diff --git a/hbase-native-client/bin/copy-protobuf.sh 
b/hbase-native-client/bin/copy-protobuf.sh
new file mode 100755
index 000..c9e70f2
--- /dev/null
+++ b/hbase-native-client/bin/copy-protobuf.sh
@@ -0,0 +1,8 @@
+#!/usr/bin/env bash
+set -euo pipefail
+IFS=$'\n\t'
+
+BIN_DIR=$(dirname "$0")
+PB_SOURCE_DIR="${BIN_DIR}/../../hbase-protocol/src/main/protobuf/"
+PB_DEST_DIR="${BIN_DIR}/../if/"
+rsync -r --delete --exclude BUCK ${PB_SOURCE_DIR} ${PB_DEST_DIR}

http://git-wip-us.apache.org/repos/asf/hbase/blob/b8919a4f/hbase-native-client/bin/start-docker.sh
--
diff --git a/hbase-native-client/bin/start-docker.sh 
b/hbase-native-client/bin/start-docker.sh
index 725ed6a..9cbd8b7 100755
--- a/hbase-native-client/bin/start-docker.sh
+++ b/hbase-native-client/bin/start-docker.sh
@@ -23,22 +23,38 @@ set -x
 eval "$(docker-machine env docker-vm)"
 eval "$(docker-machine env dinghy)"
 
-# Build the image
-docker build -t hbase_native .
+BIN_DIR=$(pushd `dirname "$0"` 2>&1 > /dev/null && pwd && popd  2>&1 > 
/dev/null)
+BASE_DIR=$(pushd "${BIN_DIR}/../" 2>&1 > /dev/null && pwd && popd  2>&1 > 
/dev/null)
 
+${BIN_DIR}/copy-protobuf.sh
 
+# Go into the base dir. This just makes things cleaner.
+pushd ${BASE_DIR}
+
+# Make sure that there is a thrid-party dir.
 mkdir third-party || true
+
+# Get gtest
+# TODO(eclark): Remove this ( see HBASE-15427 )
 if [[ ! -d third-party/googletest ]]; then
 git clone https://github.com/google/googletest.git 
third-party/googletest
 fi
 
+# We don't want to have to re-download all the jars in docker if we can help it
 if [[ ! -d ~/.m2 ]]; then
 echo "~/.m2 directory doesn't exist. Check Apache Maven is installed."
 exit 1
 fi;
 
+# Build the image
+# 
+# This shouldn't be needed after the development environment is a little more 
stable.
+docker build -t hbase_native .
+
+# After the image is built run the thing
 docker run -p 16010:16010/tcp \
-e "JAVA_HOME=/usr/lib/jvm/java-8-oracle" \
-   -v ${PWD}/..:/usr/src/hbase \
+   -v ${BASE_DIR}/..:/usr/src/hbase \
-v ~/.m2:/root/.m2 \
-it hbase_native  /bin/bash
+popd

http://git-wip-us.apache.org/repos/asf/hbase/blob/b8919a4f/hbase-native-client/if/Admin.proto
--
diff --git a/hbase-native-client/if/Admin.proto 
b/hbase-native-client/if/Admin.proto
index e905340..a1905a4 100644
--- a/hbase-native-client/if/Admin.proto
+++ b/hbase-native-client/if/Admin.proto
@@ -25,6 +25,7 @@ option java_generic_services = true;
 option java_generate_equals_and_hash = true;
 option optimize_for = SPEED;
 
+import "Client.proto";
 import "HBase.proto";
 import "WAL.proto";
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/b8919a4f/hbase-native-client/if/Client.proto
--
diff --git a/hbase-native-client/if/Client.proto 
b/hbase-native-client/if/Client.proto
index ca9abdc..8a4d459 100644
--- a/hbase-native-client/if/Client.proto
+++ b/hbase-native-client/i

hbase git commit: Correct Bloom filter documentation in the book (yi liang)

2016-04-18 Thread jerryjch
Repository: hbase
Updated Branches:
  refs/heads/master 70687c18b -> 31b85e73d


Correct Bloom filter documentation in the book (yi liang)


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/31b85e73
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/31b85e73
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/31b85e73

Branch: refs/heads/master
Commit: 31b85e73da8bee80e8aa918de09fd85d1d7a724a
Parents: 70687c1
Author: Jerry He 
Authored: Mon Apr 18 09:57:46 2016 -0700
Committer: Jerry He 
Committed: Mon Apr 18 09:57:46 2016 -0700

--
 src/main/asciidoc/_chapters/performance.adoc | 22 +++---
 1 file changed, 11 insertions(+), 11 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/31b85e73/src/main/asciidoc/_chapters/performance.adoc
--
diff --git a/src/main/asciidoc/_chapters/performance.adoc 
b/src/main/asciidoc/_chapters/performance.adoc
index 66dd489..01956d5 100644
--- a/src/main/asciidoc/_chapters/performance.adoc
+++ b/src/main/asciidoc/_chapters/performance.adoc
@@ -361,7 +361,7 @@ Bloom filters need to be rebuilt upon deletion, so may not 
be appropriate in env
 
 Bloom filters are enabled on a Column Family.
 You can do this by using the setBloomFilterType method of HColumnDescriptor or 
using the HBase API.
-Valid values are `NONE` (the default), `ROW`, or `ROWCOL`.
+Valid values are `NONE`, `ROW` (default), or `ROWCOL`.
 See <> for more information on `ROW` versus `ROWCOL`.
 See also the API documentation for 
link:http://hbase.apache.org/apidocs/org/apache/hadoop/hbase/HColumnDescriptor.html[HColumnDescriptor].
 
@@ -382,17 +382,17 @@ You can configure the following settings in the 
_hbase-site.xml_.
 | Default
 | Description
 
-| io.hfile.bloom.enabled
+| io.storefile.bloom.enabled
 | yes
 | Set to no to kill bloom filters server-wide if something goes wrong
 
-| io.hfile.bloom.error.rate
+| io.storefile.bloom.error.rate
 | .01
 | The average false positive rate for bloom filters. Folding is used to
   maintain the false positive rate. Expressed as a decimal 
representation of a
   percentage.
 
-| io.hfile.bloom.max.fold
+| io.storefile.bloom.max.fold
 | 7
 | The guaranteed maximum fold rate. Changing this setting should not be
   necessary and is not recommended.
@@ -406,7 +406,7 @@ You can configure the following settings in the 
_hbase-site.xml_.
 | Master switch to enable Delete Family Bloom filters and store them in the 
StoreFile.
 
 | io.storefile.bloom.block.size
-| 65536
+| 131072
 | Target Bloom block size. Bloom filter blocks of approximately this size
   are interleaved with data blocks.
 
@@ -713,20 +713,20 @@ Stored in the LRU cache, if it is enabled (It's enabled 
by default).
 [[config.bloom]]
  Bloom Filter Configuration
 
-= `io.hfile.bloom.enabled` global kill switch
+= `io.storefile.bloom.enabled` global kill switch
 
-`io.hfile.bloom.enabled` in `Configuration` serves as the kill switch in case 
something goes wrong.
+`io.storefile.bloom.enabled` in `Configuration` serves as the kill switch in 
case something goes wrong.
 Default = `true`.
 
-= `io.hfile.bloom.error.rate`
+= `io.storefile.bloom.error.rate`
 
-`io.hfile.bloom.error.rate` = average false positive rate.
+`io.storefile.bloom.error.rate` = average false positive rate.
 Default = 1%. Decrease rate by ½ (e.g.
 to .5%) == +1 bit per bloom entry.
 
-= `io.hfile.bloom.max.fold`
+= `io.storefile.bloom.max.fold`
 
-`io.hfile.bloom.max.fold` = guaranteed minimum fold rate.
+`io.storefile.bloom.max.fold` = guaranteed minimum fold rate.
 Most people should leave this alone.
 Default = 7, or can collapse to at least 1/128th of original size.
 See the _Development Process_ section of the document 
link:https://issues.apache.org/jira/secure/attachment/12444007/Bloom_Filters_in_HBase.pdf[BloomFilters
 in HBase] for more on what this option means.



hbase git commit: HBASE-13372 Add unit tests for SplitTransaction and RegionMergeTransaction listeners

2016-04-18 Thread apurtell
Repository: hbase
Updated Branches:
  refs/heads/branch-1 6d40b7a0e -> 2b5da6f7a


HBASE-13372 Add unit tests for SplitTransaction and RegionMergeTransaction 
listeners

Signed-off-by: Andrew Purtell 
Amending-Author: Andrew Purtell 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/2b5da6f7
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/2b5da6f7
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/2b5da6f7

Branch: refs/heads/branch-1
Commit: 2b5da6f7a044165801e01356c89df706106958cd
Parents: 6d40b7a
Author: Gábor Lipták 
Authored: Sat Aug 22 19:39:13 2015 -0400
Committer: Andrew Purtell 
Committed: Mon Apr 18 16:56:59 2016 -0700

--
 .../TestRegionMergeTransaction.java | 36 +---
 .../regionserver/TestSplitTransaction.java  | 28 ---
 2 files changed, 55 insertions(+), 9 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/2b5da6f7/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionMergeTransaction.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionMergeTransaction.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionMergeTransaction.java
index 49f3dce..3ed839b 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionMergeTransaction.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionMergeTransaction.java
@@ -21,8 +21,7 @@ package org.apache.hadoop.hbase.regionserver;
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertFalse;
 import static org.junit.Assert.assertTrue;
-import static org.mockito.Mockito.doReturn;
-import static org.mockito.Mockito.when;
+import static org.mockito.Mockito.*;
 
 import java.io.IOException;
 import java.util.ArrayList;
@@ -210,6 +209,35 @@ public class TestRegionMergeTransaction {
 assertFalse(spyMT.prepare(null));
   }
 
+  /**
+   * Test RegionMergeTransactionListener
+   */
+  @Test public void testRegionMergeTransactionListener() throws Exception {
+RegionMergeTransactionImpl mt = new RegionMergeTransactionImpl(region_a, 
region_b,
+false);
+RegionMergeTransactionImpl spyMT = Mockito.spy(mt);
+doReturn(false).when(spyMT).hasMergeQualifierInMeta(null,
+region_a.getRegionInfo().getRegionName());
+doReturn(false).when(spyMT).hasMergeQualifierInMeta(null,
+region_b.getRegionInfo().getRegionName());
+RegionMergeTransaction.TransactionListener listener =
+Mockito.mock(RegionMergeTransaction.TransactionListener.class);
+mt.registerTransactionListener(listener);
+mt.prepare(null);
+TEST_UTIL.getConfiguration().setInt(HConstants.REGIONSERVER_PORT, 0);
+CoordinatedStateManager cp = 
CoordinatedStateManagerFactory.getCoordinatedStateManager(
+  TEST_UTIL.getConfiguration());
+Server mockServer = new HRegionServer(TEST_UTIL.getConfiguration(), cp);
+mt.execute(mockServer, null);
+verify(listener).transition(mt,
+RegionMergeTransaction.RegionMergeTransactionPhase.STARTED,
+RegionMergeTransaction.RegionMergeTransactionPhase.PREPARED);
+verify(listener, times(10)).transition(any(RegionMergeTransaction.class),
+any(RegionMergeTransaction.RegionMergeTransactionPhase.class),
+any(RegionMergeTransaction.RegionMergeTransactionPhase.class));
+verifyNoMoreInteractions(listener);
+  }
+
   @Test
   public void testWholesomeMerge() throws IOException, InterruptedException {
 final int rowCountOfRegionA = loadRegion(this.region_a, CF, true);
@@ -355,9 +383,9 @@ public class TestRegionMergeTransaction {
   }
 
   @Test
-  public void testMeregedRegionBoundary() {
+  public void testMergedRegionBoundary() {
 TableName tableName =
-TableName.valueOf("testMeregedRegionBoundary");
+TableName.valueOf("testMergedRegionBoundary");
 byte[] a = Bytes.toBytes("a");
 byte[] b = Bytes.toBytes("b");
 byte[] z = Bytes.toBytes("z");

http://git-wip-us.apache.org/repos/asf/hbase/blob/2b5da6f7/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSplitTransaction.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSplitTransaction.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSplitTransaction.java
index d5c9575..b548b65 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSplitTransaction.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSplitTransaction.java
@@ -23,10 +23,9 @@ import static org

hbase git commit: HBASE-13372 Add unit tests for SplitTransaction and RegionMergeTransaction listeners

2016-04-18 Thread apurtell
Repository: hbase
Updated Branches:
  refs/heads/master 31b85e73d -> 6d7a7fa3a


HBASE-13372 Add unit tests for SplitTransaction and RegionMergeTransaction 
listeners

Signed-off-by: Andrew Purtell 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/6d7a7fa3
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/6d7a7fa3
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/6d7a7fa3

Branch: refs/heads/master
Commit: 6d7a7fa3aa5a9dc720d4f1238ebf38c53e4875b9
Parents: 31b85e7
Author: Gábor Lipták 
Authored: Sat Aug 22 19:39:13 2015 -0400
Committer: Andrew Purtell 
Committed: Mon Apr 18 17:04:56 2016 -0700

--
 .../TestRegionMergeTransaction.java | 36 +---
 .../regionserver/TestSplitTransaction.java  | 28 ---
 2 files changed, 55 insertions(+), 9 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/6d7a7fa3/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionMergeTransaction.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionMergeTransaction.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionMergeTransaction.java
index bcb8733..4630522 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionMergeTransaction.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionMergeTransaction.java
@@ -21,8 +21,7 @@ package org.apache.hadoop.hbase.regionserver;
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertFalse;
 import static org.junit.Assert.assertTrue;
-import static org.mockito.Mockito.doReturn;
-import static org.mockito.Mockito.when;
+import static org.mockito.Mockito.*;
 
 import java.io.IOException;
 import java.util.ArrayList;
@@ -211,6 +210,35 @@ public class TestRegionMergeTransaction {
 assertFalse(spyMT.prepare(null));
   }
 
+  /**
+   * Test RegionMergeTransactionListener
+   */
+  @Test public void testRegionMergeTransactionListener() throws IOException {
+RegionMergeTransactionImpl mt = new RegionMergeTransactionImpl(region_a, 
region_b,
+false);
+RegionMergeTransactionImpl spyMT = Mockito.spy(mt);
+doReturn(false).when(spyMT).hasMergeQualifierInMeta(null,
+region_a.getRegionInfo().getRegionName());
+doReturn(false).when(spyMT).hasMergeQualifierInMeta(null,
+region_b.getRegionInfo().getRegionName());
+RegionMergeTransaction.TransactionListener listener =
+Mockito.mock(RegionMergeTransaction.TransactionListener.class);
+mt.registerTransactionListener(listener);
+mt.prepare(null);
+TEST_UTIL.getConfiguration().setInt(HConstants.REGIONSERVER_PORT, 0);
+CoordinatedStateManager cp = 
CoordinatedStateManagerFactory.getCoordinatedStateManager(
+  TEST_UTIL.getConfiguration());
+Server mockServer = new HRegionServer(TEST_UTIL.getConfiguration(), cp);
+mt.execute(mockServer, null);
+verify(listener).transition(mt,
+RegionMergeTransaction.RegionMergeTransactionPhase.STARTED,
+RegionMergeTransaction.RegionMergeTransactionPhase.PREPARED);
+verify(listener, times(10)).transition(any(RegionMergeTransaction.class),
+any(RegionMergeTransaction.RegionMergeTransactionPhase.class),
+any(RegionMergeTransaction.RegionMergeTransactionPhase.class));
+verifyNoMoreInteractions(listener);
+  }
+
   @Test
   public void testWholesomeMerge() throws IOException, InterruptedException {
 final int rowCountOfRegionA = loadRegion(this.region_a, CF, true);
@@ -356,9 +384,9 @@ public class TestRegionMergeTransaction {
   }
 
   @Test
-  public void testMeregedRegionBoundary() {
+  public void testMergedRegionBoundary() {
 TableName tableName =
-TableName.valueOf("testMeregedRegionBoundary");
+TableName.valueOf("testMergedRegionBoundary");
 byte[] a = Bytes.toBytes("a");
 byte[] b = Bytes.toBytes("b");
 byte[] z = Bytes.toBytes("z");

http://git-wip-us.apache.org/repos/asf/hbase/blob/6d7a7fa3/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSplitTransaction.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSplitTransaction.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSplitTransaction.java
index 26b3293..2ade27a 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSplitTransaction.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSplitTransaction.java
@@ -23,10 +23,9 @@ import static org.junit.Assert.assertFalse;
 import