hbase git commit: HBASE-18579 Enable core dump by default for docker -revert

2017-08-14 Thread tedyu
Repository: hbase
Updated Branches:
  refs/heads/HBASE-14850 6e0f6df79 -> d4bd3c71e


HBASE-18579 Enable core dump by default for docker -revert


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/d4bd3c71
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/d4bd3c71
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/d4bd3c71

Branch: refs/heads/HBASE-14850
Commit: d4bd3c71e12d86edab412da1389a3455dba5533e
Parents: 6e0f6df
Author: tedyu 
Authored: Mon Aug 14 16:15:22 2017 -0700
Committer: tedyu 
Committed: Mon Aug 14 16:15:22 2017 -0700

--
 hbase-native-client/bin/start-docker.sh | 2 +-
 hbase-native-client/docker-files/Dockerfile | 4 +---
 2 files changed, 2 insertions(+), 4 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/d4bd3c71/hbase-native-client/bin/start-docker.sh
--
diff --git a/hbase-native-client/bin/start-docker.sh 
b/hbase-native-client/bin/start-docker.sh
index 391238d..53325c1 100755
--- a/hbase-native-client/bin/start-docker.sh
+++ b/hbase-native-client/bin/start-docker.sh
@@ -55,7 +55,7 @@ fi;
 docker build -t hbase_native -f docker-files/Dockerfile .
 
 # After the image is built run the thing
-docker run --privileged=true -h="securecluster" -p 16050:16050/tcp \
+docker run -h="securecluster" -p 16050:16050/tcp \
  -v ${BASE_DIR}/..:/usr/src/hbase \
-v ~/.m2:/root/.m2 \
  -it hbase_native /bin/bash

http://git-wip-us.apache.org/repos/asf/hbase/blob/d4bd3c71/hbase-native-client/docker-files/Dockerfile
--
diff --git a/hbase-native-client/docker-files/Dockerfile 
b/hbase-native-client/docker-files/Dockerfile
index ac3cb47..efd9a9d 100644
--- a/hbase-native-client/docker-files/Dockerfile
+++ b/hbase-native-client/docker-files/Dockerfile
@@ -117,8 +117,6 @@ RUN cd /usr/src/ && \
   ctest && \
   make install
 
-RUN echo "enabled=1" >> /etc/default/apport
-
-ENTRYPOINT /usr/sbin/krb5kdc -P /var/run/krb5kdc.pid && echo 
"/tmp/core.%h.%e.%t" >> /proc/sys/kernel/core_pattern && sysctl -p && ulimit -c 
unlimited && /bin/bash
+ENTRYPOINT /usr/sbin/krb5kdc -P /var/run/krb5kdc.pid && /bin/bash
 
 WORKDIR /usr/src/hbase/hbase-native-client



hbase git commit: HBASE-18533 Expose BucketCache values to be configured

2017-08-14 Thread tedyu
Repository: hbase
Updated Branches:
  refs/heads/branch-1.4 c6f57e0f3 -> 81f5da7af


HBASE-18533 Expose BucketCache values to be configured

Before this commit, BucketCache always used the default values.
This commit adds the ability to configure these values.

Signed-off-by: tedyu 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/81f5da7a
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/81f5da7a
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/81f5da7a

Branch: refs/heads/branch-1.4
Commit: 81f5da7af8525c34018df345eb8936b1b9851cc3
Parents: c6f57e0
Author: Zach York 
Authored: Wed Aug 2 14:43:03 2017 -0700
Committer: tedyu 
Committed: Mon Aug 14 13:29:27 2017 -0700

--
 .../hadoop/hbase/io/hfile/CacheConfig.java  |   2 +-
 .../hbase/io/hfile/bucket/BucketCache.java  | 126 ++-
 .../hbase/io/hfile/bucket/TestBucketCache.java  | 112 +
 .../io/hfile/bucket/TestBucketWriterThread.java |   3 +-
 4 files changed, 213 insertions(+), 30 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/81f5da7a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/CacheConfig.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/CacheConfig.java 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/CacheConfig.java
index 57d2057..1d68b99 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/CacheConfig.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/CacheConfig.java
@@ -638,7 +638,7 @@ public class CacheConfig {
   // Bucket cache logs its stats on creation internal to the constructor.
   bucketCache = new BucketCache(bucketCacheIOEngineName,
 bucketCacheSize, blockSize, bucketSizes, writerThreads, 
writerQueueLen, persistentPath,
-ioErrorsTolerationDuration);
+ioErrorsTolerationDuration, c);
 } catch (IOException ioex) {
   LOG.error("Can't instantiate bucket cache", ioex); throw new 
RuntimeException(ioex);
 }

http://git-wip-us.apache.org/repos/asf/hbase/blob/81f5da7a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.java
index 7aabb5c..0c8e62b 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.java
@@ -51,8 +51,11 @@ import java.util.concurrent.locks.Lock;
 import java.util.concurrent.locks.ReentrantLock;
 import java.util.concurrent.locks.ReentrantReadWriteLock;
 
+import com.google.common.base.Preconditions;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.HBaseConfiguration;
 import org.apache.hadoop.hbase.classification.InterfaceAudience;
 import org.apache.hadoop.hbase.io.HeapSize;
 import org.apache.hadoop.hbase.io.hfile.BlockCache;
@@ -96,14 +99,23 @@ import 
com.google.common.util.concurrent.ThreadFactoryBuilder;
 public class BucketCache implements BlockCache, HeapSize {
   private static final Log LOG = LogFactory.getLog(BucketCache.class);
 
+  /** Priority buckets config */
+  static final String SINGLE_FACTOR_CONFIG_NAME = 
"hbase.bucketcache.single.factor";
+  static final String MULTI_FACTOR_CONFIG_NAME = 
"hbase.bucketcache.multi.factor";
+  static final String MEMORY_FACTOR_CONFIG_NAME = 
"hbase.bucketcache.memory.factor";
+  static final String EXTRA_FREE_FACTOR_CONFIG_NAME = 
"hbase.bucketcache.extrafreefactor";
+  static final String ACCEPT_FACTOR_CONFIG_NAME = 
"hbase.bucketcache.acceptfactor";
+  static final String MIN_FACTOR_CONFIG_NAME = "hbase.bucketcache.minfactor";
+
   /** Priority buckets */
-  private static final float DEFAULT_SINGLE_FACTOR = 0.25f;
-  private static final float DEFAULT_MULTI_FACTOR = 0.50f;
-  private static final float DEFAULT_MEMORY_FACTOR = 0.25f;
-  private static final float DEFAULT_EXTRA_FREE_FACTOR = 0.10f;
+  @VisibleForTesting
+  static final float DEFAULT_SINGLE_FACTOR = 0.25f;
+  static final float DEFAULT_MULTI_FACTOR = 0.50f;
+  static final float DEFAULT_MEMORY_FACTOR = 0.25f;
+  static final float DEFAULT_MIN_FACTOR = 0.85f;
 
+  private static final float DEFAULT_EXTRA_FREE_FACTOR = 0.10f;
   private static final float DEFAULT_ACCEPT_FACTOR = 0.95f;
-  private static final float DEFAULT_MIN_FACTOR = 0.85f;
 
   // Number of blocks to clear for each of the bucket size t

hbase git commit: HBASE-18533 Expose BucketCache values to be configured

2017-08-14 Thread tedyu
Repository: hbase
Updated Branches:
  refs/heads/branch-1 9078a034c -> d6a781cf0


HBASE-18533 Expose BucketCache values to be configured

Before this commit, BucketCache always used the default values.
This commit adds the ability to configure these values.

Signed-off-by: tedyu 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/d6a781cf
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/d6a781cf
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/d6a781cf

Branch: refs/heads/branch-1
Commit: d6a781cf0823b5051d929455218d6a4720150394
Parents: 9078a03
Author: Zach York 
Authored: Wed Aug 2 14:43:03 2017 -0700
Committer: tedyu 
Committed: Mon Aug 14 13:28:42 2017 -0700

--
 .../hadoop/hbase/io/hfile/CacheConfig.java  |   2 +-
 .../hbase/io/hfile/bucket/BucketCache.java  | 126 ++-
 .../hbase/io/hfile/bucket/TestBucketCache.java  | 112 +
 .../io/hfile/bucket/TestBucketWriterThread.java |   3 +-
 4 files changed, 213 insertions(+), 30 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/d6a781cf/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/CacheConfig.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/CacheConfig.java 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/CacheConfig.java
index 57d2057..1d68b99 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/CacheConfig.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/CacheConfig.java
@@ -638,7 +638,7 @@ public class CacheConfig {
   // Bucket cache logs its stats on creation internal to the constructor.
   bucketCache = new BucketCache(bucketCacheIOEngineName,
 bucketCacheSize, blockSize, bucketSizes, writerThreads, 
writerQueueLen, persistentPath,
-ioErrorsTolerationDuration);
+ioErrorsTolerationDuration, c);
 } catch (IOException ioex) {
   LOG.error("Can't instantiate bucket cache", ioex); throw new 
RuntimeException(ioex);
 }

http://git-wip-us.apache.org/repos/asf/hbase/blob/d6a781cf/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.java
index 7aabb5c..0c8e62b 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.java
@@ -51,8 +51,11 @@ import java.util.concurrent.locks.Lock;
 import java.util.concurrent.locks.ReentrantLock;
 import java.util.concurrent.locks.ReentrantReadWriteLock;
 
+import com.google.common.base.Preconditions;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.HBaseConfiguration;
 import org.apache.hadoop.hbase.classification.InterfaceAudience;
 import org.apache.hadoop.hbase.io.HeapSize;
 import org.apache.hadoop.hbase.io.hfile.BlockCache;
@@ -96,14 +99,23 @@ import 
com.google.common.util.concurrent.ThreadFactoryBuilder;
 public class BucketCache implements BlockCache, HeapSize {
   private static final Log LOG = LogFactory.getLog(BucketCache.class);
 
+  /** Priority buckets config */
+  static final String SINGLE_FACTOR_CONFIG_NAME = 
"hbase.bucketcache.single.factor";
+  static final String MULTI_FACTOR_CONFIG_NAME = 
"hbase.bucketcache.multi.factor";
+  static final String MEMORY_FACTOR_CONFIG_NAME = 
"hbase.bucketcache.memory.factor";
+  static final String EXTRA_FREE_FACTOR_CONFIG_NAME = 
"hbase.bucketcache.extrafreefactor";
+  static final String ACCEPT_FACTOR_CONFIG_NAME = 
"hbase.bucketcache.acceptfactor";
+  static final String MIN_FACTOR_CONFIG_NAME = "hbase.bucketcache.minfactor";
+
   /** Priority buckets */
-  private static final float DEFAULT_SINGLE_FACTOR = 0.25f;
-  private static final float DEFAULT_MULTI_FACTOR = 0.50f;
-  private static final float DEFAULT_MEMORY_FACTOR = 0.25f;
-  private static final float DEFAULT_EXTRA_FREE_FACTOR = 0.10f;
+  @VisibleForTesting
+  static final float DEFAULT_SINGLE_FACTOR = 0.25f;
+  static final float DEFAULT_MULTI_FACTOR = 0.50f;
+  static final float DEFAULT_MEMORY_FACTOR = 0.25f;
+  static final float DEFAULT_MIN_FACTOR = 0.85f;
 
+  private static final float DEFAULT_EXTRA_FREE_FACTOR = 0.10f;
   private static final float DEFAULT_ACCEPT_FACTOR = 0.95f;
-  private static final float DEFAULT_MIN_FACTOR = 0.85f;
 
   // Number of blocks to clear for each of the bucket size that 

hbase git commit: HBASE-18533 Expose BucketCache values to be configured

2017-08-14 Thread tedyu
Repository: hbase
Updated Branches:
  refs/heads/branch-2 0ded122b1 -> 26bbc8ad6


HBASE-18533 Expose BucketCache values to be configured

Before this commit, BucketCache always used the default values.
This commit adds the ability to configure these values.

Signed-off-by: tedyu 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/26bbc8ad
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/26bbc8ad
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/26bbc8ad

Branch: refs/heads/branch-2
Commit: 26bbc8ad6c683a9965f61e2bce0e7caf5fd2a37e
Parents: 0ded122
Author: Zach York 
Authored: Wed Aug 2 14:43:03 2017 -0700
Committer: tedyu 
Committed: Mon Aug 14 13:27:56 2017 -0700

--
 .../hadoop/hbase/io/hfile/CacheConfig.java  |   2 +-
 .../hbase/io/hfile/bucket/BucketCache.java  | 126 ++-
 .../hbase/io/hfile/bucket/TestBucketCache.java  | 114 -
 .../io/hfile/bucket/TestBucketWriterThread.java |   3 +-
 4 files changed, 214 insertions(+), 31 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/26bbc8ad/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/CacheConfig.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/CacheConfig.java 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/CacheConfig.java
index 140009b..13f048e 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/CacheConfig.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/CacheConfig.java
@@ -671,7 +671,7 @@ public class CacheConfig {
   // Bucket cache logs its stats on creation internal to the constructor.
   bucketCache = new BucketCache(bucketCacheIOEngineName,
 bucketCacheSize, blockSize, bucketSizes, writerThreads, 
writerQueueLen, persistentPath,
-ioErrorsTolerationDuration);
+ioErrorsTolerationDuration, c);
 } catch (IOException ioex) {
   LOG.error("Can't instantiate bucket cache", ioex); throw new 
RuntimeException(ioex);
 }

http://git-wip-us.apache.org/repos/asf/hbase/blob/26bbc8ad/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.java
index 1084399..79b1f4d 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.java
@@ -52,8 +52,11 @@ import java.util.concurrent.locks.Lock;
 import java.util.concurrent.locks.ReentrantLock;
 import java.util.concurrent.locks.ReentrantReadWriteLock;
 
+import com.google.common.base.Preconditions;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.HBaseConfiguration;
 import org.apache.hadoop.hbase.classification.InterfaceAudience;
 import org.apache.hadoop.hbase.io.HeapSize;
 import org.apache.hadoop.hbase.io.hfile.BlockCache;
@@ -100,14 +103,23 @@ import 
org.apache.hadoop.hbase.shaded.com.google.common.util.concurrent.ThreadFa
 public class BucketCache implements BlockCache, HeapSize {
   private static final Log LOG = LogFactory.getLog(BucketCache.class);
 
+  /** Priority buckets config */
+  static final String SINGLE_FACTOR_CONFIG_NAME = 
"hbase.bucketcache.single.factor";
+  static final String MULTI_FACTOR_CONFIG_NAME = 
"hbase.bucketcache.multi.factor";
+  static final String MEMORY_FACTOR_CONFIG_NAME = 
"hbase.bucketcache.memory.factor";
+  static final String EXTRA_FREE_FACTOR_CONFIG_NAME = 
"hbase.bucketcache.extrafreefactor";
+  static final String ACCEPT_FACTOR_CONFIG_NAME = 
"hbase.bucketcache.acceptfactor";
+  static final String MIN_FACTOR_CONFIG_NAME = "hbase.bucketcache.minfactor";
+
   /** Priority buckets */
-  private static final float DEFAULT_SINGLE_FACTOR = 0.25f;
-  private static final float DEFAULT_MULTI_FACTOR = 0.50f;
-  private static final float DEFAULT_MEMORY_FACTOR = 0.25f;
-  private static final float DEFAULT_EXTRA_FREE_FACTOR = 0.10f;
+  @VisibleForTesting
+  static final float DEFAULT_SINGLE_FACTOR = 0.25f;
+  static final float DEFAULT_MULTI_FACTOR = 0.50f;
+  static final float DEFAULT_MEMORY_FACTOR = 0.25f;
+  static final float DEFAULT_MIN_FACTOR = 0.85f;
 
+  private static final float DEFAULT_EXTRA_FREE_FACTOR = 0.10f;
   private static final float DEFAULT_ACCEPT_FACTOR = 0.95f;
-  private static final float DEFAULT_MIN_FACTOR = 0.85f;
 
   // Number of blocks to clear for each of t

hbase git commit: HBASE-18533 Expose BucketCache values to be configured

2017-08-14 Thread tedyu
Repository: hbase
Updated Branches:
  refs/heads/master 0b26ccdaa -> 0e32869f0


HBASE-18533 Expose BucketCache values to be configured

Before this commit, BucketCache always used the default values.
This commit adds the ability to configure these values.

Signed-off-by: tedyu 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/0e32869f
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/0e32869f
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/0e32869f

Branch: refs/heads/master
Commit: 0e32869f01697abf29292aa786d0cdcca10213c6
Parents: 0b26ccd
Author: Zach York 
Authored: Wed Aug 2 14:43:03 2017 -0700
Committer: tedyu 
Committed: Mon Aug 14 13:27:26 2017 -0700

--
 .../hadoop/hbase/io/hfile/CacheConfig.java  |   2 +-
 .../hbase/io/hfile/bucket/BucketCache.java  | 126 ++-
 .../hbase/io/hfile/bucket/TestBucketCache.java  | 114 -
 .../io/hfile/bucket/TestBucketWriterThread.java |   3 +-
 4 files changed, 214 insertions(+), 31 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/0e32869f/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/CacheConfig.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/CacheConfig.java 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/CacheConfig.java
index 140009b..13f048e 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/CacheConfig.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/CacheConfig.java
@@ -671,7 +671,7 @@ public class CacheConfig {
   // Bucket cache logs its stats on creation internal to the constructor.
   bucketCache = new BucketCache(bucketCacheIOEngineName,
 bucketCacheSize, blockSize, bucketSizes, writerThreads, 
writerQueueLen, persistentPath,
-ioErrorsTolerationDuration);
+ioErrorsTolerationDuration, c);
 } catch (IOException ioex) {
   LOG.error("Can't instantiate bucket cache", ioex); throw new 
RuntimeException(ioex);
 }

http://git-wip-us.apache.org/repos/asf/hbase/blob/0e32869f/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.java
index 1084399..79b1f4d 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.java
@@ -52,8 +52,11 @@ import java.util.concurrent.locks.Lock;
 import java.util.concurrent.locks.ReentrantLock;
 import java.util.concurrent.locks.ReentrantReadWriteLock;
 
+import com.google.common.base.Preconditions;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.HBaseConfiguration;
 import org.apache.hadoop.hbase.classification.InterfaceAudience;
 import org.apache.hadoop.hbase.io.HeapSize;
 import org.apache.hadoop.hbase.io.hfile.BlockCache;
@@ -100,14 +103,23 @@ import 
org.apache.hadoop.hbase.shaded.com.google.common.util.concurrent.ThreadFa
 public class BucketCache implements BlockCache, HeapSize {
   private static final Log LOG = LogFactory.getLog(BucketCache.class);
 
+  /** Priority buckets config */
+  static final String SINGLE_FACTOR_CONFIG_NAME = 
"hbase.bucketcache.single.factor";
+  static final String MULTI_FACTOR_CONFIG_NAME = 
"hbase.bucketcache.multi.factor";
+  static final String MEMORY_FACTOR_CONFIG_NAME = 
"hbase.bucketcache.memory.factor";
+  static final String EXTRA_FREE_FACTOR_CONFIG_NAME = 
"hbase.bucketcache.extrafreefactor";
+  static final String ACCEPT_FACTOR_CONFIG_NAME = 
"hbase.bucketcache.acceptfactor";
+  static final String MIN_FACTOR_CONFIG_NAME = "hbase.bucketcache.minfactor";
+
   /** Priority buckets */
-  private static final float DEFAULT_SINGLE_FACTOR = 0.25f;
-  private static final float DEFAULT_MULTI_FACTOR = 0.50f;
-  private static final float DEFAULT_MEMORY_FACTOR = 0.25f;
-  private static final float DEFAULT_EXTRA_FREE_FACTOR = 0.10f;
+  @VisibleForTesting
+  static final float DEFAULT_SINGLE_FACTOR = 0.25f;
+  static final float DEFAULT_MULTI_FACTOR = 0.50f;
+  static final float DEFAULT_MEMORY_FACTOR = 0.25f;
+  static final float DEFAULT_MIN_FACTOR = 0.85f;
 
+  private static final float DEFAULT_EXTRA_FREE_FACTOR = 0.10f;
   private static final float DEFAULT_ACCEPT_FACTOR = 0.95f;
-  private static final float DEFAULT_MIN_FACTOR = 0.85f;
 
   // Number of blocks to clear for each of the b

hbase git commit: HBASE-18510 Update clock on replaying recovered edits (Sai Teja Ranuva)

2017-08-14 Thread appy
Repository: hbase
Updated Branches:
  refs/heads/HBASE-14070.HLC 93c0d71c4 -> 58a51bcb7


HBASE-18510 Update clock on replaying recovered edits (Sai Teja Ranuva)

- Added clock updating to HRegion#replayRecoveredEdits
- Added TestHRegion#testHybridLogicalClockUpdatesOnRecoveryEditReplay and
  TestHRegion#testSystemMonotonicClockUpdatesOnRecoveryEditReplay
- Added comments clarifying expected logical and physical time in tests
- Changed TimestampType#getLogicalTime to be public

Change-Id: Ieba31b70fa2bdd3fa8080b521958039cb288badc


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/58a51bcb
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/58a51bcb
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/58a51bcb

Branch: refs/heads/HBASE-14070.HLC
Commit: 58a51bcb72250c3173d418a6d3fffb0c1f0d446c
Parents: 93c0d71
Author: Amit Patel 
Authored: Thu Jul 27 11:45:13 2017 -0700
Committer: Apekshit Sharma 
Committed: Mon Aug 14 12:40:43 2017 -0700

--
 .../org/apache/hadoop/hbase/TimestampType.java  |  10 +-
 .../hadoop/hbase/regionserver/HRegion.java  |   1 +
 .../hadoop/hbase/regionserver/TestHRegion.java  | 108 +++
 3 files changed, 116 insertions(+), 3 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/58a51bcb/hbase-common/src/main/java/org/apache/hadoop/hbase/TimestampType.java
--
diff --git 
a/hbase-common/src/main/java/org/apache/hadoop/hbase/TimestampType.java 
b/hbase-common/src/main/java/org/apache/hadoop/hbase/TimestampType.java
index 2f0a229..effbde2 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/TimestampType.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/TimestampType.java
@@ -16,6 +16,7 @@
 
 package org.apache.hadoop.hbase;
 
+import com.google.common.annotations.VisibleForTesting;
 import org.apache.hadoop.hbase.classification.InterfaceAudience;
 import org.apache.hadoop.hbase.classification.InterfaceStability;
 import org.apache.commons.lang.time.FastDateFormat;
@@ -105,7 +106,8 @@ public enum TimestampType {
   return timestamp >>> BITS_FOR_LOGICAL_TIME; // assume unsigned timestamp
 }
 
-long getLogicalTime(long timestamp) {
+@VisibleForTesting
+public long getLogicalTime(long timestamp) {
   return timestamp & LOGICAL_TIME_MAX_VALUE;
 }
 
@@ -200,7 +202,8 @@ public enum TimestampType {
   return timestamp;
 }
 
-long getLogicalTime(long timestamp) {
+@VisibleForTesting
+public long getLogicalTime(long timestamp) {
   return 0;
 }
 
@@ -281,7 +284,8 @@ public enum TimestampType {
* @param timestamp {@link #HYBRID} or {@link #PHYSICAL} Timestamp
* @return logical time
*/
-  abstract long getLogicalTime(long timestamp);
+  @VisibleForTesting
+  abstract public long getLogicalTime(long timestamp);
 
   /**
* @return the maximum possible physical time in {@link 
TimeUnit#MILLISECONDS}

http://git-wip-us.apache.org/repos/asf/hbase/blob/58a51bcb/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
index 2b82e4c..56f8bb6 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
@@ -4434,6 +4434,7 @@ public class HRegion implements HeapSize, 
PropagatingConfigurationObserver, Regi
   continue;
 }
 CellUtil.setSequenceId(cell, currentReplaySeqId);
+clock.update(cell.getTimestamp());
 
 restoreEdit(store, cell, memstoreSize);
 editsCount++;

http://git-wip-us.apache.org/repos/asf/hbase/blob/58a51bcb/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java
index 7350473..a7e590b 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java
@@ -720,6 +720,114 @@ public class TestHRegion {
 }
   }
 
+  public void verifyClockUpdatesOnRecoveryEditReplay(Clock clock, List 
seqIds,
+  List editTimestamps, long recoverSeqId, long expectedTimestamp) 
throws Exception {
+byte[] family = Bytes.toBytes("family");
+region = initHRegion(tableName, method

hbase git commit: HBASE-18303 Clean up @Parameter boilerplate

2017-08-14 Thread mdrob
Repository: hbase
Updated Branches:
  refs/heads/branch-2 4e9961b4f -> 0ded122b1


HBASE-18303 Clean up @Parameter boilerplate


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/0ded122b
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/0ded122b
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/0ded122b

Branch: refs/heads/branch-2
Commit: 0ded122b1ebaa006e84f12a7a051eb8eaca571c3
Parents: 4e9961b
Author: Mike Drob 
Authored: Fri Jun 30 12:13:56 2017 -0500
Committer: Mike Drob 
Committed: Mon Aug 14 14:24:37 2017 -0500

--
 .../hadoop/hbase/filter/TestKeyOnlyFilter.java  | 15 -
 .../hadoop/hbase/HBaseCommonTestingUtility.java | 22 
 .../apache/hadoop/hbase/types/TestStruct.java   | 18 +++-
 .../hadoop/hbase/util/TestByteBufferUtils.java  |  8 ++-
 .../hbase/codec/keyvalue/TestKeyValueTool.java  | 16 +++---
 .../codec/prefixtree/row/TestRowEncoder.java|  6 +-
 .../hadoop/hbase/rest/TestMultiRowResource.java |  5 +
 .../hadoop/hbase/HBaseTestingUtility.java   | 20 +-
 .../encoding/TestSeekToBlockWithEncoders.java   |  4 ++--
 .../hadoop/hbase/io/hfile/TestCacheOnWrite.java |  3 ++-
 .../apache/hadoop/hbase/io/hfile/TestHFile.java |  3 ++-
 .../hbase/io/hfile/TestHFileBlockIndex.java |  3 ++-
 .../hbase/io/hfile/TestHFileWriterV3.java   |  3 ++-
 .../hbase/util/TestCoprocessorScanPolicy.java   |  3 ++-
 14 files changed, 59 insertions(+), 70 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/0ded122b/hbase-client/src/test/java/org/apache/hadoop/hbase/filter/TestKeyOnlyFilter.java
--
diff --git 
a/hbase-client/src/test/java/org/apache/hadoop/hbase/filter/TestKeyOnlyFilter.java
 
b/hbase-client/src/test/java/org/apache/hadoop/hbase/filter/TestKeyOnlyFilter.java
index 33e3cd9..f957b59 100644
--- 
a/hbase-client/src/test/java/org/apache/hadoop/hbase/filter/TestKeyOnlyFilter.java
+++ 
b/hbase-client/src/test/java/org/apache/hadoop/hbase/filter/TestKeyOnlyFilter.java
@@ -26,6 +26,7 @@ import java.util.List;
 import org.apache.hadoop.hbase.ByteBufferKeyValue;
 
 import org.apache.hadoop.hbase.CellUtil;
+import org.apache.hadoop.hbase.HBaseCommonTestingUtility;
 import org.apache.hadoop.hbase.KeyValue;
 import org.apache.hadoop.hbase.KeyValue.Type;
 import org.apache.hadoop.hbase.KeyValueUtil;
@@ -44,20 +45,12 @@ import org.junit.runners.Parameterized.Parameters;
 @RunWith(Parameterized.class)
 public class TestKeyOnlyFilter {
 
-  private final boolean lenAsVal;
+  @Parameterized.Parameter
+  public boolean lenAsVal;
 
   @Parameters
   public static Collection parameters() {
-List paramList = new ArrayList<>(2);
-{
-  paramList.add(new Object[] { false });
-  paramList.add(new Object[] { true });
-}
-return paramList;
-  }
-
-  public TestKeyOnlyFilter(boolean lenAsVal) {
-this.lenAsVal = lenAsVal;
+return HBaseCommonTestingUtility.BOOLEAN_PARAMETERIZED;
   }
 
   @Test

http://git-wip-us.apache.org/repos/asf/hbase/blob/0ded122b/hbase-common/src/test/java/org/apache/hadoop/hbase/HBaseCommonTestingUtility.java
--
diff --git 
a/hbase-common/src/test/java/org/apache/hadoop/hbase/HBaseCommonTestingUtility.java
 
b/hbase-common/src/test/java/org/apache/hadoop/hbase/HBaseCommonTestingUtility.java
index e191046..1790f4a 100644
--- 
a/hbase-common/src/test/java/org/apache/hadoop/hbase/HBaseCommonTestingUtility.java
+++ 
b/hbase-common/src/test/java/org/apache/hadoop/hbase/HBaseCommonTestingUtility.java
@@ -20,6 +20,8 @@ package org.apache.hadoop.hbase;
 
 import java.io.File;
 import java.io.IOException;
+import java.util.Arrays;
+import java.util.List;
 import java.util.UUID;
 
 import org.apache.commons.io.FileUtils;
@@ -28,6 +30,7 @@ import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hbase.classification.InterfaceAudience;
+import org.apache.hadoop.hbase.io.compress.Compression;
 
 /**
  * Common helpers for testing HBase that do not depend on specific server/etc. 
things.
@@ -37,6 +40,25 @@ import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
 public class HBaseCommonTestingUtility {
   protected static final Log LOG = 
LogFactory.getLog(HBaseCommonTestingUtility.class);
 
+  /** Compression algorithms to use in parameterized JUnit 4 tests */
+  public static final List COMPRESSION_ALGORITHMS_PARAMETERIZED =
+Arrays.asList(new Object[][] {
+  { Compression.Algorithm.NONE },
+  { Compression.Algorithm.GZ }
+});
+
+  /** This is for unit tests parameterized with a two booleans. */
+  public static final 

[2/3] hbase git commit: HBASE-18497 Add clock type to proto message for HLC region open/close (revision 4)

2017-08-14 Thread appy
http://git-wip-us.apache.org/repos/asf/hbase/blob/93c0d71c/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/AdminProtos.java
--
diff --git 
a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/AdminProtos.java
 
b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/AdminProtos.java
index f739de6..a74ee18 100644
--- 
a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/AdminProtos.java
+++ 
b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/AdminProtos.java
@@ -4542,28 +4542,47 @@ public final class AdminProtos {
 
 /**
  * 
- * physical or hybrid timestamp from master clock
+ * timestamps from each clock on master
  * 
  *
- * optional .hbase.pb.NodeTime nodeTime = 6;
+ * repeated .hbase.pb.NodeTime nodeTimes = 6;
  */
-boolean hasNodeTime();
+
java.util.List
 
+getNodeTimesList();
 /**
  * 
- * physical or hybrid timestamp from master clock
+ * timestamps from each clock on master
  * 
  *
- * optional .hbase.pb.NodeTime nodeTime = 6;
+ * repeated .hbase.pb.NodeTime nodeTimes = 6;
  */
-org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NodeTime 
getNodeTime();
+org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NodeTime 
getNodeTimes(int index);
 /**
  * 
- * physical or hybrid timestamp from master clock
+ * timestamps from each clock on master
  * 
  *
- * optional .hbase.pb.NodeTime nodeTime = 6;
+ * repeated .hbase.pb.NodeTime nodeTimes = 6;
  */
-
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NodeTimeOrBuilder 
getNodeTimeOrBuilder();
+int getNodeTimesCount();
+/**
+ * 
+ * timestamps from each clock on master
+ * 
+ *
+ * repeated .hbase.pb.NodeTime nodeTimes = 6;
+ */
+java.util.List
 
+getNodeTimesOrBuilderList();
+/**
+ * 
+ * timestamps from each clock on master
+ * 
+ *
+ * repeated .hbase.pb.NodeTime nodeTimes = 6;
+ */
+
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NodeTimeOrBuilder 
getNodeTimesOrBuilder(
+int index);
   }
   /**
* Protobuf type {@code hbase.pb.OpenRegionRequest}
@@ -4580,6 +4599,7 @@ public final class AdminProtos {
   openInfo_ = java.util.Collections.emptyList();
   serverStartCode_ = 0L;
   masterSystemTime_ = 0L;
+  nodeTimes_ = java.util.Collections.emptyList();
 }
 
 @java.lang.Override
@@ -4630,16 +4650,12 @@ public final class AdminProtos {
   break;
 }
 case 50: {
-  
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NodeTime.Builder 
subBuilder = null;
-  if (((bitField0_ & 0x0004) == 0x0004)) {
-subBuilder = nodeTime_.toBuilder();
-  }
-  nodeTime_ = 
input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NodeTime.PARSER,
 extensionRegistry);
-  if (subBuilder != null) {
-subBuilder.mergeFrom(nodeTime_);
-nodeTime_ = subBuilder.buildPartial();
+  if (!((mutable_bitField0_ & 0x0008) == 0x0008)) {
+nodeTimes_ = new 
java.util.ArrayList();
+mutable_bitField0_ |= 0x0008;
   }
-  bitField0_ |= 0x0004;
+  nodeTimes_.add(
+  
input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NodeTime.PARSER,
 extensionRegistry));
   break;
 }
   }
@@ -4653,6 +4669,9 @@ public final class AdminProtos {
 if (((mutable_bitField0_ & 0x0001) == 0x0001)) {
   openInfo_ = java.util.Collections.unmodifiableList(openInfo_);
 }
+if (((mutable_bitField0_ & 0x0008) == 0x0008)) {
+  nodeTimes_ = java.util.Collections.unmodifiableList(nodeTimes_);
+}
 this.unknownFields = unknownFields.build();
 makeExtensionsImmutable();
   }
@@ -5928,37 +5947,59 @@ public final class AdminProtos {
   return masterSystemTime_;
 }
 
-public static final int NODETIME_FIELD_NUMBER = 6;
-private 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NodeTime 
nodeTime_;
+public static final int NODETIMES_FIELD_NUMBER = 6;
+private 
java.util.List
 nodeTimes_;
 /**
  * 
- * physical or hybrid timestamp from master clock
+ * timestamps from each clock on master
  * 
  *
- * optional .hbase.pb.NodeTime nodeTime = 6;
+ * repeated .hbase.pb.NodeTime nodeTimes = 6;
  */
-public boolean hasNodeTime() {
-  return ((bitField0_ & 0x0004) == 0x0004);
+public 
java.ut

[1/3] hbase git commit: HBASE-18497 Add clock type to proto message for HLC region open/close (revision 4)

2017-08-14 Thread appy
Repository: hbase
Updated Branches:
  refs/heads/HBASE-14070.HLC d9a990490 -> 93c0d71c4


http://git-wip-us.apache.org/repos/asf/hbase/blob/93c0d71c/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/HBaseProtos.java
--
diff --git 
a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/HBaseProtos.java
 
b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/HBaseProtos.java
index cb1a47c..8fdd6b6 100644
--- 
a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/HBaseProtos.java
+++ 
b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/HBaseProtos.java
@@ -19002,19 +19002,27 @@ public final class HBaseProtos {
   org.apache.hadoop.hbase.shaded.com.google.protobuf.MessageOrBuilder {
 
 /**
- * optional uint64 time = 1;
+ * optional .hbase.pb.NodeTime.ClockType clockType = 1;
  */
-boolean hasTime();
+boolean hasClockType();
 /**
- * optional uint64 time = 1;
+ * optional .hbase.pb.NodeTime.ClockType clockType = 1;
  */
-long getTime();
+
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NodeTime.ClockType
 getClockType();
+
+/**
+ * optional uint64 timestamp = 2;
+ */
+boolean hasTimestamp();
+/**
+ * optional uint64 timestamp = 2;
+ */
+long getTimestamp();
   }
   /**
* 
**
-   * Used to send timestamp of node. The timestamp can be interpreted as 
either a physical or hybrid
-   * timestamp using TimestampType.
+   * Used to send timestamps from each clock of a master or region server
* 
*
* Protobuf type {@code hbase.pb.NodeTime}
@@ -19028,7 +19036,8 @@ public final class HBaseProtos {
   super(builder);
 }
 private NodeTime() {
-  time_ = 0L;
+  clockType_ = 0;
+  timestamp_ = 0L;
 }
 
 @java.lang.Override
@@ -19060,8 +19069,19 @@ public final class HBaseProtos {
   break;
 }
 case 8: {
-  bitField0_ |= 0x0001;
-  time_ = input.readUInt64();
+  int rawValue = input.readEnum();
+  
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NodeTime.ClockType
 value = 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NodeTime.ClockType.valueOf(rawValue);
+  if (value == null) {
+unknownFields.mergeVarintField(1, rawValue);
+  } else {
+bitField0_ |= 0x0001;
+clockType_ = rawValue;
+  }
+  break;
+}
+case 16: {
+  bitField0_ |= 0x0002;
+  timestamp_ = input.readUInt64();
   break;
 }
   }
@@ -19088,20 +19108,135 @@ public final class HBaseProtos {
   
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NodeTime.class, 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NodeTime.Builder.class);
 }
 
+/**
+ * Protobuf enum {@code hbase.pb.NodeTime.ClockType}
+ */
+public enum ClockType
+implements 
org.apache.hadoop.hbase.shaded.com.google.protobuf.ProtocolMessageEnum {
+  /**
+   * SYSTEM = 0;
+   */
+  SYSTEM(0),
+  /**
+   * SYSTEM_MONOTONIC = 1;
+   */
+  SYSTEM_MONOTONIC(1),
+  /**
+   * HLC = 2;
+   */
+  HLC(2),
+  ;
+
+  /**
+   * SYSTEM = 0;
+   */
+  public static final int SYSTEM_VALUE = 0;
+  /**
+   * SYSTEM_MONOTONIC = 1;
+   */
+  public static final int SYSTEM_MONOTONIC_VALUE = 1;
+  /**
+   * HLC = 2;
+   */
+  public static final int HLC_VALUE = 2;
+
+
+  public final int getNumber() {
+return value;
+  }
+
+  /**
+   * @deprecated Use {@link #forNumber(int)} instead.
+   */
+  @java.lang.Deprecated
+  public static ClockType valueOf(int value) {
+return forNumber(value);
+  }
+
+  public static ClockType forNumber(int value) {
+switch (value) {
+  case 0: return SYSTEM;
+  case 1: return SYSTEM_MONOTONIC;
+  case 2: return HLC;
+  default: return null;
+}
+  }
+
+  public static 
org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.EnumLiteMap
+  internalGetValueMap() {
+return internalValueMap;
+  }
+  private static final 
org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.EnumLiteMap<
+  ClockType> internalValueMap =
+new 
org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.EnumLiteMap()
 {
+  public ClockType findValueByNumber(int number) {
+return ClockType.forNumber(number);
+  }
+};
+
+  public final 
org.apache.hadoop.hbase.shaded.com.

[3/3] hbase git commit: HBASE-18497 Add clock type to proto message for HLC region open/close (revision 4)

2017-08-14 Thread appy
HBASE-18497 Add clock type to proto message for HLC region open/close (revision 
4)

- Removed overridden methods in HMaster#get/set/updateClock
- Changed parameter name and exception text in ProtobufUtil#toClockType
- Removed unused import of ClockType in TestMasterNoCluster
- Removed unused ServerManager#sendRegionClose(ServerName, HRegionInfo, Long) 
that recursively called itself
- Added TODO in RSProcedureDispatcher to update clock with top timestamp of 
both loops
- Added TODO in RequestConverter for removing/merging masterSystemTime

Change-Id: I916e759512d958eaa18c8de8eb0f20dc96adec12


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/93c0d71c
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/93c0d71c
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/93c0d71c

Branch: refs/heads/HBASE-14070.HLC
Commit: 93c0d71c47ba850b4353872082ddc1ffa76dd30d
Parents: d9a9904
Author: Amit Patel 
Authored: Wed Aug 2 20:06:09 2017 -0700
Committer: Apekshit Sharma 
Committed: Mon Aug 14 12:24:09 2017 -0700

--
 .../hadoop/hbase/client/RawAsyncHBaseAdmin.java |5 +-
 .../hbase/shaded/protobuf/ProtobufUtil.java |   69 +-
 .../hbase/shaded/protobuf/RequestConverter.java |   29 +-
 .../shaded/protobuf/generated/AdminProtos.java  | 2151 --
 .../shaded/protobuf/generated/HBaseProtos.java  |  303 ++-
 .../src/main/protobuf/Admin.proto   |   16 +-
 .../src/main/protobuf/HBase.proto   |   11 +-
 .../org/apache/hadoop/hbase/master/HMaster.java |   19 -
 .../hadoop/hbase/master/MasterServices.java |6 -
 .../hadoop/hbase/master/ServerManager.java  |   17 +-
 .../master/procedure/RSProcedureDispatcher.java |   54 +-
 .../hbase/regionserver/HRegionServer.java   |8 -
 .../hbase/regionserver/RSRpcServices.java   |   36 +-
 .../regionserver/RegionServerServices.java  |6 -
 .../hadoop/hbase/MockRegionServerServices.java  |8 -
 .../hbase/master/MockNoopMasterServices.java|3 -
 .../hadoop/hbase/master/MockRegionServer.java   |8 -
 .../hbase/master/TestMasterNoCluster.java   |3 +-
 18 files changed, 1925 insertions(+), 827 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/93c0d71c/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.java
 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.java
index 0cef556..1ae0eec 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.java
@@ -752,12 +752,13 @@ public class RawAsyncHBaseAdmin implements AsyncAdmin {
   }
 
   private CompletableFuture closeRegion(HRegionInfo hri, ServerName 
serverName) {
+CloseRegionRequest request = 
ProtobufUtil.buildCloseRegionRequest(serverName,
+hri.getRegionName());
 return this
 . newAdminCaller()
 .action(
   (controller, stub) -> this. adminCall(
-controller, stub,
-ProtobufUtil.buildCloseRegionRequest(serverName, 
hri.getRegionName()),
+controller, stub, request,
 (s, c, req, done) -> s.closeRegion(controller, req, done), resp -> 
resp.getClosed()))
 .serverName(serverName).call();
   }

http://git-wip-us.apache.org/repos/asf/hbase/blob/93c0d71c/hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/ProtobufUtil.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/ProtobufUtil.java
 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/ProtobufUtil.java
index 4ebba53..407d59f 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/ProtobufUtil.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/ProtobufUtil.java
@@ -44,6 +44,7 @@ import org.apache.hadoop.hbase.ByteBufferCell;
 import org.apache.hadoop.hbase.Cell;
 import org.apache.hadoop.hbase.CellScanner;
 import org.apache.hadoop.hbase.CellUtil;
+import org.apache.hadoop.hbase.ClockType;
 import org.apache.hadoop.hbase.ClusterId;
 import org.apache.hadoop.hbase.ClusterStatus;
 import org.apache.hadoop.hbase.DoNotRetryIOException;
@@ -154,6 +155,7 @@ import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesP
 import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema;
 import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair;
 import 
org.apache.hadoop.hbase.shaded.protobuf.generated

hbase git commit: HBASE-18303 Clean up @Parameter boilerplate

2017-08-14 Thread mdrob
Repository: hbase
Updated Branches:
  refs/heads/master ea8fa59a4 -> 0b26ccdaa


HBASE-18303 Clean up @Parameter boilerplate


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/0b26ccda
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/0b26ccda
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/0b26ccda

Branch: refs/heads/master
Commit: 0b26ccdaa1b8700e7958aeebbaf9cad81e737dd0
Parents: ea8fa59
Author: Mike Drob 
Authored: Fri Jun 30 12:13:56 2017 -0500
Committer: Mike Drob 
Committed: Mon Aug 14 14:23:24 2017 -0500

--
 .../hadoop/hbase/filter/TestKeyOnlyFilter.java  | 15 -
 .../hadoop/hbase/HBaseCommonTestingUtility.java | 22 
 .../apache/hadoop/hbase/types/TestStruct.java   | 18 +++-
 .../hadoop/hbase/util/TestByteBufferUtils.java  |  8 ++-
 .../hbase/codec/keyvalue/TestKeyValueTool.java  | 16 +++---
 .../codec/prefixtree/row/TestRowEncoder.java|  6 +-
 .../hadoop/hbase/rest/TestMultiRowResource.java |  5 +
 .../hadoop/hbase/HBaseTestingUtility.java   | 20 +-
 .../encoding/TestSeekToBlockWithEncoders.java   |  4 ++--
 .../hadoop/hbase/io/hfile/TestCacheOnWrite.java |  3 ++-
 .../apache/hadoop/hbase/io/hfile/TestHFile.java |  3 ++-
 .../hbase/io/hfile/TestHFileBlockIndex.java |  3 ++-
 .../hbase/io/hfile/TestHFileWriterV3.java   |  3 ++-
 .../hbase/util/TestCoprocessorScanPolicy.java   |  3 ++-
 14 files changed, 59 insertions(+), 70 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/0b26ccda/hbase-client/src/test/java/org/apache/hadoop/hbase/filter/TestKeyOnlyFilter.java
--
diff --git 
a/hbase-client/src/test/java/org/apache/hadoop/hbase/filter/TestKeyOnlyFilter.java
 
b/hbase-client/src/test/java/org/apache/hadoop/hbase/filter/TestKeyOnlyFilter.java
index 33e3cd9..f957b59 100644
--- 
a/hbase-client/src/test/java/org/apache/hadoop/hbase/filter/TestKeyOnlyFilter.java
+++ 
b/hbase-client/src/test/java/org/apache/hadoop/hbase/filter/TestKeyOnlyFilter.java
@@ -26,6 +26,7 @@ import java.util.List;
 import org.apache.hadoop.hbase.ByteBufferKeyValue;
 
 import org.apache.hadoop.hbase.CellUtil;
+import org.apache.hadoop.hbase.HBaseCommonTestingUtility;
 import org.apache.hadoop.hbase.KeyValue;
 import org.apache.hadoop.hbase.KeyValue.Type;
 import org.apache.hadoop.hbase.KeyValueUtil;
@@ -44,20 +45,12 @@ import org.junit.runners.Parameterized.Parameters;
 @RunWith(Parameterized.class)
 public class TestKeyOnlyFilter {
 
-  private final boolean lenAsVal;
+  @Parameterized.Parameter
+  public boolean lenAsVal;
 
   @Parameters
   public static Collection parameters() {
-List paramList = new ArrayList<>(2);
-{
-  paramList.add(new Object[] { false });
-  paramList.add(new Object[] { true });
-}
-return paramList;
-  }
-
-  public TestKeyOnlyFilter(boolean lenAsVal) {
-this.lenAsVal = lenAsVal;
+return HBaseCommonTestingUtility.BOOLEAN_PARAMETERIZED;
   }
 
   @Test

http://git-wip-us.apache.org/repos/asf/hbase/blob/0b26ccda/hbase-common/src/test/java/org/apache/hadoop/hbase/HBaseCommonTestingUtility.java
--
diff --git 
a/hbase-common/src/test/java/org/apache/hadoop/hbase/HBaseCommonTestingUtility.java
 
b/hbase-common/src/test/java/org/apache/hadoop/hbase/HBaseCommonTestingUtility.java
index e191046..1790f4a 100644
--- 
a/hbase-common/src/test/java/org/apache/hadoop/hbase/HBaseCommonTestingUtility.java
+++ 
b/hbase-common/src/test/java/org/apache/hadoop/hbase/HBaseCommonTestingUtility.java
@@ -20,6 +20,8 @@ package org.apache.hadoop.hbase;
 
 import java.io.File;
 import java.io.IOException;
+import java.util.Arrays;
+import java.util.List;
 import java.util.UUID;
 
 import org.apache.commons.io.FileUtils;
@@ -28,6 +30,7 @@ import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hbase.classification.InterfaceAudience;
+import org.apache.hadoop.hbase.io.compress.Compression;
 
 /**
  * Common helpers for testing HBase that do not depend on specific server/etc. 
things.
@@ -37,6 +40,25 @@ import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
 public class HBaseCommonTestingUtility {
   protected static final Log LOG = 
LogFactory.getLog(HBaseCommonTestingUtility.class);
 
+  /** Compression algorithms to use in parameterized JUnit 4 tests */
+  public static final List COMPRESSION_ALGORITHMS_PARAMETERIZED =
+Arrays.asList(new Object[][] {
+  { Compression.Algorithm.NONE },
+  { Compression.Algorithm.GZ }
+});
+
+  /** This is for unit tests parameterized with a two booleans. */
+  public static final List

hbase git commit: HBASE-18238 rubocop autocorrect for bin/

2017-08-14 Thread mdrob
Repository: hbase
Updated Branches:
  refs/heads/branch-2 cf050de91 -> 4e9961b4f


HBASE-18238 rubocop autocorrect for bin/


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/4e9961b4
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/4e9961b4
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/4e9961b4

Branch: refs/heads/branch-2
Commit: 4e9961b4fcb198a74d292e6a5405755dce261dcc
Parents: cf050de
Author: Mike Drob 
Authored: Wed Jul 19 12:05:26 2017 -0500
Committer: Mike Drob 
Committed: Mon Aug 14 14:04:43 2017 -0500

--
 bin/draining_servers.rb | 108 +++
 bin/get-active-master.rb|   6 +-
 bin/hirb.rb |  46 +++--
 bin/region_mover.rb |   2 +-
 bin/region_status.rb|  50 +++---
 bin/replication/copy_tables_desc.rb |  47 +++---
 bin/shutdown_regionserver.rb|  16 ++---
 7 files changed, 132 insertions(+), 143 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/4e9961b4/bin/draining_servers.rb
--
diff --git a/bin/draining_servers.rb b/bin/draining_servers.rb
index 8e1b250..ea74c30 100644
--- a/bin/draining_servers.rb
+++ b/bin/draining_servers.rb
@@ -16,7 +16,7 @@
 # limitations under the License.
 #
 
-# Add or remove servers from draining mode via zookeeper 
+# Add or remove servers from draining mode via zookeeper
 
 require 'optparse'
 include Java
@@ -29,13 +29,13 @@ java_import org.apache.commons.logging.Log
 java_import org.apache.commons.logging.LogFactory
 
 # Name of this script
-NAME = "draining_servers"
+NAME = 'draining_servers'.freeze
 
 # Do command-line parsing
 options = {}
 optparse = OptionParser.new do |opts|
   opts.banner = "Usage: ./hbase org.jruby.Main #{NAME}.rb [options] 
add|remove|list || ..."
-  opts.separator 'Add remove or list servers in draining mode. Can accept 
either hostname to drain all region servers' +
+  opts.separator 'Add remove or list servers in draining mode. Can accept 
either hostname to drain all region servers' \
  'in that host, a host:port pair or a host,port,startCode 
triplet. More than one server can be given separated by space'
   opts.on('-h', '--help', 'Display usage information') do
 puts opts
@@ -51,117 +51,117 @@ optparse.parse!
 # Return array of servernames where servername is hostname+port+startcode
 # comma-delimited
 def getServers(admin)
-  serverInfos = admin.getClusterStatus().getServers()
+  serverInfos = admin.getClusterStatus.getServers
   servers = []
   for server in serverInfos
-servers << server.getServerName()
+servers << server.getServerName
   end
-  return servers
+  servers
 end
 
 def getServerNames(hostOrServers, config)
   ret = []
   connection = ConnectionFactory.createConnection(config)
-  
+
   for hostOrServer in hostOrServers
 # check whether it is already serverName. No need to connect to cluster
 parts = hostOrServer.split(',')
-if parts.size() == 3
+if parts.size == 3
   ret << hostOrServer
-else 
-  admin = connection.getAdmin() if not admin
+else
+  admin = connection.getAdmin unless admin
   servers = getServers(admin)
 
-  hostOrServer = hostOrServer.gsub(/:/, ",")
-  for server in servers 
+  hostOrServer = hostOrServer.tr(':', ',')
+  for server in servers
 ret << server if server.start_with?(hostOrServer)
   end
 end
   end
-  
-  admin.close() if admin
-  connection.close()
-  return ret
+
+  admin.close if admin
+  connection.close
+  ret
 end
 
-def addServers(options, hostOrServers)
-  config = HBaseConfiguration.create()
+def addServers(_options, hostOrServers)
+  config = HBaseConfiguration.create
   servers = getServerNames(hostOrServers, config)
-  
-  zkw = org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher.new(config, 
"draining_servers", nil)
+
+  zkw = org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher.new(config, 
'draining_servers', nil)
   parentZnode = zkw.znodePaths.drainingZNode
-  
+
   begin
 for server in servers
   node = ZKUtil.joinZNode(parentZnode, server)
   ZKUtil.createAndFailSilent(zkw, node)
 end
   ensure
-zkw.close()
+zkw.close
   end
 end
 
-def removeServers(options, hostOrServers)
-  config = HBaseConfiguration.create()
+def removeServers(_options, hostOrServers)
+  config = HBaseConfiguration.create
   servers = getServerNames(hostOrServers, config)
-  
-  zkw = org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher.new(config, 
"draining_servers", nil)
+
+  zkw = org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher.new(config, 
'draining_servers', nil)
   parentZnode = zkw.znodePaths.drainingZNode
-  
+
   begin
 for server i

hbase git commit: HBASE-18238 rubocop autocorrect for bin/

2017-08-14 Thread mdrob
Repository: hbase
Updated Branches:
  refs/heads/master 096dac2e8 -> ea8fa59a4


HBASE-18238 rubocop autocorrect for bin/


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/ea8fa59a
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/ea8fa59a
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/ea8fa59a

Branch: refs/heads/master
Commit: ea8fa59a4c2fe7633ebe70df622098bfe36b5df9
Parents: 096dac2
Author: Mike Drob 
Authored: Wed Jul 19 12:05:26 2017 -0500
Committer: Mike Drob 
Committed: Mon Aug 14 13:53:37 2017 -0500

--
 bin/draining_servers.rb | 108 +++
 bin/get-active-master.rb|   6 +-
 bin/hirb.rb |  46 +++--
 bin/region_mover.rb |   2 +-
 bin/region_status.rb|  50 +++---
 bin/replication/copy_tables_desc.rb |  47 +++---
 bin/shutdown_regionserver.rb|  16 ++---
 7 files changed, 132 insertions(+), 143 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/ea8fa59a/bin/draining_servers.rb
--
diff --git a/bin/draining_servers.rb b/bin/draining_servers.rb
index 8e1b250..ea74c30 100644
--- a/bin/draining_servers.rb
+++ b/bin/draining_servers.rb
@@ -16,7 +16,7 @@
 # limitations under the License.
 #
 
-# Add or remove servers from draining mode via zookeeper 
+# Add or remove servers from draining mode via zookeeper
 
 require 'optparse'
 include Java
@@ -29,13 +29,13 @@ java_import org.apache.commons.logging.Log
 java_import org.apache.commons.logging.LogFactory
 
 # Name of this script
-NAME = "draining_servers"
+NAME = 'draining_servers'.freeze
 
 # Do command-line parsing
 options = {}
 optparse = OptionParser.new do |opts|
   opts.banner = "Usage: ./hbase org.jruby.Main #{NAME}.rb [options] 
add|remove|list || ..."
-  opts.separator 'Add remove or list servers in draining mode. Can accept 
either hostname to drain all region servers' +
+  opts.separator 'Add remove or list servers in draining mode. Can accept 
either hostname to drain all region servers' \
  'in that host, a host:port pair or a host,port,startCode 
triplet. More than one server can be given separated by space'
   opts.on('-h', '--help', 'Display usage information') do
 puts opts
@@ -51,117 +51,117 @@ optparse.parse!
 # Return array of servernames where servername is hostname+port+startcode
 # comma-delimited
 def getServers(admin)
-  serverInfos = admin.getClusterStatus().getServers()
+  serverInfos = admin.getClusterStatus.getServers
   servers = []
   for server in serverInfos
-servers << server.getServerName()
+servers << server.getServerName
   end
-  return servers
+  servers
 end
 
 def getServerNames(hostOrServers, config)
   ret = []
   connection = ConnectionFactory.createConnection(config)
-  
+
   for hostOrServer in hostOrServers
 # check whether it is already serverName. No need to connect to cluster
 parts = hostOrServer.split(',')
-if parts.size() == 3
+if parts.size == 3
   ret << hostOrServer
-else 
-  admin = connection.getAdmin() if not admin
+else
+  admin = connection.getAdmin unless admin
   servers = getServers(admin)
 
-  hostOrServer = hostOrServer.gsub(/:/, ",")
-  for server in servers 
+  hostOrServer = hostOrServer.tr(':', ',')
+  for server in servers
 ret << server if server.start_with?(hostOrServer)
   end
 end
   end
-  
-  admin.close() if admin
-  connection.close()
-  return ret
+
+  admin.close if admin
+  connection.close
+  ret
 end
 
-def addServers(options, hostOrServers)
-  config = HBaseConfiguration.create()
+def addServers(_options, hostOrServers)
+  config = HBaseConfiguration.create
   servers = getServerNames(hostOrServers, config)
-  
-  zkw = org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher.new(config, 
"draining_servers", nil)
+
+  zkw = org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher.new(config, 
'draining_servers', nil)
   parentZnode = zkw.znodePaths.drainingZNode
-  
+
   begin
 for server in servers
   node = ZKUtil.joinZNode(parentZnode, server)
   ZKUtil.createAndFailSilent(zkw, node)
 end
   ensure
-zkw.close()
+zkw.close
   end
 end
 
-def removeServers(options, hostOrServers)
-  config = HBaseConfiguration.create()
+def removeServers(_options, hostOrServers)
+  config = HBaseConfiguration.create
   servers = getServerNames(hostOrServers, config)
-  
-  zkw = org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher.new(config, 
"draining_servers", nil)
+
+  zkw = org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher.new(config, 
'draining_servers', nil)
   parentZnode = zkw.znodePaths.drainingZNode
-  
+
   begin
 for server in se

hbase git commit: HBASE-18522 Add RowMutations support to Batch

2017-08-14 Thread jerryjch
Repository: hbase
Updated Branches:
  refs/heads/branch-2 add997451 -> cf050de91


HBASE-18522 Add RowMutations support to Batch


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/cf050de9
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/cf050de9
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/cf050de9

Branch: refs/heads/branch-2
Commit: cf050de9172a36b767eb7e4700787b9d6a32a1b9
Parents: add9974
Author: Jerry He 
Authored: Mon Aug 14 09:28:49 2017 -0700
Committer: Jerry He 
Committed: Mon Aug 14 10:43:10 2017 -0700

--
 .../hbase/client/MultiServerCallable.java   | 64 +++-
 .../org/apache/hadoop/hbase/client/Table.java   |  4 +-
 .../hbase/shaded/protobuf/RequestConverter.java |  6 +-
 .../shaded/protobuf/ResponseConverter.java  | 37 ++-
 .../hbase/client/TestFromClientSide3.java   | 46 ++
 .../hadoop/hbase/client/TestMultiParallel.java  | 34 ++-
 6 files changed, 168 insertions(+), 23 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/cf050de9/hbase-client/src/main/java/org/apache/hadoop/hbase/client/MultiServerCallable.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/MultiServerCallable.java
 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/MultiServerCallable.java
index 33c9a0b..7f6052e 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/MultiServerCallable.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/MultiServerCallable.java
@@ -19,6 +19,7 @@ package org.apache.hadoop.hbase.client;
 
 import java.io.IOException;
 import java.util.ArrayList;
+import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
 
@@ -93,30 +94,64 @@ class MultiServerCallable extends 
CancellableRegionServerCallable
 RegionAction.Builder regionActionBuilder = RegionAction.newBuilder();
 ClientProtos.Action.Builder actionBuilder = 
ClientProtos.Action.newBuilder();
 MutationProto.Builder mutationBuilder = MutationProto.newBuilder();
-List cells = null;
-// The multi object is a list of Actions by region.  Iterate by region.
+
+// Pre-size. Presume at least a KV per Action. There are likely more.
+List cells =
+(this.cellBlock ? new ArrayList(countOfActions) : null);
+
 long nonceGroup = multiAction.getNonceGroup();
 if (nonceGroup != HConstants.NO_NONCE) {
   multiRequestBuilder.setNonceGroup(nonceGroup);
 }
+// Index to track RegionAction within the MultiRequest
+int regionActionIndex = -1;
+// Map from a created RegionAction to the original index for a 
RowMutations within
+// its original list of actions
+Map rowMutationsIndexMap = new HashMap<>();
+// The multi object is a list of Actions by region. Iterate by region.
 for (Map.Entry> e: 
this.multiAction.actions.entrySet()) {
   final byte [] regionName = e.getKey();
   final List actions = e.getValue();
   regionActionBuilder.clear();
   regionActionBuilder.setRegion(RequestConverter.buildRegionSpecifier(
   HBaseProtos.RegionSpecifier.RegionSpecifierType.REGION_NAME, 
regionName));
-  if (this.cellBlock) {
-// Pre-size. Presume at least a KV per Action.  There are likely more.
-if (cells == null) cells = new ArrayList<>(countOfActions);
-// Send data in cellblocks. The call to buildNoDataMultiRequest will 
skip RowMutations.
-// They have already been handled above. Guess at count of cells
-regionActionBuilder = 
RequestConverter.buildNoDataRegionAction(regionName, actions, cells,
-  regionActionBuilder, actionBuilder, mutationBuilder);
-  } else {
-regionActionBuilder = RequestConverter.buildRegionAction(regionName, 
actions,
-  regionActionBuilder, actionBuilder, mutationBuilder);
+
+  int rowMutations = 0;
+  for (Action action : actions) {
+Row row = action.getAction();
+// Row Mutations are a set of Puts and/or Deletes all to be applied 
atomically
+// on the one row. We do separate RegionAction for each RowMutations.
+// We maintain a map to keep track of this RegionAction and the 
original Action index.
+if (row instanceof RowMutations) {
+  RowMutations rms = (RowMutations)row;
+  if (this.cellBlock) {
+// Build a multi request absent its Cell payload. Send data in 
cellblocks.
+regionActionBuilder = 
RequestConverter.buildNoDataRegionAction(regionName, rms, cells,
+  regionActionBuilder, actionBuilder, mutationBuilder);
+  } else {
+regionActionBuilder = 
RequestConverter.buildRegionAction(regionName, rms);
+  }
+

hbase git commit: HBASE-18522 Add RowMutations support to Batch

2017-08-14 Thread jerryjch
Repository: hbase
Updated Branches:
  refs/heads/master bd4007309 -> 096dac2e8


HBASE-18522 Add RowMutations support to Batch


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/096dac2e
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/096dac2e
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/096dac2e

Branch: refs/heads/master
Commit: 096dac2e83c675f212bad4f91888d8440ba152ca
Parents: bd40073
Author: Jerry He 
Authored: Mon Aug 14 10:39:46 2017 -0700
Committer: Jerry He 
Committed: Mon Aug 14 10:39:46 2017 -0700

--
 .../hbase/client/MultiServerCallable.java   | 64 +++-
 .../org/apache/hadoop/hbase/client/Table.java   |  4 +-
 .../hbase/shaded/protobuf/RequestConverter.java |  6 +-
 .../shaded/protobuf/ResponseConverter.java  | 37 ++-
 .../hbase/client/TestFromClientSide3.java   | 46 ++
 .../hadoop/hbase/client/TestMultiParallel.java  | 34 ++-
 6 files changed, 168 insertions(+), 23 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/096dac2e/hbase-client/src/main/java/org/apache/hadoop/hbase/client/MultiServerCallable.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/MultiServerCallable.java
 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/MultiServerCallable.java
index 33c9a0b..7f6052e 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/MultiServerCallable.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/MultiServerCallable.java
@@ -19,6 +19,7 @@ package org.apache.hadoop.hbase.client;
 
 import java.io.IOException;
 import java.util.ArrayList;
+import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
 
@@ -93,30 +94,64 @@ class MultiServerCallable extends 
CancellableRegionServerCallable
 RegionAction.Builder regionActionBuilder = RegionAction.newBuilder();
 ClientProtos.Action.Builder actionBuilder = 
ClientProtos.Action.newBuilder();
 MutationProto.Builder mutationBuilder = MutationProto.newBuilder();
-List cells = null;
-// The multi object is a list of Actions by region.  Iterate by region.
+
+// Pre-size. Presume at least a KV per Action. There are likely more.
+List cells =
+(this.cellBlock ? new ArrayList(countOfActions) : null);
+
 long nonceGroup = multiAction.getNonceGroup();
 if (nonceGroup != HConstants.NO_NONCE) {
   multiRequestBuilder.setNonceGroup(nonceGroup);
 }
+// Index to track RegionAction within the MultiRequest
+int regionActionIndex = -1;
+// Map from a created RegionAction to the original index for a 
RowMutations within
+// its original list of actions
+Map rowMutationsIndexMap = new HashMap<>();
+// The multi object is a list of Actions by region. Iterate by region.
 for (Map.Entry> e: 
this.multiAction.actions.entrySet()) {
   final byte [] regionName = e.getKey();
   final List actions = e.getValue();
   regionActionBuilder.clear();
   regionActionBuilder.setRegion(RequestConverter.buildRegionSpecifier(
   HBaseProtos.RegionSpecifier.RegionSpecifierType.REGION_NAME, 
regionName));
-  if (this.cellBlock) {
-// Pre-size. Presume at least a KV per Action.  There are likely more.
-if (cells == null) cells = new ArrayList<>(countOfActions);
-// Send data in cellblocks. The call to buildNoDataMultiRequest will 
skip RowMutations.
-// They have already been handled above. Guess at count of cells
-regionActionBuilder = 
RequestConverter.buildNoDataRegionAction(regionName, actions, cells,
-  regionActionBuilder, actionBuilder, mutationBuilder);
-  } else {
-regionActionBuilder = RequestConverter.buildRegionAction(regionName, 
actions,
-  regionActionBuilder, actionBuilder, mutationBuilder);
+
+  int rowMutations = 0;
+  for (Action action : actions) {
+Row row = action.getAction();
+// Row Mutations are a set of Puts and/or Deletes all to be applied 
atomically
+// on the one row. We do separate RegionAction for each RowMutations.
+// We maintain a map to keep track of this RegionAction and the 
original Action index.
+if (row instanceof RowMutations) {
+  RowMutations rms = (RowMutations)row;
+  if (this.cellBlock) {
+// Build a multi request absent its Cell payload. Send data in 
cellblocks.
+regionActionBuilder = 
RequestConverter.buildNoDataRegionAction(regionName, rms, cells,
+  regionActionBuilder, actionBuilder, mutationBuilder);
+  } else {
+regionActionBuilder = 
RequestConverter.buildRegionAction(regionName, rms);
+  }
+

hbase git commit: HBASE-18595 Set version in branch-2 from 2.0.0-alpha2-SNAPSHOT to 2.0.0-alpha2

2017-08-14 Thread stack
Repository: hbase
Updated Branches:
  refs/heads/branch-2 c20ce21fe -> add997451


HBASE-18595 Set version in branch-2 from 2.0.0-alpha2-SNAPSHOT to 2.0.0-alpha2


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/add99745
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/add99745
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/add99745

Branch: refs/heads/branch-2
Commit: add99745155dec52a11e2fbd88497fcda88a39b9
Parents: c20ce21
Author: Michael Stack 
Authored: Mon Aug 14 10:28:44 2017 -0700
Committer: Michael Stack 
Committed: Mon Aug 14 10:28:44 2017 -0700

--
 hbase-annotations/pom.xml   |  2 +-
 .../hbase-archetype-builder/pom.xml |  2 +-
 hbase-archetypes/hbase-client-project/pom.xml   |  2 +-
 .../hbase-shaded-client-project/pom.xml |  2 +-
 hbase-archetypes/pom.xml|  2 +-
 hbase-assembly/pom.xml  |  2 +-
 hbase-checkstyle/pom.xml|  4 +--
 hbase-client/pom.xml|  6 +---
 hbase-common/pom.xml|  2 +-
 hbase-endpoint/pom.xml  |  2 +-
 hbase-examples/pom.xml  |  2 +-
 hbase-external-blockcache/pom.xml   |  2 +-
 hbase-hadoop-compat/pom.xml |  2 +-
 hbase-hadoop2-compat/pom.xml|  2 +-
 hbase-it/pom.xml|  2 +-
 hbase-metrics-api/pom.xml   |  2 +-
 hbase-metrics/pom.xml   |  2 +-
 hbase-prefix-tree/pom.xml   |  2 +-
 hbase-procedure/pom.xml |  2 +-
 hbase-protocol-shaded/pom.xml   |  2 +-
 hbase-protocol/pom.xml  |  2 +-
 hbase-resource-bundle/pom.xml   |  2 +-
 hbase-rest/pom.xml  |  6 +---
 .../apache/hadoop/hbase/rest/RESTServer.java|  4 +--
 .../hbase/rest/HBaseRESTTestingUtility.java |  4 +--
 hbase-rsgroup/pom.xml   |  6 +---
 hbase-server/pom.xml|  6 +---
 hbase-shaded/hbase-shaded-client/pom.xml|  2 +-
 hbase-shaded/hbase-shaded-server/pom.xml|  2 +-
 hbase-shaded/pom.xml|  2 +-
 hbase-shell/pom.xml |  2 +-
 hbase-spark-it/pom.xml  |  2 +-
 hbase-spark/pom.xml |  2 +-
 hbase-testing-util/pom.xml  |  2 +-
 hbase-thrift/pom.xml|  2 +-
 pom.xml | 31 +---
 36 files changed, 37 insertions(+), 86 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/add99745/hbase-annotations/pom.xml
--
diff --git a/hbase-annotations/pom.xml b/hbase-annotations/pom.xml
index 8390dc4..03f5335 100644
--- a/hbase-annotations/pom.xml
+++ b/hbase-annotations/pom.xml
@@ -23,7 +23,7 @@
   
 hbase
 org.apache.hbase
-2.0.0-alpha-2-SNAPSHOT
+2.0.0-alpha2
 ..
   
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/add99745/hbase-archetypes/hbase-archetype-builder/pom.xml
--
diff --git a/hbase-archetypes/hbase-archetype-builder/pom.xml 
b/hbase-archetypes/hbase-archetype-builder/pom.xml
index 5e9e28c..be85b18 100644
--- a/hbase-archetypes/hbase-archetype-builder/pom.xml
+++ b/hbase-archetypes/hbase-archetype-builder/pom.xml
@@ -25,7 +25,7 @@
   
 hbase-archetypes
 org.apache.hbase
-2.0.0-alpha-2-SNAPSHOT
+2.0.0-alpha2
 ..
   
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/add99745/hbase-archetypes/hbase-client-project/pom.xml
--
diff --git a/hbase-archetypes/hbase-client-project/pom.xml 
b/hbase-archetypes/hbase-client-project/pom.xml
index 38c8ecf..2eaa93e 100644
--- a/hbase-archetypes/hbase-client-project/pom.xml
+++ b/hbase-archetypes/hbase-client-project/pom.xml
@@ -26,7 +26,7 @@
   
 hbase-archetypes
 org.apache.hbase
-2.0.0-alpha-2-SNAPSHOT
+2.0.0-alpha2
 ..
   
   hbase-client-project

http://git-wip-us.apache.org/repos/asf/hbase/blob/add99745/hbase-archetypes/hbase-shaded-client-project/pom.xml
--
diff --git a/hbase-archetypes/hbase-shaded-client-project/pom.xml 
b/hbase-archetypes/hbase-shaded-client-project/pom.xml
index d97b805..f7a6016 100644
--- a/hbase-archetypes/hbase-shaded-client-project/pom.xml
+++ b/hbase-archetypes/hbase-shaded-client-project/pom.xml
@@ -26,7 +26,7 @@
   
 hbase-archetypes
 org.apach

hbase git commit: HBASE-18522 Add RowMutations support to Batch

2017-08-14 Thread jerryjch
Repository: hbase
Updated Branches:
  refs/heads/branch-1 7bd2795ee -> 9078a034c


HBASE-18522 Add RowMutations support to Batch


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/9078a034
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/9078a034
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/9078a034

Branch: refs/heads/branch-1
Commit: 9078a034c410d53800e656d6a19f810c30fc102f
Parents: 7bd2795
Author: Jerry He 
Authored: Sun Aug 13 18:23:49 2017 -0700
Committer: Jerry He 
Committed: Mon Aug 14 09:21:53 2017 -0700

--
 .../hbase/client/MultiServerCallable.java   | 66 +++-
 .../org/apache/hadoop/hbase/client/Table.java   |  6 +-
 .../hadoop/hbase/protobuf/RequestConverter.java |  6 +-
 .../hbase/protobuf/ResponseConverter.java   | 35 ++-
 .../hbase/client/TestFromClientSide3.java   | 46 ++
 .../hadoop/hbase/client/TestMultiParallel.java  | 49 ---
 6 files changed, 178 insertions(+), 30 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/9078a034/hbase-client/src/main/java/org/apache/hadoop/hbase/client/MultiServerCallable.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/MultiServerCallable.java
 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/MultiServerCallable.java
index 42c63eb..b2ea941 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/MultiServerCallable.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/MultiServerCallable.java
@@ -19,6 +19,7 @@ package org.apache.hadoop.hbase.client;
 
 import java.io.IOException;
 import java.util.ArrayList;
+import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
 
@@ -97,12 +98,21 @@ class MultiServerCallable extends 
PayloadCarryingServerCallable cells = null;
-// The multi object is a list of Actions by region.  Iterate by region.
+
+// Pre-size. Presume at least a KV per Action. There are likely more.
+List cells =
+(this.cellBlock ? new ArrayList(countOfActions) : null);
+
 long nonceGroup = multiAction.getNonceGroup();
 if (nonceGroup != HConstants.NO_NONCE) {
   multiRequestBuilder.setNonceGroup(nonceGroup);
 }
+// Index to track RegionAction within the MultiRequest
+int regionActionIndex = -1;
+// Map from a created RegionAction for a RowMutations to the original 
index within
+// its original list of actions
+Map rowMutationsIndexMap = new HashMap<>();
+// The multi object is a list of Actions by region.  Iterate by region.
 for (Map.Entry>> e: 
this.multiAction.actions.entrySet()) {
   final byte [] regionName = e.getKey();
   final List> actions = e.getValue();
@@ -110,19 +120,46 @@ class MultiServerCallable extends 
PayloadCarryingServerCallable action : actions) {
+Row row = action.getAction();
+// Row Mutations are a set of Puts and/or Deletes all to be applied 
atomically
+// on the one row. We do separate RegionAction for each RowMutations.
+// We maintain a map to keep track of this RegionAction and the 
original Action index.
+if (row instanceof RowMutations) {
+  RowMutations rms = (RowMutations)row;
+  if (this.cellBlock) {
+// Build a multi request absent its Cell payload. Send data in 
cellblocks.
+regionActionBuilder = 
RequestConverter.buildNoDataRegionAction(regionName, rms, cells,
+  regionActionBuilder, actionBuilder, mutationBuilder);
+  } else {
+regionActionBuilder = 
RequestConverter.buildRegionAction(regionName, rms);
+  }
+  regionActionBuilder.setAtomic(true);
+  multiRequestBuilder.addRegionAction(regionActionBuilder.build());
+  regionActionIndex++;
+  rowMutationsIndexMap.put(regionActionIndex, 
action.getOriginalIndex());
+  rowMutations++;
+
+  regionActionBuilder.clear();
+  regionActionBuilder.setRegion(RequestConverter.buildRegionSpecifier(
+HBaseProtos.RegionSpecifier.RegionSpecifierType.REGION_NAME, 
regionName) );
+}
+  }
 
-  if (this.cellBlock) {
-// Presize.  Presume at least a KV per Action.  There are likely more.
-if (cells == null) cells = new 
ArrayList(countOfActions);
-// Send data in cellblocks. The call to buildNoDataMultiRequest will 
skip RowMutations.
-// They have already been handled above. Guess at count of cells
-regionActionBuilder = 
RequestConverter.buildNoDataRegionAction(regionName, actions, cells,
-  regionActionBuilder, actionBuilder, mutationBuilder);
-  } else {
-regionActionBuilder = RequestConverter.buil

hbase git commit: HBASE-18522 Add RowMutations support to Batch

2017-08-14 Thread jerryjch
Repository: hbase
Updated Branches:
  refs/heads/branch-1.4 043211760 -> c6f57e0f3


HBASE-18522 Add RowMutations support to Batch


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/c6f57e0f
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/c6f57e0f
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/c6f57e0f

Branch: refs/heads/branch-1.4
Commit: c6f57e0f382e9dcef48f05da087d12eb0e47e9ad
Parents: 0432117
Author: Jerry He 
Authored: Sun Aug 13 18:23:49 2017 -0700
Committer: Jerry He 
Committed: Mon Aug 14 09:18:41 2017 -0700

--
 .../hbase/client/MultiServerCallable.java   | 66 +++-
 .../org/apache/hadoop/hbase/client/Table.java   |  6 +-
 .../hadoop/hbase/protobuf/RequestConverter.java |  6 +-
 .../hbase/protobuf/ResponseConverter.java   | 35 ++-
 .../hbase/client/TestFromClientSide3.java   | 46 ++
 .../hadoop/hbase/client/TestMultiParallel.java  | 49 ---
 6 files changed, 178 insertions(+), 30 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/c6f57e0f/hbase-client/src/main/java/org/apache/hadoop/hbase/client/MultiServerCallable.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/MultiServerCallable.java
 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/MultiServerCallable.java
index 42c63eb..b2ea941 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/MultiServerCallable.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/MultiServerCallable.java
@@ -19,6 +19,7 @@ package org.apache.hadoop.hbase.client;
 
 import java.io.IOException;
 import java.util.ArrayList;
+import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
 
@@ -97,12 +98,21 @@ class MultiServerCallable extends 
PayloadCarryingServerCallable cells = null;
-// The multi object is a list of Actions by region.  Iterate by region.
+
+// Pre-size. Presume at least a KV per Action. There are likely more.
+List cells =
+(this.cellBlock ? new ArrayList(countOfActions) : null);
+
 long nonceGroup = multiAction.getNonceGroup();
 if (nonceGroup != HConstants.NO_NONCE) {
   multiRequestBuilder.setNonceGroup(nonceGroup);
 }
+// Index to track RegionAction within the MultiRequest
+int regionActionIndex = -1;
+// Map from a created RegionAction for a RowMutations to the original 
index within
+// its original list of actions
+Map rowMutationsIndexMap = new HashMap<>();
+// The multi object is a list of Actions by region.  Iterate by region.
 for (Map.Entry>> e: 
this.multiAction.actions.entrySet()) {
   final byte [] regionName = e.getKey();
   final List> actions = e.getValue();
@@ -110,19 +120,46 @@ class MultiServerCallable extends 
PayloadCarryingServerCallable action : actions) {
+Row row = action.getAction();
+// Row Mutations are a set of Puts and/or Deletes all to be applied 
atomically
+// on the one row. We do separate RegionAction for each RowMutations.
+// We maintain a map to keep track of this RegionAction and the 
original Action index.
+if (row instanceof RowMutations) {
+  RowMutations rms = (RowMutations)row;
+  if (this.cellBlock) {
+// Build a multi request absent its Cell payload. Send data in 
cellblocks.
+regionActionBuilder = 
RequestConverter.buildNoDataRegionAction(regionName, rms, cells,
+  regionActionBuilder, actionBuilder, mutationBuilder);
+  } else {
+regionActionBuilder = 
RequestConverter.buildRegionAction(regionName, rms);
+  }
+  regionActionBuilder.setAtomic(true);
+  multiRequestBuilder.addRegionAction(regionActionBuilder.build());
+  regionActionIndex++;
+  rowMutationsIndexMap.put(regionActionIndex, 
action.getOriginalIndex());
+  rowMutations++;
+
+  regionActionBuilder.clear();
+  regionActionBuilder.setRegion(RequestConverter.buildRegionSpecifier(
+HBaseProtos.RegionSpecifier.RegionSpecifierType.REGION_NAME, 
regionName) );
+}
+  }
 
-  if (this.cellBlock) {
-// Presize.  Presume at least a KV per Action.  There are likely more.
-if (cells == null) cells = new 
ArrayList(countOfActions);
-// Send data in cellblocks. The call to buildNoDataMultiRequest will 
skip RowMutations.
-// They have already been handled above. Guess at count of cells
-regionActionBuilder = 
RequestConverter.buildNoDataRegionAction(regionName, actions, cells,
-  regionActionBuilder, actionBuilder, mutationBuilder);
-  } else {
-regionActionBuilder = RequestConverter.

[1/2] hbase git commit: Revert "HBASE-18588 Verify we're using netty .so epolling on linux post HBASE-18271" Commit message references wrong-issue

2017-08-14 Thread stack
Repository: hbase
Updated Branches:
  refs/heads/branch-2 b4793a035 -> c20ce21fe


Revert "HBASE-18588 Verify we're using netty .so epolling on linux post 
HBASE-18271"
Commit message references wrong-issue

This reverts commit b4793a03554c08a9e33b76457a1c1fe347aa0f2f.


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/c8d56bb1
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/c8d56bb1
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/c8d56bb1

Branch: refs/heads/branch-2
Commit: c8d56bb13e68680613121df5e5de4e4d197211e2
Parents: b4793a0
Author: Michael Stack 
Authored: Mon Aug 14 09:12:14 2017 -0700
Committer: Michael Stack 
Committed: Mon Aug 14 09:12:14 2017 -0700

--
 hbase-protocol-shaded/pom.xml | 17 -
 1 file changed, 17 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/c8d56bb1/hbase-protocol-shaded/pom.xml
--
diff --git a/hbase-protocol-shaded/pom.xml b/hbase-protocol-shaded/pom.xml
index f9400b3..fff02f8 100644
--- a/hbase-protocol-shaded/pom.xml
+++ b/hbase-protocol-shaded/pom.xml
@@ -192,23 +192,6 @@
 
   
 
-
-  
-
-  com.google.code.maven-replacer-plugin
-
-replacer
-[1.5.3,)
-
-  replace
-
-  
-  
-
- false
-
-  
-
   
 
   



[2/2] hbase git commit: HBASE-18593 Tell m2eclipse what to do w/ replacer plugin

2017-08-14 Thread stack
HBASE-18593 Tell m2eclipse what to do w/ replacer plugin


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/c20ce21f
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/c20ce21f
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/c20ce21f

Branch: refs/heads/branch-2
Commit: c20ce21fe800289ed902cacbf3d72ad614264442
Parents: c8d56bb
Author: Michael Stack 
Authored: Mon Aug 14 09:13:27 2017 -0700
Committer: Michael Stack 
Committed: Mon Aug 14 09:14:05 2017 -0700

--
 hbase-protocol-shaded/pom.xml | 17 +
 1 file changed, 17 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/c20ce21f/hbase-protocol-shaded/pom.xml
--
diff --git a/hbase-protocol-shaded/pom.xml b/hbase-protocol-shaded/pom.xml
index fff02f8..f9400b3 100644
--- a/hbase-protocol-shaded/pom.xml
+++ b/hbase-protocol-shaded/pom.xml
@@ -192,6 +192,23 @@
 
   
 
+
+  
+
+  com.google.code.maven-replacer-plugin
+
+replacer
+[1.5.3,)
+
+  replace
+
+  
+  
+
+ false
+
+  
+
   
 
   



[2/2] hbase git commit: HBASE-18593 Tell m2eclipse what to do w/ replacer plugin

2017-08-14 Thread stack
HBASE-18593 Tell m2eclipse what to do w/ replacer plugin


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/bd400730
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/bd400730
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/bd400730

Branch: refs/heads/master
Commit: bd40073094b248f74ac9a3c0fff7ef6668265feb
Parents: 424dff2
Author: Michael Stack 
Authored: Mon Aug 14 09:13:27 2017 -0700
Committer: Michael Stack 
Committed: Mon Aug 14 09:13:27 2017 -0700

--
 hbase-protocol-shaded/pom.xml | 17 +
 1 file changed, 17 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/bd400730/hbase-protocol-shaded/pom.xml
--
diff --git a/hbase-protocol-shaded/pom.xml b/hbase-protocol-shaded/pom.xml
index b28c03e..4c72eca 100644
--- a/hbase-protocol-shaded/pom.xml
+++ b/hbase-protocol-shaded/pom.xml
@@ -192,6 +192,23 @@
 
   
 
+
+  
+
+  com.google.code.maven-replacer-plugin
+
+replacer
+[1.5.3,)
+
+  replace
+
+  
+  
+
+ false
+
+  
+
   
 
   



[1/2] hbase git commit: Revert "HBASE-18588 Verify we're using netty .so epolling on linux post HBASE-18271" References the wrong JIRA

2017-08-14 Thread stack
Repository: hbase
Updated Branches:
  refs/heads/master ddbaf56ca -> bd4007309


Revert "HBASE-18588 Verify we're using netty .so epolling on linux post 
HBASE-18271"
References the wrong JIRA

This reverts commit ddbaf56ca8c712dc44608d3323280f578c56aed2.


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/424dff20
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/424dff20
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/424dff20

Branch: refs/heads/master
Commit: 424dff20607577901c06cb40b1293ea5051ec5c5
Parents: ddbaf56
Author: Michael Stack 
Authored: Mon Aug 14 09:12:51 2017 -0700
Committer: Michael Stack 
Committed: Mon Aug 14 09:12:51 2017 -0700

--
 hbase-protocol-shaded/pom.xml | 17 -
 1 file changed, 17 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/424dff20/hbase-protocol-shaded/pom.xml
--
diff --git a/hbase-protocol-shaded/pom.xml b/hbase-protocol-shaded/pom.xml
index 4c72eca..b28c03e 100644
--- a/hbase-protocol-shaded/pom.xml
+++ b/hbase-protocol-shaded/pom.xml
@@ -192,23 +192,6 @@
 
   
 
-
-  
-
-  com.google.code.maven-replacer-plugin
-
-replacer
-[1.5.3,)
-
-  replace
-
-  
-  
-
- false
-
-  
-
   
 
   



hbase git commit: HBASE-18588 Verify we're using netty .so epolling on linux post HBASE-18271

2017-08-14 Thread stack
Repository: hbase
Updated Branches:
  refs/heads/branch-2 fffe4e278 -> b4793a035


HBASE-18588 Verify we're using netty .so epolling on linux post HBASE-18271


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/b4793a03
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/b4793a03
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/b4793a03

Branch: refs/heads/branch-2
Commit: b4793a03554c08a9e33b76457a1c1fe347aa0f2f
Parents: fffe4e2
Author: Michael Stack 
Authored: Mon Aug 14 09:09:56 2017 -0700
Committer: Michael Stack 
Committed: Mon Aug 14 09:10:55 2017 -0700

--
 hbase-protocol-shaded/pom.xml | 17 +
 1 file changed, 17 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/b4793a03/hbase-protocol-shaded/pom.xml
--
diff --git a/hbase-protocol-shaded/pom.xml b/hbase-protocol-shaded/pom.xml
index fff02f8..f9400b3 100644
--- a/hbase-protocol-shaded/pom.xml
+++ b/hbase-protocol-shaded/pom.xml
@@ -192,6 +192,23 @@
 
   
 
+
+  
+
+  com.google.code.maven-replacer-plugin
+
+replacer
+[1.5.3,)
+
+  replace
+
+  
+  
+
+ false
+
+  
+
   
 
   



hbase git commit: HBASE-18588 Verify we're using netty .so epolling on linux post HBASE-18271

2017-08-14 Thread stack
Repository: hbase
Updated Branches:
  refs/heads/master e2b797be3 -> ddbaf56ca


HBASE-18588 Verify we're using netty .so epolling on linux post HBASE-18271


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/ddbaf56c
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/ddbaf56c
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/ddbaf56c

Branch: refs/heads/master
Commit: ddbaf56ca8c712dc44608d3323280f578c56aed2
Parents: e2b797b
Author: Michael Stack 
Authored: Mon Aug 14 09:09:56 2017 -0700
Committer: Michael Stack 
Committed: Mon Aug 14 09:09:56 2017 -0700

--
 hbase-protocol-shaded/pom.xml | 17 +
 1 file changed, 17 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/ddbaf56c/hbase-protocol-shaded/pom.xml
--
diff --git a/hbase-protocol-shaded/pom.xml b/hbase-protocol-shaded/pom.xml
index b28c03e..4c72eca 100644
--- a/hbase-protocol-shaded/pom.xml
+++ b/hbase-protocol-shaded/pom.xml
@@ -192,6 +192,23 @@
 
   
 
+
+  
+
+  com.google.code.maven-replacer-plugin
+
+replacer
+[1.5.3,)
+
+  replace
+
+  
+  
+
+ false
+
+  
+
   
 
   



[4/4] hbase git commit: HBASE-18520 Add jmx value to determine true Master Start time; amendment

2017-08-14 Thread apurtell
HBASE-18520 Add jmx value to determine true Master Start time; amendment

This is to determine how long it took in total for the master to start and 
finish initializing.

Signed-off-by: tedyu 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/04321176
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/04321176
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/04321176

Branch: refs/heads/branch-1.4
Commit: 043211760394f6e5885bbb199f632b7be4fee9cd
Parents: 16223bc
Author: Zach York 
Authored: Thu Jun 15 17:10:07 2017 -0700
Committer: Andrew Purtell 
Committed: Mon Aug 14 08:43:13 2017 -0700

--
 .../java/org/apache/hadoop/hbase/master/MetricsMasterWrapper.java | 3 +++
 .../src/main/java/org/apache/hadoop/hbase/master/HMaster.java | 3 ++-
 2 files changed, 5 insertions(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/04321176/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/master/MetricsMasterWrapper.java
--
diff --git 
a/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/master/MetricsMasterWrapper.java
 
b/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/master/MetricsMasterWrapper.java
index 812cad9..6938e87 100644
--- 
a/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/master/MetricsMasterWrapper.java
+++ 
b/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/master/MetricsMasterWrapper.java
@@ -125,5 +125,8 @@ public interface MetricsMasterWrapper {
   /**
* Get the time in Millis when the master finished initializing/becoming the 
active master
*/
+  /**
+   * Get the time in Millis when the master finished initializing/becoming the 
active master
+   */
   long getMasterInitializationTime();
 }

http://git-wip-us.apache.org/repos/asf/hbase/blob/04321176/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java
index 950ea4a..5b1aed6 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java
@@ -894,7 +894,8 @@ public class HMaster extends HRegionServer implements 
MasterServices, Server {
 }
 
 status.markComplete("Initialization successful");
-LOG.info("Master has completed initialization");
+LOG.info(String.format("Master has completed initialization %.3fsec",
+   (System.currentTimeMillis() - masterActiveTime) / 1000.0f));
 this.masterFinishedInitializationTime = System.currentTimeMillis();
 configurationManager.registerObserver(this.balancer);
 



[1/4] hbase git commit: HBASE-18387: [Thrift] Make principal configurable in DemoClient.java

2017-08-14 Thread apurtell
Repository: hbase
Updated Branches:
  refs/heads/branch-1.4 2523e716f -> 043211760


HBASE-18387: [Thrift] Make principal configurable in DemoClient.java

Added optional (fourth) parameter "server-principal"
The solution is backward compatible, in case not given, uses "hbase" as default 
value
If the third parameter is skipped the fourth cannot be set.

Signed-off-by: Josh Elser 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/2d06c1f7
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/2d06c1f7
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/2d06c1f7

Branch: refs/heads/branch-1.4
Commit: 2d06c1f7ba65f5a153adc726cc9c5cd9805bac5d
Parents: 2523e71
Author: Tamas Penzes 
Authored: Tue Aug 8 13:45:09 2017 +0200
Committer: Andrew Purtell 
Committed: Mon Aug 14 08:41:40 2017 -0700

--
 hbase-examples/README.txt|  3 ++-
 .../org/apache/hadoop/hbase/thrift/DemoClient.java   | 15 ---
 2 files changed, 14 insertions(+), 4 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/2d06c1f7/hbase-examples/README.txt
--
diff --git a/hbase-examples/README.txt b/hbase-examples/README.txt
index 6578bb4..d2ddcc7 100644
--- a/hbase-examples/README.txt
+++ b/hbase-examples/README.txt
@@ -28,7 +28,8 @@ Example code.
   2. If HBase server is not secure, or authentication is not enabled for 
the Thrift server, execute:
   {java -cp hbase-examples-[VERSION].jar:${HBASE_EXAMPLE_CLASSPATH} 
org.apache.hadoop.hbase.thrift.DemoClient  }
   3. If HBase server is secure, and authentication is enabled for the 
Thrift server, run kinit at first, then execute:
-  {java -cp hbase-examples-[VERSION].jar:${HBASE_EXAMPLE_CLASSPATH} 
org.apache.hadoop.hbase.thrift.DemoClient   true}
+  {java -cp hbase-examples-[VERSION].jar:${HBASE_EXAMPLE_CLASSPATH} 
org.apache.hadoop.hbase.thrift.DemoClient   true }
+   should only be specified when the client connects to 
a secure cluster. It's default value is "hbase".
   4. Here is a lazy example that just pulls in all hbase dependency jars 
and that goes against default location on localhost.
   It should work with a standalone hbase instance started by doing 
./bin/start-hbase.sh:
   {java -cp 
./hbase-examples/target/hbase-examples-2.0.0-SNAPSHOT.jar:`./bin/hbase 
classpath` org.apache.hadoop.hbase.thrift.DemoClient localhost 9090}

http://git-wip-us.apache.org/repos/asf/hbase/blob/2d06c1f7/hbase-examples/src/main/java/org/apache/hadoop/hbase/thrift/DemoClient.java
--
diff --git 
a/hbase-examples/src/main/java/org/apache/hadoop/hbase/thrift/DemoClient.java 
b/hbase-examples/src/main/java/org/apache/hadoop/hbase/thrift/DemoClient.java
index 64adc93..e5400f3 100644
--- 
a/hbase-examples/src/main/java/org/apache/hadoop/hbase/thrift/DemoClient.java
+++ 
b/hbase-examples/src/main/java/org/apache/hadoop/hbase/thrift/DemoClient.java
@@ -60,13 +60,14 @@ public class DemoClient {
 CharsetDecoder decoder = null;
 
 private static boolean secure = false;
+private static String serverPrincipal = "hbase";
 
 public static void main(String[] args) throws Exception {
 
-if (args.length < 2 || args.length > 3) {
+if (args.length < 2 || args.length > 4 || (args.length > 2 && 
!isBoolean(args[2]))) {
 
 System.out.println("Invalid arguments!");
-System.out.println("Usage: DemoClient host port [secure=false]");
+System.out.println("Usage: DemoClient host port [secure=false 
[server-principal=hbase] ]");
 
 System.exit(-1);
 }
@@ -77,6 +78,10 @@ public class DemoClient {
   secure = Boolean.parseBoolean(args[2]);
 }
 
+if (args.length == 4) {
+  serverPrincipal = args[3];
+}
+
 final DemoClient client = new DemoClient();
 Subject.doAs(getSubject(),
   new PrivilegedExceptionAction() {
@@ -88,6 +93,10 @@ public class DemoClient {
   });
 }
 
+private static boolean isBoolean(String s){
+  return Boolean.TRUE.toString().equalsIgnoreCase(s) || 
Boolean.FALSE.toString().equalsIgnoreCase(s);
+}
+
 DemoClient() {
 decoder = Charset.forName("UTF-8").newDecoder();
 }
@@ -123,7 +132,7 @@ public class DemoClient {
* The HBase cluster must be secure, allow proxy user.
*/
   transport = new TSaslClientTransport("GSSAPI", null,
-"hbase", // Thrift server user name, should be an authorized proxy 
user.
+serverPrincipal, // Thrift server user name, should be an 
authorized proxy user.
 host, // Thrift server domain
 saslProperties, nul

[2/4] hbase git commit: HBASE-18469 Correct RegionServer metric of totalRequestCount

2017-08-14 Thread apurtell
HBASE-18469 Correct RegionServer metric of totalRequestCount


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/c9f38bc7
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/c9f38bc7
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/c9f38bc7

Branch: refs/heads/branch-1.4
Commit: c9f38bc7f31057a723effd5b59d36760623cffb9
Parents: 2d06c1f7
Author: Yu Li 
Authored: Fri Aug 11 14:13:18 2017 +0800
Committer: Andrew Purtell 
Committed: Mon Aug 14 08:41:46 2017 -0700

--
 .../regionserver/MetricsRegionServerSource.java |  3 ++
 .../MetricsRegionServerWrapper.java |  2 +
 .../MetricsRegionServerSourceImpl.java  |  2 +
 .../MetricsRegionServerWrapperImpl.java |  5 +++
 .../hbase/regionserver/RSRpcServices.java   | 15 +--
 .../MetricsRegionServerWrapperStub.java |  5 +++
 .../regionserver/TestMetricsRegionServer.java   |  4 ++
 .../regionserver/TestRegionServerMetrics.java   | 41 ++--
 8 files changed, 71 insertions(+), 6 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/c9f38bc7/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionServerSource.java
--
diff --git 
a/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionServerSource.java
 
b/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionServerSource.java
index 06bdee6..3ac678e 100644
--- 
a/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionServerSource.java
+++ 
b/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionServerSource.java
@@ -244,6 +244,9 @@ public interface MetricsRegionServerSource extends 
BaseSource, JvmPauseMonitorSo
   String TOTAL_REQUEST_COUNT = "totalRequestCount";
   String TOTAL_REQUEST_COUNT_DESC =
   "Total number of requests this RegionServer has answered.";
+  String TOTAL_ROW_ACTION_REQUEST_COUNT = "totalRowActionRequestCount";
+  String TOTAL_ROW_ACTION_REQUEST_COUNT_DESC =
+  "Total number of region requests this RegionServer has answered, count 
by row-level action";
   String READ_REQUEST_COUNT = "readRequestCount";
   String READ_REQUEST_COUNT_DESC =
   "Number of read requests this region server has answered.";

http://git-wip-us.apache.org/repos/asf/hbase/blob/c9f38bc7/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionServerWrapper.java
--
diff --git 
a/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionServerWrapper.java
 
b/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionServerWrapper.java
index 7232063..7d7f66d 100644
--- 
a/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionServerWrapper.java
+++ 
b/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionServerWrapper.java
@@ -367,4 +367,6 @@ public interface MetricsRegionServerWrapper {
   long getDeleteFamilyBloomHitCount();
 
   long getTrailerHitCount();
+
+  long getTotalRowActionRequestCount();
 }

http://git-wip-us.apache.org/repos/asf/hbase/blob/c9f38bc7/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionServerSourceImpl.java
--
diff --git 
a/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionServerSourceImpl.java
 
b/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionServerSourceImpl.java
index 9b7f80a..cec122b 100644
--- 
a/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionServerSourceImpl.java
+++ 
b/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionServerSourceImpl.java
@@ -354,6 +354,8 @@ public class MetricsRegionServerSourceImpl
   .addGauge(Interns.info(AVERAGE_REGION_SIZE, 
AVERAGE_REGION_SIZE_DESC), rsWrap.getAverageRegionSize())
   .addCounter(Interns.info(TOTAL_REQUEST_COUNT, 
TOTAL_REQUEST_COUNT_DESC),
   rsWrap.getTotalRequestCount())
+  .addCounter(Interns.info(TOTAL_ROW_ACTION_REQUEST_COUNT, 
TOTAL_ROW_ACTION_REQUEST_COUNT_DESC),
+  rsWrap.getTotalRowActionRequestCount())
   .addCounter(Interns.info(READ_REQUEST_COUNT, 
READ_REQUEST_COUNT_DESC),
   rsWrap.getReadRequestsCount())
   .addCounter(Interns.info(WRITE_REQUEST_COUNT, 
WRITE_REQUEST_COUNT_DESC),

http://git-wip-us.apache.org/repos/asf/hbase/blob/c9f38bc7/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionSe

[3/4] hbase git commit: Revert "HBASE-18520 Add jmx value to determine true Master Start time"

2017-08-14 Thread apurtell
Revert "HBASE-18520 Add jmx value to determine true Master Start time"

This reverts commit 1ba949e0307e4fda3f9234bc436490fb33155bea.


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/16223bcb
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/16223bcb
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/16223bcb

Branch: refs/heads/branch-1.4
Commit: 16223bcb5cdd813b917b5475014b7ad209388e66
Parents: c9f38bc
Author: Michael Stack 
Authored: Tue Aug 8 15:43:58 2017 +0800
Committer: Andrew Purtell 
Committed: Mon Aug 14 08:43:09 2017 -0700

--
 .../src/main/java/org/apache/hadoop/hbase/master/HMaster.java | 3 +--
 1 file changed, 1 insertion(+), 2 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/16223bcb/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java
index 5b1aed6..950ea4a 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java
@@ -894,8 +894,7 @@ public class HMaster extends HRegionServer implements 
MasterServices, Server {
 }
 
 status.markComplete("Initialization successful");
-LOG.info(String.format("Master has completed initialization %.3fsec",
-   (System.currentTimeMillis() - masterActiveTime) / 1000.0f));
+LOG.info("Master has completed initialization");
 this.masterFinishedInitializationTime = System.currentTimeMillis();
 configurationManager.registerObserver(this.balancer);
 



[05/51] [partial] hbase-site git commit: Published site at .

2017-08-14 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8bae1c8a/devapidocs/org/apache/hadoop/hbase/ipc/class-use/MetricsHBaseServer.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/ipc/class-use/MetricsHBaseServer.html 
b/devapidocs/org/apache/hadoop/hbase/ipc/class-use/MetricsHBaseServer.html
index 14fd7e2..7c967d0 100644
--- a/devapidocs/org/apache/hadoop/hbase/ipc/class-use/MetricsHBaseServer.html
+++ b/devapidocs/org/apache/hadoop/hbase/ipc/class-use/MetricsHBaseServer.html
@@ -146,7 +146,7 @@
 
 
 
-NettyRpcServerRequestDecoder(io.netty.channel.group.ChannelGroup allChannels,
+NettyRpcServerRequestDecoder(org.apache.hadoop.hbase.shaded.io.netty.channel.group.ChannelGroup allChannels,
 MetricsHBaseServer metrics) 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8bae1c8a/devapidocs/org/apache/hadoop/hbase/ipc/class-use/NettyRpcServer.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/ipc/class-use/NettyRpcServer.html 
b/devapidocs/org/apache/hadoop/hbase/ipc/class-use/NettyRpcServer.html
index 76298dc..b8a90bd 100644
--- a/devapidocs/org/apache/hadoop/hbase/ipc/class-use/NettyRpcServer.html
+++ b/devapidocs/org/apache/hadoop/hbase/ipc/class-use/NettyRpcServer.html
@@ -120,8 +120,8 @@
 NettyRpcServerPreambleHandler(NettyRpcServer rpcServer) 
 
 
-NettyServerRpcConnection(NettyRpcServer rpcServer,
-
io.netty.channel.Channel channel) 
+NettyServerRpcConnection(NettyRpcServer rpcServer,
+
org.apache.hadoop.hbase.shaded.io.netty.channel.Channel channel) 
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8bae1c8a/devapidocs/org/apache/hadoop/hbase/ipc/package-tree.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/ipc/package-tree.html 
b/devapidocs/org/apache/hadoop/hbase/ipc/package-tree.html
index 08990df..d9fa725 100644
--- a/devapidocs/org/apache/hadoop/hbase/ipc/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/ipc/package-tree.html
@@ -117,25 +117,25 @@
 org.apache.hadoop.hbase.ipc.CellBlockBuilder
 org.apache.hadoop.hbase.ipc.CellBlockBuilder.ByteBufferOutputStreamSupplier 
(implements org.apache.hadoop.hbase.ipc.CellBlockBuilder.OutputStreamSupplier)
 org.apache.hadoop.hbase.ipc.CellBlockBuilder.ByteBufOutputStreamSupplier 
(implements org.apache.hadoop.hbase.ipc.CellBlockBuilder.OutputStreamSupplier)
-io.netty.channel.ChannelHandlerAdapter (implements 
io.netty.channel.ChannelHandler)
+org.apache.hadoop.hbase.shaded.io.netty.channel.ChannelHandlerAdapter
 (implements org.apache.hadoop.hbase.shaded.io.netty.channel.ChannelHandler)
 
-io.netty.channel.ChannelInboundHandlerAdapter (implements 
io.netty.channel.ChannelInboundHandler)
+org.apache.hadoop.hbase.shaded.io.netty.channel.ChannelInboundHandlerAdapter
 (implements 
org.apache.hadoop.hbase.shaded.io.netty.channel.ChannelInboundHandler)
 
-io.netty.channel.ChannelDuplexHandler (implements 
io.netty.channel.ChannelOutboundHandler)
+org.apache.hadoop.hbase.shaded.io.netty.channel.ChannelDuplexHandler
 (implements 
org.apache.hadoop.hbase.shaded.io.netty.channel.ChannelOutboundHandler)
 
 org.apache.hadoop.hbase.ipc.BufferCallBeforeInitHandler
 org.apache.hadoop.hbase.ipc.NettyRpcDuplexHandler
 
 
 org.apache.hadoop.hbase.ipc.NettyRpcServerRequestDecoder
-io.netty.channel.SimpleChannelInboundHandler
+org.apache.hadoop.hbase.shaded.io.netty.channel.SimpleChannelInboundHandler
 
 org.apache.hadoop.hbase.ipc.NettyRpcServerPreambleHandler
 
 
 
 
-io.netty.channel.ChannelOutboundHandlerAdapter (implements 
io.netty.channel.ChannelOutboundHandler)
+org.apache.hadoop.hbase.shaded.io.netty.channel.ChannelOutboundHandlerAdapter
 (implements 
org.apache.hadoop.hbase.shaded.io.netty.channel.ChannelOutboundHandler)
 
 org.apache.hadoop.hbase.ipc.NettyRpcServerResponseEncoder
 
@@ -342,8 +342,8 @@
 java.lang.http://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true";
 title="class or interface in java.lang">Enum (implements java.lang.http://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true";
 title="class or interface in java.lang">Comparable, java.io.http://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true";
 title="class or interface in java.io">Serializable)
 
 org.apache.hadoop.hbase.ipc.BufferCallBeforeInitHandler.BufferCallAction
-org.apache.hadoop.hbase.ipc.MetricsHBaseServerSourceFactoryImpl.SourceStorage
 org.apache.hadoop.hbase.ipc.CallEvent.Type
+org.apache.hadoop.hbase.ipc.MetricsHBaseServerSourceFactoryImpl.SourceStorage
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8bae1c8a/devapidocs/org/apache/hadoop/hbase/mapreduce/class-use/LoadIncrementalHFiles.html

[42/51] [partial] hbase-site git commit: Published site at .

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8bae1c8a/checkstyle-aggregate.html
--
diff --git a/checkstyle-aggregate.html b/checkstyle-aggregate.html
index 7ef078b..eca8090 100644
--- a/checkstyle-aggregate.html
+++ b/checkstyle-aggregate.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase – Checkstyle Results
 
@@ -286,10 +286,10 @@
  Warnings
  Errors
 
-2024
+2026
 0
 0
-12796
+12844
 
 Files
 
@@ -362,7 +362,7 @@
 org/apache/hadoop/hbase/ClusterStatus.java
 0
 0
-1
+3
 
 org/apache/hadoop/hbase/CompatibilityFactory.java
 0
@@ -594,116 +594,136 @@
 0
 3
 
+org/apache/hadoop/hbase/backup/BackupMergeJob.java
+0
+0
+1
+
 org/apache/hadoop/hbase/backup/BackupRestoreConstants.java
 0
 0
 3
-
+
 org/apache/hadoop/hbase/backup/BackupTableInfo.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/backup/FailedArchiveException.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/backup/HBackupFileSystem.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/backup/HFileArchiver.java
 0
 0
 20
-
+
 org/apache/hadoop/hbase/backup/LogUtils.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/backup/RestoreDriver.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/backup/RestoreJob.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/backup/RestoreRequest.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/backup/example/HFileArchiveManager.java
 0
 0
 3
-
+
 org/apache/hadoop/hbase/backup/example/LongTermArchivingHFileCleaner.java
 0
 0
 5
-
+
 org/apache/hadoop/hbase/backup/example/TableHFileArchiveTracker.java
 0
 0
 6
-
+
 org/apache/hadoop/hbase/backup/example/ZKTableArchiveClient.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/backup/impl/BackupAdminImpl.java
 0
 0
-12
-
+13
+
 org/apache/hadoop/hbase/backup/impl/BackupCommands.java
 0
 0
-60
-
+58
+
 org/apache/hadoop/hbase/backup/impl/BackupManager.java
 0
 0
-5
-
+4
+
 org/apache/hadoop/hbase/backup/impl/BackupManifest.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/backup/impl/BackupSystemTable.java
 0
 0
-41
-
+25
+
 org/apache/hadoop/hbase/backup/impl/FullTableBackupClient.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/backup/impl/IncrementalBackupManager.java
 0
 0
 4
-
+
 org/apache/hadoop/hbase/backup/impl/IncrementalTableBackupClient.java
 0
 0
 5
+
+org/apache/hadoop/hbase/backup/impl/RestoreTablesClient.java
+0
+0
+1
 
 org/apache/hadoop/hbase/backup/impl/TableBackupClient.java
 0
 0
 13
 
+org/apache/hadoop/hbase/backup/mapreduce/MapReduceBackupMergeJob.java
+0
+0
+1
+
+org/apache/hadoop/hbase/backup/mapreduce/MapReduceRestoreJob.java
+0
+0
+3
+
 org/apache/hadoop/hbase/backup/regionserver/LogRollBackupSubprocedure.java
 0
 0
@@ -712,7 +732,7 @@
 org/apache/hadoop/hbase/backup/util/BackupUtils.java
 0
 0
-3
+4
 
 org/apache/hadoop/hbase/backup/util/RestoreTool.java
 0
@@ -777,7 +797,7 @@
 org/apache/hadoop/hbase/client/AsyncBatchRpcRetryingCaller.java
 0
 0
-3
+4
 
 org/apache/hadoop/hbase/client/AsyncClientScanner.java
 0
@@ -804,25 +824,35 @@
 0
 8
 
-org/apache/hadoop/hbase/client/AsyncMetaRegionLocator.java
+org/apache/hadoop/hbase/client/AsyncMasterRequestRpcRetryingCaller.java
 0
 0
 1
 
+org/apache/hadoop/hbase/client/AsyncMetaRegionLocator.java
+0
+0
+1
+
 org/apache/hadoop/hbase/client/AsyncNonMetaRegionLocator.java
 0
 0
 4
-
+
 org/apache/hadoop/hbase/client/AsyncProcess.java
 0
 0
 12
-
+
 org/apache/hadoop/hbase/client/AsyncProcessTask.java
 0
 0
 1
+
+org/apache/hadoop/hbase/client/AsyncRegionLocator.java
+0
+0
+1
 
 org/apache/hadoop/hbase/client/AsyncRequestFuture.java
 0
@@ -837,1672 +867,1682 @@
 org/apache/hadoop/hbase/client/AsyncRpcRetryingCaller.java
 0
 0
-1
+2
 
 org/apache/hadoop/hbase/client/AsyncRpcRetryingCallerFactory.java
 0
 0
-2
+3
 
 org/apache/hadoop/hbase/client/AsyncScanSingleRegionRpcRetryingCaller.java
 0
 0
 2
 
-org/apache/hadoop/hbase/client/AsyncSingleRequestRpcRetryingCaller.java
+org/apache/hadoop/hbase/client/AsyncServerRequestRpcRetryingCaller.java
 0
 0
 1
 
+org/apache/hadoop/hbase/client/AsyncSingleRequestRpcRetryingCaller.java
+0
+0
+2
+
 org/apache/hadoop/hbase/client/AsyncTableBase.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/client/AsyncTableImpl.java
 0
 0
 3
-
+
 org/apache/hadoop/hbase/client/AsyncTableResultScanner.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/client/BatchErrors.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/client/BufferedMutator.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/client/BufferedMutatorImpl.java
 0
 0
 5
-
+
 org/apache/hadoop/hbase/client/CancellableRegionServerCallable.java
 0
 0
 3
-
+
 org/apache/hadoop/hbase/client/ClientAsyncPrefetchScanner.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/client/ClientIdGenerator.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/client/ClientScanner.java
 0
 0
 3
-
+
 org/apache/hadoop/hbase/client/ClientServiceCallable.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/client/ClientUtil.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/client/ClusterStatusListener.java
 0
 0
-1
-
+2
+
 org/apache/hadoop/hbase/client/ColumnFamilyDescriptor.java

[13/51] [partial] hbase-site git commit: Published site at .

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8bae1c8a/devapidocs/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.html 
b/devapidocs/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.html
index 521ae2d..ce60328 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.html
@@ -18,7 +18,7 @@
 catch(err) {
 }
 //-->
-var methods = 
{"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10,"i14":10,"i15":10,"i16":10,"i17":10,"i18":10,"i19":10,"i20":10,"i21":10,"i22":10,"i23":10,"i24":10,"i25":10,"i26":10,"i27":10,"i28":10,"i29":10,"i30":10,"i31":10,"i32":10,"i33":10,"i34":10,"i35":10,"i36":10,"i37":10,"i38":10,"i39":10,"i40":10,"i41":10,"i42":10,"i43":10,"i44":10,"i45":10,"i46":10,"i47":10,"i48":10,"i49":10,"i50":10,"i51":10,"i52":10,"i53":10,"i54":10,"i55":10,"i56":10,"i57":10,"i58":10,"i59":10,"i60":10,"i61":10,"i62":10,"i63":10,"i64":10,"i65":10,"i66":10,"i67":10,"i68":10,"i69":10,"i70":10,"i71":10,"i72":10,"i73":10,"i74":10,"i75":10,"i76":10,"i77":10,"i78":10,"i79":10,"i80":10,"i81":10,"i82":10,"i83":10,"i84":10,"i85":10,"i86":10,"i87":10,"i88":10,"i89":10,"i90":10,"i91":10,"i92":10,"i93":10,"i94":10,"i95":10,"i96":10,"i97":10,"i98":10,"i99":10,"i100":10,"i101":10,"i102":10,"i103":10,"i104":10,"i105":10,"i106":10,"i107":10,"i108":10,"i
 
109":10,"i110":10,"i111":10,"i112":10,"i113":10,"i114":10,"i115":10,"i116":10,"i117":10,"i118":10,"i119":10,"i120":10,"i121":10,"i122":10,"i123":10,"i124":10,"i125":10,"i126":10,"i127":10,"i128":10,"i129":10,"i130":10,"i131":10,"i132":10,"i133":10,"i134":10,"i135":10,"i136":10,"i137":10,"i138":10};
+var methods = 
{"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10,"i14":10,"i15":10,"i16":10,"i17":10,"i18":10,"i19":10,"i20":10,"i21":10,"i22":10,"i23":10,"i24":10,"i25":10,"i26":10,"i27":10,"i28":10,"i29":10,"i30":10,"i31":10,"i32":10,"i33":10,"i34":10,"i35":10,"i36":10,"i37":10,"i38":10,"i39":10,"i40":10,"i41":10,"i42":10,"i43":10,"i44":10,"i45":10,"i46":10,"i47":10,"i48":10,"i49":10,"i50":10,"i51":10,"i52":10,"i53":10,"i54":10,"i55":10,"i56":10,"i57":10,"i58":10,"i59":10,"i60":10,"i61":10,"i62":10,"i63":10,"i64":10,"i65":10,"i66":10,"i67":10,"i68":10,"i69":10,"i70":10,"i71":10,"i72":10,"i73":10,"i74":10,"i75":10,"i76":10,"i77":10,"i78":10,"i79":10,"i80":10,"i81":10,"i82":10,"i83":10,"i84":10,"i85":10,"i86":10,"i87":10,"i88":10,"i89":10,"i90":10,"i91":10,"i92":10,"i93":10,"i94":10,"i95":10,"i96":10,"i97":10,"i98":10,"i99":10,"i100":10,"i101":10,"i102":10,"i103":10,"i104":10,"i105":10,"i106":10,"i107":10,"i108":10,"i
 
109":10,"i110":10,"i111":10,"i112":10,"i113":10,"i114":10,"i115":10,"i116":10,"i117":10,"i118":10,"i119":10,"i120":10,"i121":10,"i122":10,"i123":10,"i124":10,"i125":10,"i126":10,"i127":10,"i128":10,"i129":10,"i130":10,"i131":10,"i132":10,"i133":10,"i134":10,"i135":10,"i136":10,"i137":10,"i138":10,"i139":10};
 var tabs = {65535:["t0","All Methods"],2:["t2","Instance 
Methods"],8:["t4","Concrete Methods"]};
 var altColor = "altColor";
 var rowColor = "rowColor";
@@ -623,151 +623,155 @@ implements getClusterStatus() 
 
 
+http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";
 title="class or interface in java.util.concurrent">CompletableFuture
+getClusterStatus(ClusterStatus.Options options) 
+
+
 http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";
 title="class or interface in java.util.concurrent">CompletableFuture
 getCompactionState(TableName tableName)
 Get the current compaction state of a table.
 
 
-
+
 http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";
 title="class or interface in java.util.concurrent">CompletableFuture
 getCompactionStateForRegion(byte[] regionName)
 Get the current compaction state of region.
 
 
-
+
 http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";
 title="class or interface in java.util.concurrent">CompletableFutureOptionalLong>>
 getLastMajorCompactionTimestamp(TableName tableName)
 Get the timestamp of the last major compaction for the 
passed table.
 
 
-
+
 http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";
 title="class or interface in java.util.concurrent">CompletableFuture

[45/51] [partial] hbase-site git commit: Published site at .

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8bae1c8a/apidocs/src-html/org/apache/hadoop/hbase/client/Admin.html
--
diff --git a/apidocs/src-html/org/apache/hadoop/hbase/client/Admin.html 
b/apidocs/src-html/org/apache/hadoop/hbase/client/Admin.html
index 324fa63..f47225a 100644
--- a/apidocs/src-html/org/apache/hadoop/hbase/client/Admin.html
+++ b/apidocs/src-html/org/apache/hadoop/hbase/client/Admin.html
@@ -39,1926 +39,1926 @@
 031import 
org.apache.hadoop.conf.Configuration;
 032import 
org.apache.hadoop.hbase.Abortable;
 033import 
org.apache.hadoop.hbase.ClusterStatus;
-034import 
org.apache.hadoop.hbase.HColumnDescriptor;
-035import 
org.apache.hadoop.hbase.HRegionInfo;
-036import 
org.apache.hadoop.hbase.HTableDescriptor;
-037import 
org.apache.hadoop.hbase.NamespaceDescriptor;
-038import 
org.apache.hadoop.hbase.NamespaceNotFoundException;
-039import 
org.apache.hadoop.hbase.ProcedureInfo;
-040import 
org.apache.hadoop.hbase.RegionLoad;
-041import 
org.apache.hadoop.hbase.ServerName;
-042import 
org.apache.hadoop.hbase.TableExistsException;
-043import 
org.apache.hadoop.hbase.TableName;
-044import 
org.apache.hadoop.hbase.TableNotFoundException;
-045import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
-046import 
org.apache.hadoop.hbase.client.replication.TableCFs;
-047import 
org.apache.hadoop.hbase.client.security.SecurityCapability;
-048import 
org.apache.hadoop.hbase.ipc.CoprocessorRpcChannel;
-049import 
org.apache.hadoop.hbase.procedure2.LockInfo;
-050import 
org.apache.hadoop.hbase.quotas.QuotaFilter;
-051import 
org.apache.hadoop.hbase.quotas.QuotaRetriever;
-052import 
org.apache.hadoop.hbase.quotas.QuotaSettings;
-053import 
org.apache.hadoop.hbase.regionserver.wal.FailedLogCloseException;
-054import 
org.apache.hadoop.hbase.replication.ReplicationException;
-055import 
org.apache.hadoop.hbase.replication.ReplicationPeerConfig;
-056import 
org.apache.hadoop.hbase.replication.ReplicationPeerDescription;
-057import 
org.apache.hadoop.hbase.snapshot.HBaseSnapshotException;
-058import 
org.apache.hadoop.hbase.snapshot.RestoreSnapshotException;
-059import 
org.apache.hadoop.hbase.snapshot.SnapshotCreationException;
-060import 
org.apache.hadoop.hbase.snapshot.UnknownSnapshotException;
-061import 
org.apache.hadoop.hbase.util.Pair;
-062
-063/**
-064 * The administrative API for HBase. 
Obtain an instance from an {@link Connection#getAdmin()} and
-065 * call {@link #close()} afterwards.
-066 * 

Admin can be used to create, drop, list, enable and disable tables, add and drop table -067 * column families and other administrative operations. -068 * -069 * @see ConnectionFactory -070 * @see Connection -071 * @see Table -072 * @since 0.99.0 -073 */ -074@InterfaceAudience.Public -075public interface Admin extends Abortable, Closeable { -076 int getOperationTimeout(); -077 -078 @Override -079 void abort(String why, Throwable e); -080 -081 @Override -082 boolean isAborted(); -083 -084 /** -085 * @return Connection used by this object. -086 */ -087 Connection getConnection(); -088 -089 /** -090 * @param tableName Table to check. -091 * @return True if table exists already. -092 * @throws IOException -093 */ -094 boolean tableExists(final TableName tableName) throws IOException; -095 -096 /** -097 * List all the userspace tables. -098 * -099 * @return - returns an array of read-only HTableDescriptors -100 * @throws IOException if a remote or network exception occurs -101 * @deprecated since 2.0 version and will be removed in 3.0 version. -102 * use {@link #listTableDescriptors()} -103 */ -104 @Deprecated -105 HTableDescriptor[] listTables() throws IOException; -106 -107 /** -108 * List all the userspace tables. -109 * -110 * @return - returns a list of TableDescriptors -111 * @throws IOException if a remote or network exception occurs -112 */ -113 List listTableDescriptors() throws IOException; -114 -115 /** -116 * List all the userspace tables matching the given pattern. -117 * -118 * @param pattern The compiled regular expression to match against -119 * @return - returns an array of read-only HTableDescriptors -120 * @throws IOException if a remote or network exception occurs -121 * @see #listTables() -122 * @deprecated since 2.0 version and will be removed in 3.0 version. -123 * use {@link #listTableDescriptors(java.util.regex.Pattern)} -124 */ -125 @Deprecated -126 HTableDescriptor[] listTables(Pattern pattern) throws IOException; -127 -128 /** -129 * List all the userspace tables matching the given pattern. -130 * -131 * @param pattern The compiled regular expression to match against -132 * @return - returns a list of TableDescriptors -133 * @throws IOException if a remote or network exception occurs -134 * @see #listTables() -135 */ -136 List


[22/51] [partial] hbase-site git commit: Published site at .

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8bae1c8a/devapidocs/org/apache/hadoop/hbase/client/Admin.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/client/Admin.html 
b/devapidocs/org/apache/hadoop/hbase/client/Admin.html
index acc88a2..88b7fa7 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/Admin.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/Admin.html
@@ -18,7 +18,7 @@
 catch(err) {
 }
 //-->
-var methods = 
{"i0":6,"i1":6,"i2":6,"i3":38,"i4":6,"i5":6,"i6":18,"i7":18,"i8":6,"i9":6,"i10":6,"i11":6,"i12":6,"i13":6,"i14":6,"i15":6,"i16":6,"i17":38,"i18":38,"i19":38,"i20":38,"i21":6,"i22":6,"i23":6,"i24":6,"i25":6,"i26":6,"i27":6,"i28":6,"i29":6,"i30":6,"i31":6,"i32":50,"i33":50,"i34":50,"i35":6,"i36":6,"i37":6,"i38":50,"i39":6,"i40":38,"i41":6,"i42":6,"i43":6,"i44":6,"i45":6,"i46":6,"i47":6,"i48":6,"i49":6,"i50":6,"i51":38,"i52":38,"i53":6,"i54":6,"i55":18,"i56":6,"i57":6,"i58":6,"i59":38,"i60":38,"i61":6,"i62":6,"i63":18,"i64":6,"i65":6,"i66":6,"i67":38,"i68":38,"i69":6,"i70":6,"i71":6,"i72":6,"i73":38,"i74":6,"i75":6,"i76":6,"i77":6,"i78":6,"i79":6,"i80":6,"i81":6,"i82":6,"i83":6,"i84":6,"i85":6,"i86":6,"i87":6,"i88":6,"i89":6,"i90":6,"i91":18,"i92":6,"i93":38,"i94":38,"i95":38,"i96":6,"i97":6,"i98":6,"i99":6,"i100":6,"i101":6,"i102":6,"i103":6,"i104":6,"i105":6,"i106":6,"i107":6,"i108":6,"i109":6,"i110":6,"i111":6,"i112":6,"i113":6,"i114":6,"i115":18,"i116":18,"i117":18,"i
 
118":6,"i119":6,"i120":6,"i121":6,"i122":6,"i123":6,"i124":6,"i125":6,"i126":6,"i127":6,"i128":6,"i129":38,"i130":6,"i131":6,"i132":6,"i133":6,"i134":6,"i135":6,"i136":38,"i137":38,"i138":38,"i139":38,"i140":38,"i141":6,"i142":6,"i143":6,"i144":6,"i145":6,"i146":6,"i147":6,"i148":6,"i149":38,"i150":6,"i151":6,"i152":38,"i153":6,"i154":6,"i155":6,"i156":6,"i157":6,"i158":38,"i159":6,"i160":38,"i161":6,"i162":6,"i163":6,"i164":6,"i165":18,"i166":18,"i167":6,"i168":6,"i169":6,"i170":6,"i171":6,"i172":6,"i173":6,"i174":6,"i175":6,"i176":6,"i177":6,"i178":6,"i179":6,"i180":6,"i181":6,"i182":6,"i183":6,"i184":6,"i185":6,"i186":6,"i187":6,"i188":38,"i189":38,"i190":6,"i191":6,"i192":6,"i193":6,"i194":6,"i195":6,"i196":6,"i197":6,"i198":6,"i199":6,"i200":18};
+var methods = 
{"i0":6,"i1":6,"i2":6,"i3":38,"i4":6,"i5":6,"i6":18,"i7":18,"i8":6,"i9":6,"i10":6,"i11":6,"i12":6,"i13":6,"i14":6,"i15":6,"i16":6,"i17":38,"i18":38,"i19":38,"i20":38,"i21":6,"i22":6,"i23":6,"i24":6,"i25":6,"i26":6,"i27":6,"i28":6,"i29":6,"i30":6,"i31":6,"i32":50,"i33":50,"i34":50,"i35":6,"i36":6,"i37":6,"i38":50,"i39":6,"i40":38,"i41":6,"i42":6,"i43":6,"i44":6,"i45":6,"i46":6,"i47":6,"i48":6,"i49":6,"i50":6,"i51":38,"i52":38,"i53":6,"i54":6,"i55":18,"i56":6,"i57":6,"i58":6,"i59":38,"i60":38,"i61":6,"i62":6,"i63":18,"i64":6,"i65":6,"i66":6,"i67":38,"i68":38,"i69":6,"i70":6,"i71":6,"i72":6,"i73":38,"i74":6,"i75":6,"i76":6,"i77":6,"i78":6,"i79":6,"i80":6,"i81":6,"i82":6,"i83":6,"i84":6,"i85":6,"i86":6,"i87":6,"i88":6,"i89":6,"i90":6,"i91":6,"i92":18,"i93":6,"i94":38,"i95":38,"i96":38,"i97":6,"i98":6,"i99":6,"i100":6,"i101":6,"i102":6,"i103":6,"i104":6,"i105":6,"i106":6,"i107":6,"i108":6,"i109":6,"i110":6,"i111":6,"i112":6,"i113":6,"i114":6,"i115":6,"i116":18,"i117":18,"i1
 
18":18,"i119":6,"i120":6,"i121":6,"i122":6,"i123":6,"i124":6,"i125":6,"i126":6,"i127":6,"i128":6,"i129":6,"i130":38,"i131":6,"i132":6,"i133":6,"i134":6,"i135":6,"i136":6,"i137":38,"i138":38,"i139":38,"i140":38,"i141":38,"i142":6,"i143":6,"i144":6,"i145":6,"i146":6,"i147":6,"i148":6,"i149":6,"i150":38,"i151":6,"i152":6,"i153":38,"i154":6,"i155":6,"i156":6,"i157":6,"i158":6,"i159":38,"i160":6,"i161":38,"i162":6,"i163":6,"i164":6,"i165":6,"i166":18,"i167":18,"i168":6,"i169":6,"i170":6,"i171":6,"i172":6,"i173":6,"i174":6,"i175":6,"i176":6,"i177":6,"i178":6,"i179":6,"i180":6,"i181":6,"i182":6,"i183":6,"i184":6,"i185":6,"i186":6,"i187":6,"i188":6,"i189":38,"i190":38,"i191":6,"i192":6,"i193":6,"i194":6,"i195":6,"i196":6,"i197":6,"i198":6,"i199":6,"i200":6,"i201":18};
 var tabs = {65535:["t0","All Methods"],2:["t2","Instance 
Methods"],4:["t3","Abstract Methods"],16:["t5","Default 
Methods"],32:["t6","Deprecated Methods"]};
 var altColor = "altColor";
 var rowColor = "rowColor";
@@ -110,7 +110,7 @@ var activeTableTab = "activeTableTab";
 
 
 @InterfaceAudience.Public
-public interface Admin
+public interface Admin
 extends Abortable, http://docs.oracle.com/javase/8/docs/api/java/io/Closeable.html?is-external=true";
 title="class or interface in java.io">Closeable
 The administrative API for HBase. Obtain an instance from 
an Connection.getAdmin()
 and
  call close()
 afterwards.
@@ -705,33 +705,39 @@ extends getClusterStatus() 
 
 
+ClusterStatus
+getClusterStatus(ClusterStatus.Options options)
+Get cluster status with options to filter out unwanted 
status.
+
+
+
 CompactionState
 getCompactionState(TableName tableName)
 Get the current compaction state of a table.
 
 
-
+
 CompactionState
 getCompactio

[25/51] [partial] hbase-site git commit: Published site at .

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8bae1c8a/devapidocs/org/apache/hadoop/hbase/class-use/ServerName.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/class-use/ServerName.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/ServerName.html
index dda7356..46a7e99 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/ServerName.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/ServerName.html
@@ -212,10 +212,14 @@
 
 
 private ServerName
-SplitLogTask.originServer 
+ClusterStatus.Builder.master 
 
 
 private ServerName
+SplitLogTask.originServer 
+
+
+private ServerName
 HRegionLocation.serverName 
 
 
@@ -233,8 +237,16 @@
 
 
 private http://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true";
 title="class or interface in java.util">Collection
+ClusterStatus.Builder.backupMasters 
+
+
+private http://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true";
 title="class or interface in java.util">Collection
 ClusterStatus.deadServers 
 
+
+private http://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true";
 title="class or interface in java.util">Collection
+ClusterStatus.Builder.deadServers 
+
 
 static http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List
 ServerName.EMPTY_SERVER_LIST 
@@ -243,6 +255,10 @@
 private http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true";
 title="class or interface in java.util">Map
 ClusterStatus.liveServers 
 
+
+private http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true";
 title="class or interface in java.util">Map
+ClusterStatus.Builder.liveServers 
+
 
 
 
@@ -463,6 +479,10 @@
 
 
 
+ClusterStatus.Builder
+ClusterStatus.Builder.setMaster(ServerName master) 
+
+
 static void
 MetaTableAccessor.splitRegion(Connection connection,
HRegionInfo parent,
@@ -474,7 +494,7 @@
 Splits the region into two in an atomic operation.
 
 
-
+
 private static void
 MetaTableAccessor.updateLocation(Connection connection,
   HRegionInfo regionInfo,
@@ -484,7 +504,7 @@
 Updates the location of the specified region to be the 
specified server.
 
 
-
+
 static void
 MetaTableAccessor.updateRegionLocation(Connection connection,
 HRegionInfo regionInfo,
@@ -508,6 +528,18 @@
 (package private) static http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List
 MetaTableAccessor.getListOfHRegionInfos(http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List> pairs) 
 
+
+ClusterStatus.Builder
+ClusterStatus.Builder.setBackupMasters(http://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true";
 title="class or interface in java.util">Collection backupMasters) 
+
+
+ClusterStatus.Builder
+ClusterStatus.Builder.setDeadServers(http://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true";
 title="class or interface in java.util">Collection deadServers) 
+
+
+ClusterStatus.Builder
+ClusterStatus.Builder.setLiveServers(http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true";
 title="class or interface in java.util">Map liveServers) 
+
 
 
 
@@ -525,7 +557,12 @@
  http://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true";
 title="class or interface in java.util">Collection backupMasters,
  http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List rit,
  http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String[] masterCoprocessors,
- http://docs.oracle.com/javase/8/docs/api/java/lang/Boolean.html?is-external=true";
 title="class or interface in 
java.lang">Boolean balancerOn) 
+ http://docs.oracle.com/javase/8/docs/api/java/lang/Boolean.html?is-external=true";
 title="class or interface in java.lang">Boolean balancerOn)
+Deprecated. 
+As of release 2.0.0, this 
will be removed in HBase 3.0.0
+ (https://issues.apache.org/jira/browse/HBASE-15511";>HBASE-15511).
+
+
 
 
 Done(ServerName originServer,
@@ -581,7 +618,12 @@
  http://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true";
 title="class or interface in java.util">Collection backupMasters,
  http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">L

[28/51] [partial] hbase-site git commit: Published site at .

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8bae1c8a/devapidocs/org/apache/hadoop/hbase/backup/mapreduce/class-use/MapReduceHFileSplitterJob.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/backup/mapreduce/class-use/MapReduceHFileSplitterJob.html
 
b/devapidocs/org/apache/hadoop/hbase/backup/mapreduce/class-use/MapReduceHFileSplitterJob.html
new file mode 100644
index 000..38939a8
--- /dev/null
+++ 
b/devapidocs/org/apache/hadoop/hbase/backup/mapreduce/class-use/MapReduceHFileSplitterJob.html
@@ -0,0 +1,125 @@
+http://www.w3.org/TR/html4/loose.dtd";>
+
+
+
+
+
+Uses of Class 
org.apache.hadoop.hbase.backup.mapreduce.MapReduceHFileSplitterJob (Apache 
HBase 3.0.0-SNAPSHOT API)
+
+
+
+
+
+
+
+JavaScript is disabled on your browser.
+
+
+
+
+
+Skip navigation links
+
+
+
+
+Overview
+Package
+Class
+Use
+Tree
+Deprecated
+Index
+Help
+
+
+
+
+Prev
+Next
+
+
+Frames
+No Frames
+
+
+All Classes
+
+
+
+
+
+
+
+
+
+
+Uses of 
Classorg.apache.hadoop.hbase.backup.mapreduce.MapReduceHFileSplitterJob
+
+No usage of 
org.apache.hadoop.hbase.backup.mapreduce.MapReduceHFileSplitterJob
+
+
+
+
+Skip navigation links
+
+
+
+
+Overview
+Package
+Class
+Use
+Tree
+Deprecated
+Index
+Help
+
+
+
+
+Prev
+Next
+
+
+Frames
+No Frames
+
+
+All Classes
+
+
+
+
+
+
+
+
+
+Copyright © 2007–2017 https://www.apache.org/";>The Apache Software Foundation. All rights 
reserved.
+
+

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8bae1c8a/devapidocs/org/apache/hadoop/hbase/backup/mapreduce/package-frame.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/backup/mapreduce/package-frame.html 
b/devapidocs/org/apache/hadoop/hbase/backup/mapreduce/package-frame.html
index afbda38..477fd79 100644
--- a/devapidocs/org/apache/hadoop/hbase/backup/mapreduce/package-frame.html
+++ b/devapidocs/org/apache/hadoop/hbase/backup/mapreduce/package-frame.html
@@ -13,10 +13,11 @@
 
 Classes
 
-HFileSplitterJob
-HFileSplitterJob.HFileCellMapper
 MapReduceBackupCopyJob
 MapReduceBackupCopyJob.SnapshotCopy
+MapReduceBackupMergeJob
+MapReduceHFileSplitterJob
+MapReduceHFileSplitterJob.HFileCellMapper
 MapReduceRestoreJob
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8bae1c8a/devapidocs/org/apache/hadoop/hbase/backup/mapreduce/package-summary.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/backup/mapreduce/package-summary.html 
b/devapidocs/org/apache/hadoop/hbase/backup/mapreduce/package-summary.html
index 5d3ff01..47d8068 100644
--- a/devapidocs/org/apache/hadoop/hbase/backup/mapreduce/package-summary.html
+++ b/devapidocs/org/apache/hadoop/hbase/backup/mapreduce/package-summary.html
@@ -83,36 +83,42 @@
 
 
 
-HFileSplitterJob
+MapReduceBackupCopyJob
 
-A tool to split HFiles into new region boundaries as a 
MapReduce job.
+Map-Reduce implementation of BackupCopyJob.
 
 
+
+MapReduceBackupCopyJob.SnapshotCopy
+ 
+
 
-HFileSplitterJob.HFileCellMapper
+MapReduceBackupMergeJob
 
-A mapper that just writes out cells.
+MapReduce implementation of BackupMergeJob
+ Must be initialized with configuration of a backup destination cluster
 
 
 
-MapReduceBackupCopyJob
+MapReduceHFileSplitterJob
 
-Map-Reduce implementation of BackupCopyJob.
+A tool to split HFiles into new region boundaries as a 
MapReduce job.
 
 
-
-MapReduceBackupCopyJob.SnapshotCopy
- 
-
 
+MapReduceHFileSplitterJob.HFileCellMapper
+
+A mapper that just writes out cells.
+
+
+
 MapReduceRestoreJob
 
 MapReduce implementation of RestoreJob
 
- For full backup restore, it runs HFileSplitterJob job 
and creates
+ For backup restore, it runs MapReduceHFileSplitterJob
 job and creates
  HFiles which are aligned with a region boundaries of a table being
- restored, for incremental backup restore it runs WALPlayer in
- bulk load mode (creates HFiles from WAL edits).
+ restored.
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8bae1c8a/devapidocs/org/apache/hadoop/hbase/backup/mapreduce/package-tree.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/backup/mapred

[46/51] [partial] hbase-site git commit: Published site at .

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8bae1c8a/apidocs/src-html/org/apache/hadoop/hbase/ClusterStatus.html
--
diff --git a/apidocs/src-html/org/apache/hadoop/hbase/ClusterStatus.html 
b/apidocs/src-html/org/apache/hadoop/hbase/ClusterStatus.html
index 6d989d2..09abf56 100644
--- a/apidocs/src-html/org/apache/hadoop/hbase/ClusterStatus.html
+++ b/apidocs/src-html/org/apache/hadoop/hbase/ClusterStatus.html
@@ -55,293 +55,586 @@
 047 * 
  • Regions in transition at master
  • 048 *
  • The unique cluster ID
  • 049 * -050 */ -051@InterfaceAudience.Public -052public class ClusterStatus extends VersionedWritable { -053 /** -054 * Version for object serialization. Incremented for changes in serialized -055 * representation. -056 *
    -057 *
    0
    Initial version
    -058 *
    1
    Added cluster ID
    -059 *
    2
    Added Map of ServerName to ServerLoad
    -060 *
    3
    Added master and backupMasters
    -061 *
    -062 */ -063 private static final byte VERSION = 2; -064 -065 private String hbaseVersion; -066 private Map liveServers; -067 private Collection deadServers; -068 private ServerName master; -069 private Collection backupMasters; -070 private List intransition; -071 private String clusterId; -072 private String[] masterCoprocessors; -073 private Boolean balancerOn; -074 -075 public ClusterStatus(final String hbaseVersion, final String clusterid, -076 final Map servers, -077 final Collection deadServers, -078 final ServerName master, -079 final Collection backupMasters, -080 final List rit, -081 final String[] masterCoprocessors, -082 final Boolean balancerOn) { -083this.hbaseVersion = hbaseVersion; -084 -085this.liveServers = servers; -086this.deadServers = deadServers; -087this.master = master; -088this.backupMasters = backupMasters; -089this.intransition = rit; -090this.clusterId = clusterid; -091this.masterCoprocessors = masterCoprocessors; -092this.balancerOn = balancerOn; -093 } -094 -095 /** -096 * @return the names of region servers on the dead list -097 */ -098 public Collection getDeadServerNames() { -099if (deadServers == null) { -100 return Collections.emptyList(); -101} -102return Collections.unmodifiableCollection(deadServers); -103 } -104 -105 /** -106 * @return the number of region servers in the cluster -107 */ -108 public int getServersSize() { -109return liveServers != null ? liveServers.size() : 0; -110 } -111 -112 /** -113 * @return the number of dead region servers in the cluster -114 * @deprecated As of release 2.0.0, this will be removed in HBase 3.0.0 -115 * (HBASE-13656;). -116 * Use {@link #getDeadServersSize()}. -117 */ -118 @Deprecated -119 public int getDeadServers() { -120return getDeadServersSize(); +050 * {@link Options} provides a way to filter out infos which unwanted. +051 * The following codes will retrieve all the cluster information. +052 *
    +053 * {@code
    +054 * // Original version still works
    +055 * Admin admin = connection.getAdmin();
    +056 * ClusterStatus status = 
    admin.getClusterStatus();
    +057 * // or below, a new version which has 
    the same effects
    +058 * ClusterStatus status = 
    admin.getClusterStatus(Options.defaultOptions());
    +059 * }
    +060 * 
    +061 * If information about dead servers and master coprocessors are unwanted, +062 * then codes in the following way: +063 *
    +064 * {@code
    +065 * Admin admin = connection.getAdmin();
    +066 * ClusterStatus status = 
    admin.getClusterStatus(
    +067 *
    Options.defaultOptions()
    +068 *   
    .excludeDeadServers()
    +069 *   
    .excludeMasterCoprocessors());
    +070 * }
    +071 * 
    +072 */ +073@InterfaceAudience.Public +074public class ClusterStatus extends VersionedWritable { +075 /** +076 * Version for object serialization. Incremented for changes in serialized +077 * representation. +078 *
    +079 *
    0
    Initial version
    +080 *
    1
    Added cluster ID
    +081 *
    2
    Added Map of ServerName to ServerLoad
    +082 *
    3
    Added master and backupMasters
    +083 *
    +084 */ +085 private static final byte VERSION = 2; +086 +087 private String hbaseVersion; +088 private Map liveServers; +089 private Collection deadServers; +090 private ServerName master; +091 private Collection backupMasters; +092 private List

    [11/51] [partial] hbase-site git commit: Published site at .

    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8bae1c8a/devapidocs/org/apache/hadoop/hbase/client/class-use/TableDescriptor.html
    --
    diff --git 
    a/devapidocs/org/apache/hadoop/hbase/client/class-use/TableDescriptor.html 
    b/devapidocs/org/apache/hadoop/hbase/client/class-use/TableDescriptor.html
    index 537fec1..85f16c4 100644
    --- a/devapidocs/org/apache/hadoop/hbase/client/class-use/TableDescriptor.html
    +++ b/devapidocs/org/apache/hadoop/hbase/client/class-use/TableDescriptor.html
    @@ -625,15 +625,20 @@ Input/OutputFormats, a table indexing MapReduce job, and 
    utility methods.
      
    org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateTableResponse response) 
     
     
    -ModifyableTableDescriptor(TableDescriptor desc) 
    +ImmutableHTableDescriptor(TableDescriptor desc)
    +Deprecated. 
    + 
     
     
    +ModifyableTableDescriptor(TableDescriptor desc) 
    +
    +
     ModifyableTableDescriptor(TableName name,
      TableDescriptor desc)
     Deprecated. 
     
     
    -
    +
     TableDescriptorBuilder(TableDescriptor desc) 
     
     
    @@ -675,7 +680,7 @@ Input/OutputFormats, a table indexing MapReduce job, and 
    utility methods.
     default void
     MasterObserver.postCloneSnapshot(ObserverContext ctx,
      
    org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription snapshot,
    - TableDescriptor hTableDescriptor)
    + TableDescriptor tableDescriptor)
     Called after a snapshot clone operation has been 
    requested.
     
     
    @@ -715,7 +720,7 @@ Input/OutputFormats, a table indexing MapReduce job, and 
    utility methods.
     default void
     MasterObserver.postRestoreSnapshot(ObserverContext ctx,
    
    org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription snapshot,
    -   TableDescriptor hTableDescriptor)
    +   TableDescriptor tableDescriptor)
     Called after a snapshot restore operation has been 
    requested.
     
     
    @@ -723,7 +728,7 @@ Input/OutputFormats, a table indexing MapReduce job, and 
    utility methods.
     default void
     MasterObserver.postSnapshot(ObserverContext ctx,
     
    org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription snapshot,
    -TableDescriptor hTableDescriptor)
    +TableDescriptor tableDescriptor)
     Called after the snapshot operation has been 
    requested.
     
     
    @@ -731,7 +736,7 @@ Input/OutputFormats, a table indexing MapReduce job, and 
    utility methods.
     default void
     MasterObserver.preCloneSnapshot(ObserverContext ctx,
     
    org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription snapshot,
    -TableDescriptor hTableDescriptor)
    +TableDescriptor tableDescriptor)
     Called before a snapshot is cloned.
     
     
    @@ -773,7 +778,7 @@ Input/OutputFormats, a table indexing MapReduce job, and 
    utility methods.
     default void
     MasterObserver.preRestoreSnapshot(ObserverContext ctx,
       
    org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription snapshot,
    -  TableDescriptor hTableDescriptor)
    +  TableDescriptor tableDescriptor)
     Called before a snapshot is restored.
     
     
    @@ -781,7 +786,7 @@ Input/OutputFormats, a table indexing MapReduce job, and 
    utility methods.
     default void
     MasterObserver.preSnapshot(ObserverContext ctx,
    
    org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription snapshot,
    -   TableDescriptor hTableDescriptor)
    +   TableDescriptor tableDescriptor)
     Called before a new snapshot is taken.
     
     
    @@ -978,6 +983,93 @@ Input/OutputFormats, a table indexing MapReduce job, and 
    utility methods.
     
     
     
    +Methods in org.apache.hadoop.hbase.master
     with parameters of type TableDescriptor 
    +
    +Modifier and Type
    +Method and Description
    +
    +
    +
    +void
    +MasterCoprocessorHost.postCloneSnapshot(org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription snapshot,
    + TableDescriptor hTableDescriptor) 
    +
    +
    +void
    +MasterCoprocessorHost.postCompletedCreateTableAction(TableDescriptor htd,
    +  HRegionInfo[] regions,
    +  User user) 
    +
    +
    +void
    +MasterCoprocessorHost.postCompletedModifyTableAction(TableName tableName,
    +  TableDescriptor htd,
    +  User user) 
    +
    +
    +void
    +MasterCoprocessorHost.postCreateTable(TableDescriptor htd,
    +   HRegionInfo[] regions) 
    +
    +
    +void
    +MasterCoprocessorHost.postModifyTable(TableName tableName,
    +   TableDescriptor htd) 
    +
    +
    +void
    +MasterCoprocessorHost.postRestoreSnapshot(org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.Sna

    [40/51] [partial] hbase-site git commit: Published site at .

    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8bae1c8a/devapidocs/constant-values.html
    --
    diff --git a/devapidocs/constant-values.html b/devapidocs/constant-values.html
    index bb57548..fd61d3e 100644
    --- a/devapidocs/constant-values.html
    +++ b/devapidocs/constant-values.html
    @@ -3836,21 +3836,21 @@
     
     public static final http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
     title="class or interface in java.lang">String
     date
    -"Sat Aug 12 14:41:08 UTC 2017"
    +"Mon Aug 14 14:40:45 UTC 2017"
     
     
     
     
     public static final http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
     title="class or interface in java.lang">String
     revision
    -"173dce73471da005fb6780a7e7b65b43bad481e2"
    +"e2b797be390f05c55a490a64bc72e2d8c19fcbb7"
     
     
     
     
     public static final http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
     title="class or interface in java.lang">String
     srcChecksum
    -"0b5fe4e50617b899e986f38527f3404c"
    +"5941cfcfb586bba33a288c7c32efc33d"
     
     
     
    @@ -4206,6 +4206,13 @@
     "hbase.backup.copy.class"
     
     
    +
    +
    +public static final http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
     title="class or interface in java.lang">String
    +HBASE_BACKUP_MERGE_IMPL_CLASS
    +"hbase.backup.merge.class"
    +
    +
     
     
     public static final http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
     title="class or interface in java.lang">String
    @@ -4387,48 +4394,55 @@
     "Incorrect usage"
     
     
    +
    +
    +public static final http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
     title="class or interface in java.lang">String
    +MERGE_CMD_USAGE
    +"Usage: hbase backup merge [backup_ids]\n  
    backup_ids  Comma separated list of backup image ids.\n"
    +
    +
     
     
     public static final http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
     title="class or interface in java.lang">String
     NO_ACTIVE_SESSION_FOUND
     "No active backup sessions found."
     
    -
    +
     
     
     public static final http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
     title="class or interface in java.lang">String
     NO_INFO_FOUND
     "No info was found for backup id: "
     
    -
    +
     
     
     public static final http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
     title="class or interface in java.lang">String
     PROGRESS_CMD_USAGE
     "Usage: hbase backup progress \n  
    backup_id   Backup image id (optional). If no id specified, the command 
    will show\n  progress for currently running backup 
    session."
     
    -
    +
     
     
     public static final http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
     title="class or interface in java.lang">String
     REPAIR_CMD_USAGE
     "Usage: hbase backup repair\n"
     
    -
    +
     
     
     public static final http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
     title="class or interface in java.lang">String
     SET_CMD_USAGE
     "Usage: hbase backup set COMMAND [name] [tables]\n  
    nameBackup set name\n  tables  Comma separated list of 
    tables.\nCOMMAND is one of:\n  add add tables to a set, create a 
    set if needed\n  remove  remove tables from a set\n  list
    list all backup sets in the system\n  describedescribe set\n  delete
      delete backup set\n"
     
    -
    +
     
     
     public static final http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
     title="class or interface in java.lang">String
     USAGE
    -"Usage: hbase backup COMMAND [command-specific 
    arguments]\nwhere COMMAND is one of:\n  create create a new backup image\n  
    delete delete an existing backup image\n  describe   show the detailed 
    information of a backup image\n  historyshow history of all successful 
    backups\n  progress   show the progress of the latest backup request\n  set 
       backup set management\n  repair repair backup system tableRun \'hbase 
    backup COMMAND -h\' to see help message for each command\n"
    +"Usage: hbase backup COMMAND [command-specific 
    arguments]\nwhere COMMAND is one of:\n  create create a new backup image\n  
    delete delete an existing backup image\n  describe   show the detailed 
    information of a backup image\n  historyshow history of all successful 
    backups\n  progress   show the progress of the latest backup request\n  set 
       backup set management\n  repair repair backup system table\n  merge  
    merge backup images\nRun \'hbase backup COMMAND -h\' to see help message for 
    each command\n"
     
    -
    +
     
     
     public static final http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
     title="class or interface in java.lang">String
    @@ -4635,7 +4649,7 @@
     
     
     
    -org.apache.hadoop.hbase.backup.mapreduce.HFileSplitterJob 
    +org.apache.hadoop.hbase.backup.mapreduce.MapReduceHFileSplitterJob 
     
     Modifier and Typ

    [03/51] [partial] hbase-site git commit: Published site at .

    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8bae1c8a/devapidocs/org/apache/hadoop/hbase/master/HMasterCommandLine.LocalHMaster.html
    --
    diff --git 
    a/devapidocs/org/apache/hadoop/hbase/master/HMasterCommandLine.LocalHMaster.html
     
    b/devapidocs/org/apache/hadoop/hbase/master/HMasterCommandLine.LocalHMaster.html
    index b7e36e9..a0004a6 100644
    --- 
    a/devapidocs/org/apache/hadoop/hbase/master/HMasterCommandLine.LocalHMaster.html
    +++ 
    b/devapidocs/org/apache/hadoop/hbase/master/HMasterCommandLine.LocalHMaster.html
    @@ -248,7 +248,7 @@ extends 
     
     Methods inherited from class org.apache.hadoop.hbase.master.HMaster
    -abort,
     abortProcedure,
     addColumn,
     addReplicationPeer,
     balance,
     balance,
     balanceSwitch,
     canCreateBaseZNode, canUpdateTableDescriptor,
     checkIfShouldMoveSystemRegionAsync,
     checkInitialized,
     checkServiceStarted,
     checkTableModifiable,
     configureInfoServer,
     construct
     Master, createMetaBootstrap,
     createNamespace,
     createQuotaSnapshotNotifier,
     createRpcServices,
     createServerManager,
     createSystemTable,
     createTable,
     deleteColumn,
     deleteNamespace,
     deleteTable,
     disableReplicationPeer,
     disableTable,
     drainRegionServer,
     enableReplicationPeer,
     enableTable,
     getAssignmentManager,
     getAverageLoad,
     getCatalogJanitor,
     getClientIdAuditPrefix,
     getClusterSchema,
     getClusterStatus,
     getDumpServlet, getFavoredNodesManager,
     getFsTableDescriptors,
     getHFileCleaner,
     getInitializedEvent,
     getLastMajorCompactionTimestamp,
     getLastMajorCompactionTimestampForRegion,
     getLoadBalancer,
     getLoadBalancerClassName, getLoadedCoprocessors,
     getLockManager,
     getLogCleaner,
     getMasterActiveTime,
     getMasterCoprocessorHost,
     getMasterCoprocessors,
     getMasterFileSystem,
     getMasterFinishedIni
     tializationTime, getMasterMetrics,
     getMasterProcedureExecutor,
     getMasterProcedureManagerHost,
     getMasterQuotaManager,
     getMasterRpcServices,
     getMasterStartTime,
     getMasterWalManager,
     getMergePlanCount,
     getMobCompactionState,
     getNamespace,
     getNamespaces,
     getNumWALFiles,
     getProcessName,
     getQuotaObserverChore,
     getRegionNormalizer,
     getRegionNormalizerTracker,
     getRegionServerFatalLogBuffer, getRegionServerInfoPort,
     getRegionServerVersion,
     getRemoteInetAddress,
     getReplicationPeerConfig,
     getServerCrashProcessingEnabledEvent,
     getServerManager,
     getServerName, getSnapshotManager,
     getSpaceQuotaSnapshotNotifier,
     getSplitOrMergeTracker,
     getSplitPlanCount,
     getTableDescriptors,
     getTableRegionForRow,
     getTableStateManager,
     getWalPr
     ocedureStore, getZooKeeper,
     initClusterSchemaService,
     initializeZKBasedSystemTrackers,
     initQuotaManager,
     isActiveMaster,
     isBalancerOn,
     isCatalogJanitorEnabled,
     isCleanerChoreEnabled,
     isInitialized, isInMaintenanceMode,
     isNormalizerOn,
     isServerCrashProcessingEnabled,
     isSplitOrMergeEnabled,
     listDrainingRegionServers,
     listLocks,
     listProcedures,
     listReplicationPeers, listTableDescriptors,
     listTableDescriptorsByNamespace,
     listTableNames,
     listTableNamesByNamespace,
     login,
     main,
     mergeRegions,
     modifyColumn,
     modifyNamespace,
     modifyTable,
     move,
     normalizeRegions,
     recoverMeta,
     registerService, removeDrainFromRegionServer,
     removeReplicationPeer,
     reportMobCompactionEnd,
     reportMobCompactionStart,
     requestMobCompaction,
     restoreSnapshot, setCatalogJanitorEnabled,
     setInitialized,
     setServerCrashProcessingEnabled,
     shutdown,
     splitRegion,
     stopMaster,
     stopServiceThreads,
     truncateTable, updateConfigurationForSpaceQuotaObserver,
     updateReplicationPeerConfig,
     waitForMasterActive
    +abort,
     abortProcedure,
     addColumn,
     addReplicationPeer,
     balance,
     balance,
     balanceSwitch,
     canCreateBaseZNode, canUpdateTableDescriptor,
     checkIfShouldMoveSystemRegionAsync,
     checkInitialized,
     checkServiceStarted,
     checkTableModifiable,
     configureInfoServer,
     construct
     Master, createMetaBootstrap,
     createNamespace,
     createQuotaSnapshotNotifier,
     createRpcServices,
     createServerManager,
     createSystemTable,
     createTable,
     deleteColumn,
     deleteNamespace,
     deleteTable,
     disableReplicationPeer,
     disableTable,
     drainRegionServer,
     enableReplicationPeer,
     enableTable,
     getAssignmentManager,
     getAverageLoad,
     getCatalogJanitor,
     getClientIdAuditPrefix,
     getClusterSchema,
     getClusterStatus,
     getClusterStatus, getDumpServlet,
     getFavoredNodesManager,
     getFsTableDescriptors,
     getHFileCleaner,
     getInitializedEvent,
     getLastMajorCompactionTimestamp,
     getLastMajorCompactionTimestampForRegion,
     getLoadBalancer,
     getLoadBalancerClassName,
     getLoadedCoprocessors,
     getLockManager,
     getLogCleaner,
     getMasterActiveTime,
     getMasterCoprocessorHost,
     getMasterCoprocessors,
     getMasterFileSystem, getMasterFinishedInitializationTime,
     getMasterMetrics,
     getMasterProcedureExecutor,
     getMasterProcedureManagerHost,
     getMasterQuotaManager,
     getMasterRpcServices,
     getMasterStartTime,
     ge
     tMasterWalManager, getMergePlanCount,
     getMobCompactionState,
     getNamespace,
     getNamespaces,
     getNumWALFiles,
     getProcessName,
     getQuotaObserverChore,
     getRegionNor

    [19/51] [partial] hbase-site git commit: Published site at .

    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8bae1c8a/devapidocs/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.html
    --
    diff --git a/devapidocs/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.html 
    b/devapidocs/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.html
    index 67131e4..d59138d 100644
    --- a/devapidocs/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.html
    +++ b/devapidocs/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.html
    @@ -18,7 +18,7 @@
     catch(err) {
     }
     //-->
    -var methods = 
    {"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10,"i14":10,"i15":10,"i16":10,"i17":10,"i18":10,"i19":10,"i20":10,"i21":10,"i22":10,"i23":10,"i24":10,"i25":10,"i26":10,"i27":10,"i28":10,"i29":10,"i30":10,"i31":10,"i32":10,"i33":10,"i34":10,"i35":10,"i36":10,"i37":10,"i38":10,"i39":10,"i40":10,"i41":10,"i42":10,"i43":10,"i44":10,"i45":10,"i46":10,"i47":10,"i48":10,"i49":10,"i50":10,"i51":10,"i52":10,"i53":10,"i54":10,"i55":10,"i56":10,"i57":10,"i58":10,"i59":10,"i60":10,"i61":10,"i62":10,"i63":10,"i64":10,"i65":10,"i66":10,"i67":10,"i68":10,"i69":10,"i70":10,"i71":10,"i72":10,"i73":10,"i74":10,"i75":10,"i76":10,"i77":10,"i78":10,"i79":10,"i80":10,"i81":10,"i82":10,"i83":10,"i84":10,"i85":10,"i86":10,"i87":10,"i88":10,"i89":10,"i90":10,"i91":10,"i92":10,"i93":10,"i94":10,"i95":10,"i96":10,"i97":10,"i98":10,"i99":10,"i100":10,"i101":10,"i102":10,"i103":10,"i104":10,"i105":10,"i106":10,"i107":10};
    +var methods = 
    {"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10,"i14":10,"i15":10,"i16":10,"i17":10,"i18":10,"i19":10,"i20":10,"i21":10,"i22":10,"i23":10,"i24":10,"i25":10,"i26":10,"i27":10,"i28":10,"i29":10,"i30":10,"i31":10,"i32":10,"i33":10,"i34":10,"i35":10,"i36":10,"i37":10,"i38":10,"i39":10,"i40":10,"i41":10,"i42":10,"i43":10,"i44":10,"i45":10,"i46":10,"i47":10,"i48":10,"i49":10,"i50":10,"i51":10,"i52":10,"i53":10,"i54":10,"i55":10,"i56":10,"i57":10,"i58":10,"i59":10,"i60":10,"i61":10,"i62":10,"i63":10,"i64":10,"i65":10,"i66":10,"i67":10,"i68":10,"i69":10,"i70":10,"i71":10,"i72":10,"i73":10,"i74":10,"i75":10,"i76":10,"i77":10,"i78":10,"i79":10,"i80":10,"i81":10,"i82":10,"i83":10,"i84":10,"i85":10,"i86":10,"i87":10,"i88":10,"i89":10,"i90":10,"i91":10,"i92":10,"i93":10,"i94":10,"i95":10,"i96":10,"i97":10,"i98":10,"i99":10,"i100":10,"i101":10,"i102":10,"i103":10,"i104":10,"i105":10,"i106":10,"i107":10,"i108":10};
     var tabs = {65535:["t0","All Methods"],2:["t2","Instance 
    Methods"],8:["t4","Concrete Methods"]};
     var altColor = "altColor";
     var rowColor = "rowColor";
    @@ -114,7 +114,7 @@ var activeTableTab = "activeTableTab";
     
     
     @InterfaceAudience.Private
    -public class AsyncHBaseAdmin
    +public class AsyncHBaseAdmin
     extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true";
     title="class or interface in java.lang">Object
     implements AsyncAdmin
     The implementation of AsyncAdmin.
    @@ -418,113 +418,117 @@ implements getClusterStatus() 
     
     
    +http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";
     title="class or interface in java.util.concurrent">CompletableFuture
    +getClusterStatus(ClusterStatus.Options options) 
    +
    +
     http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";
     title="class or interface in java.util.concurrent">CompletableFuture
     getCompactionState(TableName tableName)
     Get the current compaction state of a table.
     
     
    -
    +
     http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";
     title="class or interface in java.util.concurrent">CompletableFuture
     getCompactionStateForRegion(byte[] regionName)
     Get the current compaction state of region.
     
     
    -
    +
     http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";
     title="class or interface in java.util.concurrent">CompletableFutureOptionalLong>>
     getLastMajorCompactionTimestamp(TableName tableName)
     Get the timestamp of the last major compaction for the 
    passed table.
     
     
    -
    +
     http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";
     title="class or interface in java.util.concurrent">CompletableFutureOptionalLong>>
     getLastMajorCompactionTimestampForRegion(byte[] regionName)

    [07/51] [partial] hbase-site git commit: Published site at .

    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8bae1c8a/devapidocs/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.html
    --
    diff --git 
    a/devapidocs/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.html
     
    b/devapidocs/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.html
    index a39f56f..db18883 100644
    --- 
    a/devapidocs/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.html
    +++ 
    b/devapidocs/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.html
    @@ -286,13 +286,13 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
     
     
     private static void
    -doSaslNegotiation(org.apache.hadoop.conf.Configuration conf,
    - io.netty.channel.Channel channel,
    +doSaslNegotiation(org.apache.hadoop.conf.Configuration conf,
    + 
    org.apache.hadoop.hbase.shaded.io.netty.channel.Channel channel,
      int timeoutMs,
      http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
     title="class or interface in java.lang">String username,
      char[] password,
      http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true";
     title="class or interface in java.util">MapString,http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
     title="class or interface in java.lang">String> saslProps,
    - io.netty.util.concurrent.PromiseVoid> saslPromise) 
    + 
    org.apache.hadoop.hbase.shaded.io.netty.util.concurrent.PromiseVoid> saslPromise) 
     
     
     private static char[]
    @@ -304,13 +304,13 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
     
     
     (package private) static void
    -trySaslNegotiate(org.apache.hadoop.conf.Configuration conf,
    -io.netty.channel.Channel channel,
    +trySaslNegotiate(org.apache.hadoop.conf.Configuration conf,
    +
    org.apache.hadoop.hbase.shaded.io.netty.channel.Channel channel,
     org.apache.hadoop.hdfs.protocol.DatanodeInfo dnInfo,
     int timeoutMs,
     org.apache.hadoop.hdfs.DFSClient client,
     
    org.apache.hadoop.security.token.Token accessToken,
    -io.netty.util.concurrent.PromiseVoid> saslPromise) 
    +
    org.apache.hadoop.hbase.shaded.io.netty.util.concurrent.PromiseVoid> saslPromise) 
     
     
     
    @@ -549,34 +549,34 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
     private static http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true";
     title="class or interface in java.util">MapString,http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
     title="class or interface in java.lang">String> createSaslPropertiesForEncryption(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
     title="class or interface in 
    java.lang">String encryptionAlgorithm)
     
     
    -
    +
     
     
     
     
     doSaslNegotiation
     private static void doSaslNegotiation(org.apache.hadoop.conf.Configuration conf,
    -  io.netty.channel.Channel channel,
    +  
    org.apache.hadoop.hbase.shaded.io.netty.channel.Channel channel,
       int timeoutMs,
       http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
     title="class or interface in java.lang">String username,
       char[] password,
       http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true";
     title="class or interface in java.util">MapString,http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
     title="class or interface in java.lang">String> saslProps,
    -  io.netty.util.concurrent.Promise
    

    [38/51] [partial] hbase-site git commit: Published site at .

    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8bae1c8a/devapidocs/org/apache/hadoop/hbase/ClusterStatus.Builder.html
    --
    diff --git a/devapidocs/org/apache/hadoop/hbase/ClusterStatus.Builder.html 
    b/devapidocs/org/apache/hadoop/hbase/ClusterStatus.Builder.html
    new file mode 100644
    index 000..82f88dc
    --- /dev/null
    +++ b/devapidocs/org/apache/hadoop/hbase/ClusterStatus.Builder.html
    @@ -0,0 +1,536 @@
    +http://www.w3.org/TR/html4/loose.dtd";>
    +
    +
    +
    +
    +
    +ClusterStatus.Builder (Apache HBase 3.0.0-SNAPSHOT API)
    +
    +
    +
    +
    +
    +var methods = 
    {"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10};
    +var tabs = {65535:["t0","All Methods"],2:["t2","Instance 
    Methods"],8:["t4","Concrete Methods"]};
    +var altColor = "altColor";
    +var rowColor = "rowColor";
    +var tableTab = "tableTab";
    +var activeTableTab = "activeTableTab";
    +
    +
    +JavaScript is disabled on your browser.
    +
    +
    +
    +
    +
    +Skip navigation links
    +
    +
    +
    +
    +Overview
    +Package
    +Class
    +Use
    +Tree
    +Deprecated
    +Index
    +Help
    +
    +
    +
    +
    +Prev Class
    +Next Class
    +
    +
    +Frames
    +No Frames
    +
    +
    +All Classes
    +
    +
    +
    +
    +
    +
    +
    +Summary: 
    +Nested | 
    +Field | 
    +Constr | 
    +Method
    +
    +
    +Detail: 
    +Field | 
    +Constr | 
    +Method
    +
    +
    +
    +
    +
    +
    +
    +
    +org.apache.hadoop.hbase
    +Class 
    ClusterStatus.Builder
    +
    +
    +
    +http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true";
     title="class or interface in java.lang">java.lang.Object
    +
    +
    +org.apache.hadoop.hbase.ClusterStatus.Builder
    +
    +
    +
    +
    +
    +
    +
    +Enclosing class:
    +ClusterStatus
    +
    +
    +
    +public static class ClusterStatus.Builder
    +extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true";
     title="class or interface in java.lang">Object
    +Builder for construct a ClusterStatus.
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +Field Summary
    +
    +Fields 
    +
    +Modifier and Type
    +Field and Description
    +
    +
    +private http://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true";
     title="class or interface in java.util">Collection
    +backupMasters 
    +
    +
    +private http://docs.oracle.com/javase/8/docs/api/java/lang/Boolean.html?is-external=true";
     title="class or interface in java.lang">Boolean
    +balancerOn 
    +
    +
    +private http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
     title="class or interface in java.lang">String
    +clusterId 
    +
    +
    +private http://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true";
     title="class or interface in java.util">Collection
    +deadServers 
    +
    +
    +private http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
     title="class or interface in java.lang">String
    +hbaseVersion 
    +
    +
    +private http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
     title="class or interface in java.util">List
    +intransition 
    +
    +
    +private http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true";
     title="class or interface in java.util">Map
    +liveServers 
    +
    +
    +private ServerName
    +master 
    +
    +
    +private http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
     title="class or interface in java.lang">String[]
    +masterCoprocessors 
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +Constructor Summary
    +
    +Constructors 
    +
    +Modifier
    +Constructor and Description
    +
    +
    +private 
    +Builder() 
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +Method Summary
    +
    +All Methods Instance Methods Concrete Methods 
    +
    +Modifier and Type
    +Method and Description
    +
    +
    +ClusterStatus
    +build() 
    +
    +
    +ClusterStatus.Builder
    +setBackupMasters(http://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true";
     title="class or interface in java.util">Collection backupMasters) 
    +
    +
    +ClusterStatus.Builder
    +setBalancerOn(http://docs.oracle.com/javase/8/docs/api/java/lang/Boolean.html?is-external=true";
     title="class or interface in 
    java.lang">Boolean balancerOn) 
    +
    +
    +ClusterStatus.Builder
    +setClusterId(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
     title="class or interface in 
    java.lang">String clusterId) 
    +
    +
    +ClusterStatus.Builder
    +setDeadServers(http://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true";
     title="class or interface in java.util">Collection deadServers) 
    +
    +
    +ClusterStatus.Builder
    +setHBaseVersion(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
     title="class or interface in 
    java.lang">String hbaseVersion) 
    +
    +
    +ClusterStatus.Builder
    +setLiveServers(http:/

    [49/51] [partial] hbase-site git commit: Published site at .

    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8bae1c8a/apidocs/org/apache/hadoop/hbase/client/Admin.html
    --
    diff --git a/apidocs/org/apache/hadoop/hbase/client/Admin.html 
    b/apidocs/org/apache/hadoop/hbase/client/Admin.html
    index 9db0217..935bd33 100644
    --- a/apidocs/org/apache/hadoop/hbase/client/Admin.html
    +++ b/apidocs/org/apache/hadoop/hbase/client/Admin.html
    @@ -18,7 +18,7 @@
     catch(err) {
     }
     //-->
    -var methods = 
    {"i0":6,"i1":6,"i2":6,"i3":38,"i4":6,"i5":6,"i6":18,"i7":18,"i8":6,"i9":6,"i10":6,"i11":6,"i12":6,"i13":6,"i14":6,"i15":6,"i16":6,"i17":38,"i18":38,"i19":38,"i20":38,"i21":6,"i22":6,"i23":6,"i24":6,"i25":6,"i26":6,"i27":6,"i28":6,"i29":6,"i30":6,"i31":6,"i32":50,"i33":50,"i34":50,"i35":6,"i36":6,"i37":6,"i38":50,"i39":6,"i40":38,"i41":6,"i42":6,"i43":6,"i44":6,"i45":6,"i46":6,"i47":6,"i48":6,"i49":6,"i50":6,"i51":38,"i52":38,"i53":6,"i54":6,"i55":18,"i56":6,"i57":6,"i58":6,"i59":38,"i60":38,"i61":6,"i62":6,"i63":18,"i64":6,"i65":6,"i66":6,"i67":38,"i68":38,"i69":6,"i70":6,"i71":6,"i72":6,"i73":38,"i74":6,"i75":6,"i76":6,"i77":6,"i78":6,"i79":6,"i80":6,"i81":6,"i82":6,"i83":6,"i84":6,"i85":6,"i86":6,"i87":6,"i88":6,"i89":6,"i90":6,"i91":18,"i92":6,"i93":38,"i94":38,"i95":38,"i96":6,"i97":6,"i98":6,"i99":6,"i100":6,"i101":6,"i102":6,"i103":6,"i104":6,"i105":6,"i106":6,"i107":6,"i108":6,"i109":6,"i110":6,"i111":6,"i112":6,"i113":6,"i114":6,"i115":18,"i116":18,"i117":18,"i
     
    118":6,"i119":6,"i120":6,"i121":6,"i122":6,"i123":6,"i124":6,"i125":6,"i126":6,"i127":6,"i128":6,"i129":38,"i130":6,"i131":6,"i132":6,"i133":6,"i134":6,"i135":6,"i136":38,"i137":38,"i138":38,"i139":38,"i140":38,"i141":6,"i142":6,"i143":6,"i144":6,"i145":6,"i146":6,"i147":6,"i148":6,"i149":38,"i150":6,"i151":6,"i152":38,"i153":6,"i154":6,"i155":6,"i156":6,"i157":6,"i158":38,"i159":6,"i160":38,"i161":6,"i162":6,"i163":6,"i164":6,"i165":18,"i166":18,"i167":6,"i168":6,"i169":6,"i170":6,"i171":6,"i172":6,"i173":6,"i174":6,"i175":6,"i176":6,"i177":6,"i178":6,"i179":6,"i180":6,"i181":6,"i182":6,"i183":6,"i184":6,"i185":6,"i186":6,"i187":6,"i188":38,"i189":38,"i190":6,"i191":6,"i192":6,"i193":6,"i194":6,"i195":6,"i196":6,"i197":6,"i198":6,"i199":6,"i200":18};
    +var methods = 
    {"i0":6,"i1":6,"i2":6,"i3":38,"i4":6,"i5":6,"i6":18,"i7":18,"i8":6,"i9":6,"i10":6,"i11":6,"i12":6,"i13":6,"i14":6,"i15":6,"i16":6,"i17":38,"i18":38,"i19":38,"i20":38,"i21":6,"i22":6,"i23":6,"i24":6,"i25":6,"i26":6,"i27":6,"i28":6,"i29":6,"i30":6,"i31":6,"i32":50,"i33":50,"i34":50,"i35":6,"i36":6,"i37":6,"i38":50,"i39":6,"i40":38,"i41":6,"i42":6,"i43":6,"i44":6,"i45":6,"i46":6,"i47":6,"i48":6,"i49":6,"i50":6,"i51":38,"i52":38,"i53":6,"i54":6,"i55":18,"i56":6,"i57":6,"i58":6,"i59":38,"i60":38,"i61":6,"i62":6,"i63":18,"i64":6,"i65":6,"i66":6,"i67":38,"i68":38,"i69":6,"i70":6,"i71":6,"i72":6,"i73":38,"i74":6,"i75":6,"i76":6,"i77":6,"i78":6,"i79":6,"i80":6,"i81":6,"i82":6,"i83":6,"i84":6,"i85":6,"i86":6,"i87":6,"i88":6,"i89":6,"i90":6,"i91":6,"i92":18,"i93":6,"i94":38,"i95":38,"i96":38,"i97":6,"i98":6,"i99":6,"i100":6,"i101":6,"i102":6,"i103":6,"i104":6,"i105":6,"i106":6,"i107":6,"i108":6,"i109":6,"i110":6,"i111":6,"i112":6,"i113":6,"i114":6,"i115":6,"i116":18,"i117":18,"i1
     
    18":18,"i119":6,"i120":6,"i121":6,"i122":6,"i123":6,"i124":6,"i125":6,"i126":6,"i127":6,"i128":6,"i129":6,"i130":38,"i131":6,"i132":6,"i133":6,"i134":6,"i135":6,"i136":6,"i137":38,"i138":38,"i139":38,"i140":38,"i141":38,"i142":6,"i143":6,"i144":6,"i145":6,"i146":6,"i147":6,"i148":6,"i149":6,"i150":38,"i151":6,"i152":6,"i153":38,"i154":6,"i155":6,"i156":6,"i157":6,"i158":6,"i159":38,"i160":6,"i161":38,"i162":6,"i163":6,"i164":6,"i165":6,"i166":18,"i167":18,"i168":6,"i169":6,"i170":6,"i171":6,"i172":6,"i173":6,"i174":6,"i175":6,"i176":6,"i177":6,"i178":6,"i179":6,"i180":6,"i181":6,"i182":6,"i183":6,"i184":6,"i185":6,"i186":6,"i187":6,"i188":6,"i189":38,"i190":38,"i191":6,"i192":6,"i193":6,"i194":6,"i195":6,"i196":6,"i197":6,"i198":6,"i199":6,"i200":6,"i201":18};
     var tabs = {65535:["t0","All Methods"],2:["t2","Instance 
    Methods"],4:["t3","Abstract Methods"],16:["t5","Default 
    Methods"],32:["t6","Deprecated Methods"]};
     var altColor = "altColor";
     var rowColor = "rowColor";
    @@ -106,7 +106,7 @@ var activeTableTab = "activeTableTab";
     
     
     @InterfaceAudience.Public
    -public interface Admin
    +public interface Admin
     extends org.apache.hadoop.hbase.Abortable, http://docs.oracle.com/javase/8/docs/api/java/io/Closeable.html?is-external=true";
     title="class or interface in java.io">Closeable
     The administrative API for HBase. Obtain an instance from 
    an Connection.getAdmin()
     and
      call close()
     afterwards.
    @@ -701,33 +701,39 @@ extends org.apache.hadoop.hbase.Abortable, http://docs.oracle.com/javas
     getClusterStatus() 
     
     
    +ClusterStatus
    +getClusterStatus(org.apache.hadoop.hbase.ClusterStatus.Options options)
    +Get cluster status with options to filter out unwanted 
    status.
    +
    +
    +
     CompactionState
     getCompactionState(TableNa

    [51/51] [partial] hbase-site git commit: Published site at .

    Published site at .
    
    
    Project: http://git-wip-us.apache.org/repos/asf/hbase-site/repo
    Commit: http://git-wip-us.apache.org/repos/asf/hbase-site/commit/8bae1c8a
    Tree: http://git-wip-us.apache.org/repos/asf/hbase-site/tree/8bae1c8a
    Diff: http://git-wip-us.apache.org/repos/asf/hbase-site/diff/8bae1c8a
    
    Branch: refs/heads/asf-site
    Commit: 8bae1c8a139a5c3edeb1b230ba2bf302144ec3ce
    Parents: c70834a
    Author: jenkins 
    Authored: Mon Aug 14 15:06:17 2017 +
    Committer: jenkins 
    Committed: Mon Aug 14 15:06:17 2017 +
    
    --
     acid-semantics.html | 4 +-
     apache_hbase_reference_guide.pdf| 6 +-
     apidocs/allclasses-frame.html   | 1 -
     apidocs/allclasses-noframe.html | 1 -
     apidocs/deprecated-list.html|41 +-
     apidocs/index-all.html  |33 +-
     .../org/apache/hadoop/hbase/ClusterStatus.html  |   122 +-
     .../apache/hadoop/hbase/HTableDescriptor.html   | 4 -
     .../hadoop/hbase/class-use/ClusterStatus.html   |10 +
     .../hbase/class-use/HColumnDescriptor.html  |15 -
     .../hbase/class-use/HTableDescriptor.html   |28 -
     .../hadoop/hbase/class-use/ServerLoad.html  | 7 +-
     .../hadoop/hbase/class-use/ServerName.html  |28 +-
     .../org/apache/hadoop/hbase/client/Admin.html   |   677 +-
     .../apache/hadoop/hbase/client/AsyncAdmin.html  |   439 +-
     ...ableMultiplexer.HTableMultiplexerStatus.html | 4 +-
     .../hbase/client/ImmutableHTableDescriptor.html |   344 -
     .../apache/hadoop/hbase/client/Increment.html   | 4 +-
     .../hadoop/hbase/client/TableDescriptor.html| 2 +-
     .../class-use/ColumnFamilyDescriptor.html   | 6 -
     .../class-use/ImmutableHTableDescriptor.html|   125 -
     .../hbase/client/class-use/TableDescriptor.html |15 -
     .../hadoop/hbase/client/package-frame.html  | 1 -
     .../hadoop/hbase/client/package-summary.html|32 +-
     .../hadoop/hbase/client/package-tree.html   | 5 -
     .../hbase/ipc/NettyRpcClientConfigHelper.html   |12 +-
     apidocs/overview-tree.html  | 6 +-
     .../org/apache/hadoop/hbase/ClusterStatus.html  |   855 +-
     .../org/apache/hadoop/hbase/client/Admin.html   |   ++--
     .../apache/hadoop/hbase/client/AsyncAdmin.html  |  2144 +-
     .../hbase/client/ImmutableHTableDescriptor.html |   130 -
     .../hbase/ipc/NettyRpcClientConfigHelper.html   | 4 +-
     .../hbase/mapreduce/TableMapReduceUtil.html | 2 +-
     book.html   | 2 +-
     bulk-loads.html | 4 +-
     checkstyle-aggregate.html   | 18690 +
     checkstyle.rss  |   158 +-
     coc.html| 4 +-
     cygwin.html | 4 +-
     dependencies.html   | 4 +-
     dependency-convergence.html |93 +-
     dependency-info.html| 4 +-
     dependency-management.html  |   148 +-
     devapidocs/allclasses-frame.html| 9 +-
     devapidocs/allclasses-noframe.html  | 9 +-
     devapidocs/constant-values.html |63 +-
     devapidocs/deprecated-list.html |64 +-
     devapidocs/index-all.html   |   706 +-
     .../hadoop/hbase/ClusterStatus.Builder.html |   536 +
     .../hadoop/hbase/ClusterStatus.Options.html |   745 +
     .../org/apache/hadoop/hbase/ClusterStatus.html  |   179 +-
     .../hadoop/hbase/CompatibilityFactory.html  | 4 +-
     .../apache/hadoop/hbase/backup/BackupAdmin.html |55 +-
     .../hadoop/hbase/backup/BackupDriver.html   |12 +-
     .../hbase/backup/BackupInfo.BackupPhase.html| 4 +-
     .../hbase/backup/BackupInfo.BackupState.html| 4 +-
     .../hadoop/hbase/backup/BackupInfo.Filter.html  | 4 +-
     .../apache/hadoop/hbase/backup/BackupInfo.html  |41 +-
     .../hadoop/hbase/backup/BackupMergeJob.html |   251 +
     .../hadoop/hbase/backup/BackupObserver.html | 4 +-
     .../hbase/backup/BackupRestoreFactory.html  |61 +-
     .../hadoop/hbase/backup/HBackupFileSystem.html  |76 +-
     .../hbase/backup/class-use/BackupInfo.html  | 4 +-
     .../hbase/backup/class-use/BackupMergeJob.html  |   192 +
     .../hbase/backup/class-use/RestoreJob.html  | 5 +-
     .../hbase/backup/impl/BackupAdminImpl.html  |   206 +-
     .../impl/BackupCommands.BackupSetCommand.html   |32 +-
     .../impl/BackupCommands.CancelCommand.html  | 8 +-
     .../backup/impl/BackupCommands.Command.html |30 +-
     .../impl/BackupCommands.CreateCommand.html  |26 +-
     .../impl/BackupCommands.DeleteCommand.html  |10 +-
     .../impl/BackupCommands.DescribeCommand.html| 8 +-
     .../backup/impl/BackupCommands.HelpCommand.html | 8 +-
     .../impl/BackupCommands.History

    hbase-site git commit: INFRA-10751 Empty commit

    Repository: hbase-site
    Updated Branches:
      refs/heads/asf-site 8bae1c8a1 -> 2341d7c5a
    
    
    INFRA-10751 Empty commit
    
    
    Project: http://git-wip-us.apache.org/repos/asf/hbase-site/repo
    Commit: http://git-wip-us.apache.org/repos/asf/hbase-site/commit/2341d7c5
    Tree: http://git-wip-us.apache.org/repos/asf/hbase-site/tree/2341d7c5
    Diff: http://git-wip-us.apache.org/repos/asf/hbase-site/diff/2341d7c5
    
    Branch: refs/heads/asf-site
    Commit: 2341d7c5a7e1832d0a2ca80eebaa5252189c3cc8
    Parents: 8bae1c8
    Author: jenkins 
    Authored: Mon Aug 14 15:06:58 2017 +
    Committer: jenkins 
    Committed: Mon Aug 14 15:06:58 2017 +
    
    --
    
    --
    
    
    
    

    [27/51] [partial] hbase-site git commit: Published site at .

    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8bae1c8a/devapidocs/org/apache/hadoop/hbase/backup/util/BackupUtils.html
    --
    diff --git a/devapidocs/org/apache/hadoop/hbase/backup/util/BackupUtils.html 
    b/devapidocs/org/apache/hadoop/hbase/backup/util/BackupUtils.html
    index 2088d7e..97cc4c6 100644
    --- a/devapidocs/org/apache/hadoop/hbase/backup/util/BackupUtils.html
    +++ b/devapidocs/org/apache/hadoop/hbase/backup/util/BackupUtils.html
    @@ -18,7 +18,7 @@
     catch(err) {
     }
     //-->
    -var methods = 
    {"i0":9,"i1":9,"i2":9,"i3":9,"i4":9,"i5":9,"i6":9,"i7":9,"i8":9,"i9":9,"i10":9,"i11":9,"i12":9,"i13":9,"i14":9,"i15":9,"i16":9,"i17":9,"i18":9,"i19":9,"i20":9,"i21":9,"i22":9,"i23":9,"i24":9,"i25":9,"i26":9,"i27":9};
    +var methods = 
    {"i0":9,"i1":9,"i2":9,"i3":9,"i4":9,"i5":9,"i6":9,"i7":9,"i8":9,"i9":9,"i10":9,"i11":9,"i12":9,"i13":9,"i14":9,"i15":9,"i16":9,"i17":9,"i18":9,"i19":9,"i20":9,"i21":9,"i22":9,"i23":9,"i24":9,"i25":9,"i26":9,"i27":9,"i28":9,"i29":9,"i30":9,"i31":9,"i32":9,"i33":9};
     var tabs = {65535:["t0","All Methods"],1:["t1","Static 
    Methods"],8:["t4","Concrete Methods"]};
     var altColor = "altColor";
     var rowColor = "rowColor";
    @@ -110,7 +110,7 @@ var activeTableTab = "activeTableTab";
     
     
     @InterfaceAudience.Private
    -public final class BackupUtils
    +public final class BackupUtils
     extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true";
     title="class or interface in java.lang">Object
     A collection for methods used by multiple classes to backup 
    HBase tables.
     
    @@ -139,6 +139,10 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
     static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
     title="class or interface in java.lang">String
     LOGNAME_SEPARATOR 
     
    +
    +static int
    +MILLISEC_IN_HOUR 
    +
     
     
     
    @@ -209,11 +213,15 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
     copyTableRegionInfo(Connection conn,
    BackupInfo backupInfo,
    org.apache.hadoop.conf.Configuration conf)
    -copy out Table RegionInfo into incremental backup image 
    need to consider move this
    - logic into HBackupFileSystem
    +copy out Table RegionInfo into incremental backup image 
    need to consider move this logic into
    + HBackupFileSystem
     
     
     
    +static LoadIncrementalHFiles
    +createLoader(org.apache.hadoop.conf.Configuration config) 
    +
    +
     static RestoreRequest
     createRestoreRequest(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
     title="class or interface in java.lang">String backupRootDir,
     http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
     title="class or interface in java.lang">String backupId,
    @@ -224,39 +232,58 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
     Create restore request.
     
     
    -
    +
    +static boolean
    +failed(int result) 
    +
    +
    +static org.apache.hadoop.fs.Path
    +getBulkOutputDir(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
     title="class or interface in java.lang">String tableName,
    +
    org.apache.hadoop.conf.Configuration conf) 
    +
    +
    +static org.apache.hadoop.fs.Path
    +getBulkOutputDir(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
     title="class or interface in java.lang">String tableName,
    +org.apache.hadoop.conf.Configuration conf,
    +boolean deleteOnExit) 
    +
    +
     static http://docs.oracle.com/javase/8/docs/api/java/lang/Long.html?is-external=true";
     title="class or interface in java.lang">Long
     getCreationTime(org.apache.hadoop.fs.Path p)
     Given the log file, parse the timestamp from the file 
    name.
     
     
    -
    +
    +static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
     title="class or interface in java.lang">String
    +getFileNameCompatibleString(TableName table) 
    +
    +
     static http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
     title="class or interface in java.util">ListString>
     getFiles(org.apache.hadoop.fs.FileSystem fs,
     org.apache.hadoop.fs.Path rootDir,
     http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
     title="class or interface in java.util">ListString> files,
     org.apache.hadoop.fs.PathFilter filter) 
     
    -
    +
     static long
     getFilesLength(org.apache.hadoop.fs.FileSystem fs,
       org.apache.hadoop.fs.Path dir)
     Get the total length of files under the given directory 
    recursively.
     
     
    -
    +
     static http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
     title="class or interface in java.util">List

    [47/51] [partial] hbase-site git commit: Published site at .

    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8bae1c8a/apidocs/org/apache/hadoop/hbase/client/HTableMultiplexer.HTableMultiplexerStatus.html
    --
    diff --git 
    a/apidocs/org/apache/hadoop/hbase/client/HTableMultiplexer.HTableMultiplexerStatus.html
     
    b/apidocs/org/apache/hadoop/hbase/client/HTableMultiplexer.HTableMultiplexerStatus.html
    index ea7fcc4..2868934 100644
    --- 
    a/apidocs/org/apache/hadoop/hbase/client/HTableMultiplexer.HTableMultiplexerStatus.html
    +++ 
    b/apidocs/org/apache/hadoop/hbase/client/HTableMultiplexer.HTableMultiplexerStatus.html
    @@ -50,7 +50,7 @@ var activeTableTab = "activeTableTab";
     
     
     Prev Class
    -Next Class
    +Next Class
     
     
     Frames
    @@ -326,7 +326,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
     
     
     Prev Class
    -Next Class
    +Next Class
     
     
     Frames
    
    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8bae1c8a/apidocs/org/apache/hadoop/hbase/client/ImmutableHTableDescriptor.html
    --
    diff --git 
    a/apidocs/org/apache/hadoop/hbase/client/ImmutableHTableDescriptor.html 
    b/apidocs/org/apache/hadoop/hbase/client/ImmutableHTableDescriptor.html
    deleted file mode 100644
    index 79b51b5..000
    --- a/apidocs/org/apache/hadoop/hbase/client/ImmutableHTableDescriptor.html
    +++ /dev/null
    @@ -1,344 +0,0 @@
    -http://www.w3.org/TR/html4/loose.dtd";>
    -
    -
    -
    -
    -
    -ImmutableHTableDescriptor (Apache HBase 3.0.0-SNAPSHOT API)
    -
    -
    -
    -
    -
    -var methods = {"i0":42,"i1":42};
    -var tabs = {65535:["t0","All Methods"],2:["t2","Instance 
    Methods"],8:["t4","Concrete Methods"],32:["t6","Deprecated Methods"]};
    -var altColor = "altColor";
    -var rowColor = "rowColor";
    -var tableTab = "tableTab";
    -var activeTableTab = "activeTableTab";
    -
    -
    -JavaScript is disabled on your browser.
    -
    -
    -
    -
    -
    -Skip navigation links
    -
    -
    -
    -
    -Overview
    -Package
    -Class
    -Use
    -Tree
    -Deprecated
    -Index
    -Help
    -
    -
    -
    -
    -Prev Class
    -Next Class
    -
    -
    -Frames
    -No Frames
    -
    -
    -All Classes
    -
    -
    -
    -
    -
    -
    -
    -Summary: 
    -Nested | 
    -Field | 
    -Constr | 
    -Method
    -
    -
    -Detail: 
    -Field | 
    -Constr | 
    -Method
    -
    -
    -
    -
    -
    -
    -
    -
    -org.apache.hadoop.hbase.client
    -Class 
    ImmutableHTableDescriptor
    -
    -
    -
    -http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true";
     title="class or interface in java.lang">java.lang.Object
    -
    -
    -org.apache.hadoop.hbase.HTableDescriptor
    -
    -
    -org.apache.hadoop.hbase.client.ImmutableHTableDescriptor
    -
    -
    -
    -
    -
    -
    -
    -
    -
    -All Implemented Interfaces:
    -http://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true";
     title="class or interface in java.lang">Comparable, TableDescriptor
    -
    -
    -Deprecated.
    -
    -http://docs.oracle.com/javase/8/docs/api/java/lang/Deprecated.html?is-external=true";
     title="class or interface in java.lang">@Deprecated
    - @InterfaceAudience.Public
    -public class ImmutableHTableDescriptor
    -extends HTableDescriptor
    -Read-only table descriptor.
    -
    -
    -
    -
    -
    -
    -
    -
    -
    -
    -
    -Field Summary
    -
    -
    -
    -
    -Fields inherited from class org.apache.hadoop.hbase.HTableDescriptor
    -COMPACTION_ENABLED,
     DEFAULT_COMPACTION_ENABLED,
     DEFAULT_MEMSTORE_FLUSH_SIZE,
     DEFAULT_NORMALIZATION_ENABLED,
     DEFAULT_READONLY,
     DEFAULT_REGION_MEMSTORE_REPLICATION,
     DEFAULT_REGION_REPLICATION,
     delegatee<
     /a>, DURABILITY,
     FLUSH_POLICY,
     IS_META,
     IS_ROOT,
     MAX_FILESIZE,
     MEMSTORE_FLUSHSIZE,
     NAMESPACE_COL_DESC_BYTES,
     NAMESPACE_FAMILY_INFO,
     NAMESPACE_FAMILY_INFO_BYTES,
     NAMESPACE_TABLEDESC,
     NORMALIZATION_ENABLED,
     OWNER,
     OWNER_KEY,
     PRIORITY,
     READONLY,
     REGION_MEMSTORE_REPLICATION,
     REGION_REPLICATION,
     SPLIT_POLICY
    -
    -
    -
    -
    -
    -
    -
    -
    -Constructor Summary
    -
    -Constructors 
    -
    -Constructor and Description
    -
    -
    -ImmutableHTableDescriptor(HTableDescriptor desc)
    -Deprecated. 
    - 
    -
    -
    -
    -
    -
    -
    -
    -
    -
    -Method Summary
    -
    -All Methods Instance Methods Concrete Methods Deprecated Methods 
    -
    -Modifier and Type
    -Method and Description
    -
    -
    -protected 
    org.apache.hadoop.hbase.client.TableDescriptorBuilder.ModifyableTableDescriptor
    -getDelegateeForModification()
    -Deprecated. 
    - 
    -
    -
    -protected HColumnDescriptor
    -toHColumnDescriptor(ColumnFamilyDescriptor desc)
    -Deprecated. 
    -Return a HColumnDescriptor for user to keep the 
    compatibility as much as possible.
    -
    -
    -
    -
    -
    -
    -
    -Methods inherited from class org.apache.hadoop.hbase.HTableDescriptor
    -addCoprocessor,
     addCoprocessor,
     addCoprocessorWithSpec,
     addFamily,
     compareTo,
     equals,
     getColumnFamilies,
     getColumnFamily, getColumnFamilyCount,
     getColumnFamilyNames,
     getConfiguration,
     getConfigurationValue,
    

    [50/51] [partial] hbase-site git commit: Published site at .

    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8bae1c8a/apidocs/org/apache/hadoop/hbase/ClusterStatus.html
    --
    diff --git a/apidocs/org/apache/hadoop/hbase/ClusterStatus.html 
    b/apidocs/org/apache/hadoop/hbase/ClusterStatus.html
    index 6670825..504733b 100644
    --- a/apidocs/org/apache/hadoop/hbase/ClusterStatus.html
    +++ b/apidocs/org/apache/hadoop/hbase/ClusterStatus.html
    @@ -18,8 +18,8 @@
     catch(err) {
     }
     //-->
    -var methods = 
    {"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":42,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10,"i14":10,"i15":10,"i16":10,"i17":10,"i18":10,"i19":10,"i20":10,"i21":10,"i22":10};
    -var tabs = {65535:["t0","All Methods"],2:["t2","Instance 
    Methods"],8:["t4","Concrete Methods"],32:["t6","Deprecated Methods"]};
    +var methods = 
    {"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":42,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10,"i14":10,"i15":10,"i16":10,"i17":10,"i18":10,"i19":10,"i20":10,"i21":10,"i22":9,"i23":10};
    +var tabs = {65535:["t0","All Methods"],1:["t1","Static 
    Methods"],2:["t2","Instance Methods"],8:["t4","Concrete 
    Methods"],32:["t6","Deprecated Methods"]};
     var altColor = "altColor";
     var rowColor = "rowColor";
     var tableTab = "tableTab";
    @@ -119,7 +119,7 @@ var activeTableTab = "activeTableTab";
     
     
     @InterfaceAudience.Public
    -public class ClusterStatus
    +public class ClusterStatus
     extends org.apache.hadoop.io.VersionedWritable
     Status information on the HBase cluster.
      
    @@ -136,7 +136,29 @@ extends org.apache.hadoop.io.VersionedWritable
       per server and per region.
      Regions in transition at master
      The unique cluster ID
    - 
    + 
    + ClusterStatus.Options provides a way to filter out 
    infos which unwanted.
    + The following codes will retrieve all the cluster information.
    + 
    + 
    + // Original version still works
    + Admin admin = connection.getAdmin();
    + ClusterStatus status = admin.getClusterStatus();
    + // or below, a new version which has the same effects
    + ClusterStatus status = admin.getClusterStatus(Options.defaultOptions());
    + 
    + 
    + If information about dead servers and master coprocessors are unwanted,
    + then codes in the following way:
    + 
    + 
    + Admin admin = connection.getAdmin();
    + ClusterStatus status = admin.getClusterStatus(
    +Options.defaultOptions()
    +   .excludeDeadServers()
    +   .excludeMasterCoprocessors());
    + 
    + 
     
     
     
    @@ -163,7 +185,12 @@ extends org.apache.hadoop.io.VersionedWritable
      http://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true";
     title="class or interface in java.util">Collection backupMasters,
      http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
     title="class or interface in 
    java.util">List rit,
      http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
     title="class or interface in java.lang">String[] masterCoprocessors,
    - http://docs.oracle.com/javase/8/docs/api/java/lang/Boolean.html?is-external=true";
     title="class or interface in 
    java.lang">Boolean balancerOn) 
    + http://docs.oracle.com/javase/8/docs/api/java/lang/Boolean.html?is-external=true";
     title="class or interface in java.lang">Boolean balancerOn)
    +Deprecated. 
    +As of release 2.0.0, this 
    will be removed in HBase 3.0.0
    + (https://issues.apache.org/jira/browse/HBASE-15511";>HBASE-15511).
    +
    +
     
     
     
    @@ -175,7 +202,7 @@ extends org.apache.hadoop.io.VersionedWritable
     
     Method Summary
     
    -All Methods Instance Methods Concrete Methods Deprecated Methods 
    +All Methods Static Methods Instance Methods Concrete Methods Deprecated Methods 
     
     Modifier and Type
     Method and Description
    @@ -277,6 +304,10 @@ extends org.apache.hadoop.io.VersionedWritable
     isBalancerOn() 
     
     
    +static 
    org.apache.hadoop.hbase.ClusterStatus.Builder
    +newBuilder() 
    +
    +
     http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
     title="class or interface in java.lang">String
     toString() 
     
    @@ -315,15 +346,19 @@ extends org.apache.hadoop.io.VersionedWritable
     
     
     ClusterStatus
    -public ClusterStatus(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
     title="class or interface in java.lang">String hbaseVersion,
    - http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
     title="class or interface in java.lang">String clusterid,
    - http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true";
     title="class or interface in java.util">Map servers,
    - http://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true";
     title="class or interface in java.util">Collection deadServers,
    -   

    [06/51] [partial] hbase-site git commit: Published site at .

    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8bae1c8a/devapidocs/org/apache/hadoop/hbase/ipc/NettyRpcDuplexHandler.html
    --
    diff --git a/devapidocs/org/apache/hadoop/hbase/ipc/NettyRpcDuplexHandler.html 
    b/devapidocs/org/apache/hadoop/hbase/ipc/NettyRpcDuplexHandler.html
    index dfc5842..5069501 100644
    --- a/devapidocs/org/apache/hadoop/hbase/ipc/NettyRpcDuplexHandler.html
    +++ b/devapidocs/org/apache/hadoop/hbase/ipc/NettyRpcDuplexHandler.html
    @@ -100,13 +100,13 @@ var activeTableTab = "activeTableTab";
     http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true";
     title="class or interface in java.lang">java.lang.Object
     
     
    -io.netty.channel.ChannelHandlerAdapter
    +org.apache.hadoop.hbase.shaded.io.netty.channel.ChannelHandlerAdapter
     
     
    -io.netty.channel.ChannelInboundHandlerAdapter
    +org.apache.hadoop.hbase.shaded.io.netty.channel.ChannelInboundHandlerAdapter
     
     
    -io.netty.channel.ChannelDuplexHandler
    +org.apache.hadoop.hbase.shaded.io.netty.channel.ChannelDuplexHandler
     
     
     org.apache.hadoop.hbase.ipc.NettyRpcDuplexHandler
    @@ -124,13 +124,13 @@ var activeTableTab = "activeTableTab";
     
     
     All Implemented Interfaces:
    -io.netty.channel.ChannelHandler, io.netty.channel.ChannelInboundHandler, 
    io.netty.channel.ChannelOutboundHandler
    +org.apache.hadoop.hbase.shaded.io.netty.channel.ChannelHandler, 
    org.apache.hadoop.hbase.shaded.io.netty.channel.ChannelInboundHandler, 
    org.apache.hadoop.hbase.shaded.io.netty.channel.ChannelOutboundHandler
     
     
     
     @InterfaceAudience.Private
     class NettyRpcDuplexHandler
    -extends io.netty.channel.ChannelDuplexHandler
    +extends 
    org.apache.hadoop.hbase.shaded.io.netty.channel.ChannelDuplexHandler
     The netty rpc handler.
     
     
    @@ -145,11 +145,11 @@ extends io.netty.channel.ChannelDuplexHandler
     
     Nested Class Summary
     
    -
    +
     
     
    -Nested classes/interfaces inherited from 
    interface io.netty.channel.ChannelHandler
    -io.netty.channel.ChannelHandler.Sharable
    +Nested classes/interfaces inherited from 
    interface org.apache.hadoop.hbase.shaded.io.netty.channel.ChannelHandler
    +org.apache.hadoop.hbase.shaded.io.netty.channel.ChannelHandler.Sharable
     
     
     
    @@ -226,65 +226,65 @@ extends io.netty.channel.ChannelDuplexHandler
     
     
     void
    -channelInactive(io.netty.channel.ChannelHandlerContext ctx) 
    +channelInactive(org.apache.hadoop.hbase.shaded.io.netty.channel.ChannelHandlerContext ctx) 
     
     
     void
    -channelRead(io.netty.channel.ChannelHandlerContext ctx,
    +channelRead(org.apache.hadoop.hbase.shaded.io.netty.channel.ChannelHandlerContext ctx,
    http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true";
     title="class or interface in java.lang">Object msg) 
     
     
     private void
    -cleanupCalls(io.netty.channel.ChannelHandlerContext ctx,
    +cleanupCalls(org.apache.hadoop.hbase.shaded.io.netty.channel.ChannelHandlerContext ctx,
     http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true";
     title="class or interface in 
    java.io">IOException error) 
     
     
     void
    -exceptionCaught(io.netty.channel.ChannelHandlerContext ctx,
    +exceptionCaught(org.apache.hadoop.hbase.shaded.io.netty.channel.ChannelHandlerContext ctx,
    http://docs.oracle.com/javase/8/docs/api/java/lang/Throwable.html?is-external=true";
     title="class or interface in 
    java.lang">Throwable cause) 
     
     
     private void
    -readResponse(io.netty.channel.ChannelHandlerContext ctx,
    -io.netty.buffer.ByteBuf buf) 
    +readResponse(org.apache.hadoop.hbase.shaded.io.netty.channel.ChannelHandlerContext ctx,
    +
    org.apache.hadoop.hbase.shaded.io.netty.buffer.ByteBuf buf) 
     
     
     void
    -userEventTriggered(io.netty.channel.ChannelHandlerContext ctx,
    +userEventTriggered(org.apache.hadoop.hbase.shaded.io.netty.channel.ChannelHandlerContext ctx,
       http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true";
     title="class or interface in java.lang">Object evt) 
     
     
     void
    -write(io.netty.channel.ChannelHandlerContext ctx,
    +write(org.apache.hadoop.hbase.shaded.io.netty.channel.ChannelHandlerContext ctx,
      http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true";
     title="class or interface in java.lang">Object msg,
    - io.netty.channel.ChannelPromise promise) 
    + 
    org.apache.hadoop.hbase.shaded.io.netty.channel.ChannelPromise promise) 
     
     
     private void
    -writeRequest(io.netty.channel.ChannelHandlerContext ctx,
    +writeRequest(org.apache.hadoop.hbase.shaded.io.netty.channel.ChannelHandlerContext ctx,
     Call call,
    -io.netty.channel.ChannelPromise promise) 
    +
    org.apache.hadoop.hbase.shaded.io.netty.channel.ChannelPromise promise) 
     
     
     
    -
    +
     
     
    -Methods inherited from 
    class io.netty.channel.ChannelDuplexHandler
    +Methods inherited from 
    class org.apache.hadoop.hbase.shaded.io.netty.channel.ChannelDuplexHandler
     bind, close, connect, deregister, disconnect, flush, read
     
     
    -
    +
     
     
    -Methods inherit

    [04/51] [partial] hbase-site git commit: Published site at .

    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8bae1c8a/devapidocs/org/apache/hadoop/hbase/master/HMaster.html
    --
    diff --git a/devapidocs/org/apache/hadoop/hbase/master/HMaster.html 
    b/devapidocs/org/apache/hadoop/hbase/master/HMaster.html
    index 8a3486f..8e662c4 100644
    --- a/devapidocs/org/apache/hadoop/hbase/master/HMaster.html
    +++ b/devapidocs/org/apache/hadoop/hbase/master/HMaster.html
    @@ -18,7 +18,7 @@
     catch(err) {
     }
     //-->
    -var methods = 
    {"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10,"i14":10,"i15":10,"i16":10,"i17":10,"i18":10,"i19":10,"i20":10,"i21":10,"i22":9,"i23":10,"i24":10,"i25":10,"i26":10,"i27":10,"i28":10,"i29":10,"i30":10,"i31":10,"i32":10,"i33":10,"i34":10,"i35":10,"i36":10,"i37":10,"i38":9,"i39":10,"i40":10,"i41":10,"i42":10,"i43":10,"i44":10,"i45":10,"i46":10,"i47":10,"i48":10,"i49":10,"i50":10,"i51":10,"i52":10,"i53":10,"i54":10,"i55":9,"i56":10,"i57":10,"i58":10,"i59":10,"i60":10,"i61":10,"i62":10,"i63":10,"i64":10,"i65":10,"i66":10,"i67":10,"i68":10,"i69":10,"i70":10,"i71":10,"i72":10,"i73":10,"i74":10,"i75":10,"i76":10,"i77":10,"i78":10,"i79":10,"i80":10,"i81":10,"i82":10,"i83":10,"i84":10,"i85":10,"i86":10,"i87":10,"i88":10,"i89":10,"i90":10,"i91":10,"i92":10,"i93":10,"i94":10,"i95":10,"i96":10,"i97":10,"i98":10,"i99":10,"i100":10,"i101":10,"i102":10,"i103":10,"i104":10,"i105":10,"i106":9,"i107":10,"i108":10,"i109"
     
    :10,"i110":10,"i111":10,"i112":10,"i113":10,"i114":10,"i115":10,"i116":10,"i117":10,"i118":10,"i119":10,"i120":10,"i121":10,"i122":9,"i123":10,"i124":10,"i125":10,"i126":10,"i127":10,"i128":10,"i129":10,"i130":10,"i131":10,"i132":10,"i133":10,"i134":10,"i135":10,"i136":10,"i137":10,"i138":10,"i139":10,"i140":10,"i141":10,"i142":10,"i143":10,"i144":10,"i145":10,"i146":10,"i147":10,"i148":10,"i149":10,"i150":10,"i151":10,"i152":10,"i153":10,"i154":10,"i155":10,"i156":9};
    +var methods = 
    {"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10,"i14":10,"i15":10,"i16":10,"i17":10,"i18":10,"i19":10,"i20":10,"i21":10,"i22":9,"i23":10,"i24":10,"i25":10,"i26":10,"i27":10,"i28":10,"i29":10,"i30":10,"i31":10,"i32":10,"i33":10,"i34":10,"i35":10,"i36":10,"i37":10,"i38":9,"i39":10,"i40":10,"i41":10,"i42":10,"i43":10,"i44":10,"i45":10,"i46":10,"i47":10,"i48":10,"i49":10,"i50":10,"i51":10,"i52":10,"i53":10,"i54":10,"i55":10,"i56":10,"i57":9,"i58":10,"i59":10,"i60":10,"i61":10,"i62":10,"i63":10,"i64":10,"i65":10,"i66":10,"i67":10,"i68":10,"i69":10,"i70":10,"i71":10,"i72":10,"i73":10,"i74":10,"i75":10,"i76":10,"i77":10,"i78":10,"i79":10,"i80":10,"i81":10,"i82":10,"i83":10,"i84":10,"i85":10,"i86":10,"i87":10,"i88":10,"i89":10,"i90":10,"i91":10,"i92":10,"i93":10,"i94":10,"i95":10,"i96":10,"i97":10,"i98":10,"i99":10,"i100":10,"i101":10,"i102":10,"i103":10,"i104":10,"i105":10,"i106":10,"i107":10,"i108":9,"i109"
     
    :10,"i110":10,"i111":10,"i112":10,"i113":10,"i114":10,"i115":10,"i116":10,"i117":10,"i118":10,"i119":10,"i120":10,"i121":10,"i122":10,"i123":10,"i124":9,"i125":10,"i126":10,"i127":10,"i128":10,"i129":10,"i130":10,"i131":10,"i132":10,"i133":10,"i134":10,"i135":10,"i136":10,"i137":10,"i138":10,"i139":10,"i140":10,"i141":10,"i142":10,"i143":10,"i144":10,"i145":10,"i146":10,"i147":10,"i148":10,"i149":10,"i150":10,"i151":10,"i152":10,"i153":10,"i154":10,"i155":10,"i156":10,"i157":10,"i158":9};
     var tabs = {65535:["t0","All Methods"],1:["t1","Static 
    Methods"],2:["t2","Instance Methods"],8:["t4","Concrete Methods"]};
     var altColor = "altColor";
     var rowColor = "rowColor";
    @@ -128,7 +128,7 @@ var activeTableTab = "activeTableTab";
     
     
     @InterfaceAudience.LimitedPrivate(value="Tools")
    -public class HMaster
    +public class HMaster
     extends HRegionServer
     implements MasterServices
     HMaster is the "master server" for HBase. An HBase cluster 
    has one active
    @@ -735,227 +735,235 @@ implements 
    +private http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
     title="class or interface in java.util">List
    +getBackupMasters() 
    +
    +
     CatalogJanitor
     getCatalogJanitor() 
     
    -
    -(package private) http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
     title="class or interface in java.lang">String
    +
    +http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
     title="class or interface in java.lang">String
     getClientIdAuditPrefix() 
     
    -
    +
     ClusterSchema
     getClusterSchema() 
     
    -
    +
     ClusterStatus
     getClusterStatus() 
     
    -
    +
    +ClusterStatus
    +getClusterStatus(ClusterStatus.Options options) 
    +
    +
     protected http://docs.oracle.com/javase/8/docs/api/java/lang/Class.html?is-external=true";
     title="class or interface in java.lang">Class
     getDumpServlet() 
     
    -
    +
     FavoredNodesManager
     getFavoredNodesManager() 
     
    -
    +
     protected TableDescriptors
     getFsTableDescriptors() 
     
    -
    +
     HFile

    [18/51] [partial] hbase-site git commit: Published site at .

    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8bae1c8a/devapidocs/org/apache/hadoop/hbase/client/AsyncMasterRequestRpcRetryingCaller.html
    --
    diff --git 
    a/devapidocs/org/apache/hadoop/hbase/client/AsyncMasterRequestRpcRetryingCaller.html
     
    b/devapidocs/org/apache/hadoop/hbase/client/AsyncMasterRequestRpcRetryingCaller.html
    index da33b58..51a7fd4 100644
    --- 
    a/devapidocs/org/apache/hadoop/hbase/client/AsyncMasterRequestRpcRetryingCaller.html
    +++ 
    b/devapidocs/org/apache/hadoop/hbase/client/AsyncMasterRequestRpcRetryingCaller.html
    @@ -181,7 +181,7 @@ extends Constructor and Description
     
     
    -AsyncMasterRequestRpcRetryingCaller(io.netty.util.HashedWheelTimer retryTimer,
    +AsyncMasterRequestRpcRetryingCaller(org.apache.hadoop.hbase.shaded.io.netty.util.HashedWheelTimer retryTimer,
    AsyncConnectionImpl conn,
    AsyncMasterRequestRpcRetryingCaller.Callable callable,
    long pauseNs,
    @@ -259,13 +259,13 @@ extends 
    +
     
     
     
     
     AsyncMasterRequestRpcRetryingCaller
    -public AsyncMasterRequestRpcRetryingCaller(io.netty.util.HashedWheelTimer retryTimer,
    +public AsyncMasterRequestRpcRetryingCaller(org.apache.hadoop.hbase.shaded.io.netty.util.HashedWheelTimer retryTimer,
    AsyncConnectionImpl conn,
    AsyncMasterRequestRpcRetryingCaller.Callable callable,
    long pauseNs,
    
    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8bae1c8a/devapidocs/org/apache/hadoop/hbase/client/AsyncRegionLocator.html
    --
    diff --git a/devapidocs/org/apache/hadoop/hbase/client/AsyncRegionLocator.html 
    b/devapidocs/org/apache/hadoop/hbase/client/AsyncRegionLocator.html
    index 626c676..811c8ad 100644
    --- a/devapidocs/org/apache/hadoop/hbase/client/AsyncRegionLocator.html
    +++ b/devapidocs/org/apache/hadoop/hbase/client/AsyncRegionLocator.html
    @@ -144,7 +144,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
     nonMetaRegionLocator 
     
     
    -private io.netty.util.HashedWheelTimer
    +private 
    org.apache.hadoop.hbase.shaded.io.netty.util.HashedWheelTimer
     retryTimer 
     
     
    @@ -162,8 +162,8 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
     Constructor and Description
     
     
    -AsyncRegionLocator(AsyncConnectionImpl conn,
    -  
    io.netty.util.HashedWheelTimer retryTimer) 
    +AsyncRegionLocator(AsyncConnectionImpl conn,
    +  
    org.apache.hadoop.hbase.shaded.io.netty.util.HashedWheelTimer retryTimer) 
     
     
     
    @@ -260,7 +260,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
     
     
     retryTimer
    -private final io.netty.util.HashedWheelTimer retryTimer
    +private 
    final org.apache.hadoop.hbase.shaded.io.netty.util.HashedWheelTimer retryTimer
     
     
     
    @@ -289,14 +289,14 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
     
     
     Constructor Detail
    -
    +
     
     
     
     
     AsyncRegionLocator
     AsyncRegionLocator(AsyncConnectionImpl conn,
    -   io.netty.util.HashedWheelTimer retryTimer)
    +   
    org.apache.hadoop.hbase.shaded.io.netty.util.HashedWheelTimer retryTimer)
     
     
     
    
    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8bae1c8a/devapidocs/org/apache/hadoop/hbase/client/AsyncRpcRetryingCaller.html
    --
    diff --git 
    a/devapidocs/org/apache/hadoop/hbase/client/AsyncRpcRetryingCaller.html 
    b/devapidocs/org/apache/hadoop/hbase/client/AsyncRpcRetryingCaller.html
    index c134142..0999b91 100644
    --- a/devapidocs/org/apache/hadoop/hbase/client/AsyncRpcRetryingCaller.html
    +++ b/devapidocs/org/apache/hadoop/hbase/client/AsyncRpcRetryingCaller.html
    @@ -167,7 +167,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
     pauseNs 
     
     
    -private io.netty.util.HashedWheelTimer
    +private 
    org.apache.hadoop.hbase.shaded.io.netty.util.HashedWheelTimer
     retryTimer 
     
     
    @@ -201,7 +201,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
     Constructor and Description
     
     
    -AsyncRpcRetryingCaller(io.netty.util.HashedWheelTimer retryTimer,
    +AsyncRpcRetryingCaller(org.apache.hadoop.hbase.shaded.io.netty.util.HashedWheelTimer retryTimer,
       AsyncConnectionImpl conn,
       long pauseNs,
       int maxAttempts,
    @@ -291,7 +291,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
     
     
     retryTimer
    -private final io.netty.util.HashedWheelTimer retryTimer
    +private 
    final org.apache.hadoop.hbase.shaded.io.netty.util.HashedWheelTimer retryTimer
     
     
     
    @@ -401,13 +401,13 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
     
     
     Constructor Detail
    -
    +
     
     
     
     
     AsyncRpcRetryingCaller
    -public AsyncRpcRetryingCaller(io

    [31/51] [partial] hbase-site git commit: Published site at .

    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8bae1c8a/devapidocs/org/apache/hadoop/hbase/backup/impl/RestoreTablesClient.html
    --
    diff --git 
    a/devapidocs/org/apache/hadoop/hbase/backup/impl/RestoreTablesClient.html 
    b/devapidocs/org/apache/hadoop/hbase/backup/impl/RestoreTablesClient.html
    index ac05189..51c5c49 100644
    --- a/devapidocs/org/apache/hadoop/hbase/backup/impl/RestoreTablesClient.html
    +++ b/devapidocs/org/apache/hadoop/hbase/backup/impl/RestoreTablesClient.html
    @@ -144,26 +144,22 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
     conn 
     
     
    -private http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
     title="class or interface in java.lang">String
    -fullBackupId 
    -
    -
     private boolean
     isOverwrite 
     
    -
    +
     private static 
    org.apache.commons.logging.Log
     LOG 
     
    -
    +
     private TableName[]
     sTableArray 
     
    -
    +
     private http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
     title="class or interface in java.lang">String
     targetRootDir 
     
    -
    +
     private TableName[]
     tTableArray 
     
    @@ -297,22 +293,13 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
     private http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
     title="class or interface in java.lang">String backupId
     
     
    -
    -
    -
    -
    -
    -fullBackupId
    -private http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
     title="class or interface in java.lang">String fullBackupId
    -
    -
     
     
     
     
     
     sTableArray
    -private TableName[] sTableArray
    +private TableName[] sTableArray
     
     
     
    @@ -321,7 +308,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
     
     
     tTableArray
    -private TableName[] tTableArray
    +private TableName[] tTableArray
     
     
     
    @@ -330,7 +317,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
     
     
     targetRootDir
    -private http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
     title="class or interface in java.lang">String targetRootDir
    +private http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
     title="class or interface in java.lang">String targetRootDir
     
     
     
    @@ -339,7 +326,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
     
     
     isOverwrite
    -private boolean isOverwrite
    +private boolean isOverwrite
     
     
     
    @@ -356,7 +343,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
     
     
     RestoreTablesClient
    -public RestoreTablesClient(Connection conn,
    +public RestoreTablesClient(Connection conn,
    RestoreRequest request)
     throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true";
     title="class or interface in java.io">IOException
     
    @@ -379,7 +366,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
     
     
     checkTargetTables
    -private void checkTargetTables(TableName[] tTableArray,
    +private void checkTargetTables(TableName[] tTableArray,
    boolean isOverwrite)
     throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true";
     title="class or interface in java.io">IOException
     Validate target tables
    @@ -400,7 +387,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
     
     
     restoreImages
    -private void restoreImages(BackupManifest.BackupImage[] images,
    +private void restoreImages(BackupManifest.BackupImage[] images,
    TableName sTable,
    TableName tTable,
    boolean truncateIfExists)
    @@ -424,7 +411,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
     
     
     restore
    -private void restore(http://docs.oracle.com/javase/8/docs/api/java/util/HashMap.html?is-external=true";
     title="class or interface in java.util">HashMap backupManifestMap,
    +private void restore(http://docs.oracle.com/javase/8/docs/api/java/util/HashMap.html?is-external=true";
     title="class or interface in java.util">HashMap backupManifestMap,
      TableName[] sTableArray,
      TableName[] tTableArray,
      boolean isOverwrite)
    @@ -446,7 +433,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
     
     
     getTsFromBackupId
    -static long getTsFromBackupId(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
     title="class or interface in java.lang">String backupId)
    +static long getTsFromBackupId(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
     title="class or interface in java.lang">String backupId)
     
     
     
    @@ -455,7 +442,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
     
     
     withinRange
    -static boolean withinRange(long a,
    +static boolea

    [09/51] [partial] hbase-site git commit: Published site at .

    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8bae1c8a/devapidocs/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutput.AckHandler.html
    --
    diff --git 
    a/devapidocs/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutput.AckHandler.html
     
    b/devapidocs/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutput.AckHandler.html
    index de626a4..df62ecd 100644
    --- 
    a/devapidocs/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutput.AckHandler.html
    +++ 
    b/devapidocs/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutput.AckHandler.html
    @@ -100,13 +100,13 @@ var activeTableTab = "activeTableTab";
     http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true";
     title="class or interface in java.lang">java.lang.Object
     
     
    -io.netty.channel.ChannelHandlerAdapter
    +org.apache.hadoop.hbase.shaded.io.netty.channel.ChannelHandlerAdapter
     
     
    -io.netty.channel.ChannelInboundHandlerAdapter
    +org.apache.hadoop.hbase.shaded.io.netty.channel.ChannelInboundHandlerAdapter
     
     
    -io.netty.channel.SimpleChannelInboundHandler
    +org.apache.hadoop.hbase.shaded.io.netty.channel.SimpleChannelInboundHandler
     
     
     
    org.apache.hadoop.hbase.io.asyncfs.FanOutOneBlockAsyncDFSOutput.AckHandler
    @@ -124,7 +124,7 @@ var activeTableTab = "activeTableTab";
     
     
     All Implemented Interfaces:
    -io.netty.channel.ChannelHandler, 
    io.netty.channel.ChannelInboundHandler
    +org.apache.hadoop.hbase.shaded.io.netty.channel.ChannelHandler, 
    org.apache.hadoop.hbase.shaded.io.netty.channel.ChannelInboundHandler
     
     
     Enclosing class:
    @@ -134,7 +134,7 @@ var activeTableTab = "activeTableTab";
     
     @ChannelHandler.Sharable
     private final class FanOutOneBlockAsyncDFSOutput.AckHandler
    -extends 
    io.netty.channel.SimpleChannelInboundHandler
    +extends 
    org.apache.hadoop.hbase.shaded.io.netty.channel.SimpleChannelInboundHandler
     
     
     
    @@ -148,11 +148,11 @@ extends 
    io.netty.channel.SimpleChannelInboundHandlerThrowable cause) 
     
     
     void
    -userEventTriggered(io.netty.channel.ChannelHandlerContext ctx,
    +userEventTriggered(org.apache.hadoop.hbase.shaded.io.netty.channel.ChannelHandlerContext ctx,
       http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true";
     title="class or interface in java.lang">Object evt) 
     
     
     
    -
    +
     
     
    -Methods inherited from 
    class io.netty.channel.SimpleChannelInboundHandler
    +Methods inherited from 
    class org.apache.hadoop.hbase.shaded.io.netty.channel.SimpleChannelInboundHandler
     acceptInboundMessage, channelRead
     
     
    -
    +
     
     
    -Methods inherited from 
    class io.netty.channel.ChannelInboundHandlerAdapter
    +Methods inherited from 
    class org.apache.hadoop.hbase.shaded.io.netty.channel.ChannelInboundHandlerAdapter
     channelActive, channelReadComplete, channelRegistered, 
    channelUnregistered, channelWritabilityChanged
     
     
    -
    +
     
     
    -Methods inherited from 
    class io.netty.channel.ChannelHandlerAdapter
    +Methods inherited from 
    class org.apache.hadoop.hbase.shaded.io.netty.channel.ChannelHandlerAdapter
     ensureNotSharable, handlerAdded, handlerRemoved, isSharable
     
     
    @@ -253,10 +253,10 @@ extends 
    io.netty.channel.SimpleChannelInboundHandlerclone, http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#equals-java.lang.Object-";
     title="class 

    [36/51] [partial] hbase-site git commit: Published site at .

    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8bae1c8a/devapidocs/org/apache/hadoop/hbase/backup/BackupInfo.html
    --
    diff --git a/devapidocs/org/apache/hadoop/hbase/backup/BackupInfo.html 
    b/devapidocs/org/apache/hadoop/hbase/backup/BackupInfo.html
    index 204b832..cb30554 100644
    --- a/devapidocs/org/apache/hadoop/hbase/backup/BackupInfo.html
    +++ b/devapidocs/org/apache/hadoop/hbase/backup/BackupInfo.html
    @@ -18,7 +18,7 @@
     catch(err) {
     }
     //-->
    -var methods = 
    {"i0":10,"i1":10,"i2":10,"i3":9,"i4":9,"i5":9,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10,"i14":10,"i15":10,"i16":10,"i17":10,"i18":10,"i19":10,"i20":10,"i21":10,"i22":10,"i23":10,"i24":10,"i25":10,"i26":10,"i27":10,"i28":10,"i29":10,"i30":10,"i31":10,"i32":10,"i33":10,"i34":10,"i35":10,"i36":10,"i37":10,"i38":10,"i39":10,"i40":10,"i41":10,"i42":10,"i43":10,"i44":10,"i45":10,"i46":10,"i47":10,"i48":10,"i49":10,"i50":10,"i51":10,"i52":10,"i53":10,"i54":9,"i55":10};
    +var methods = 
    {"i0":10,"i1":10,"i2":10,"i3":9,"i4":9,"i5":9,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10,"i14":10,"i15":10,"i16":10,"i17":10,"i18":10,"i19":10,"i20":10,"i21":10,"i22":10,"i23":10,"i24":10,"i25":10,"i26":10,"i27":10,"i28":10,"i29":10,"i30":10,"i31":10,"i32":10,"i33":10,"i34":10,"i35":10,"i36":10,"i37":10,"i38":10,"i39":10,"i40":10,"i41":10,"i42":10,"i43":10,"i44":10,"i45":10,"i46":10,"i47":10,"i48":10,"i49":10,"i50":10,"i51":10,"i52":10,"i53":10,"i54":9,"i55":10,"i56":10};
     var tabs = {65535:["t0","All Methods"],1:["t1","Static 
    Methods"],2:["t2","Instance Methods"],8:["t4","Concrete Methods"]};
     var altColor = "altColor";
     var rowColor = "rowColor";
    @@ -542,13 +542,17 @@ implements http://docs.oracle.com/javase/8/docs/api/java/lang/Comparabl
     org.apache.hadoop.hbase.shaded.protobuf.generated.BackupProtos.BackupInfo
     toProtosBackupInfo() 
     
    +
    +http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
     title="class or interface in java.lang">String
    +toString() 
    +
     
     
     
     
     
     Methods inherited from class java.lang.http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true";
     title="class or interface in java.lang">Object
    -http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#clone--";
     title="class or interface in java.lang">clone, http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#finalize--";
     title="class or interface in java.lang">finalize, http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#getClass--";
     title="class or interface in java.lang">getClass, http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#notify--";
     title="class or interface in java.lang">notify, http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#notifyAll--";
     title="class or interface in java.lang">notifyAll, http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#toString--";
     title="class or interface in java.lang">toString, http://docs.oracle.com/javase/8/docs/api/java/lang/Object.ht
     ml?is-external=true#wait--" title="class or interface in java.lang">wait, 
    http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#wait-long-";
     title="class or interface in java.lang">wait, http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#wait-long-int-";
     title="class or interface in java.lang">wait
    +http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#clone--";
     title="class or interface in java.lang">clone, http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#finalize--";
     title="class or interface in java.lang">finalize, http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#getClass--";
     title="class or interface in java.lang">getClass, http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#notify--";
     title="class or interface in java.lang">notify, http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#notifyAll--";
     title="class or interface in java.lang">notifyAll, http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#wait--";
     title="class or interface in java.lang">wait, http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-ex
     ternal=true#wait-long-" title="class or interface in java.lang">wait, http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#wait-long-int-";
     title="class or interface in java.lang">wait
     
     
     
    @@ -1210,13 +1214,26 @@ implements http://docs.oracle.com/javase/8/docs/api/java/lang/Comparabl
     
     
     
    +
    +
    +
    +
    +
    +toString
    +public http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
     title="class or interface in java.lang">String toString()
    +
    +Overrides:
    +http://docs.oracle.com/javase/8/doc

    [44/51] [partial] hbase-site git commit: Published site at .

    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8bae1c8a/apidocs/src-html/org/apache/hadoop/hbase/client/AsyncAdmin.html
    --
    diff --git a/apidocs/src-html/org/apache/hadoop/hbase/client/AsyncAdmin.html 
    b/apidocs/src-html/org/apache/hadoop/hbase/client/AsyncAdmin.html
    index 477e3fa..2ab3342 100644
    --- a/apidocs/src-html/org/apache/hadoop/hbase/client/AsyncAdmin.html
    +++ b/apidocs/src-html/org/apache/hadoop/hbase/client/AsyncAdmin.html
    @@ -35,1087 +35,1093 @@
     027import java.util.regex.Pattern;
     028
     029import 
    org.apache.hadoop.hbase.ClusterStatus;
    -030import 
    org.apache.hadoop.hbase.HRegionInfo;
    -031import 
    org.apache.hadoop.hbase.ProcedureInfo;
    -032import 
    org.apache.hadoop.hbase.RegionLoad;
    -033import 
    org.apache.hadoop.hbase.ServerName;
    -034import 
    org.apache.hadoop.hbase.NamespaceDescriptor;
    -035import 
    org.apache.hadoop.hbase.TableName;
    -036import 
    org.apache.hadoop.hbase.classification.InterfaceAudience;
    -037import 
    org.apache.hadoop.hbase.procedure2.LockInfo;
    -038import 
    org.apache.hadoop.hbase.quotas.QuotaFilter;
    -039import 
    org.apache.hadoop.hbase.quotas.QuotaSettings;
    -040import 
    org.apache.hadoop.hbase.client.RawAsyncTable.CoprocessorCallable;
    -041import 
    org.apache.hadoop.hbase.client.replication.TableCFs;
    -042import 
    org.apache.hadoop.hbase.client.security.SecurityCapability;
    -043import 
    org.apache.hadoop.hbase.replication.ReplicationPeerConfig;
    -044import 
    org.apache.hadoop.hbase.replication.ReplicationPeerDescription;
    -045import 
    org.apache.hadoop.hbase.util.Pair;
    -046
    -047import com.google.protobuf.RpcChannel;
    -048
    -049/**
    -050 * The asynchronous administrative API 
    for HBase.
    -051 * 

    -052 * This feature is still under development, so marked as IA.Private. Will change to public when -053 * done. Use it with caution. -054 */ -055@InterfaceAudience.Public -056public interface AsyncAdmin { -057 -058 /** -059 * @param tableName Table to check. -060 * @return True if table exists already. The return value will be wrapped by a -061 * {@link CompletableFuture}. -062 */ -063 CompletableFuture tableExists(TableName tableName); -064 -065 /** -066 * List all the userspace tables. -067 * @return - returns a list of TableDescriptors wrapped by a {@link CompletableFuture}. -068 * @see #listTables(Optional, boolean) -069 */ -070 default CompletableFuture> listTables() { -071return listTables(Optional.empty(), false); -072 } -073 -074 /** -075 * List all the tables matching the given pattern. -076 * @param pattern The compiled regular expression to match against -077 * @param includeSysTables False to match only against userspace tables -078 * @return - returns a list of TableDescriptors wrapped by a {@link CompletableFuture}. -079 */ -080 CompletableFuture> listTables(Optional pattern, -081 boolean includeSysTables); -082 -083 /** -084 * List all of the names of userspace tables. -085 * @return a list of table names wrapped by a {@link CompletableFuture}. -086 * @see #listTableNames(Optional, boolean) -087 */ -088 default CompletableFuture> listTableNames() { -089return listTableNames(Optional.empty(), false); -090 } -091 -092 /** -093 * List all of the names of userspace tables. -094 * @param pattern The regular expression to match against -095 * @param includeSysTables False to match only against userspace tables -096 * @return a list of table names wrapped by a {@link CompletableFuture}. -097 */ -098 CompletableFuture> listTableNames(Optional pattern, -099 boolean includeSysTables); -100 -101 /** -102 * Method for getting the tableDescriptor -103 * @param tableName as a {@link TableName} -104 * @return the read-only tableDescriptor wrapped by a {@link CompletableFuture}. -105 */ -106 CompletableFuture getTableDescriptor(TableName tableName); -107 -108 /** -109 * Creates a new table. -110 * @param desc table descriptor for table -111 */ -112 default CompletableFuture createTable(TableDescriptor desc) { -113return createTable(desc, Optional.empty()); -114 } -115 -116 /** -117 * Creates a new table with the specified number of regions. The start key specified will become -118 * the end key of the first region of the table, and the end key specified will become the start -119 * key of the last region of the table (the first region has a null start key and the last region -120 * has a null end key). BigInteger math will be used to divide the key range specified into enough -121 * segments to make the required number of total regions. -122 * @param desc table descriptor for table -123 * @param startKey beginning of key range -124 * @param endKey end of key range -125 * @param numRegions the total number of regions to create -126 */ -127 Complet


    [12/51] [partial] hbase-site git commit: Published site at .

    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8bae1c8a/devapidocs/org/apache/hadoop/hbase/client/class-use/AsyncAdminRequestRetryingCaller.Callable.html
    --
    diff --git 
    a/devapidocs/org/apache/hadoop/hbase/client/class-use/AsyncAdminRequestRetryingCaller.Callable.html
     
    b/devapidocs/org/apache/hadoop/hbase/client/class-use/AsyncAdminRequestRetryingCaller.Callable.html
    index 18077a4..90ee3b5 100644
    --- 
    a/devapidocs/org/apache/hadoop/hbase/client/class-use/AsyncAdminRequestRetryingCaller.Callable.html
    +++ 
    b/devapidocs/org/apache/hadoop/hbase/client/class-use/AsyncAdminRequestRetryingCaller.Callable.html
    @@ -134,7 +134,7 @@
     
     
     
    -AsyncAdminRequestRetryingCaller(io.netty.util.HashedWheelTimer retryTimer,
    +AsyncAdminRequestRetryingCaller(org.apache.hadoop.hbase.shaded.io.netty.util.HashedWheelTimer retryTimer,
    AsyncConnectionImpl conn,
    long pauseNs,
    int maxAttempts,
    
    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8bae1c8a/devapidocs/org/apache/hadoop/hbase/client/class-use/AsyncConnection.html
    --
    diff --git 
    a/devapidocs/org/apache/hadoop/hbase/client/class-use/AsyncConnection.html 
    b/devapidocs/org/apache/hadoop/hbase/client/class-use/AsyncConnection.html
    index 90170a3..7961661 100644
    --- a/devapidocs/org/apache/hadoop/hbase/client/class-use/AsyncConnection.html
    +++ b/devapidocs/org/apache/hadoop/hbase/client/class-use/AsyncConnection.html
    @@ -200,8 +200,8 @@
     
     
     
    -RequestHandler(AsyncConnection conn,
    -  
    io.netty.channel.group.ChannelGroup channelGroup) 
    +RequestHandler(AsyncConnection conn,
    +  
    org.apache.hadoop.hbase.shaded.io.netty.channel.group.ChannelGroup channelGroup) 
     
     
     
    
    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8bae1c8a/devapidocs/org/apache/hadoop/hbase/client/class-use/AsyncConnectionImpl.html
    --
    diff --git 
    a/devapidocs/org/apache/hadoop/hbase/client/class-use/AsyncConnectionImpl.html 
    b/devapidocs/org/apache/hadoop/hbase/client/class-use/AsyncConnectionImpl.html
    index f9d60a2..55b9349 100644
    --- 
    a/devapidocs/org/apache/hadoop/hbase/client/class-use/AsyncConnectionImpl.html
    +++ 
    b/devapidocs/org/apache/hadoop/hbase/client/class-use/AsyncConnectionImpl.html
    @@ -145,7 +145,7 @@
     
     
     
    -AsyncAdminRequestRetryingCaller(io.netty.util.HashedWheelTimer retryTimer,
    +AsyncAdminRequestRetryingCaller(org.apache.hadoop.hbase.shaded.io.netty.util.HashedWheelTimer retryTimer,
    AsyncConnectionImpl conn,
    long pauseNs,
    int maxAttempts,
    @@ -156,7 +156,7 @@
    AsyncAdminRequestRetryingCaller.Callable callable) 
     
     
    -AsyncBatchRpcRetryingCaller(io.netty.util.HashedWheelTimer retryTimer,
    +AsyncBatchRpcRetryingCaller(org.apache.hadoop.hbase.shaded.io.netty.util.HashedWheelTimer retryTimer,
    AsyncConnectionImpl conn,
    TableName tableName,
    http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
     title="class or interface in java.util">List actions,
    @@ -178,7 +178,7 @@
       int startLogErrorsCnt) 
     
     
    -AsyncMasterRequestRpcRetryingCaller(io.netty.util.HashedWheelTimer retryTimer,
    +AsyncMasterRequestRpcRetryingCaller(org.apache.hadoop.hbase.shaded.io.netty.util.HashedWheelTimer retryTimer,
    AsyncConnectionImpl conn,
    AsyncMasterRequestRpcRetryingCaller.Callable callable,
    long pauseNs,
    @@ -191,11 +191,11 @@
     AsyncNonMetaRegionLocator(AsyncConnectionImpl conn) 
     
     
    -AsyncRegionLocator(AsyncConnectionImpl conn,
    -  
    io.netty.util.HashedWheelTimer retryTimer) 
    +AsyncRegionLocator(AsyncConnectionImpl conn,
    +  
    org.apache.hadoop.hbase.shaded.io.netty.util.HashedWheelTimer retryTimer) 
     
     
    -AsyncRpcRetryingCaller(io.netty.util.HashedWheelTimer retryTimer,
    +AsyncRpcRetryingCaller(org.apache.hadoop.hbase.shaded.io.netty.util.HashedWheelTimer retryTimer,
       AsyncConnectionImpl conn,
       long pauseNs,
       int maxAttempts,
    @@ -204,11 +204,11 @@
       int startLogErrorsCnt) 
     
     
    -AsyncRpcRetryingCallerFactory(AsyncConnectionImpl conn,
    - 
    io.netty.util.HashedWheelTimer retryTimer) 
    +AsyncRpcRetryingCallerFactory(AsyncConnectionImpl conn,
    + 
    org.apache.hadoop.hbase.shaded.io.netty.util.HashedWheelTimer retryTimer) 
     
     
    -AsyncScanSingleRegionRpcRetryingCaller(io.netty.util.HashedWheelTimer retryTimer,
    +AsyncScanSingleRegionRpcRetryingCaller(org.apache.hadoop.hba

    [17/51] [partial] hbase-site git commit: Published site at .

    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8bae1c8a/devapidocs/org/apache/hadoop/hbase/client/HBaseAdmin.MergeTableRegionsFuture.html
    --
    diff --git 
    a/devapidocs/org/apache/hadoop/hbase/client/HBaseAdmin.MergeTableRegionsFuture.html
     
    b/devapidocs/org/apache/hadoop/hbase/client/HBaseAdmin.MergeTableRegionsFuture.html
    index 1047b49..1ea496a 100644
    --- 
    a/devapidocs/org/apache/hadoop/hbase/client/HBaseAdmin.MergeTableRegionsFuture.html
    +++ 
    b/devapidocs/org/apache/hadoop/hbase/client/HBaseAdmin.MergeTableRegionsFuture.html
    @@ -127,7 +127,7 @@ var activeTableTab = "activeTableTab";
     
     
     
    -private static class HBaseAdmin.MergeTableRegionsFuture
    +private static class HBaseAdmin.MergeTableRegionsFuture
     extends HBaseAdmin.TableFutureVoid>
     
     
    @@ -239,7 +239,7 @@ extends 
     
     MergeTableRegionsFuture
    -public MergeTableRegionsFuture(HBaseAdmin admin,
    +public MergeTableRegionsFuture(HBaseAdmin admin,
    TableName tableName,
    
    org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MergeTableRegionsResponse response)
     
    @@ -250,7 +250,7 @@ extends 
     
     MergeTableRegionsFuture
    -public MergeTableRegionsFuture(HBaseAdmin admin,
    +public MergeTableRegionsFuture(HBaseAdmin admin,
    TableName tableName,
    http://docs.oracle.com/javase/8/docs/api/java/lang/Long.html?is-external=true";
     title="class or interface in java.lang">Long procId)
     
    @@ -269,7 +269,7 @@ extends 
     
     getOperationType
    -public http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
     title="class or interface in java.lang">String getOperationType()
    +public http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
     title="class or interface in java.lang">String getOperationType()
     
     Specified by:
     getOperationType in
     class HBaseAdmin.TableFutureVoid>
    
    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8bae1c8a/devapidocs/org/apache/hadoop/hbase/client/HBaseAdmin.ModifyColumnFamilyFuture.html
    --
    diff --git 
    a/devapidocs/org/apache/hadoop/hbase/client/HBaseAdmin.ModifyColumnFamilyFuture.html
     
    b/devapidocs/org/apache/hadoop/hbase/client/HBaseAdmin.ModifyColumnFamilyFuture.html
    index 77b24f8..50b13e7 100644
    --- 
    a/devapidocs/org/apache/hadoop/hbase/client/HBaseAdmin.ModifyColumnFamilyFuture.html
    +++ 
    b/devapidocs/org/apache/hadoop/hbase/client/HBaseAdmin.ModifyColumnFamilyFuture.html
    @@ -132,7 +132,7 @@ var activeTableTab = "activeTableTab";
     
     
     
    -private static class HBaseAdmin.ModifyColumnFamilyFuture
    +private static class HBaseAdmin.ModifyColumnFamilyFuture
     extends HBaseAdmin.ModifyTableFuture
     
     
    @@ -246,7 +246,7 @@ extends 
     
     ModifyColumnFamilyFuture
    -public ModifyColumnFamilyFuture(HBaseAdmin admin,
    +public ModifyColumnFamilyFuture(HBaseAdmin admin,
     TableName tableName,
     
    org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ModifyColumnResponse response)
     
    @@ -265,7 +265,7 @@ extends 
     
     getOperationType
    -public http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
     title="class or interface in java.lang">String getOperationType()
    +public http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
     title="class or interface in java.lang">String getOperationType()
     
     Overrides:
     getOperationType in
     class HBaseAdmin.ModifyTableFuture
    
    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8bae1c8a/devapidocs/org/apache/hadoop/hbase/client/HBaseAdmin.ModifyTableFuture.html
    --
    diff --git 
    a/devapidocs/org/apache/hadoop/hbase/client/HBaseAdmin.ModifyTableFuture.html 
    b/devapidocs/org/apache/hadoop/hbase/client/HBaseAdmin.ModifyTableFuture.html
    index b669513..08df163 100644
    --- 
    a/devapidocs/org/apache/hadoop/hbase/client/HBaseAdmin.ModifyTableFuture.html
    +++ 
    b/devapidocs/org/apache/hadoop/hbase/client/HBaseAdmin.ModifyTableFuture.html
    @@ -131,7 +131,7 @@ var activeTableTab = "activeTableTab";
     
     
     
    -private static class HBaseAdmin.ModifyTableFuture
    +private static class HBaseAdmin.ModifyTableFuture
     extends HBaseAdmin.TableFutureVoid>
     
     
    @@ -250,7 +250,7 @@ extends 
     
     ModifyTableFuture
    -public ModifyTableFuture(HBaseAdmin admin,
    +public ModifyTableFuture(HBaseAdmin admin,
      TableName tableName,
      
    org.apache.hadoop.hbase.shaded.protobuf.

    [39/51] [partial] hbase-site git commit: Published site at .

    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8bae1c8a/devapidocs/index-all.html
    --
    diff --git a/devapidocs/index-all.html b/devapidocs/index-all.html
    index 54d424b..bd4d91b 100644
    --- a/devapidocs/index-all.html
    +++ b/devapidocs/index-all.html
    @@ -2813,7 +2813,7 @@
     
     Find a bucket to allocate a block
     
    -allocateBuffer(ChannelHandlerContext,
     ByteBuf, boolean) - Method in class 
    org.apache.hadoop.hbase.io.asyncfs.FanOutOneBlockAsyncDFSOutputSaslHelper.EncryptHandler
    +allocateBuffer(ChannelHandlerContext,
     ByteBuf, boolean) - Method in class 
    org.apache.hadoop.hbase.io.asyncfs.FanOutOneBlockAsyncDFSOutputSaslHelper.EncryptHandler
      
     allocateBuffer()
     - Method in class org.apache.hadoop.hbase.io.hfile.HFileBlock
     
    @@ -3817,7 +3817,7 @@
      
     AsyncAdminRequestRetryingCaller - Class in 
    org.apache.hadoop.hbase.client
      
    -AsyncAdminRequestRetryingCaller(HashedWheelTimer,
     AsyncConnectionImpl, long, int, long, long, int, ServerName, 
    AsyncAdminRequestRetryingCaller.Callable) - Constructor for 
    class org.apache.hadoop.hbase.client.AsyncAdminRequestRetryingCaller
    +AsyncAdminRequestRetryingCaller(HashedWheelTimer,
     AsyncConnectionImpl, long, int, long, long, int, ServerName, 
    AsyncAdminRequestRetryingCaller.Callable) - Constructor for 
    class org.apache.hadoop.hbase.client.AsyncAdminRequestRetryingCaller
      
     AsyncAdminRequestRetryingCaller.Callable - 
    Interface in org.apache.hadoop.hbase.client
      
    @@ -3834,7 +3834,7 @@
     
     Retry caller for batch.
     
    -AsyncBatchRpcRetryingCaller(HashedWheelTimer,
     AsyncConnectionImpl, TableName, List, long, int, long, 
    long, int) - Constructor for class org.apache.hadoop.hbase.client.AsyncBatchRpcRetryingCaller
    +AsyncBatchRpcRetryingCaller(HashedWheelTimer,
     AsyncConnectionImpl, TableName, List, long, int, long, 
    long, int) - Constructor for class org.apache.hadoop.hbase.client.AsyncBatchRpcRetryingCaller
      
     AsyncBatchRpcRetryingCaller.RegionRequest - 
    Class in org.apache.hadoop.hbase.client
      
    @@ -3896,7 +3896,7 @@
     
     An asynchronous implementation of FSWAL.
     
    -AsyncFSWAL(FileSystem,
     Path, String, String, Configuration, List, boolean, 
    String, String, EventLoop, Class) - 
    Constructor for class org.apache.hadoop.hbase.regionserver.wal.AsyncFSWAL
    +AsyncFSWAL(FileSystem,
     Path, String, String, Configuration, List, boolean, 
    String, String, EventLoop, Class) - 
    Constructor for class org.apache.hadoop.hbase.regionserver.wal.AsyncFSWAL
      
     AsyncFSWALProvider - Class in org.apache.hadoop.hbase.wal
     
    @@ -3918,7 +3918,7 @@
     
     Retry caller for a request call to master.
     
    -AsyncMasterRequestRpcRetryingCaller(HashedWheelTimer,
     AsyncConnectionImpl, AsyncMasterRequestRpcRetryingCaller.Callable, 
    long, int, long, long, int) - Constructor for class 
    org.apache.hadoop.hbase.client.AsyncMasterRequestRpcRetryingCaller
    +AsyncMasterRequestRpcRetryingCaller(HashedWheelTimer,
     AsyncConnectionImpl, AsyncMasterRequestRpcRetryingCaller.Callable, 
    long, int, long, long, int) - Constructor for class 
    org.apache.hadoop.hbase.client.AsyncMasterRequestRpcRetryingCaller
      
     AsyncMasterRequestRpcRetryingCaller.Callable - Interface in org.apache.hadoop.hbase.client
      
    @@ -3983,7 +3983,7 @@
     
     AsyncWriter for protobuf-based WAL.
     
    -AsyncProtobufLogWriter(EventLoop,
     Class) - Constructor for class 
    org.apache.hadoop.hbase.regionserver.wal.AsyncProtobufLogWriter
    +AsyncProtobufLogWriter(EventLoop,
     Class) - Constructor for class 
    org.apache.hadoop.hbase.regionserver.wal.AsyncProtobufLogWriter
      
     AsyncProtobufLogWriter.OutputStreamWrapper - 
    Class in org.apache.hadoop.hbase.regionserver.wal
      
    @@ -3991,7 +3991,7 @@
     
     The asynchronous region locator.
     
    -AsyncRegionLocator(AsyncConnectionImpl,
     HashedWheelTimer) - Constructor for class 
    org.apache.hadoop.hbase.client.AsyncRegionLocator
    +AsyncRegionLocator(AsyncConnectionImpl,
     HashedWheelTimer) - Constructor for class 
    org.apache.hadoop.hbase.client.AsyncRegionLocator
      
     AsyncRegistry - Interface in org.apache.hadoop.hbase.client
     
    @@ -4034,13 +4034,13 @@
     
     AsyncRpcRetryingCaller - Class in org.apache.hadoop.hbase.client
      
    -AsyncRpcRetryingCaller(HashedWheelTimer,
     AsyncConnectionImpl, long, int, long, long, int) - Constructor for 
    class org.apache.hadoop.hbase.client.AsyncRpcRetryingCaller
    +AsyncRpcRetryingCaller(HashedWheelTimer,
     AsyncConnectionImpl, long, int, long, long, int) - Constructor for 
    class org.apache.hadoop.hbase.client.AsyncRpcRetryingCaller
      
     AsyncRpcRetryingCallerFactory - Class in org.apache.hadoop.hbase.client
     
     Factory to create an AsyncRpcRetryCaller.
     
    -AsyncRpcRetryingCallerFactory(AsyncConnectionImpl,
     HashedWheelTimer) - Constructor for class 
    org.apache.hadoop.hbase.client.AsyncRpcRetryingCallerFactory
    +AsyncRpcRetryingCallerFactory(AsyncConnectionImpl,
     HashedWheelTimer) - Constructor for clas

    [02/51] [partial] hbase-site git commit: Published site at .

    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8bae1c8a/devapidocs/org/apache/hadoop/hbase/master/MasterCoprocessorHost.html
    --
    diff --git 
    a/devapidocs/org/apache/hadoop/hbase/master/MasterCoprocessorHost.html 
    b/devapidocs/org/apache/hadoop/hbase/master/MasterCoprocessorHost.html
    index 88e21e5..8eb0f11 100644
    --- a/devapidocs/org/apache/hadoop/hbase/master/MasterCoprocessorHost.html
    +++ b/devapidocs/org/apache/hadoop/hbase/master/MasterCoprocessorHost.html
    @@ -18,8 +18,8 @@
     catch(err) {
     }
     //-->
    -var methods = 
    {"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10,"i14":10,"i15":10,"i16":10,"i17":10,"i18":10,"i19":10,"i20":10,"i21":10,"i22":10,"i23":10,"i24":10,"i25":10,"i26":10,"i27":10,"i28":10,"i29":10,"i30":10,"i31":10,"i32":10,"i33":10,"i34":10,"i35":10,"i36":10,"i37":10,"i38":10,"i39":10,"i40":10,"i41":10,"i42":10,"i43":10,"i44":10,"i45":10,"i46":10,"i47":10,"i48":10,"i49":10,"i50":10,"i51":10,"i52":10,"i53":10,"i54":10,"i55":10,"i56":10,"i57":10,"i58":10,"i59":10,"i60":10,"i61":10,"i62":10,"i63":10,"i64":10,"i65":10,"i66":10,"i67":10,"i68":10,"i69":10,"i70":10,"i71":10,"i72":10,"i73":10,"i74":10,"i75":10,"i76":10,"i77":10,"i78":10,"i79":10,"i80":10,"i81":10,"i82":10,"i83":10,"i84":10,"i85":10,"i86":10,"i87":10,"i88":10,"i89":10,"i90":10,"i91":10,"i92":10,"i93":10,"i94":10,"i95":10,"i96":10,"i97":10,"i98":10,"i99":10,"i100":10,"i101":10,"i102":10,"i103":10,"i104":10,"i105":10,"i106":10,"i107":10,"i108":10,"i
     
    109":10,"i110":10,"i111":10,"i112":10,"i113":10,"i114":10,"i115":10,"i116":10,"i117":10,"i118":10,"i119":10,"i120":10,"i121":10,"i122":10,"i123":10,"i124":10,"i125":10,"i126":10,"i127":10,"i128":10,"i129":10,"i130":10,"i131":10,"i132":10,"i133":10,"i134":10,"i135":10,"i136":10,"i137":10,"i138":10,"i139":10,"i140":10,"i141":10,"i142":10};
    -var tabs = {65535:["t0","All Methods"],2:["t2","Instance 
    Methods"],8:["t4","Concrete Methods"]};
    +var methods = 
    {"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10,"i14":10,"i15":10,"i16":10,"i17":10,"i18":10,"i19":10,"i20":10,"i21":10,"i22":10,"i23":10,"i24":10,"i25":10,"i26":10,"i27":10,"i28":10,"i29":10,"i30":10,"i31":10,"i32":10,"i33":10,"i34":10,"i35":10,"i36":10,"i37":10,"i38":10,"i39":10,"i40":10,"i41":10,"i42":10,"i43":10,"i44":10,"i45":10,"i46":10,"i47":10,"i48":10,"i49":10,"i50":10,"i51":10,"i52":10,"i53":10,"i54":10,"i55":10,"i56":10,"i57":10,"i58":10,"i59":10,"i60":10,"i61":10,"i62":10,"i63":10,"i64":10,"i65":10,"i66":10,"i67":10,"i68":10,"i69":10,"i70":10,"i71":10,"i72":10,"i73":10,"i74":10,"i75":10,"i76":10,"i77":10,"i78":10,"i79":10,"i80":10,"i81":10,"i82":10,"i83":10,"i84":10,"i85":10,"i86":10,"i87":10,"i88":10,"i89":10,"i90":10,"i91":10,"i92":10,"i93":10,"i94":10,"i95":10,"i96":10,"i97":10,"i98":10,"i99":10,"i100":10,"i101":10,"i102":10,"i103":10,"i104":10,"i105":10,"i106":10,"i107":10,"i108":10,"i
     
    109":10,"i110":10,"i111":10,"i112":10,"i113":10,"i114":10,"i115":10,"i116":10,"i117":10,"i118":10,"i119":10,"i120":10,"i121":10,"i122":10,"i123":10,"i124":10,"i125":10,"i126":10,"i127":10,"i128":10,"i129":10,"i130":10,"i131":10,"i132":10,"i133":10,"i134":10,"i135":10,"i136":10,"i137":10,"i138":10,"i139":10,"i140":10,"i141":10,"i142":10,"i143":9,"i144":9};
    +var tabs = {65535:["t0","All Methods"],1:["t1","Static 
    Methods"],2:["t2","Instance Methods"],8:["t4","Concrete Methods"]};
     var altColor = "altColor";
     var rowColor = "rowColor";
     var tableTab = "tableTab";
    @@ -115,7 +115,7 @@ var activeTableTab = "activeTableTab";
     
     
     @InterfaceAudience.Private
    -public class MasterCoprocessorHost
    +public class MasterCoprocessorHost
     extends CoprocessorHost
     Provides the coprocessor framework and environment for 
    master oriented
      operations.  HMaster interacts with the 
    loaded coprocessors
    @@ -218,7 +218,7 @@ extends 
    -All Methods Instance Methods Concrete Methods 
    +All Methods Static Methods Instance Methods Concrete Methods 
     
     Modifier and Type
     Method and Description
    @@ -255,8 +255,8 @@ extends 
     void
    -postAddColumn(TableName tableName,
    - HColumnDescriptor columnFamily) 
    +postAddColumn(TableName tableName,
    + ColumnFamilyDescriptor columnFamily) 
     
     
     void
    @@ -287,18 +287,18 @@ extends 
     void
    -postCloneSnapshot(org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription snapshot,
    - HTableDescriptor hTableDescriptor) 
    +postCloneSnapshot(org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription snapshot,
    + TableDescriptor hTableDescriptor) 
     
     
     void
    -postCompletedAddColumnFamilyAction(TableName tableName,
    -  HColumnDescriptor columnFamily,
    +postCompletedAddColumnFamilyAction(TableName tableName,
    +  ColumnFamilyDescriptor columnFamily,

    [41/51] [partial] hbase-site git commit: Published site at .

    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8bae1c8a/checkstyle.rss
    --
    diff --git a/checkstyle.rss b/checkstyle.rss
    index 691d6ad..a5be63d 100644
    --- a/checkstyle.rss
    +++ b/checkstyle.rss
    @@ -25,8 +25,8 @@ under the License.
     en-us
     ©2007 - 2017 The Apache Software Foundation
     
    -  File: 2024,
    - Errors: 12796,
    +  File: 2026,
    + Errors: 12844,
      Warnings: 0,
      Infos: 0
       
    @@ -349,7 +349,7 @@ under the License.
       0
     
     
    -  27
    +  30
     
       
       
    @@ -419,7 +419,7 @@ under the License.
       0
     
     
    -  1
    +  2
     
       
       
    @@ -755,7 +755,7 @@ under the License.
       0
     
     
    -  60
    +  58
     
       
       
    @@ -1203,7 +1203,7 @@ under the License.
       0
     
     
    -  0
    +  1
     
       
       
    @@ -1306,6 +1306,20 @@ under the License.
       
       
     
    +  http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.backup.BackupMergeJob.java";>org/apache/hadoop/hbase/backup/BackupMergeJob.java
    +
    +
    +  0
    +
    +
    +  0
    +
    +
    +  1
    +
    +  
    +  
    +
       http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.rest.StorageClusterStatusResource.java";>org/apache/hadoop/hbase/rest/StorageClusterStatusResource.java
     
     
    @@ -1847,7 +1861,7 @@ under the License.
       0
     
     
    -  1
    +  2
     
       
       
    @@ -3485,7 +3499,7 @@ under the License.
       0
     
     
    -  2
    +  3
     
       
       
    @@ -4255,7 +4269,7 @@ under the License.
       0
     
     
    -  0
    +  1
     
       
       
    @@ -5235,7 +5249,7 @@ under the License.
       0
     
     
    -  0
    +  1
     
       
       
    @@ -5375,7 +5389,7 @@ under the License.
       0
     
     
    -  0
    +  3
     
       
       
    @@ -7181,7 +7195,7 @@ under the License.
       0
     
     
    -  0
    +  1
     
       
       
    @@ -8175,7 +8189,7 @@ under the License.
       0
     
     
    -  2
    +  4
     
       
       
    @@ -8189,7 +8203,7 @@ under the License.
       0
     
     
    -  124
    +  125
     
       
       
    @@ -8343,7 +8357,7 @@ under the License.
       0
     
     
    -  0
    +  1
     
       
       
    @@ -8735,7 +8749,7 @@ under the License.
       0
     
     
    -  2
    +  3
     
       
       
    @@ -10093,7 +10107,7 @@ under the License.
       0
     
     
    -  4
    +  5
     
       
       
    @@ -10317,7 +10331,7 @@ under the License.
       0
     
     
    -  1
    +  3
     
       
       
    @@ -11185,7 +11199,7 @@ under the License.
       0
     
     
    -  5
    +  4
     
       
       
    @@ -12753,7 +12767,7 @@ under the License.
       0
     
     
    -  0
    +  1
     
       
       
    @@ -13411,7 +13425,7 @@ under the License.
       0
     
      

    [30/51] [partial] hbase-site git commit: Published site at .

    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8bae1c8a/devapidocs/org/apache/hadoop/hbase/backup/mapreduce/HFileSplitterJob.html
    --
    diff --git 
    a/devapidocs/org/apache/hadoop/hbase/backup/mapreduce/HFileSplitterJob.html 
    b/devapidocs/org/apache/hadoop/hbase/backup/mapreduce/HFileSplitterJob.html
    deleted file mode 100644
    index c8cc759..000
    --- a/devapidocs/org/apache/hadoop/hbase/backup/mapreduce/HFileSplitterJob.html
    +++ /dev/null
    @@ -1,531 +0,0 @@
    -http://www.w3.org/TR/html4/loose.dtd";>
    -
    -
    -
    -
    -
    -HFileSplitterJob (Apache HBase 3.0.0-SNAPSHOT API)
    -
    -
    -
    -
    -
    -var methods = {"i0":10,"i1":9,"i2":10,"i3":10};
    -var tabs = {65535:["t0","All Methods"],1:["t1","Static 
    Methods"],2:["t2","Instance Methods"],8:["t4","Concrete Methods"]};
    -var altColor = "altColor";
    -var rowColor = "rowColor";
    -var tableTab = "tableTab";
    -var activeTableTab = "activeTableTab";
    -
    -
    -JavaScript is disabled on your browser.
    -
    -
    -
    -
    -
    -Skip navigation links
    -
    -
    -
    -
    -Overview
    -Package
    -Class
    -Use
    -Tree
    -Deprecated
    -Index
    -Help
    -
    -
    -
    -
    -Prev Class
    -Next Class
    -
    -
    -Frames
    -No Frames
    -
    -
    -All Classes
    -
    -
    -
    -
    -
    -
    -
    -Summary: 
    -Nested | 
    -Field | 
    -Constr | 
    -Method
    -
    -
    -Detail: 
    -Field | 
    -Constr | 
    -Method
    -
    -
    -
    -
    -
    -
    -
    -
    -org.apache.hadoop.hbase.backup.mapreduce
    -Class HFileSplitterJob
    -
    -
    -
    -http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true";
     title="class or interface in java.lang">java.lang.Object
    -
    -
    -org.apache.hadoop.conf.Configured
    -
    -
    -org.apache.hadoop.hbase.backup.mapreduce.HFileSplitterJob
    -
    -
    -
    -
    -
    -
    -
    -
    -
    -All Implemented Interfaces:
    -org.apache.hadoop.conf.Configurable, org.apache.hadoop.util.Tool
    -
    -
    -
    -@InterfaceAudience.Private
    -public class HFileSplitterJob
    -extends org.apache.hadoop.conf.Configured
    -implements org.apache.hadoop.util.Tool
    -A tool to split HFiles into new region boundaries as a 
    MapReduce job. The tool generates HFiles
    - for later bulk importing.
    -
    -
    -
    -
    -
    -
    -
    -
    -
    -
    -
    -Nested Class Summary
    -
    -Nested Classes 
    -
    -Modifier and Type
    -Class and Description
    -
    -
    -(package private) static class 
    -HFileSplitterJob.HFileCellMapper
    -A mapper that just writes out cells.
    -
    -
    -
    -
    -
    -
    -
    -
    -
    -
    -Field Summary
    -
    -Fields 
    -
    -Modifier and Type
    -Field and Description
    -
    -
    -static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
     title="class or interface in java.lang">String
    -BULK_OUTPUT_CONF_KEY 
    -
    -
    -private static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
     title="class or interface in java.lang">String
    -JOB_NAME_CONF_KEY 
    -
    -
    -private static 
    org.apache.commons.logging.Log
    -LOG 
    -
    -
    -(package private) static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
     title="class or interface in java.lang">String
    -NAME 
    -
    -
    -static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
     title="class or interface in java.lang">String
    -TABLE_MAP_KEY 
    -
    -
    -static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
     title="class or interface in java.lang">String
    -TABLES_KEY 
    -
    -
    -
    -
    -
    -
    -
    -
    -
    -Constructor Summary
    -
    -Constructors 
    -
    -Modifier
    -Constructor and Description
    -
    -
    - 
    -HFileSplitterJob() 
    -
    -
    -protected 
    -HFileSplitterJob(org.apache.hadoop.conf.Configuration c) 
    -
    -
    -
    -
    -
    -
    -
    -
    -
    -Method Summary
    -
    -All Methods Static Methods Instance Methods Concrete Methods 
    -
    -Modifier and Type
    -Method and Description
    -
    -
    -org.apache.hadoop.mapreduce.Job
    -createSubmittableJob(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
     title="class or interface in java.lang">String[] args)
    -Sets up the actual job.
    -
    -
    -
    -static void
    -main(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
     title="class or interface in java.lang">String[] args)
    -Main entry point.
    -
    -
    -
    -int
    -run(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
     title="class or interface in 
    java.lang">String[] args) 
    -
    -
    -private void
    -usage(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
     title="class or interface in java.lang">String errorMsg)
    -Print usage
    -
    -
    -
    -
    -
    -
    -
    -Methods inherited from class org.apache.hadoop.conf.Configured
    -getConf, setConf
    -
    -
    -
    -
    -
    -Methods inherited from class java.lang.http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true";
     title="class or interface in java.lang">Object
    -http://docs.oracle.com/javase/8/docs/a

    [35/51] [partial] hbase-site git commit: Published site at .

    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8bae1c8a/devapidocs/org/apache/hadoop/hbase/backup/impl/BackupAdminImpl.html
    --
    diff --git 
    a/devapidocs/org/apache/hadoop/hbase/backup/impl/BackupAdminImpl.html 
    b/devapidocs/org/apache/hadoop/hbase/backup/impl/BackupAdminImpl.html
    index 2167678..cb9af32 100644
    --- a/devapidocs/org/apache/hadoop/hbase/backup/impl/BackupAdminImpl.html
    +++ b/devapidocs/org/apache/hadoop/hbase/backup/impl/BackupAdminImpl.html
    @@ -18,7 +18,7 @@
     catch(err) {
     }
     //-->
    -var methods = 
    {"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10,"i14":10,"i15":10,"i16":10,"i17":10,"i18":10,"i19":10};
    +var methods = 
    {"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10,"i14":10,"i15":10,"i16":10,"i17":10,"i18":10,"i19":10,"i20":10,"i21":10};
     var tabs = {65535:["t0","All Methods"],2:["t2","Instance 
    Methods"],8:["t4","Concrete Methods"]};
     var altColor = "altColor";
     var rowColor = "rowColor";
    @@ -114,7 +114,7 @@ var activeTableTab = "activeTableTab";
     
     
     @InterfaceAudience.Private
    -public class BackupAdminImpl
    +public class BackupAdminImpl
     extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true";
     title="class or interface in java.lang">Object
     implements BackupAdmin
     
    @@ -198,122 +198,135 @@ implements 
     private void
    +checkIfValidForMerge(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
     title="class or interface in java.lang">String[] backupIds,
    +BackupSystemTable table)
    +Verifies that backup images are valid for merge.
    +
    +
    +
    +private void
     cleanupBackupDir(BackupInfo backupInfo,
     TableName table,
     org.apache.hadoop.conf.Configuration conf)
     Clean up the data at target directory
     
     
    -
    +
     void
     close() 
     
    -
    +
     private int
     deleteBackup(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
     title="class or interface in java.lang">String backupId,
     BackupSystemTable sysTable)
     Delete single backup and all related backups 
      Algorithm:
    -  Backup type: FULL or INCREMENTAL 
    -  Is this last backup session for table T: YES or NO 
    -  For every table T from table list 'tables':
    -  if(FULL, YES) deletes only physical data (PD) 
    -  if(FULL, NO), deletes PD, scans all newer backups and removes T from 
    backupInfo,
    -  until we either reach the most recent backup for T in the system or FULL 
    backup
    -  which includes T
    -  if(INCREMENTAL, YES) deletes only physical data (PD)
    -  if(INCREMENTAL, NO) deletes physical data and for table T scans all backup 
    images between last
    -  FULL backup, which is older than the backup being deleted and the next FULL 
    backup (if exists) 
    -  or last one for a particular table T and removes T from list of backup 
    tables.
    + Backup type: FULL or INCREMENTAL 
    + Is this last backup session for table T: YES or NO 
    + For every table T from table list 'tables':
    + if(FULL, YES) deletes only physical data (PD) 
    + if(FULL, NO), deletes PD, scans all newer backups and removes T from 
    backupInfo,
    + until we either reach the most recent backup for T in the system or FULL 
    backup
    + which includes T
    + if(INCREMENTAL, YES) deletes only physical data (PD) if(INCREMENTAL, NO) 
    deletes physical data
    + and for table T scans all backup images between last
    + FULL backup, which is older than the backup being deleted and the next FULL 
    backup (if exists) 
    + or last one for a particular table T and removes T from list of backup 
    tables.
     
     
    -
    +
     int
     deleteBackups(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
     title="class or interface in java.lang">String[] backupIds)
     Delete backup image command
     
     
    -
    +
     boolean
     deleteBackupSet(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
     title="class or interface in java.lang">String name)
     Delete backup set command
     
     
    -
    +
     private http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
     title="class or interface in java.util">List
     excludeNonExistingTables(http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
     title="class or interface in java.util">List tableList,
     http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
     title="class or interface in java.util">List nonExistingTableList) 
     
    -
    +
     private void
     finalizeDelete(http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true";
     title="class or interface in java.util">MapString,http://docs.oracle.com/javase/8/docs/api/java/util/HashSet.html?is-external=true";
     title="class or interface in java.util">

    [34/51] [partial] hbase-site git commit: Published site at .

    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8bae1c8a/devapidocs/org/apache/hadoop/hbase/backup/impl/BackupCommands.Command.html
    --
    diff --git 
    a/devapidocs/org/apache/hadoop/hbase/backup/impl/BackupCommands.Command.html 
    b/devapidocs/org/apache/hadoop/hbase/backup/impl/BackupCommands.Command.html
    index 11408eb..f18d891 100644
    --- a/devapidocs/org/apache/hadoop/hbase/backup/impl/BackupCommands.Command.html
    +++ b/devapidocs/org/apache/hadoop/hbase/backup/impl/BackupCommands.Command.html
    @@ -118,7 +118,7 @@ var activeTableTab = "activeTableTab";
     
     
     Direct Known Subclasses:
    -BackupCommands.BackupSetCommand, BackupCommands.CancelCommand, BackupCommands.CreateCommand, BackupCommands.DeleteCommand, BackupCommands.DescribeCommand, BackupCommands.HelpCommand, BackupCommands.HistoryCommand, BackupCommands.ProgressCommand, BackupCommands.RepairCommand
    +BackupCommands.BackupSetCommand, BackupCommands.CancelCommand, BackupCommands.CreateCommand, BackupCommands.DeleteCommand, BackupCommands.DescribeCommand, BackupCommands.HelpCommand, BackupCommands.HistoryCommand, BackupCommands.MergeCommand, BackupCommands.ProgressCommand, BackupCommands.RepairCommand
     
     
     Enclosing class:
    @@ -126,7 +126,7 @@ var activeTableTab = "activeTableTab";
     
     
     
    -public abstract static class BackupCommands.Command
    +public abstract static class BackupCommands.Command
     extends org.apache.hadoop.conf.Configured
     
     
    @@ -201,9 +201,8 @@ extends org.apache.hadoop.conf.Configured
     
     protected boolean
     requiresConsistentState()
    -Command requires consistent state of a backup system
    - Backup system may become inconsistent because of an abnormal
    - termination of a backup session or delete command
    +Command requires consistent state of a backup system Backup 
    system may become inconsistent
    + because of an abnormal termination of a backup session or delete command
     
     
     
    @@ -247,7 +246,7 @@ extends org.apache.hadoop.conf.Configured
     
     
     cmdline
    -org.apache.commons.cli.CommandLine cmdline
    +org.apache.commons.cli.CommandLine cmdline
     
     
     
    @@ -256,7 +255,7 @@ extends org.apache.hadoop.conf.Configured
     
     
     conn
    -Connection conn
    +Connection conn
     
     
     
    @@ -273,7 +272,7 @@ extends org.apache.hadoop.conf.Configured
     
     
     Command
    -Command(org.apache.hadoop.conf.Configuration conf)
    +Command(org.apache.hadoop.conf.Configuration conf)
     
     
     
    @@ -290,7 +289,7 @@ extends org.apache.hadoop.conf.Configured
     
     
     execute
    -public void execute()
    +public void execute()
      throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true";
     title="class or interface in java.io">IOException
     
     Throws:
    @@ -304,7 +303,7 @@ extends org.apache.hadoop.conf.Configured
     
     
     finish
    -public void finish()
    +public void finish()
     throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true";
     title="class or interface in java.io">IOException
     
     Throws:
    @@ -318,7 +317,7 @@ extends org.apache.hadoop.conf.Configured
     
     
     printUsage
    -protected abstract void printUsage()
    +protected abstract void printUsage()
     
     
     
    @@ -327,7 +326,7 @@ extends org.apache.hadoop.conf.Configured
     
     
     requiresNoActiveSession
    -protected boolean requiresNoActiveSession()
    +protected boolean requiresNoActiveSession()
     The command can't be run if active backup session is in 
    progress
     
     Returns:
    @@ -341,10 +340,9 @@ extends org.apache.hadoop.conf.Configured
     
     
     requiresConsistentState
    -protected boolean requiresConsistentState()
    -Command requires consistent state of a backup system
    - Backup system may become inconsistent because of an abnormal
    - termination of a backup session or delete command
    +protected boolean requiresConsistentState()
    +Command requires consistent state of a backup system Backup 
    system may become inconsistent
    + because of an abnormal termination of a backup session or delete command
     
     Returns:
     true, if yes
    
    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8bae1c8a/devapidocs/org/apache/hadoop/hbase/backup/impl/BackupCommands.CreateCommand.html
    --
    diff --git 
    a/devapidocs/org/apache/hadoop/hbase/backup/impl/BackupCommands.CreateCommand.html
     
    b/devapidocs/org/apache/hadoop/hbase/backup/impl/BackupCommands.CreateCommand.html
    index 036dad8..df01402 100644
    --- 
    a/devapidocs/org/apache/hadoop/hbase/backup/impl/BackupCommands.CreateCommand.html
    +++ 
    b/devapidocs/org/apache/hadoop/hbase/backup/impl/BackupCommands.CreateCommand.html
    @@ -127,7 +127,7 @@ var activeTableTab = "activeTableTab";
     
     
     
    -public static class BackupCommands.CreateCommand
    +public static class BackupCommands.CreateCommand
     extends BackupCommands.Command
     
     
    @@ -196,9 +196,8 @@ extends 
     protected boolean
     requiresConsistentState()
    -Command requires consistent state of a backup system
    - Backup system may become inconsistent because of an abnormal
    - termination of a backup session or delete command
    +Command requi

    [20/51] [partial] hbase-site git commit: Published site at .

    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8bae1c8a/devapidocs/org/apache/hadoop/hbase/client/AsyncAdminRequestRetryingCaller.html
    --
    diff --git 
    a/devapidocs/org/apache/hadoop/hbase/client/AsyncAdminRequestRetryingCaller.html
     
    b/devapidocs/org/apache/hadoop/hbase/client/AsyncAdminRequestRetryingCaller.html
    index 00a35cd..865044f 100644
    --- 
    a/devapidocs/org/apache/hadoop/hbase/client/AsyncAdminRequestRetryingCaller.html
    +++ 
    b/devapidocs/org/apache/hadoop/hbase/client/AsyncAdminRequestRetryingCaller.html
    @@ -184,7 +184,7 @@ extends Constructor and Description
     
     
    -AsyncAdminRequestRetryingCaller(io.netty.util.HashedWheelTimer retryTimer,
    +AsyncAdminRequestRetryingCaller(org.apache.hadoop.hbase.shaded.io.netty.util.HashedWheelTimer retryTimer,
    AsyncConnectionImpl conn,
    long pauseNs,
    int maxAttempts,
    @@ -272,13 +272,13 @@ extends 
    +
     
     
     
     
     AsyncAdminRequestRetryingCaller
    -public AsyncAdminRequestRetryingCaller(io.netty.util.HashedWheelTimer retryTimer,
    +public AsyncAdminRequestRetryingCaller(org.apache.hadoop.hbase.shaded.io.netty.util.HashedWheelTimer retryTimer,
    AsyncConnectionImpl conn,
    long pauseNs,
    int maxAttempts,
    
    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8bae1c8a/devapidocs/org/apache/hadoop/hbase/client/AsyncBatchRpcRetryingCaller.html
    --
    diff --git 
    a/devapidocs/org/apache/hadoop/hbase/client/AsyncBatchRpcRetryingCaller.html 
    b/devapidocs/org/apache/hadoop/hbase/client/AsyncBatchRpcRetryingCaller.html
    index 1b19b34..1e16ca8 100644
    --- a/devapidocs/org/apache/hadoop/hbase/client/AsyncBatchRpcRetryingCaller.html
    +++ b/devapidocs/org/apache/hadoop/hbase/client/AsyncBatchRpcRetryingCaller.html
    @@ -200,7 +200,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
     pauseNs 
     
     
    -private io.netty.util.HashedWheelTimer
    +private 
    org.apache.hadoop.hbase.shaded.io.netty.util.HashedWheelTimer
     retryTimer 
     
     
    @@ -234,7 +234,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
     Constructor and Description
     
     
    -AsyncBatchRpcRetryingCaller(io.netty.util.HashedWheelTimer retryTimer,
    +AsyncBatchRpcRetryingCaller(org.apache.hadoop.hbase.shaded.io.netty.util.HashedWheelTimer retryTimer,
    AsyncConnectionImpl conn,
    TableName tableName,
    http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
     title="class or interface in java.util">List actions,
    @@ -394,7 +394,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
     
     
     retryTimer
    -private final io.netty.util.HashedWheelTimer retryTimer
    +private 
    final org.apache.hadoop.hbase.shaded.io.netty.util.HashedWheelTimer retryTimer
     
     
     
    @@ -513,13 +513,13 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
     
     
     Constructor Detail
    -
    +
     
     
     
     
     AsyncBatchRpcRetryingCaller
    -public AsyncBatchRpcRetryingCaller(io.netty.util.HashedWheelTimer retryTimer,
    +public AsyncBatchRpcRetryingCaller(org.apache.hadoop.hbase.shaded.io.netty.util.HashedWheelTimer retryTimer,
    AsyncConnectionImpl conn,
    TableName tableName,
    http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
     title="class or interface in java.util">List actions,
    
    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8bae1c8a/devapidocs/org/apache/hadoop/hbase/client/AsyncConnectionImpl.html
    --
    diff --git a/devapidocs/org/apache/hadoop/hbase/client/AsyncConnectionImpl.html 
    b/devapidocs/org/apache/hadoop/hbase/client/AsyncConnectionImpl.html
    index 6b1b443..b8f88a2 100644
    --- a/devapidocs/org/apache/hadoop/hbase/client/AsyncConnectionImpl.html
    +++ b/devapidocs/org/apache/hadoop/hbase/client/AsyncConnectionImpl.html
    @@ -185,7 +185,7 @@ implements RESOLVE_HOSTNAME_ON_FAIL_KEY 
     
     
    -(package private) static 
    io.netty.util.HashedWheelTimer
    +(package private) static 
    org.apache.hadoop.hbase.shaded.io.netty.util.HashedWheelTimer
     RETRY_TIMER 
     
     
    @@ -368,7 +368,7 @@ implements 
     
     RETRY_TIMER
    -static final io.netty.util.HashedWheelTimer RETRY_TIMER
    +static 
    final org.apache.hadoop.hbase.shaded.io.netty.util.HashedWheelTimer RETRY_TIMER
     
     
     
    
    
    

    [16/51] [partial] hbase-site git commit: Published site at .

    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8bae1c8a/devapidocs/org/apache/hadoop/hbase/client/HBaseAdmin.ThrowableAbortable.html
    --
    diff --git 
    a/devapidocs/org/apache/hadoop/hbase/client/HBaseAdmin.ThrowableAbortable.html 
    b/devapidocs/org/apache/hadoop/hbase/client/HBaseAdmin.ThrowableAbortable.html
    index b498ecf..764074d 100644
    --- 
    a/devapidocs/org/apache/hadoop/hbase/client/HBaseAdmin.ThrowableAbortable.html
    +++ 
    b/devapidocs/org/apache/hadoop/hbase/client/HBaseAdmin.ThrowableAbortable.html
    @@ -117,7 +117,7 @@ var activeTableTab = "activeTableTab";
     
     
     
    -private static class HBaseAdmin.ThrowableAbortable
    +private static class HBaseAdmin.ThrowableAbortable
     extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true";
     title="class or interface in java.lang">Object
     implements Abortable
     Simple Abortable, throwing 
    RuntimeException on abort.
    @@ -199,7 +199,7 @@ implements 
     
     ThrowableAbortable
    -private ThrowableAbortable()
    +private ThrowableAbortable()
     
     
     
    @@ -216,7 +216,7 @@ implements 
     
     abort
    -public void abort(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
     title="class or interface in java.lang">String why,
    +public void abort(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
     title="class or interface in java.lang">String why,
       http://docs.oracle.com/javase/8/docs/api/java/lang/Throwable.html?is-external=true";
     title="class or interface in java.lang">Throwable e)
     Description copied from 
    interface: Abortable
     Abort the server or client.
    @@ -235,7 +235,7 @@ implements 
     
     isAborted
    -public boolean isAborted()
    +public boolean isAborted()
     Description copied from 
    interface: Abortable
     Check if the server or client was aborted.
     
    
    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8bae1c8a/devapidocs/org/apache/hadoop/hbase/client/HBaseAdmin.TruncateTableFuture.html
    --
    diff --git 
    a/devapidocs/org/apache/hadoop/hbase/client/HBaseAdmin.TruncateTableFuture.html 
    b/devapidocs/org/apache/hadoop/hbase/client/HBaseAdmin.TruncateTableFuture.html
    index fb48ad4..ed982af 100644
    --- 
    a/devapidocs/org/apache/hadoop/hbase/client/HBaseAdmin.TruncateTableFuture.html
    +++ 
    b/devapidocs/org/apache/hadoop/hbase/client/HBaseAdmin.TruncateTableFuture.html
    @@ -127,7 +127,7 @@ var activeTableTab = "activeTableTab";
     
     
     
    -private static class HBaseAdmin.TruncateTableFuture
    +private static class HBaseAdmin.TruncateTableFuture
     extends HBaseAdmin.TableFutureVoid>
     
     
    @@ -260,7 +260,7 @@ extends 
     
     preserveSplits
    -private final boolean preserveSplits
    +private final boolean preserveSplits
     
     
     
    @@ -277,7 +277,7 @@ extends 
     
     TruncateTableFuture
    -public TruncateTableFuture(HBaseAdmin admin,
    +public TruncateTableFuture(HBaseAdmin admin,
    TableName tableName,
    boolean preserveSplits,
    
    org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.TruncateTableResponse response)
    @@ -297,7 +297,7 @@ extends 
     
     getOperationType
    -public http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
     title="class or interface in java.lang">String getOperationType()
    +public http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
     title="class or interface in java.lang">String getOperationType()
     
     Specified by:
     getOperationType in
     class HBaseAdmin.TableFutureVoid>
    @@ -312,7 +312,7 @@ extends 
     
     waitOperationResult
    -protected http://docs.oracle.com/javase/8/docs/api/java/lang/Void.html?is-external=true";
     title="class or interface in java.lang">Void waitOperationResult(long deadlineTs)
    +protected http://docs.oracle.com/javase/8/docs/api/java/lang/Void.html?is-external=true";
     title="class or interface in java.lang">Void waitOperationResult(long deadlineTs)
     throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true";
     title="class or interface in java.io">IOException,
    http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/TimeoutException.html?is-external=true";
     title="class or interface in java.util.concurrent">TimeoutException
     Description copied from 
    class: HBaseAdmin.ProcedureFuture
    
    
    

    [48/51] [partial] hbase-site git commit: Published site at .

    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8bae1c8a/apidocs/org/apache/hadoop/hbase/client/AsyncAdmin.html
    --
    diff --git a/apidocs/org/apache/hadoop/hbase/client/AsyncAdmin.html 
    b/apidocs/org/apache/hadoop/hbase/client/AsyncAdmin.html
    index e5db1b0..efbf566 100644
    --- a/apidocs/org/apache/hadoop/hbase/client/AsyncAdmin.html
    +++ b/apidocs/org/apache/hadoop/hbase/client/AsyncAdmin.html
    @@ -18,7 +18,7 @@
     catch(err) {
     }
     //-->
    -var methods = 
    {"i0":6,"i1":6,"i2":6,"i3":6,"i4":6,"i5":18,"i6":6,"i7":6,"i8":6,"i9":38,"i10":18,"i11":6,"i12":18,"i13":6,"i14":6,"i15":6,"i16":6,"i17":6,"i18":18,"i19":6,"i20":6,"i21":6,"i22":6,"i23":6,"i24":18,"i25":6,"i26":6,"i27":6,"i28":6,"i29":6,"i30":6,"i31":6,"i32":6,"i33":6,"i34":6,"i35":6,"i36":6,"i37":6,"i38":6,"i39":6,"i40":18,"i41":6,"i42":6,"i43":6,"i44":6,"i45":6,"i46":18,"i47":6,"i48":6,"i49":6,"i50":18,"i51":6,"i52":18,"i53":6,"i54":6,"i55":6,"i56":6,"i57":6,"i58":6,"i59":6,"i60":6,"i61":6,"i62":6,"i63":6,"i64":6,"i65":6,"i66":18,"i67":6,"i68":6,"i69":6,"i70":6,"i71":6,"i72":6,"i73":6,"i74":6,"i75":18,"i76":6,"i77":18,"i78":6,"i79":18,"i80":6,"i81":18,"i82":6,"i83":6,"i84":18,"i85":6,"i86":18,"i87":6,"i88":6,"i89":6,"i90":6,"i91":6,"i92":6,"i93":6,"i94":6,"i95":6,"i96":6,"i97":6,"i98":6,"i99":6,"i100":6,"i101":6,"i102":6,"i103":6,"i104":6,"i105":6,"i106":6,"i107":6,"i108":6,"i109":6,"i110":6,"i111":6,"i112":18,"i113":18,"i114":6,"i115":6,"i116":18,"i117":6,"i118":6,"
     i119":6,"i120":6,"i121":6,"i122":6,"i123":6,"i124":6,"i125":6};
    +var methods = 
    {"i0":6,"i1":6,"i2":6,"i3":6,"i4":6,"i5":18,"i6":6,"i7":6,"i8":6,"i9":38,"i10":18,"i11":6,"i12":18,"i13":6,"i14":6,"i15":6,"i16":6,"i17":6,"i18":18,"i19":6,"i20":6,"i21":6,"i22":6,"i23":6,"i24":18,"i25":6,"i26":6,"i27":6,"i28":6,"i29":6,"i30":6,"i31":6,"i32":6,"i33":6,"i34":6,"i35":6,"i36":6,"i37":6,"i38":6,"i39":6,"i40":18,"i41":6,"i42":6,"i43":6,"i44":6,"i45":6,"i46":6,"i47":18,"i48":6,"i49":6,"i50":6,"i51":18,"i52":6,"i53":18,"i54":6,"i55":6,"i56":6,"i57":6,"i58":6,"i59":6,"i60":6,"i61":6,"i62":6,"i63":6,"i64":6,"i65":6,"i66":6,"i67":18,"i68":6,"i69":6,"i70":6,"i71":6,"i72":6,"i73":6,"i74":6,"i75":6,"i76":18,"i77":6,"i78":18,"i79":6,"i80":18,"i81":6,"i82":18,"i83":6,"i84":6,"i85":18,"i86":6,"i87":18,"i88":6,"i89":6,"i90":6,"i91":6,"i92":6,"i93":6,"i94":6,"i95":6,"i96":6,"i97":6,"i98":6,"i99":6,"i100":6,"i101":6,"i102":6,"i103":6,"i104":6,"i105":6,"i106":6,"i107":6,"i108":6,"i109":6,"i110":6,"i111":6,"i112":6,"i113":18,"i114":18,"i115":6,"i116":6,"i117":18,"i118":6,"
     i119":6,"i120":6,"i121":6,"i122":6,"i123":6,"i124":6,"i125":6,"i126":6};
     var tabs = {65535:["t0","All Methods"],2:["t2","Instance 
    Methods"],4:["t3","Abstract Methods"],16:["t5","Default 
    Methods"],32:["t6","Deprecated Methods"]};
     var altColor = "altColor";
     var rowColor = "rowColor";
    @@ -102,7 +102,7 @@ var activeTableTab = "activeTableTab";
     
     
     @InterfaceAudience.Public
    -public interface AsyncAdmin
    +public interface AsyncAdmin
     The asynchronous administrative API for HBase.
      
      This feature is still under development, so marked as IA.Private. Will change 
    to public when
    @@ -398,127 +398,131 @@ public interface getClusterStatus() 
     
     
    +http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";
     title="class or interface in java.util.concurrent">CompletableFuture
    +getClusterStatus(org.apache.hadoop.hbase.ClusterStatus.Options options) 
    +
    +
     http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";
     title="class or interface in java.util.concurrent">CompletableFuture
     getCompactionState(TableName tableName)
     Get the current compaction state of a table.
     
     
    -
    +
     http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";
     title="class or interface in java.util.concurrent">CompletableFuture
     getCompactionStateForRegion(byte[] regionName)
     Get the current compaction state of region.
     
     
    -
    +
     http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";
     title="class or interface in java.util.concurrent">CompletableFutureOptionalLong>>
     getLastMajorCompactionTimestamp(TableName tableName)
     Get the timestamp of the last major compaction for the 
    passed table.
     
     
    -
    +
     http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";
     title="class or interface in java.util.concurrent">CompletableFutureOptional
    

    [08/51] [partial] hbase-site git commit: Published site at .

    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8bae1c8a/devapidocs/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.EncryptHandler.html
    --
    diff --git 
    a/devapidocs/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.EncryptHandler.html
     
    b/devapidocs/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.EncryptHandler.html
    index 4066de2..b54297e 100644
    --- 
    a/devapidocs/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.EncryptHandler.html
    +++ 
    b/devapidocs/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.EncryptHandler.html
    @@ -100,13 +100,13 @@ var activeTableTab = "activeTableTab";
     http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true";
     title="class or interface in java.lang">java.lang.Object
     
     
    -io.netty.channel.ChannelHandlerAdapter
    +org.apache.hadoop.hbase.shaded.io.netty.channel.ChannelHandlerAdapter
     
     
    -io.netty.channel.ChannelOutboundHandlerAdapter
    +org.apache.hadoop.hbase.shaded.io.netty.channel.ChannelOutboundHandlerAdapter
     
     
    -io.netty.handler.codec.MessageToByteEncoder
    +org.apache.hadoop.hbase.shaded.io.netty.handler.codec.MessageToByteEncoder
     
     
     
    org.apache.hadoop.hbase.io.asyncfs.FanOutOneBlockAsyncDFSOutputSaslHelper.EncryptHandler
    @@ -124,7 +124,7 @@ var activeTableTab = "activeTableTab";
     
     
     All Implemented Interfaces:
    -io.netty.channel.ChannelHandler, 
    io.netty.channel.ChannelOutboundHandler
    +org.apache.hadoop.hbase.shaded.io.netty.channel.ChannelHandler, 
    org.apache.hadoop.hbase.shaded.io.netty.channel.ChannelOutboundHandler
     
     
     Enclosing class:
    @@ -133,7 +133,7 @@ var activeTableTab = "activeTableTab";
     
     
     private static final class FanOutOneBlockAsyncDFSOutputSaslHelper.EncryptHandler
    -extends 
    io.netty.handler.codec.MessageToByteEncoder
    +extends 
    org.apache.hadoop.hbase.shaded.io.netty.handler.codec.MessageToByteEncoder
     
     
     
    @@ -147,11 +147,11 @@ extends 
    io.netty.handler.codec.MessageToByteEncoderclone, http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#equals-java.lang.Object-";
     title="class or interface in java.lang">equals, http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#finalize--";
     title="class or interface in java.lang">finalize, http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#getClass--";
     title="class or interface in java.lang">getClass, http://docs.oracle.com

    [10/51] [partial] hbase-site git commit: Published site at .

    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8bae1c8a/devapidocs/org/apache/hadoop/hbase/client/package-use.html
    --
    diff --git a/devapidocs/org/apache/hadoop/hbase/client/package-use.html 
    b/devapidocs/org/apache/hadoop/hbase/client/package-use.html
    index ff8714f..ad92e41 100644
    --- a/devapidocs/org/apache/hadoop/hbase/client/package-use.html
    +++ b/devapidocs/org/apache/hadoop/hbase/client/package-use.html
    @@ -99,24 +99,28 @@
      
     
     
    -org.apache.hadoop.hbase.backup.master
    +org.apache.hadoop.hbase.backup.mapreduce
      
     
     
    -org.apache.hadoop.hbase.backup.util
    +org.apache.hadoop.hbase.backup.master
      
     
     
    +org.apache.hadoop.hbase.backup.util
    + 
    +
    +
     org.apache.hadoop.hbase.client
     
     Provides HBase Client
     
     
    -
    +
     org.apache.hadoop.hbase.client.backoff
      
     
    -
    +
     org.apache.hadoop.hbase.client.coprocessor
     
     Provides client classes for invoking Coprocessor RPC 
    protocols
    @@ -127,208 +131,208 @@
     
     
     
    -
    +
     org.apache.hadoop.hbase.client.example
      
     
    -
    +
     org.apache.hadoop.hbase.client.locking
      
     
    -
    +
     org.apache.hadoop.hbase.client.replication
      
     
    -
    +
     org.apache.hadoop.hbase.constraint
     
     Restrict the domain of a data attribute, often times to 
    fulfill business rules/requirements.
     
     
    -
    +
     org.apache.hadoop.hbase.coprocessor
     
     Table of Contents
     
     
    -
    +
     org.apache.hadoop.hbase.coprocessor.example
      
     
    -
    +
     org.apache.hadoop.hbase.exceptions
      
     
    -
    +
     org.apache.hadoop.hbase.favored
      
     
    -
    +
     org.apache.hadoop.hbase.io
      
     
    -
    +
     org.apache.hadoop.hbase.io.hfile
     
     Provides implementations of HFile and HFile
      BlockCache.
     
     
    -
    +
     org.apache.hadoop.hbase.ipc
     
     Tools to help define network clients and servers.
     
     
    -
    +
     org.apache.hadoop.hbase.mapred
     
     Provides HBase http://wiki.apache.org/hadoop/HadoopMapReduce";>MapReduce
     Input/OutputFormats, a table indexing MapReduce job, and utility methods.
     
     
    -
    +
     org.apache.hadoop.hbase.mapreduce
     
     Provides HBase http://wiki.apache.org/hadoop/HadoopMapReduce";>MapReduce
     Input/OutputFormats, a table indexing MapReduce job, and utility methods.
     
     
    -
    +
     org.apache.hadoop.hbase.mapreduce.replication
      
     
    -
    +
     org.apache.hadoop.hbase.master
      
     
    -
    +
     org.apache.hadoop.hbase.master.assignment
      
     
    -
    +
     org.apache.hadoop.hbase.master.cleaner
      
     
    -
    +
     org.apache.hadoop.hbase.master.normalizer
      
     
    -
    +
     org.apache.hadoop.hbase.mob
      
     
    -
    +
     org.apache.hadoop.hbase.mob.compactions
      
     
    -
    +
     org.apache.hadoop.hbase.quotas
      
     
    -
    +
     org.apache.hadoop.hbase.quotas.policies
      
     
    -
    +
     org.apache.hadoop.hbase.regionserver
      
     
    -
    +
     org.apache.hadoop.hbase.regionserver.handler
      
     
    -
    +
     org.apache.hadoop.hbase.regionserver.querymatcher
      
     
    -
    +
     org.apache.hadoop.hbase.regionserver.wal
      
     
    -
    +
     org.apache.hadoop.hbase.replication
     
     Multi Cluster Replication
     
     
    -
    +
     org.apache.hadoop.hbase.replication.regionserver
      
     
    -
    +
     org.apache.hadoop.hbase.rest
     
     HBase REST
     
     
    -
    +
     org.apache.hadoop.hbase.rest.client
      
     
    -
    +
     org.apache.hadoop.hbase.rest.model
      
     
    -
    +
     org.apache.hadoop.hbase.rsgroup
      
     
    -
    +
     org.apache.hadoop.hbase.security
      
     
    -
    +
     org.apache.hadoop.hbase.security.access
      
     
    -
    +
     org.apache.hadoop.hbase.security.token
      
     
    -
    +
     org.apache.hadoop.hbase.security.visibility
      
     
    -
    +
     org.apache.hadoop.hbase.snapshot
      
     
    -
    +
     org.apache.hadoop.hbase.thrift
     
     Provides an HBase http://incubator.apache.org/thrift/";>Thrift
     service.
     
     
    -
    +
     org.apache.hadoop.hbase.thrift2
     
     Provides an HBase http://thrift.apache.org/";>Thrift
     service.
     
     
    -
    +
     org.apache.hadoop.hbase.tool
      
     
    -
    +
     org.apache.hadoop.hbase.util
      
     
    -
    +
     org.apache.hadoop.hbase.util.hbck
      
     
    -
    +
     org.apache.hadoop.hbase.wal
      
     
    -
    +
     org.apache.hadoop.hbase.zookeeper
      
     
    -
    +
     org.apache.hbase.archetypes.exemplars.client
     
     This package provides fully-functional exemplar Java code 
    demonstrating
    @@ -548,6 +552,24 @@ service.
     
     
     
    +
    +
    +
    +
    +Classes in org.apache.hadoop.hbase.client
     used by org.apache.hadoop.hbase.backup.mapreduce 
    +
    +Class and Description
    +
    +
    +
    +Connection
    +A cluster connection encapsulating lower level individual 
    connections to actual servers and
    + a connection to zookeeper.
    +
    +
    +
    +
    +
     
     
     
    @@ -2020,30 +2042,46 @@ service.
     
     
     
    +ColumnFamilyDescriptor
    +An ColumnFamilyDescriptor contains information about a 
    column family such as the
    + number of versions, compression settings, etc.
    +
    +
    +
     Connection
     A cluster connection encapsulating lower level individual 
    connections to actual servers and
      a connection to zookeeper.
     
     
    +
    +ImmutableHColumnDescriptor
    +Deprecated. 
    +
    +
     
    +ImmutableHTableDescriptor
    +Deprecated. 
    +
    +
    +
     MasterSwitchType
     Represents the master switch type
     
     
    -
    +
     Mutation 
     
    -
    +
     Result
     Single row result of a Get or Scan query.
     
     
    -
    +
     Table
     Used to communicate with a single HBase table.
     
     
    -
    +
     TableDescriptor
     TableDescriptor contains the details about an HBase table 
    such as the descriptors of
      all the column families, is the table a catalog table,  hbase:meta 
    ,
    @@ -2051,12 +20

    [37/51] [partial] hbase-site git commit: Published site at .

    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8bae1c8a/devapidocs/org/apache/hadoop/hbase/ClusterStatus.html
    --
    diff --git a/devapidocs/org/apache/hadoop/hbase/ClusterStatus.html 
    b/devapidocs/org/apache/hadoop/hbase/ClusterStatus.html
    index 4748516..6c58afa 100644
    --- a/devapidocs/org/apache/hadoop/hbase/ClusterStatus.html
    +++ b/devapidocs/org/apache/hadoop/hbase/ClusterStatus.html
    @@ -18,8 +18,8 @@
     catch(err) {
     }
     //-->
    -var methods = 
    {"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":42,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10,"i14":10,"i15":10,"i16":10,"i17":10,"i18":10,"i19":10,"i20":10,"i21":10,"i22":10,"i23":10};
    -var tabs = {65535:["t0","All Methods"],2:["t2","Instance 
    Methods"],8:["t4","Concrete Methods"],32:["t6","Deprecated Methods"]};
    +var methods = 
    {"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":42,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10,"i14":10,"i15":10,"i16":10,"i17":10,"i18":10,"i19":10,"i20":10,"i21":10,"i22":10,"i23":9,"i24":10};
    +var tabs = {65535:["t0","All Methods"],1:["t1","Static 
    Methods"],2:["t2","Instance Methods"],8:["t4","Concrete 
    Methods"],32:["t6","Deprecated Methods"]};
     var altColor = "altColor";
     var rowColor = "rowColor";
     var tableTab = "tableTab";
    @@ -50,7 +50,7 @@ var activeTableTab = "activeTableTab";
     
     
     Prev Class
    -Next Class
    +Next Class
     
     
     Frames
    @@ -74,7 +74,7 @@ var activeTableTab = "activeTableTab";
     
     
     Summary: 
    -Nested | 
    +Nested | 
     Field | 
     Constr | 
     Method
    @@ -119,7 +119,7 @@ var activeTableTab = "activeTableTab";
     
     
     @InterfaceAudience.Public
    -public class ClusterStatus
    +public class ClusterStatus
     extends org.apache.hadoop.io.VersionedWritable
     Status information on the HBase cluster.
      
    @@ -136,13 +136,62 @@ extends org.apache.hadoop.io.VersionedWritable
       per server and per region.
      Regions in transition at master
      The unique cluster ID
    - 
    + 
    + ClusterStatus.Options provides a 
    way to filter out infos which unwanted.
    + The following codes will retrieve all the cluster information.
    + 
    + 
    + // Original version still works
    + Admin admin = connection.getAdmin();
    + ClusterStatus status = admin.getClusterStatus();
    + // or below, a new version which has the same effects
    + ClusterStatus status = admin.getClusterStatus(Options.defaultOptions());
    + 
    + 
    + If information about dead servers and master coprocessors are unwanted,
    + then codes in the following way:
    + 
    + 
    + Admin admin = connection.getAdmin();
    + ClusterStatus status = admin.getClusterStatus(
    +Options.defaultOptions()
    +   .excludeDeadServers()
    +   .excludeMasterCoprocessors());
    + 
    + 
     
     
     
     
     
     
    +
    +
    +
    +
    +
    +Nested Class Summary
    +
    +Nested Classes 
    +
    +Modifier and Type
    +Class and Description
    +
    +
    +static class 
    +ClusterStatus.Builder
    +Builder for construct a ClusterStatus.
    +
    +
    +
    +static class 
    +ClusterStatus.Options
    +Options provides a way to filter out unwanted 
    information.
    +
    +
    +
    +
    +
     
     
     
    @@ -220,7 +269,12 @@ extends org.apache.hadoop.io.VersionedWritable
      http://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true";
     title="class or interface in java.util">Collection backupMasters,
      http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
     title="class or interface in java.util">List rit,
      http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
     title="class or interface in java.lang">String[] masterCoprocessors,
    - http://docs.oracle.com/javase/8/docs/api/java/lang/Boolean.html?is-external=true";
     title="class or interface in 
    java.lang">Boolean balancerOn) 
    + http://docs.oracle.com/javase/8/docs/api/java/lang/Boolean.html?is-external=true";
     title="class or interface in java.lang">Boolean balancerOn)
    +Deprecated. 
    +As of release 2.0.0, this 
    will be removed in HBase 3.0.0
    + (https://issues.apache.org/jira/browse/HBASE-15511";>HBASE-15511).
    +
    +
     
     
     
    @@ -232,7 +286,7 @@ extends org.apache.hadoop.io.VersionedWritable
     
     Method Summary
     
    -All Methods Instance Methods Concrete Methods Deprecated Methods 
    +All Methods Static Methods Instance Methods Concrete Methods Deprecated Methods 
     
     Modifier and Type
     Method and Description
    @@ -338,6 +392,10 @@ extends org.apache.hadoop.io.VersionedWritable
     isBalancerOn() 
     
     
    +static ClusterStatus.Builder
    +newBuilder() 
    +
    +
     http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
     title="class or interface in java.lang">String
     toString() 
     
    @@ -376,7 +434,7 @@ extends org.apache.hadoop.io.VersionedWritable
     
     
     VERSION
    -private static final byte VERSION
    +private static final byte VERSION
     Version for object serialization.  Incremented for changes 
    in serialized
      representation.
      
    @@ -397,7 +455,7 @@ extends org.apache.ha

    [24/51] [partial] hbase-site git commit: Published site at .

    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8bae1c8a/devapidocs/org/apache/hadoop/hbase/class-use/TableName.html
    --
    diff --git a/devapidocs/org/apache/hadoop/hbase/class-use/TableName.html 
    b/devapidocs/org/apache/hadoop/hbase/class-use/TableName.html
    index 35769e1..592bd87 100644
    --- a/devapidocs/org/apache/hadoop/hbase/class-use/TableName.html
    +++ b/devapidocs/org/apache/hadoop/hbase/class-use/TableName.html
    @@ -971,25 +971,17 @@ service.
     BackupInfo.getBackupTableInfo(TableName table) 
     
     
    -static BackupManifest
    -HBackupFileSystem.getManifest(TableName tableName,
    -   org.apache.hadoop.conf.Configuration conf,
    -   org.apache.hadoop.fs.Path backupRootPath,
    -   http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
     title="class or interface in 
    java.lang">String backupId) 
    -
    -
    -private static org.apache.hadoop.fs.Path
    -HBackupFileSystem.getManifestPath(TableName tableName,
    -   org.apache.hadoop.conf.Configuration conf,
    -   org.apache.hadoop.fs.Path backupRootPath,
    -   http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
     title="class or interface in 
    java.lang">String backupId) 
    -
    -
     http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
     title="class or interface in java.lang">String
     BackupInfo.getSnapshotName(TableName table) 
     
     
     static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
     title="class or interface in java.lang">String
    +HBackupFileSystem.getTableBackupDataDir(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
     title="class or interface in java.lang">String backupRootDir,
    + http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
     title="class or interface in java.lang">String backupId,
    + TableName tableName) 
    +
    +
    +static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
     title="class or interface in java.lang">String
     HBackupFileSystem.getTableBackupDir(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
     title="class or interface in java.lang">String backupRootDir,
      http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
     title="class or interface in java.lang">String backupId,
      TableName tableName)
    @@ -997,11 +989,11 @@ service.
      which is also where the backup manifest file is.
     
     
    -
    +
     http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
     title="class or interface in java.lang">String
     BackupInfo.getTableBackupDir(TableName tableName) 
     
    -
    +
     static org.apache.hadoop.fs.Path
     HBackupFileSystem.getTableBackupPath(TableName tableName,
       org.apache.hadoop.fs.Path backupRootPath,
    @@ -1010,14 +1002,14 @@ service.
      which is also where the backup manifest file is.
     
     
    -
    +
     void
     BackupAdmin.removeFromBackupSet(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
     title="class or interface in java.lang">String name,
    TableName[] tables)
     Remove tables from backup set
     
     
    -
    +
     void
     RestoreJob.run(org.apache.hadoop.fs.Path[] dirPaths,
    TableName[] fromTables,
    @@ -1026,7 +1018,7 @@ service.
     Run restore operation
     
     
    -
    +
     void
     RestoreJob.run(org.apache.hadoop.fs.Path[] dirPaths,
    TableName[] fromTables,
    @@ -1035,24 +1027,24 @@ service.
     Run restore operation
     
     
    -
    +
     private RestoreRequest
     RestoreRequest.setFromTables(TableName[] fromTables) 
     
    -
    +
     void
     BackupInfo.setSnapshotName(TableName table,
    http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
     title="class or interface in 
    java.lang">String snapshotName) 
     
    -
    +
     private RestoreRequest
     RestoreRequest.setToTables(TableName[] toTables) 
     
    -
    +
     RestoreRequest.Builder
     RestoreRequest.Builder.withFromTables(TableName[] fromTables) 
     
    -
    +
     RestoreRequest.Builder
     RestoreRequest.Builder.withToTables(TableName[] toTables) 
     
    @@ -1515,60 +1507,64 @@ service.
     
     
     
    +private Put
    +BackupSystemTable.createPutForUpdateTablesForMerge(http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
     title="class or interface in java.util">List tables) 
    +
    +
     private http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
     title="class or interface in java.util">List
     BackupAdminImpl.excludeNonExistingTables(http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
     title="class or interface in java.util">List tableList,
     http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
     title="class or interface in java.util">List nonExistingTableList) 
     
    -
    +
     private http://docs.oracle.com/javase/8/doc

    [29/51] [partial] hbase-site git commit: Published site at .

    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8bae1c8a/devapidocs/org/apache/hadoop/hbase/backup/mapreduce/MapReduceHFileSplitterJob.HFileCellMapper.html
    --
    diff --git 
    a/devapidocs/org/apache/hadoop/hbase/backup/mapreduce/MapReduceHFileSplitterJob.HFileCellMapper.html
     
    b/devapidocs/org/apache/hadoop/hbase/backup/mapreduce/MapReduceHFileSplitterJob.HFileCellMapper.html
    new file mode 100644
    index 000..cc61ea4
    --- /dev/null
    +++ 
    b/devapidocs/org/apache/hadoop/hbase/backup/mapreduce/MapReduceHFileSplitterJob.HFileCellMapper.html
    @@ -0,0 +1,338 @@
    +http://www.w3.org/TR/html4/loose.dtd";>
    +
    +
    +
    +
    +
    +MapReduceHFileSplitterJob.HFileCellMapper (Apache HBase 3.0.0-SNAPSHOT 
    API)
    +
    +
    +
    +
    +
    +var methods = {"i0":10,"i1":10};
    +var tabs = {65535:["t0","All Methods"],2:["t2","Instance 
    Methods"],8:["t4","Concrete Methods"]};
    +var altColor = "altColor";
    +var rowColor = "rowColor";
    +var tableTab = "tableTab";
    +var activeTableTab = "activeTableTab";
    +
    +
    +JavaScript is disabled on your browser.
    +
    +
    +
    +
    +
    +Skip navigation links
    +
    +
    +
    +
    +Overview
    +Package
    +Class
    +Use
    +Tree
    +Deprecated
    +Index
    +Help
    +
    +
    +
    +
    +Prev Class
    +Next Class
    +
    +
    +Frames
    +No Frames
    +
    +
    +All Classes
    +
    +
    +
    +
    +
    +
    +
    +Summary: 
    +Nested | 
    +Field | 
    +Constr | 
    +Method
    +
    +
    +Detail: 
    +Field | 
    +Constr | 
    +Method
    +
    +
    +
    +
    +
    +
    +
    +
    +org.apache.hadoop.hbase.backup.mapreduce
    +Class MapReduceHFileSplitterJob.HFileCellMapper
    +
    +
    +
    +http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true";
     title="class or interface in java.lang">java.lang.Object
    +
    +
    +org.apache.hadoop.mapreduce.Mapper
    +
    +
    +org.apache.hadoop.hbase.backup.mapreduce.MapReduceHFileSplitterJob.HFileCellMapper
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +Enclosing class:
    +MapReduceHFileSplitterJob
    +
    +
    +
    +static class MapReduceHFileSplitterJob.HFileCellMapper
    +extends 
    org.apache.hadoop.mapreduce.Mapper
    +A mapper that just writes out cells. This one can be used 
    together with
    + KeyValueSortReducer
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +Nested Class Summary
    +
    +
    +
    +
    +Nested classes/interfaces inherited from 
    class org.apache.hadoop.mapreduce.Mapper
    +org.apache.hadoop.mapreduce.Mapper.Context
    +
    +
    +
    +
    +
    +
    +
    +
    +Constructor Summary
    +
    +Constructors 
    +
    +Constructor and Description
    +
    +
    +HFileCellMapper() 
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +Method Summary
    +
    +All Methods Instance Methods Concrete Methods 
    +
    +Modifier and Type
    +Method and Description
    +
    +
    +void
    +map(org.apache.hadoop.io.NullWritable key,
    +   KeyValue value,
    +   org.apache.hadoop.mapreduce.Mapper.Context context) 
    +
    +
    +void
    +setup(org.apache.hadoop.mapreduce.Mapper.Context context) 
    +
    +
    +
    +
    +
    +
    +Methods inherited from class org.apache.hadoop.mapreduce.Mapper
    +cleanup, run
    +
    +
    +
    +
    +
    +Methods inherited from class java.lang.http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true";
     title="class or interface in java.lang">Object
    +http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#clone--";
     title="class or interface in java.lang">clone, http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#equals-java.lang.Object-";
     title="class or interface in java.lang">equals, http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#finalize--";
     title="class or interface in java.lang">finalize, http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#getClass--";
     title="class or interface in java.lang">getClass, http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#hashCode--";
     title="class or interface in java.lang">hashCode, http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#notify--";
     title="class or interface in java.lang">notify, http://docs.oracle.com/javase/8/docs/api/java/lang
     /Object.html?is-external=true#notifyAll--" title="class or interface in 
    java.lang">notifyAll, http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#toString--";
     title="class or interface in java.lang">toString, http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#wait--";
     title="class or interface in java.lang">wait, http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#wait-long-";
     title="class or interface in java.lang">wait, http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html

    [14/51] [partial] hbase-site git commit: Published site at .

    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8bae1c8a/devapidocs/org/apache/hadoop/hbase/client/ImmutableHColumnDescriptor.html
    --
    diff --git 
    a/devapidocs/org/apache/hadoop/hbase/client/ImmutableHColumnDescriptor.html 
    b/devapidocs/org/apache/hadoop/hbase/client/ImmutableHColumnDescriptor.html
    index 1ebfe64..93b86d7 100644
    --- a/devapidocs/org/apache/hadoop/hbase/client/ImmutableHColumnDescriptor.html
    +++ b/devapidocs/org/apache/hadoop/hbase/client/ImmutableHColumnDescriptor.html
    @@ -164,7 +164,7 @@ extends Constructor and Description
     
     
    -ImmutableHColumnDescriptor(ColumnFamilyDescriptorBuilder.ModifyableColumnFamilyDescriptor desc)
    +ImmutableHColumnDescriptor(ColumnFamilyDescriptor desc)
     Deprecated. 
      
     
    @@ -233,13 +233,13 @@ extends Deprecated. 
     
     
    -
    +
     
     
     
     
     ImmutableHColumnDescriptor
    -ImmutableHColumnDescriptor(ColumnFamilyDescriptorBuilder.ModifyableColumnFamilyDescriptor desc)
    +public ImmutableHColumnDescriptor(ColumnFamilyDescriptor desc)
     Deprecated. 
     
     
    @@ -257,7 +257,7 @@ extends 
     
     getDelegateeForModification
    -protected ColumnFamilyDescriptorBuilder.ModifyableColumnFamilyDescriptor getDelegateeForModification()
    +protected ColumnFamilyDescriptorBuilder.ModifyableColumnFamilyDescriptor getDelegateeForModification()
     Deprecated. 
     
     Overrides:
    
    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8bae1c8a/devapidocs/org/apache/hadoop/hbase/client/ImmutableHTableDescriptor.html
    --
    diff --git 
    a/devapidocs/org/apache/hadoop/hbase/client/ImmutableHTableDescriptor.html 
    b/devapidocs/org/apache/hadoop/hbase/client/ImmutableHTableDescriptor.html
    index 787145a..35e385f 100644
    --- a/devapidocs/org/apache/hadoop/hbase/client/ImmutableHTableDescriptor.html
    +++ b/devapidocs/org/apache/hadoop/hbase/client/ImmutableHTableDescriptor.html
    @@ -120,7 +120,7 @@ var activeTableTab = "activeTableTab";
     Deprecated.
     
     http://docs.oracle.com/javase/8/docs/api/java/lang/Deprecated.html?is-external=true";
     title="class or interface in java.lang">@Deprecated
    - @InterfaceAudience.Public
    + @InterfaceAudience.Private
     public class ImmutableHTableDescriptor
     extends HTableDescriptor
     Read-only table descriptor.
    @@ -168,6 +168,11 @@ extends Deprecated. 
      
     
    +
    +ImmutableHTableDescriptor(TableDescriptor desc)
    +Deprecated. 
    + 
    +
     
     
     
    @@ -228,10 +233,20 @@ extends 
     
     
    +
    +
    +ImmutableHTableDescriptor
    +public ImmutableHTableDescriptor(HTableDescriptor desc)
    +Deprecated. 
    +
    +
    +
    +
    +
     
     
     ImmutableHTableDescriptor
    -public ImmutableHTableDescriptor(HTableDescriptor desc)
    +public ImmutableHTableDescriptor(TableDescriptor desc)
     Deprecated. 
     
     
    @@ -269,7 +284,7 @@ extends 
     
     getDelegateeForModification
    -protected TableDescriptorBuilder.ModifyableTableDescriptor getDelegateeForModification()
    +protected TableDescriptorBuilder.ModifyableTableDescriptor getDelegateeForModification()
     Deprecated. 
     
     Overrides:
    
    
    

    [32/51] [partial] hbase-site git commit: Published site at .

    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8bae1c8a/devapidocs/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.html
    --
    diff --git 
    a/devapidocs/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.html 
    b/devapidocs/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.html
    index f4eee5a..02f2fc0 100644
    --- a/devapidocs/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.html
    +++ b/devapidocs/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.html
    @@ -18,7 +18,7 @@
     catch(err) {
     }
     //-->
    -var methods = 
    {"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":9,"i14":10,"i15":10,"i16":10,"i17":10,"i18":10,"i19":10,"i20":10,"i21":10,"i22":9,"i23":9,"i24":10,"i25":10,"i26":9,"i27":10,"i28":10,"i29":10,"i30":10,"i31":10,"i32":10,"i33":10,"i34":10,"i35":9,"i36":10,"i37":9,"i38":10,"i39":10,"i40":10,"i41":10,"i42":10,"i43":10,"i44":9,"i45":10,"i46":10,"i47":10,"i48":10,"i49":10,"i50":10,"i51":10,"i52":10,"i53":10,"i54":10,"i55":10,"i56":10,"i57":10,"i58":10,"i59":10,"i60":9,"i61":10,"i62":9,"i63":9,"i64":9,"i65":9,"i66":10,"i67":9,"i68":10,"i69":10,"i70":10,"i71":10,"i72":10,"i73":10,"i74":10,"i75":10,"i76":10,"i77":10,"i78":10,"i79":10,"i80":10,"i81":10,"i82":10,"i83":9,"i84":10,"i85":9,"i86":9,"i87":9,"i88":9,"i89":10,"i90":10,"i91":10,"i92":10,"i93":10,"i94":10,"i95":10,"i96":10,"i97":10,"i98":10,"i99":10,"i100":10,"i101":10};
    +var methods = 
    {"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10,"i14":9,"i15":10,"i16":10,"i17":10,"i18":10,"i19":10,"i20":10,"i21":10,"i22":10,"i23":10,"i24":9,"i25":9,"i26":10,"i27":10,"i28":10,"i29":9,"i30":10,"i31":10,"i32":10,"i33":10,"i34":10,"i35":10,"i36":10,"i37":10,"i38":10,"i39":9,"i40":10,"i41":9,"i42":10,"i43":10,"i44":10,"i45":10,"i46":10,"i47":10,"i48":9,"i49":10,"i50":10,"i51":10,"i52":10,"i53":10,"i54":10,"i55":10,"i56":10,"i57":10,"i58":10,"i59":10,"i60":10,"i61":10,"i62":10,"i63":10,"i64":10,"i65":10,"i66":9,"i67":10,"i68":9,"i69":9,"i70":9,"i71":9,"i72":10,"i73":9,"i74":10,"i75":10,"i76":10,"i77":10,"i78":10,"i79":10,"i80":10,"i81":10,"i82":10,"i83":10,"i84":10,"i85":10,"i86":10,"i87":10,"i88":10,"i89":10,"i90":9,"i91":10,"i92":9,"i93":9,"i94":9,"i95":9,"i96":10,"i97":10,"i98":10,"i99":10,"i100":10,"i101":10,"i102":10,"i103":10,"i104":10,"i105":10,"i106":10,"i107":10,"i108":10,"i109":10,"i110":10}
     ;
     var tabs = {65535:["t0","All Methods"],1:["t1","Static 
    Methods"],2:["t2","Instance Methods"],8:["t4","Concrete Methods"]};
     var altColor = "altColor";
     var rowColor = "rowColor";
    @@ -114,7 +114,7 @@ var activeTableTab = "activeTableTab";
     
     
     @InterfaceAudience.Private
    -public final class BackupSystemTable
    +public final class BackupSystemTable
     extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true";
     title="class or interface in java.lang">Object
     implements http://docs.oracle.com/javase/8/docs/api/java/io/Closeable.html?is-external=true";
     title="class or interface in java.io">Closeable
     This class provides API to access backup system table
    @@ -238,54 +238,58 @@ implements http://docs.oracle.com/javase/8/docs/api/java/io/Closeable.h
     LOG 
     
     
    +private static byte[]
    +MERGE_OP_ROW 
    +
    +
     (package private) static byte[]
     META_FAMILY
     Stores other meta
     
     
    -
    +
     private static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
     title="class or interface in java.lang">String
     NULL 
     
    -
    +
     (package private) static byte[]
     PATH_COL 
     
    -
    +
     private static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
     title="class or interface in java.lang">String
     RS_LOG_TS_PREFIX 
     
    -
    +
     (package private) static byte[]
     SESSIONS_FAMILY
     Stores backup sessions (contexts)
     
     
    -
    +
     private static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
     title="class or interface in java.lang">String
     SET_KEY_PREFIX 
     
    -
    +
     private static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
     title="class or interface in java.lang">String
     START_CODE_ROW 
     
    -
    +
     (package private) static byte[]
     STATE_COL 
     
    -
    +
     private static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
     title="class or interface in java.lang">String
     TABLE_RS_LOG_MAP_PREFIX 
     
    -
    +
     private TableName
     tableName 
     
    -
    +
     (package private) static byte[]
     TBL_COL 
     
    -
    +
     private static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
     title="class or interface in java.lang">String
     WALS_PREFIX 
     
    @@ -385,68 +389,76 @@ implements http://docs.oracle.com/javase/8/docs/api/java/io/Closeable.h
     
     
     private Delete
    +createDeleteForBackupMergeOperation() 
    +
    +
    +private Delete
     createDeleteForBackupSet(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
     title="class or

    [26/51] [partial] hbase-site git commit: Published site at .

    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8bae1c8a/devapidocs/org/apache/hadoop/hbase/class-use/ClusterStatus.Options.html
    --
    diff --git 
    a/devapidocs/org/apache/hadoop/hbase/class-use/ClusterStatus.Options.html 
    b/devapidocs/org/apache/hadoop/hbase/class-use/ClusterStatus.Options.html
    new file mode 100644
    index 000..f84a700
    --- /dev/null
    +++ b/devapidocs/org/apache/hadoop/hbase/class-use/ClusterStatus.Options.html
    @@ -0,0 +1,291 @@
    +http://www.w3.org/TR/html4/loose.dtd";>
    +
    +
    +
    +
    +
    +Uses of Class org.apache.hadoop.hbase.ClusterStatus.Options (Apache 
    HBase 3.0.0-SNAPSHOT API)
    +
    +
    +
    +
    +
    +
    +
    +JavaScript is disabled on your browser.
    +
    +
    +
    +
    +
    +Skip navigation links
    +
    +
    +
    +
    +Overview
    +Package
    +Class
    +Use
    +Tree
    +Deprecated
    +Index
    +Help
    +
    +
    +
    +
    +Prev
    +Next
    +
    +
    +Frames
    +No Frames
    +
    +
    +All Classes
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +Uses of 
    Classorg.apache.hadoop.hbase.ClusterStatus.Options
    +
    +
    +
    +
    +
    +Packages that use ClusterStatus.Options 
    +
    +Package
    +Description
    +
    +
    +
    +org.apache.hadoop.hbase
    + 
    +
    +
    +org.apache.hadoop.hbase.client
    +
    +Provides HBase Client
    +
    +
    +
    +org.apache.hadoop.hbase.master
    + 
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +Uses of ClusterStatus.Options in org.apache.hadoop.hbase
    +
    +Methods in org.apache.hadoop.hbase
     that return ClusterStatus.Options 
    +
    +Modifier and Type
    +Method and Description
    +
    +
    +
    +ClusterStatus.Options
    +ClusterStatus.Options.excludeBackupMasters()
    +Filter out backup masters info.
    +
    +
    +
    +ClusterStatus.Options
    +ClusterStatus.Options.excludeBalancerOn()
    +Filter out balancer on info.
    +
    +
    +
    +ClusterStatus.Options
    +ClusterStatus.Options.excludeClusterId()
    +Filter out cluster id.
    +
    +
    +
    +ClusterStatus.Options
    +ClusterStatus.Options.excludeDeadServers()
    +Filter out dead servers info.
    +
    +
    +
    +ClusterStatus.Options
    +ClusterStatus.Options.excludeHBaseVersion()
    +Filter out hbase verision.
    +
    +
    +
    +ClusterStatus.Options
    +ClusterStatus.Options.excludeLiveServers()
    +Filter out live servers.
    +
    +
    +
    +ClusterStatus.Options
    +ClusterStatus.Options.excludeMaster()
    +Filter out master info.
    +
    +
    +
    +ClusterStatus.Options
    +ClusterStatus.Options.excludeMasterCoprocessors()
    +Filter out master's coprocessors info.
    +
    +
    +
    +ClusterStatus.Options
    +ClusterStatus.Options.excludeRegionState()
    +Filter out region state.
    +
    +
    +
    +static ClusterStatus.Options
    +ClusterStatus.Options.getDefaultOptions()
    +Include all information about a ClusterStatus.
    +
    +
    +
    +ClusterStatus.Options
    +ClusterStatus.Options.reset()
    +For an options reusable convenience, reset options to 
    default.
    +
    +
    +
    +
    +
    +
    +
    +
    +Uses of ClusterStatus.Options in org.apache.hadoop.hbase.client
    +
    +Methods in org.apache.hadoop.hbase.client
     with parameters of type ClusterStatus.Options 
    +
    +Modifier and Type
    +Method and Description
    +
    +
    +
    +http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";
     title="class or interface in java.util.concurrent">CompletableFuture
    +AsyncHBaseAdmin.getClusterStatus(ClusterStatus.Options options) 
    +
    +
    +ClusterStatus
    +Admin.getClusterStatus(ClusterStatus.Options options)
    +Get cluster status with options to filter out unwanted 
    status.
    +
    +
    +
    +ClusterStatus
    +HBaseAdmin.getClusterStatus(ClusterStatus.Options options) 
    +
    +
    +http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";
     title="class or interface in java.util.concurrent">CompletableFuture
    +AsyncAdmin.getClusterStatus(ClusterStatus.Options options) 
    +
    +
    +http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";
     title="class or interface in java.util.concurrent">CompletableFuture
    +RawAsyncHBaseAdmin.getClusterStatus(ClusterStatus.Options options) 
    +
    +
    +
    +
    +
    +
    +
    +Uses of ClusterStatus.Options in org.apache.hadoop.hbase.master
    +
    +Methods in org.apache.hadoop.hbase.master
     with parameters of type ClusterStatus.Options 
    +
    +Modifier and Type
    +Method and Description
    +
    +
    +
    +ClusterStatus
    +HMaster.getClusterStatus(ClusterStatus.Options options) 
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +Skip navigation links
    +
    +
    +
    +
    +Overview
    +Package
    +Class
    +Use
    +Tree
    +Deprecated
    +Index
    +Help
    +
    +
    +
    +
    +Prev
    +Next
    +
    +
    +Frames
    +No Frames
    +
    +
    +All Classes
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +Copyright © 2007–2017 https://www.apache.org/"

    [21/51] [partial] hbase-site git commit: Published site at .

    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8bae1c8a/devapidocs/org/apache/hadoop/hbase/client/AsyncAdmin.html
    --
    diff --git a/devapidocs/org/apache/hadoop/hbase/client/AsyncAdmin.html 
    b/devapidocs/org/apache/hadoop/hbase/client/AsyncAdmin.html
    index eae7b36..8935670 100644
    --- a/devapidocs/org/apache/hadoop/hbase/client/AsyncAdmin.html
    +++ b/devapidocs/org/apache/hadoop/hbase/client/AsyncAdmin.html
    @@ -18,7 +18,7 @@
     catch(err) {
     }
     //-->
    -var methods = 
    {"i0":6,"i1":6,"i2":6,"i3":6,"i4":6,"i5":18,"i6":6,"i7":6,"i8":6,"i9":38,"i10":18,"i11":6,"i12":18,"i13":6,"i14":6,"i15":6,"i16":6,"i17":6,"i18":18,"i19":6,"i20":6,"i21":6,"i22":6,"i23":6,"i24":18,"i25":6,"i26":6,"i27":6,"i28":6,"i29":6,"i30":6,"i31":6,"i32":6,"i33":6,"i34":6,"i35":6,"i36":6,"i37":6,"i38":6,"i39":6,"i40":18,"i41":6,"i42":6,"i43":6,"i44":6,"i45":6,"i46":18,"i47":6,"i48":6,"i49":6,"i50":18,"i51":6,"i52":18,"i53":6,"i54":6,"i55":6,"i56":6,"i57":6,"i58":6,"i59":6,"i60":6,"i61":6,"i62":6,"i63":6,"i64":6,"i65":6,"i66":18,"i67":6,"i68":6,"i69":6,"i70":6,"i71":6,"i72":6,"i73":6,"i74":6,"i75":18,"i76":6,"i77":18,"i78":6,"i79":18,"i80":6,"i81":18,"i82":6,"i83":6,"i84":18,"i85":6,"i86":18,"i87":6,"i88":6,"i89":6,"i90":6,"i91":6,"i92":6,"i93":6,"i94":6,"i95":6,"i96":6,"i97":6,"i98":6,"i99":6,"i100":6,"i101":6,"i102":6,"i103":6,"i104":6,"i105":6,"i106":6,"i107":6,"i108":6,"i109":6,"i110":6,"i111":6,"i112":18,"i113":18,"i114":6,"i115":6,"i116":18,"i117":6,"i118":6,"
     i119":6,"i120":6,"i121":6,"i122":6,"i123":6,"i124":6,"i125":6};
    +var methods = 
    {"i0":6,"i1":6,"i2":6,"i3":6,"i4":6,"i5":18,"i6":6,"i7":6,"i8":6,"i9":38,"i10":18,"i11":6,"i12":18,"i13":6,"i14":6,"i15":6,"i16":6,"i17":6,"i18":18,"i19":6,"i20":6,"i21":6,"i22":6,"i23":6,"i24":18,"i25":6,"i26":6,"i27":6,"i28":6,"i29":6,"i30":6,"i31":6,"i32":6,"i33":6,"i34":6,"i35":6,"i36":6,"i37":6,"i38":6,"i39":6,"i40":18,"i41":6,"i42":6,"i43":6,"i44":6,"i45":6,"i46":6,"i47":18,"i48":6,"i49":6,"i50":6,"i51":18,"i52":6,"i53":18,"i54":6,"i55":6,"i56":6,"i57":6,"i58":6,"i59":6,"i60":6,"i61":6,"i62":6,"i63":6,"i64":6,"i65":6,"i66":6,"i67":18,"i68":6,"i69":6,"i70":6,"i71":6,"i72":6,"i73":6,"i74":6,"i75":6,"i76":18,"i77":6,"i78":18,"i79":6,"i80":18,"i81":6,"i82":18,"i83":6,"i84":6,"i85":18,"i86":6,"i87":18,"i88":6,"i89":6,"i90":6,"i91":6,"i92":6,"i93":6,"i94":6,"i95":6,"i96":6,"i97":6,"i98":6,"i99":6,"i100":6,"i101":6,"i102":6,"i103":6,"i104":6,"i105":6,"i106":6,"i107":6,"i108":6,"i109":6,"i110":6,"i111":6,"i112":6,"i113":18,"i114":18,"i115":6,"i116":6,"i117":18,"i118":6,"
     i119":6,"i120":6,"i121":6,"i122":6,"i123":6,"i124":6,"i125":6,"i126":6};
     var tabs = {65535:["t0","All Methods"],2:["t2","Instance 
    Methods"],4:["t3","Abstract Methods"],16:["t5","Default 
    Methods"],32:["t6","Deprecated Methods"]};
     var altColor = "altColor";
     var rowColor = "rowColor";
    @@ -106,7 +106,7 @@ var activeTableTab = "activeTableTab";
     
     
     @InterfaceAudience.Public
    -public interface AsyncAdmin
    +public interface AsyncAdmin
     The asynchronous administrative API for HBase.
      
      This feature is still under development, so marked as IA.Private. Will change 
    to public when
    @@ -402,127 +402,131 @@ public interface getClusterStatus() 
     
     
    +http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";
     title="class or interface in java.util.concurrent">CompletableFuture
    +getClusterStatus(ClusterStatus.Options options) 
    +
    +
     http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";
     title="class or interface in java.util.concurrent">CompletableFuture
     getCompactionState(TableName tableName)
     Get the current compaction state of a table.
     
     
    -
    +
     http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";
     title="class or interface in java.util.concurrent">CompletableFuture
     getCompactionStateForRegion(byte[] regionName)
     Get the current compaction state of region.
     
     
    -
    +
     http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";
     title="class or interface in java.util.concurrent">CompletableFutureOptionalLong>>
     getLastMajorCompactionTimestamp(TableName tableName)
     Get the timestamp of the last major compaction for the 
    passed table.
     
     
    -
    +
     http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";
     title="class or interface in java.util.concurrent">CompletableFutureOptional
    

    [33/51] [partial] hbase-site git commit: Published site at .

    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8bae1c8a/devapidocs/org/apache/hadoop/hbase/backup/impl/BackupManager.html
    --
    diff --git a/devapidocs/org/apache/hadoop/hbase/backup/impl/BackupManager.html 
    b/devapidocs/org/apache/hadoop/hbase/backup/impl/BackupManager.html
    index ec5a05d..04d6eb3 100644
    --- a/devapidocs/org/apache/hadoop/hbase/backup/impl/BackupManager.html
    +++ b/devapidocs/org/apache/hadoop/hbase/backup/impl/BackupManager.html
    @@ -590,7 +590,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/io/Closeable.h
     
     
     getOngoingBackupId
    -private http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
     title="class or interface in java.lang">String getOngoingBackupId()
    +private http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
     title="class or interface in java.lang">String getOngoingBackupId()
    throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true";
     title="class or interface in java.io">IOException
     Check if any ongoing backup. Currently, we only reply on 
    checking status in backup system
      table. We need to consider to handle the case of orphan records in the 
    future. Otherwise, all
    @@ -609,7 +609,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/io/Closeable.h
     
     
     initialize
    -public void initialize()
    +public void initialize()
     throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true";
     title="class or interface in java.io">IOException
     Start the backup manager service.
     
    @@ -624,7 +624,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/io/Closeable.h
     
     
     setBackupInfo
    -public void setBackupInfo(BackupInfo backupInfo)
    +public void setBackupInfo(BackupInfo backupInfo)
     
     
     
    @@ -633,7 +633,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/io/Closeable.h
     
     
     getAncestors
    -public http://docs.oracle.com/javase/8/docs/api/java/util/ArrayList.html?is-external=true";
     title="class or interface in java.util">ArrayList getAncestors(BackupInfo backupInfo)
    +public http://docs.oracle.com/javase/8/docs/api/java/util/ArrayList.html?is-external=true";
     title="class or interface in java.util">ArrayList getAncestors(BackupInfo backupInfo)
    throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true";
     title="class or interface in java.io">IOException,
       BackupException
     Get direct ancestors of the current backup.
    @@ -654,7 +654,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/io/Closeable.h
     
     
     getAncestors
    -public http://docs.oracle.com/javase/8/docs/api/java/util/ArrayList.html?is-external=true";
     title="class or interface in java.util">ArrayList getAncestors(BackupInfo backupInfo,
    +public http://docs.oracle.com/javase/8/docs/api/java/util/ArrayList.html?is-external=true";
     title="class or interface in java.util">ArrayList getAncestors(BackupInfo backupInfo,
       TableName table)
    throws BackupException,
       http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true";
     title="class or interface in java.io">IOException
    @@ -677,7 +677,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/io/Closeable.h
     
     
     updateBackupInfo
    -public void updateBackupInfo(BackupInfo context)
    +public void updateBackupInfo(BackupInfo context)
       throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true";
     title="class or interface in java.io">IOException
     Updates status (state) of a backup session in a persistent 
    store
     
    @@ -694,7 +694,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/io/Closeable.h
     
     
     startBackupSession
    -public void startBackupSession()
    +public void startBackupSession()
     throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true";
     title="class or interface in java.io">IOException
     Starts new backup session
     
    @@ -709,7 +709,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/io/Closeable.h
     
     
     finishBackupSession
    -public void finishBackupSession()
    +public void finishBackupSession()
      throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true";
     title="class or interface in java.io">IOException
     Finishes active backup session
     
    @@ -724,7 +724,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/io/Closeable.h
     
     
     readBackupStartCode
    -public http://docs.oracle.com/javase/8/docs/api/java/

    [43/51] [partial] hbase-site git commit: Published site at .

    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8bae1c8a/apidocs/src-html/org/apache/hadoop/hbase/client/ImmutableHTableDescriptor.html
    --
    diff --git 
    a/apidocs/src-html/org/apache/hadoop/hbase/client/ImmutableHTableDescriptor.html
     
    b/apidocs/src-html/org/apache/hadoop/hbase/client/ImmutableHTableDescriptor.html
    deleted file mode 100644
    index fc97453..000
    --- 
    a/apidocs/src-html/org/apache/hadoop/hbase/client/ImmutableHTableDescriptor.html
    +++ /dev/null
    @@ -1,130 +0,0 @@
    -http://www.w3.org/TR/html4/loose.dtd";>
    -
    -
    -Source code
    -
    -
    -
    -
    -001/**
    -002 *
    -003 * Licensed to the Apache Software 
    Foundation (ASF) under one
    -004 * or more contributor license 
    agreements.  See the NOTICE file
    -005 * distributed with this work for 
    additional information
    -006 * regarding copyright ownership.  The 
    ASF licenses this file
    -007 * to you under the Apache License, 
    Version 2.0 (the
    -008 * "License"); you may not use this file 
    except in compliance
    -009 * with the License.  You may obtain a 
    copy of the License at
    -010 *
    -011 * 
    http://www.apache.org/licenses/LICENSE-2.0
    -012 *
    -013 * Unless required by applicable law or 
    agreed to in writing, software
    -014 * distributed under the License is 
    distributed on an "AS IS" BASIS,
    -015 * WITHOUT WARRANTIES OR CONDITIONS OF 
    ANY KIND, either express or implied.
    -016 * See the License for the specific 
    language governing permissions and
    -017 * limitations under the License.
    -018 */
    -019package org.apache.hadoop.hbase.client;
    -020
    -021import 
    org.apache.hadoop.hbase.classification.InterfaceAudience;
    -022import 
    org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder.ModifyableColumnFamilyDescriptor;
    -023import 
    org.apache.hadoop.hbase.client.TableDescriptorBuilder.ModifyableTableDescriptor;
    -024import 
    org.apache.hadoop.hbase.HColumnDescriptor;
    -025import 
    org.apache.hadoop.hbase.HTableDescriptor;
    -026
    -027/**
    -028 * Read-only table descriptor.
    -029 */
    -030@Deprecated // deprecated for hbase 2.0, 
    remove for hbase 3.0. see HTableDescriptor.
    -031@InterfaceAudience.Public
    -032public class ImmutableHTableDescriptor 
    extends HTableDescriptor {
    -033
    -034  @Override
    -035  protected HColumnDescriptor 
    toHColumnDescriptor(ColumnFamilyDescriptor desc) {
    -036if (desc == null) {
    -037  return null;
    -038} else if (desc instanceof 
    ModifyableColumnFamilyDescriptor) {
    -039  return new 
    ImmutableHColumnDescriptor((ModifyableColumnFamilyDescriptor) desc);
    -040} else if (desc instanceof 
    HColumnDescriptor) {
    -041  return new 
    ImmutableHColumnDescriptor((HColumnDescriptor) desc);
    -042} else {
    -043  return new 
    ImmutableHColumnDescriptor(new ModifyableColumnFamilyDescriptor(desc));
    -044}
    -045  }
    -046  /*
    -047   * Create an unmodifyable copy of an 
    HTableDescriptor
    -048   * @param desc
    -049   */
    -050  public ImmutableHTableDescriptor(final 
    HTableDescriptor desc) {
    -051super(desc, false);
    -052  }
    -053
    -054  @Override
    -055  protected ModifyableTableDescriptor 
    getDelegateeForModification() {
    -056throw new 
    UnsupportedOperationException("HTableDescriptor is read-only");
    -057  }
    -058}
    -
    -
    -
    -
    -
    -
    -
    -
    -
    -
    -
    -
    -
    -
    -
    -
    -
    -
    -
    -
    -
    -
    -
    -
    -
    -
    -
    -
    -
    -
    -
    -
    -
    -
    -
    -
    -
    -
    -
    -
    -
    -
    -
    -
    -
    -
    -
    -
    -
    -
    -
    -
    -
    -
    -
    -
    -
    -
    -
    -
    -
    -
    -
    -
    
    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8bae1c8a/apidocs/src-html/org/apache/hadoop/hbase/ipc/NettyRpcClientConfigHelper.html
    --
    diff --git 
    a/apidocs/src-html/org/apache/hadoop/hbase/ipc/NettyRpcClientConfigHelper.html 
    b/apidocs/src-html/org/apache/hadoop/hbase/ipc/NettyRpcClientConfigHelper.html
    index dbd0de7..98d1f72 100644
    --- 
    a/apidocs/src-html/org/apache/hadoop/hbase/ipc/NettyRpcClientConfigHelper.html
    +++ 
    b/apidocs/src-html/org/apache/hadoop/hbase/ipc/NettyRpcClientConfigHelper.html
    @@ -27,8 +27,8 @@
     019
     020import 
    org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions;
     021
    -022import io.netty.channel.Channel;
    -023import io.netty.channel.EventLoopGroup;
    +022import 
    org.apache.hadoop.hbase.shaded.io.netty.channel.Channel;
    +023import 
    org.apache.hadoop.hbase.shaded.io.netty.channel.EventLoopGroup;
     024
     025import java.util.HashMap;
     026import java.util.Map;
    
    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8bae1c8a/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/TableMapReduceUtil.html
    --
    diff --git 
    a/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/TableMapReduceUtil.html 
    b/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/TableMapReduceUtil.html
    index 7663309..2601f00 100644
    --- a/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/TableMapReduceUtil.html
    +++ b/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/TableMapReduceUtil.html
    @@ -800,7 +800,7 @@
     792  prefixTreeCodecClass, //  
    hbase-prefix-tree (if null will be skipped)
     793  // pull necessary dependencies
     794  

    [23/51] [partial] hbase-site git commit: Published site at .

    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8bae1c8a/devapidocs/org/apache/hadoop/hbase/classification/class-use/InterfaceAudience.Private.html
    --
    diff --git 
    a/devapidocs/org/apache/hadoop/hbase/classification/class-use/InterfaceAudience.Private.html
     
    b/devapidocs/org/apache/hadoop/hbase/classification/class-use/InterfaceAudience.Private.html
    index 9459d48..f11508b 100644
    --- 
    a/devapidocs/org/apache/hadoop/hbase/classification/class-use/InterfaceAudience.Private.html
    +++ 
    b/devapidocs/org/apache/hadoop/hbase/classification/class-use/InterfaceAudience.Private.html
    @@ -1199,73 +1199,79 @@ service.
     
     
     
    +interface 
    +BackupMergeJob
    +Backup merge operation job interface.
    +
    +
    +
     class 
     BackupRequest
     POJO class for backup request
     
     
    -
    +
     interface 
     BackupRestoreConstants
     BackupRestoreConstants holds a bunch of HBase Backup and 
    Restore constants
     
     
    -
    +
     class 
     BackupRestoreFactory
     Factory implementation for backup/restore related jobs
     
     
    -
    +
     class 
     BackupTableInfo
     Backup related information encapsulated for a table.
     
     
    -
    +
     class 
     BackupType 
     
    -
    +
     class 
     FailedArchiveException
     Exception indicating that some files in the requested set 
    could not be archived.
     
     
    -
    +
     class 
     HBackupFileSystem
     View to an on-disk Backup Image FileSytem Provides the set 
    of methods necessary to interact with
      the on-disk Backup Image data.
     
     
    -
    +
     class 
     HFileArchiver
     Utility class to handle the removal of HFiles (or the 
    respective StoreFiles)
      for a HRegion from the FileSystem.
     
     
    -
    +
     (package private) class 
     LogUtils
     Utility class for disabling Zk and client logging
     
     
    -
    +
     class 
     RestoreDriver
     Command-line entry point for restore operation
     
     
    -
    +
     interface 
     RestoreJob
     Restore operation job interface Concrete implementation is 
    provided by backup provider, see
      BackupRestoreFactory
     
     
    -
    +
     class 
     RestoreRequest
     POJO class for restore request
    @@ -1401,25 +1407,31 @@ service.
     
     
     class 
    -HFileSplitterJob
    -A tool to split HFiles into new region boundaries as a 
    MapReduce job.
    +MapReduceBackupCopyJob
    +Map-Reduce implementation of BackupCopyJob.
     
     
     
     class 
    -MapReduceBackupCopyJob
    -Map-Reduce implementation of BackupCopyJob.
    +MapReduceBackupMergeJob
    +MapReduce implementation of BackupMergeJob
    + Must be initialized with configuration of a backup destination cluster
     
     
     
     class 
    +MapReduceHFileSplitterJob
    +A tool to split HFiles into new region boundaries as a 
    MapReduce job.
    +
    +
    +
    +class 
     MapReduceRestoreJob
     MapReduce implementation of RestoreJob
     
    - For full backup restore, it runs HFileSplitterJob job 
    and creates
    + For backup restore, it runs MapReduceHFileSplitterJob
     job and creates
      HFiles which are aligned with a region boundaries of a table being
    - restored, for incremental backup restore it runs WALPlayer in
    - bulk load mode (creates HFiles from WAL edits).
    + restored.
     
     
     
    @@ -1916,110 +1928,116 @@ service.
     
     
     
    +class 
    +ImmutableHTableDescriptor
    +Deprecated. 
    +
    +
    +
     (package private) class 
     MasterCoprocessorRpcChannelImpl
     The implementation of a master based coprocessor rpc 
    channel.
     
     
    -
    +
     class 
     MetaCache
     A cache implementation for region locations from meta.
     
     
    -
    +
     class 
     MetricsConnection
     This class is for maintaining the various connection 
    statistics and publishing them through
      the metrics interfaces.
     
     
    -
    +
     class 
     MultiAction
     Container for Actions (i.e.
     
     
    -
    +
     class 
     MultiResponse
     A container for Result objects, grouped by regionName.
     
     
    -
    +
     (package private) class 
     MultiServerCallable
     Callable that handles the multi method call 
    going against a single
      regionserver; i.e.
     
     
    -
    +
     class 
     NoncedRegionServerCallable
     Implementations make an rpc call against a RegionService 
    via a protobuf Service.
     
     
    -
    +
     interface 
     NonceGenerator
     NonceGenerator interface.
     
     
    -
    +
     (package private) class 
     NoOpRetryableCallerInterceptor
     Class that acts as a NoOpInterceptor.
     
     
    -
    +
     (package private) class 
     NoOpRetryingInterceptorContext 
     
    -
    +
     class 
     PackagePrivateFieldAccessor
     A helper class used to access the package private field in 
    o.a.h.h.client package.
     
     
    -
    +
     class 
     PerClientRandomNonceGenerator
     NonceGenerator implementation that uses client ID hash + 
    random int as nonce group, and random
      numbers as nonces.
     
     
    -
    +
     (package private) class 
     PreemptiveFastFailInterceptor
     The concrete RetryingCallerInterceptor 
    class that implements the preemptive fast fail
      feature.
     
     
    -
    +
     class 
     QuotaStatusCalls
     Client class to wrap RPCs to HBase servers for space quota 
    status information.
     
     
    -
    +
     class 
     RawAsyncHBaseAdmin
     The implementation of AsyncAdmin.
     
     
    -
    +
     (package private) class 
     RawAsyncTableImpl
     The implementation of RawAsyncTable.
     
     
    -
    +
     class 
     RegionAdminServiceCallable
     Similar to RegionServerCallable but for the AdminService 
    interface.
     
     
    -
    +
     (package private) class 
     RegionCoprocessorRpcChannel
     Provides clients with an RPC connection

    [15/51] [partial] hbase-site git commit: Published site at .

    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8bae1c8a/devapidocs/org/apache/hadoop/hbase/client/HBaseAdmin.html
    --
    diff --git a/devapidocs/org/apache/hadoop/hbase/client/HBaseAdmin.html 
    b/devapidocs/org/apache/hadoop/hbase/client/HBaseAdmin.html
    index eacb7b5..b516ead 100644
    --- a/devapidocs/org/apache/hadoop/hbase/client/HBaseAdmin.html
    +++ b/devapidocs/org/apache/hadoop/hbase/client/HBaseAdmin.html
    @@ -18,7 +18,7 @@
     catch(err) {
     }
     //-->
    -var methods = 
    {"i0":10,"i1":10,"i2":10,"i3":42,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":9,"i11":10,"i12":10,"i13":10,"i14":41,"i15":10,"i16":10,"i17":10,"i18":10,"i19":10,"i20":10,"i21":10,"i22":42,"i23":42,"i24":42,"i25":42,"i26":10,"i27":10,"i28":10,"i29":10,"i30":10,"i31":10,"i32":10,"i33":10,"i34":10,"i35":10,"i36":10,"i37":10,"i38":10,"i39":10,"i40":10,"i41":10,"i42":10,"i43":10,"i44":10,"i45":10,"i46":42,"i47":10,"i48":10,"i49":10,"i50":10,"i51":10,"i52":10,"i53":10,"i54":10,"i55":10,"i56":10,"i57":10,"i58":10,"i59":10,"i60":10,"i61":10,"i62":10,"i63":10,"i64":10,"i65":10,"i66":10,"i67":10,"i68":10,"i69":10,"i70":10,"i71":10,"i72":10,"i73":10,"i74":10,"i75":10,"i76":10,"i77":10,"i78":9,"i79":10,"i80":10,"i81":9,"i82":10,"i83":10,"i84":10,"i85":10,"i86":10,"i87":10,"i88":10,"i89":10,"i90":41,"i91":10,"i92":10,"i93":10,"i94":10,"i95":10,"i96":10,"i97":10,"i98":10,"i99":10,"i100":10,"i101":10,"i102":10,"i103":10,"i104":10,"i105":10,"i106":10,"i107":10,"i108":10,"i109
     
    ":10,"i110":10,"i111":9,"i112":10,"i113":10,"i114":10,"i115":10,"i116":10,"i117":10,"i118":10,"i119":10,"i120":10,"i121":10,"i122":10,"i123":10,"i124":10,"i125":10,"i126":10,"i127":10,"i128":10,"i129":10,"i130":10,"i131":10,"i132":10,"i133":10,"i134":10,"i135":10,"i136":10,"i137":10,"i138":10,"i139":10,"i140":10,"i141":10,"i142":10,"i143":10,"i144":10,"i145":10,"i146":10,"i147":10,"i148":10,"i149":10,"i150":10,"i151":10,"i152":10,"i153":10,"i154":10,"i155":10,"i156":10,"i157":10,"i158":10,"i159":10,"i160":10,"i161":10,"i162":10,"i163":10,"i164":10,"i165":10,"i166":10,"i167":10,"i168":10,"i169":10,"i170":10,"i171":10,"i172":10,"i173":42,"i174":10,"i175":10,"i176":10,"i177":42,"i178":10,"i179":10,"i180":10,"i181":10,"i182":10,"i183":10,"i184":10,"i185":10,"i186":10,"i187":10,"i188":10,"i189":10,"i190":10,"i191":10,"i192":10,"i193":10,"i194":10,"i195":10,"i196":10,"i197":10,"i198":10,"i199":42,"i200":10,"i201":10,"i202":10,"i203":10,"i204":10,"i205":10,"i206":10,"i207":10,"i208":10,"i2
     
    09":10,"i210":10,"i211":10,"i212":10,"i213":10,"i214":10,"i215":10,"i216":10,"i217":10,"i218":10,"i219":10,"i220":10,"i221":10,"i222":10,"i223":10,"i224":10,"i225":10,"i226":10,"i227":10,"i228":10,"i229":10,"i230":10,"i231":10,"i232":10};
    +var methods = 
    {"i0":10,"i1":10,"i2":10,"i3":42,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":9,"i11":10,"i12":10,"i13":10,"i14":41,"i15":10,"i16":10,"i17":10,"i18":10,"i19":10,"i20":10,"i21":10,"i22":42,"i23":42,"i24":42,"i25":42,"i26":10,"i27":10,"i28":10,"i29":10,"i30":10,"i31":10,"i32":10,"i33":10,"i34":10,"i35":10,"i36":10,"i37":10,"i38":10,"i39":10,"i40":10,"i41":10,"i42":10,"i43":10,"i44":10,"i45":10,"i46":42,"i47":10,"i48":10,"i49":10,"i50":10,"i51":10,"i52":10,"i53":10,"i54":10,"i55":10,"i56":10,"i57":10,"i58":10,"i59":10,"i60":10,"i61":10,"i62":10,"i63":10,"i64":10,"i65":10,"i66":10,"i67":10,"i68":10,"i69":10,"i70":10,"i71":10,"i72":10,"i73":10,"i74":10,"i75":10,"i76":10,"i77":10,"i78":9,"i79":10,"i80":10,"i81":9,"i82":10,"i83":10,"i84":10,"i85":10,"i86":10,"i87":10,"i88":10,"i89":10,"i90":10,"i91":41,"i92":10,"i93":10,"i94":10,"i95":10,"i96":10,"i97":10,"i98":10,"i99":10,"i100":10,"i101":10,"i102":10,"i103":10,"i104":10,"i105":10,"i106":10,"i107":10,"i108":10,"i109
     
    ":10,"i110":10,"i111":10,"i112":9,"i113":10,"i114":10,"i115":10,"i116":10,"i117":10,"i118":10,"i119":10,"i120":10,"i121":10,"i122":10,"i123":10,"i124":10,"i125":10,"i126":10,"i127":10,"i128":10,"i129":10,"i130":10,"i131":10,"i132":10,"i133":10,"i134":10,"i135":10,"i136":10,"i137":10,"i138":10,"i139":10,"i140":10,"i141":10,"i142":10,"i143":10,"i144":10,"i145":10,"i146":10,"i147":10,"i148":10,"i149":10,"i150":10,"i151":10,"i152":10,"i153":10,"i154":10,"i155":10,"i156":10,"i157":10,"i158":10,"i159":10,"i160":10,"i161":10,"i162":10,"i163":10,"i164":10,"i165":10,"i166":10,"i167":10,"i168":10,"i169":10,"i170":10,"i171":10,"i172":10,"i173":10,"i174":42,"i175":10,"i176":10,"i177":10,"i178":42,"i179":10,"i180":10,"i181":10,"i182":10,"i183":10,"i184":10,"i185":10,"i186":10,"i187":10,"i188":10,"i189":10,"i190":10,"i191":10,"i192":10,"i193":10,"i194":10,"i195":10,"i196":10,"i197":10,"i198":10,"i199":10,"i200":42,"i201":10,"i202":10,"i203":10,"i204":10,"i205":10,"i206":10,"i207":10,"i208":10,"i2
     
    09":10,"i210":10,"i211":10,"i212":10,"i213":10,"i214":10,"i215":10,"i216":10,"i217":10,"i218":10,"i219":10,"i220":10,"i221":10,"i222":10,"i223":10,"i224":10,"i225":10,"i226":10,"i227":10,"i228":10,"i229":10,"i230":10,"i231"

    [01/51] [partial] hbase-site git commit: Published site at .

    Repository: hbase-site
    Updated Branches:
      refs/heads/asf-site c70834a52 -> 8bae1c8a1
    
    
    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8bae1c8a/devapidocs/org/apache/hadoop/hbase/master/MasterRpcServices.html
    --
    diff --git a/devapidocs/org/apache/hadoop/hbase/master/MasterRpcServices.html 
    b/devapidocs/org/apache/hadoop/hbase/master/MasterRpcServices.html
    index 5ef3b38..e93b0e3 100644
    --- a/devapidocs/org/apache/hadoop/hbase/master/MasterRpcServices.html
    +++ b/devapidocs/org/apache/hadoop/hbase/master/MasterRpcServices.html
    @@ -1299,7 +1299,7 @@ implements 
    org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.Master
     
     
     getCompletedSnapshots
    -public org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCompletedSnapshotsResponse getCompletedSnapshots(org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController controller,
    +public org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCompletedSnapshotsResponse getCompletedSnapshots(org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController controller,
    
       
    org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest request)
    
    throws 
    org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException
     List the currently available/stored snapshots. Any 
    in-progress snapshots are ignored
    @@ -1317,7 +1317,7 @@ implements 
    org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.Master
     
     
     getNamespaceDescriptor
    -public org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetNamespaceDescriptorResponse getNamespaceDescriptor(org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController controller,
    +public org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetNamespaceDescriptorResponse getNamespaceDescriptor(org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController controller,
    
     
    org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetNamespaceDescriptorRequest request)
    
      throws 
    org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException
     
    @@ -1334,7 +1334,7 @@ implements 
    org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.Master
     
     
     getSchemaAlterStatus
    -public org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetSchemaAlterStatusResponse getSchemaAlterStatus(org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController controller,
    +public org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetSchemaAlterStatusResponse getSchemaAlterStatus(org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController controller,
    
     
    org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetSchemaAlterStatusRequest req)
    
      throws 
    org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException
     Get the number of regions of the table that have been 
    updated by the alter.
    @@ -1356,7 +1356,7 @@ implements 
    org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.Master
     
     
     getTableDescriptors
    -public org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableDescriptorsResponse getTableDescriptors(org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController c,
    +public org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableDescriptorsResponse getTableDescriptors(org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController c,
    
       
    org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableDescriptorsRequest req)
    
    throws 
    org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException
     Get list of TableDescriptors for requested tables.
    @@ -1380,7 +1380,7 @@ implements 
    org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.Master
     
     
     getTableNames
    -public org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableNamesResponse getTableNames(org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController controller,
    +public org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableNamesResponse getTableNames(org.ap

    hbase git commit: HBASE-18579 Enable core dump by default for docker

    Repository: hbase
    Updated Branches:
      refs/heads/HBASE-14850 aff0336ec -> 6e0f6df79
    
    
    HBASE-18579 Enable core dump by default for docker
    
    
    Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
    Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/6e0f6df7
    Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/6e0f6df7
    Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/6e0f6df7
    
    Branch: refs/heads/HBASE-14850
    Commit: 6e0f6df7997817d94ba56d4920c90c6b5eedf2ea
    Parents: aff0336
    Author: tedyu 
    Authored: Mon Aug 14 07:06:23 2017 -0700
    Committer: tedyu 
    Committed: Mon Aug 14 07:06:23 2017 -0700
    
    --
     hbase-native-client/bin/start-docker.sh | 2 +-
     hbase-native-client/docker-files/Dockerfile | 4 +++-
     2 files changed, 4 insertions(+), 2 deletions(-)
    --
    
    
    http://git-wip-us.apache.org/repos/asf/hbase/blob/6e0f6df7/hbase-native-client/bin/start-docker.sh
    --
    diff --git a/hbase-native-client/bin/start-docker.sh 
    b/hbase-native-client/bin/start-docker.sh
    index 53325c1..391238d 100755
    --- a/hbase-native-client/bin/start-docker.sh
    +++ b/hbase-native-client/bin/start-docker.sh
    @@ -55,7 +55,7 @@ fi;
     docker build -t hbase_native -f docker-files/Dockerfile .
     
     # After the image is built run the thing
    -docker run -h="securecluster" -p 16050:16050/tcp \
    +docker run --privileged=true -h="securecluster" -p 16050:16050/tcp \
      -v ${BASE_DIR}/..:/usr/src/hbase \
    -v ~/.m2:/root/.m2 \
      -it hbase_native /bin/bash
    
    http://git-wip-us.apache.org/repos/asf/hbase/blob/6e0f6df7/hbase-native-client/docker-files/Dockerfile
    --
    diff --git a/hbase-native-client/docker-files/Dockerfile 
    b/hbase-native-client/docker-files/Dockerfile
    index efd9a9d..ac3cb47 100644
    --- a/hbase-native-client/docker-files/Dockerfile
    +++ b/hbase-native-client/docker-files/Dockerfile
    @@ -117,6 +117,8 @@ RUN cd /usr/src/ && \
       ctest && \
       make install
     
    -ENTRYPOINT /usr/sbin/krb5kdc -P /var/run/krb5kdc.pid && /bin/bash
    +RUN echo "enabled=1" >> /etc/default/apport
    +
    +ENTRYPOINT /usr/sbin/krb5kdc -P /var/run/krb5kdc.pid && echo 
    "/tmp/core.%h.%e.%t" >> /proc/sys/kernel/core_pattern && sysctl -p && ulimit -c 
    unlimited && /bin/bash
     
     WORKDIR /usr/src/hbase/hbase-native-client
    
    
    

    hbase git commit: HBASE-17803 PE always re-creates table when we specify the split policy

    Repository: hbase
    Updated Branches:
      refs/heads/branch-1.3 18726b370 -> f62069897
    
    
    HBASE-17803 PE always re-creates table when we specify the split policy
    
    
    Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
    Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/f6206989
    Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/f6206989
    Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/f6206989
    
    Branch: refs/heads/branch-1.3
    Commit: f62069897b64c5da7fc93d02d9bf0cd7b9fcd4bf
    Parents: 18726b3
    Author: Chia-Ping Tsai 
    Authored: Mon Aug 14 15:10:09 2017 +0800
    Committer: Chia-Ping Tsai 
    Committed: Mon Aug 14 15:10:09 2017 +0800
    
    --
     .../test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java  | 3 ++-
     1 file changed, 2 insertions(+), 1 deletion(-)
    --
    
    
    http://git-wip-us.apache.org/repos/asf/hbase/blob/f6206989/hbase-server/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java
    --
    diff --git 
    a/hbase-server/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java 
    b/hbase-server/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java
    index 76da965..b68b61a 100644
    --- 
    a/hbase-server/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java
    +++ 
    b/hbase-server/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java
    @@ -44,6 +44,7 @@ import java.util.concurrent.Future;
     import com.google.common.base.Objects;
     import com.google.common.util.concurrent.ThreadFactoryBuilder;
     
    +import org.apache.commons.lang.StringUtils;
     import org.apache.commons.logging.Log;
     import org.apache.commons.logging.LogFactory;
     import org.apache.hadoop.conf.Configuration;
    @@ -325,7 +326,7 @@ public class PerformanceEvaluation extends Configured 
    implements Tool {
     // recreate the table when user has requested presplit or when existing
     // {RegionSplitPolicy,replica count} does not match requested.
     if ((exists && opts.presplitRegions != DEFAULT_OPTS.presplitRegions)
    -  || (!isReadCmd && desc != null && desc.getRegionSplitPolicyClassName() 
    != opts.splitPolicy)
    +  || (!isReadCmd && desc != null && 
    !StringUtils.equals(desc.getRegionSplitPolicyClassName(), opts.splitPolicy))
       || (!isReadCmd && desc != null && desc.getRegionReplication() != 
    opts.replicas)) {
       needsDelete = true;
       // wait, why did it delete my table?!?
    
    
    

    hbase git commit: HBASE-17803 PE always re-creates table when we specify the split policy

    Repository: hbase
    Updated Branches:
      refs/heads/branch-1.2 b9a57f455 -> e4f65bf7b
    
    
    HBASE-17803 PE always re-creates table when we specify the split policy
    
    
    Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
    Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/e4f65bf7
    Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/e4f65bf7
    Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/e4f65bf7
    
    Branch: refs/heads/branch-1.2
    Commit: e4f65bf7b5a89b6f970b653ea48dfc758057b1d7
    Parents: b9a57f4
    Author: Chia-Ping Tsai 
    Authored: Mon Aug 14 15:09:24 2017 +0800
    Committer: Chia-Ping Tsai 
    Committed: Mon Aug 14 15:09:24 2017 +0800
    
    --
     .../test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java  | 3 ++-
     1 file changed, 2 insertions(+), 1 deletion(-)
    --
    
    
    http://git-wip-us.apache.org/repos/asf/hbase/blob/e4f65bf7/hbase-server/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java
    --
    diff --git 
    a/hbase-server/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java 
    b/hbase-server/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java
    index ae058fb..0f12cc7 100644
    --- 
    a/hbase-server/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java
    +++ 
    b/hbase-server/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java
    @@ -44,6 +44,7 @@ import java.util.concurrent.Future;
     import com.google.common.base.Objects;
     import com.google.common.util.concurrent.ThreadFactoryBuilder;
     
    +import org.apache.commons.lang.StringUtils;
     import org.apache.commons.logging.Log;
     import org.apache.commons.logging.LogFactory;
     import org.apache.hadoop.conf.Configuration;
    @@ -325,7 +326,7 @@ public class PerformanceEvaluation extends Configured 
    implements Tool {
     // recreate the table when user has requested presplit or when existing
     // {RegionSplitPolicy,replica count} does not match requested.
     if ((exists && opts.presplitRegions != DEFAULT_OPTS.presplitRegions)
    -  || (!isReadCmd && desc != null && desc.getRegionSplitPolicyClassName() 
    != opts.splitPolicy)
    +  || (!isReadCmd && desc != null && 
    !StringUtils.equals(desc.getRegionSplitPolicyClassName(), opts.splitPolicy))
       || (!isReadCmd && desc != null && desc.getRegionReplication() != 
    opts.replicas)) {
       needsDelete = true;
       // wait, why did it delete my table?!?