hbase git commit: HBASE-18343 Track the remaining unimplemented methods for async admin

2017-07-10 Thread zghao
Repository: hbase
Updated Branches:
  refs/heads/branch-2 7acb226eb -> 8a8e299ee


HBASE-18343 Track the remaining unimplemented methods for async admin


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/8a8e299e
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/8a8e299e
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/8a8e299e

Branch: refs/heads/branch-2
Commit: 8a8e299eee10309268679ea47a0f5bc70d27dc6b
Parents: 7acb226
Author: Guanghao Zhang 
Authored: Mon Jul 10 13:39:44 2017 +0800
Committer: Guanghao Zhang 
Committed: Tue Jul 11 14:05:02 2017 +0800

--
 .../apache/hadoop/hbase/client/AsyncAdmin.java  |  61 +++
 .../hadoop/hbase/client/AsyncHBaseAdmin.java|  43 
 .../hadoop/hbase/client/RawAsyncHBaseAdmin.java | 106 +++
 .../hbase/client/TestAsyncClusterAdminApi.java  |  90 
 .../hbase/client/TestAsyncRegionAdminApi.java   |  78 ++
 5 files changed, 378 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/8a8e299e/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncAdmin.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncAdmin.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncAdmin.java
index 9538a48..7d904b3 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncAdmin.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncAdmin.java
@@ -21,6 +21,7 @@ import java.util.List;
 import java.util.Collection;
 import java.util.Map;
 import java.util.Optional;
+import java.util.Set;
 import java.util.concurrent.CompletableFuture;
 import java.util.regex.Pattern;
 
@@ -32,9 +33,11 @@ import org.apache.hadoop.hbase.ServerName;
 import org.apache.hadoop.hbase.NamespaceDescriptor;
 import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.classification.InterfaceAudience;
+import org.apache.hadoop.hbase.procedure2.LockInfo;
 import org.apache.hadoop.hbase.quotas.QuotaFilter;
 import org.apache.hadoop.hbase.quotas.QuotaSettings;
 import org.apache.hadoop.hbase.client.replication.TableCFs;
+import org.apache.hadoop.hbase.client.security.SecurityCapability;
 import org.apache.hadoop.hbase.replication.ReplicationPeerConfig;
 import org.apache.hadoop.hbase.replication.ReplicationPeerDescription;
 import org.apache.hadoop.hbase.util.Pair;
@@ -405,6 +408,34 @@ public interface AsyncAdmin {
   CompletableFuture majorCompactRegionServer(ServerName serverName);
 
   /**
+   * Turn the Merge switch on or off.
+   * @param on
+   * @return Previous switch value wrapped by a {@link CompletableFuture}
+   */
+  CompletableFuture setMergeOn(boolean on);
+
+  /**
+   * Query the current state of the Merge switch.
+   * @return true if the switch is on, false otherwise. The return value will 
be wrapped by a
+   * {@link CompletableFuture}
+   */
+  CompletableFuture isMergeOn();
+
+  /**
+   * Turn the Split switch on or off.
+   * @param on
+   * @return Previous switch value wrapped by a {@link CompletableFuture}
+   */
+  CompletableFuture setSplitOn(boolean on);
+
+  /**
+   * Query the current state of the Split switch.
+   * @return true if the switch is on, false otherwise. The return value will 
be wrapped by a
+   * {@link CompletableFuture}
+   */
+  CompletableFuture isSplitOn();
+
+  /**
* Merge two regions.
* @param nameOfRegionA encoded or full name of region a
* @param nameOfRegionB encoded or full name of region b
@@ -771,6 +802,12 @@ public interface AsyncAdmin {
   CompletableFuture> listProcedures();
 
   /**
+   * List procedure locks.
+   * @return lock list wrapped by {@link CompletableFuture}
+   */
+  CompletableFuture> listProcedureLocks();
+
+  /**
* Mark a region server as draining to prevent additional regions from 
getting assigned to it.
* @param servers
*/
@@ -852,6 +889,24 @@ public interface AsyncAdmin {
   CompletableFuture updateConfiguration();
 
   /**
+   * Roll the log writer. I.e. for filesystem based write ahead logs, start 
writing to a new file.
+   * 
+   * When the returned CompletableFuture is done, it only means the 
rollWALWriter request was sent
+   * to the region server and may need some time to finish the rollWALWriter 
operation. As a side
+   * effect of this call, the named region server may schedule store flushes 
at the request of the
+   * wal.
+   * @param serverName The servername of the region server.
+   */
+  CompletableFuture rollWALWriter(ServerName serverName);
+
+  /**
+   * Clear compacting queues on a region server.
+   * @param serverName
+   * @param queues the set of queue name
+   */
+  CompletableFuture clearCompactionQueues(ServerName se

hbase git commit: HBASE-18343 Track the remaining unimplemented methods for async admin

2017-07-10 Thread zghao
Repository: hbase
Updated Branches:
  refs/heads/master f8e892d7a -> 1978b78cd


HBASE-18343 Track the remaining unimplemented methods for async admin


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/1978b78c
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/1978b78c
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/1978b78c

Branch: refs/heads/master
Commit: 1978b78cdf8b31fc627937a3ef9fdd0010fac08c
Parents: f8e892d
Author: Guanghao Zhang 
Authored: Mon Jul 10 13:39:44 2017 +0800
Committer: Guanghao Zhang 
Committed: Tue Jul 11 14:01:56 2017 +0800

--
 .../apache/hadoop/hbase/client/AsyncAdmin.java  |  61 +++
 .../hadoop/hbase/client/AsyncHBaseAdmin.java|  43 
 .../hadoop/hbase/client/RawAsyncHBaseAdmin.java | 106 +++
 .../hbase/client/TestAsyncClusterAdminApi.java  |  90 
 .../hbase/client/TestAsyncRegionAdminApi.java   |  78 ++
 5 files changed, 378 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/1978b78c/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncAdmin.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncAdmin.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncAdmin.java
index 9538a48..7d904b3 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncAdmin.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncAdmin.java
@@ -21,6 +21,7 @@ import java.util.List;
 import java.util.Collection;
 import java.util.Map;
 import java.util.Optional;
+import java.util.Set;
 import java.util.concurrent.CompletableFuture;
 import java.util.regex.Pattern;
 
@@ -32,9 +33,11 @@ import org.apache.hadoop.hbase.ServerName;
 import org.apache.hadoop.hbase.NamespaceDescriptor;
 import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.classification.InterfaceAudience;
+import org.apache.hadoop.hbase.procedure2.LockInfo;
 import org.apache.hadoop.hbase.quotas.QuotaFilter;
 import org.apache.hadoop.hbase.quotas.QuotaSettings;
 import org.apache.hadoop.hbase.client.replication.TableCFs;
+import org.apache.hadoop.hbase.client.security.SecurityCapability;
 import org.apache.hadoop.hbase.replication.ReplicationPeerConfig;
 import org.apache.hadoop.hbase.replication.ReplicationPeerDescription;
 import org.apache.hadoop.hbase.util.Pair;
@@ -405,6 +408,34 @@ public interface AsyncAdmin {
   CompletableFuture majorCompactRegionServer(ServerName serverName);
 
   /**
+   * Turn the Merge switch on or off.
+   * @param on
+   * @return Previous switch value wrapped by a {@link CompletableFuture}
+   */
+  CompletableFuture setMergeOn(boolean on);
+
+  /**
+   * Query the current state of the Merge switch.
+   * @return true if the switch is on, false otherwise. The return value will 
be wrapped by a
+   * {@link CompletableFuture}
+   */
+  CompletableFuture isMergeOn();
+
+  /**
+   * Turn the Split switch on or off.
+   * @param on
+   * @return Previous switch value wrapped by a {@link CompletableFuture}
+   */
+  CompletableFuture setSplitOn(boolean on);
+
+  /**
+   * Query the current state of the Split switch.
+   * @return true if the switch is on, false otherwise. The return value will 
be wrapped by a
+   * {@link CompletableFuture}
+   */
+  CompletableFuture isSplitOn();
+
+  /**
* Merge two regions.
* @param nameOfRegionA encoded or full name of region a
* @param nameOfRegionB encoded or full name of region b
@@ -771,6 +802,12 @@ public interface AsyncAdmin {
   CompletableFuture> listProcedures();
 
   /**
+   * List procedure locks.
+   * @return lock list wrapped by {@link CompletableFuture}
+   */
+  CompletableFuture> listProcedureLocks();
+
+  /**
* Mark a region server as draining to prevent additional regions from 
getting assigned to it.
* @param servers
*/
@@ -852,6 +889,24 @@ public interface AsyncAdmin {
   CompletableFuture updateConfiguration();
 
   /**
+   * Roll the log writer. I.e. for filesystem based write ahead logs, start 
writing to a new file.
+   * 
+   * When the returned CompletableFuture is done, it only means the 
rollWALWriter request was sent
+   * to the region server and may need some time to finish the rollWALWriter 
operation. As a side
+   * effect of this call, the named region server may schedule store flushes 
at the request of the
+   * wal.
+   * @param serverName The servername of the region server.
+   */
+  CompletableFuture rollWALWriter(ServerName serverName);
+
+  /**
+   * Clear compacting queues on a region server.
+   * @param serverName
+   * @param queues the set of queue name
+   */
+  CompletableFuture clearCompactionQueues(ServerName server

hbase git commit: HBASE-18348 The implementation of AsyncTableRegionLocator does not follow the javadoc

2017-07-10 Thread zhangduo
Repository: hbase
Updated Branches:
  refs/heads/master 43492d2d3 -> f8e892d7a


HBASE-18348 The implementation of AsyncTableRegionLocator does not follow the 
javadoc


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/f8e892d7
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/f8e892d7
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/f8e892d7

Branch: refs/heads/master
Commit: f8e892d7aaa752db7a835f8969a676a9028c8448
Parents: 43492d2
Author: zhangduo 
Authored: Mon Jul 10 15:47:25 2017 +0800
Committer: zhangduo 
Committed: Tue Jul 11 11:43:35 2017 +0800

--
 .../hbase/client/AsyncMetaRegionLocator.java|  10 +-
 .../hbase/client/AsyncNonMetaRegionLocator.java |  24 +++--
 .../hadoop/hbase/client/AsyncRegionLocator.java |  21 ++--
 .../client/AsyncTableRegionLocatorImpl.java |   2 +-
 .../client/TestAsyncMetaRegionLocator.java  | 102 +++
 .../client/TestAsyncNonMetaRegionLocator.java   |  86 
 ...syncNonMetaRegionLocatorConcurrenyLimit.java |   2 +-
 7 files changed, 203 insertions(+), 44 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/f8e892d7/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncMetaRegionLocator.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncMetaRegionLocator.java
 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncMetaRegionLocator.java
index 6e7dba7..600af34 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncMetaRegionLocator.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncMetaRegionLocator.java
@@ -45,11 +45,13 @@ class AsyncMetaRegionLocator {
 this.registry = registry;
   }
 
-  CompletableFuture getRegionLocation() {
+  CompletableFuture getRegionLocation(boolean reload) {
 for (;;) {
-  HRegionLocation metaRegionLocation = this.metaRegionLocation.get();
-  if (metaRegionLocation != null) {
-return CompletableFuture.completedFuture(metaRegionLocation);
+  if (!reload) {
+HRegionLocation metaRegionLocation = this.metaRegionLocation.get();
+if (metaRegionLocation != null) {
+  return CompletableFuture.completedFuture(metaRegionLocation);
+}
   }
   if (LOG.isTraceEnabled()) {
 LOG.trace("Meta region location cache is null, try fetching from 
registry.");

http://git-wip-us.apache.org/repos/asf/hbase/blob/f8e892d7/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncNonMetaRegionLocator.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncNonMetaRegionLocator.java
 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncNonMetaRegionLocator.java
index 3dc9537..31f369c 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncNonMetaRegionLocator.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncNonMetaRegionLocator.java
@@ -392,22 +392,26 @@ class AsyncNonMetaRegionLocator {
   // placed before it. Used for reverse scan. See the comment of
   // AsyncRegionLocator.getPreviousRegionLocation.
   private CompletableFuture 
getRegionLocationInternal(TableName tableName,
-  byte[] row, RegionLocateType locateType) {
+  byte[] row, RegionLocateType locateType, boolean reload) {
 // AFTER should be convert to CURRENT before calling this method
 assert !locateType.equals(RegionLocateType.AFTER);
 TableCache tableCache = getTableCache(tableName);
-HRegionLocation loc = locateInCache(tableCache, tableName, row, 
locateType);
-if (loc != null) {
-  return CompletableFuture.completedFuture(loc);
+if (!reload) {
+  HRegionLocation loc = locateInCache(tableCache, tableName, row, 
locateType);
+  if (loc != null) {
+return CompletableFuture.completedFuture(loc);
+  }
 }
 CompletableFuture future;
 LocateRequest req;
 boolean sendRequest = false;
 synchronized (tableCache) {
   // check again
-  loc = locateInCache(tableCache, tableName, row, locateType);
-  if (loc != null) {
-return CompletableFuture.completedFuture(loc);
+  if (!reload) {
+HRegionLocation loc = locateInCache(tableCache, tableName, row, 
locateType);
+if (loc != null) {
+  return CompletableFuture.completedFuture(loc);
+}
   }
   req = new LocateRequest(row, locateType);
   future = tableCache.allRequests.get(req);
@@ -427,16 +431,16 @@ class AsyncNonMetaRegionLocator {
   }
 
   CompletableFuture getRegionLocation(TableName tableName, 
byte[] row,
-  RegionLocateType locateType) {
+  Region

hbase git commit: HBASE-18348 The implementation of AsyncTableRegionLocator does not follow the javadoc

2017-07-10 Thread zhangduo
Repository: hbase
Updated Branches:
  refs/heads/branch-2 6456c8bf8 -> 7acb226eb


HBASE-18348 The implementation of AsyncTableRegionLocator does not follow the 
javadoc


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/7acb226e
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/7acb226e
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/7acb226e

Branch: refs/heads/branch-2
Commit: 7acb226eb9693fdecbcaf0293687137e0386823a
Parents: 6456c8b
Author: zhangduo 
Authored: Mon Jul 10 15:47:25 2017 +0800
Committer: zhangduo 
Committed: Tue Jul 11 11:44:40 2017 +0800

--
 .../hbase/client/AsyncMetaRegionLocator.java|  10 +-
 .../hbase/client/AsyncNonMetaRegionLocator.java |  24 +++--
 .../hadoop/hbase/client/AsyncRegionLocator.java |  21 ++--
 .../client/AsyncTableRegionLocatorImpl.java |   2 +-
 .../client/TestAsyncMetaRegionLocator.java  | 102 +++
 .../client/TestAsyncNonMetaRegionLocator.java   |  86 
 ...syncNonMetaRegionLocatorConcurrenyLimit.java |   2 +-
 7 files changed, 203 insertions(+), 44 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/7acb226e/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncMetaRegionLocator.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncMetaRegionLocator.java
 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncMetaRegionLocator.java
index 6e7dba7..600af34 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncMetaRegionLocator.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncMetaRegionLocator.java
@@ -45,11 +45,13 @@ class AsyncMetaRegionLocator {
 this.registry = registry;
   }
 
-  CompletableFuture getRegionLocation() {
+  CompletableFuture getRegionLocation(boolean reload) {
 for (;;) {
-  HRegionLocation metaRegionLocation = this.metaRegionLocation.get();
-  if (metaRegionLocation != null) {
-return CompletableFuture.completedFuture(metaRegionLocation);
+  if (!reload) {
+HRegionLocation metaRegionLocation = this.metaRegionLocation.get();
+if (metaRegionLocation != null) {
+  return CompletableFuture.completedFuture(metaRegionLocation);
+}
   }
   if (LOG.isTraceEnabled()) {
 LOG.trace("Meta region location cache is null, try fetching from 
registry.");

http://git-wip-us.apache.org/repos/asf/hbase/blob/7acb226e/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncNonMetaRegionLocator.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncNonMetaRegionLocator.java
 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncNonMetaRegionLocator.java
index 3dc9537..31f369c 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncNonMetaRegionLocator.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncNonMetaRegionLocator.java
@@ -392,22 +392,26 @@ class AsyncNonMetaRegionLocator {
   // placed before it. Used for reverse scan. See the comment of
   // AsyncRegionLocator.getPreviousRegionLocation.
   private CompletableFuture 
getRegionLocationInternal(TableName tableName,
-  byte[] row, RegionLocateType locateType) {
+  byte[] row, RegionLocateType locateType, boolean reload) {
 // AFTER should be convert to CURRENT before calling this method
 assert !locateType.equals(RegionLocateType.AFTER);
 TableCache tableCache = getTableCache(tableName);
-HRegionLocation loc = locateInCache(tableCache, tableName, row, 
locateType);
-if (loc != null) {
-  return CompletableFuture.completedFuture(loc);
+if (!reload) {
+  HRegionLocation loc = locateInCache(tableCache, tableName, row, 
locateType);
+  if (loc != null) {
+return CompletableFuture.completedFuture(loc);
+  }
 }
 CompletableFuture future;
 LocateRequest req;
 boolean sendRequest = false;
 synchronized (tableCache) {
   // check again
-  loc = locateInCache(tableCache, tableName, row, locateType);
-  if (loc != null) {
-return CompletableFuture.completedFuture(loc);
+  if (!reload) {
+HRegionLocation loc = locateInCache(tableCache, tableName, row, 
locateType);
+if (loc != null) {
+  return CompletableFuture.completedFuture(loc);
+}
   }
   req = new LocateRequest(row, locateType);
   future = tableCache.allRequests.get(req);
@@ -427,16 +431,16 @@ class AsyncNonMetaRegionLocator {
   }
 
   CompletableFuture getRegionLocation(TableName tableName, 
byte[] row,
-  RegionLocateType locateType) {
+  Re

hbase git commit: HBASE-18267 The result from the postAppend is ignored

2017-07-10 Thread chia7712
Repository: hbase
Updated Branches:
  refs/heads/branch-1.2 8a996e341 -> 56659f342


HBASE-18267 The result from the postAppend is ignored


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/56659f34
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/56659f34
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/56659f34

Branch: refs/heads/branch-1.2
Commit: 56659f3421dde0670bd9b757076a2d1b74140877
Parents: 8a996e3
Author: Chia-Ping Tsai 
Authored: Tue Jul 11 10:32:59 2017 +0800
Committer: Chia-Ping Tsai 
Committed: Tue Jul 11 10:32:59 2017 +0800

--
 .../hbase/regionserver/RSRpcServices.java   |   2 +-
 .../regionserver/RegionCoprocessorHost.java |   7 +-
 .../hbase/client/TestResultFromCoprocessor.java | 128 +++
 3 files changed, 133 insertions(+), 4 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/56659f34/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSRpcServices.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSRpcServices.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSRpcServices.java
index 914467c..0a3b14c 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSRpcServices.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSRpcServices.java
@@ -520,7 +520,7 @@ public class RSRpcServices implements HBaseRPCErrorHandler,
 endNonceOperation(m, nonceGroup, success);
   }
   if (region.getCoprocessorHost() != null) {
-region.getCoprocessorHost().postAppend(append, r);
+r = region.getCoprocessorHost().postAppend(append, r);
   }
 }
 if (regionServer.metricsRegionServer != null) {

http://git-wip-us.apache.org/repos/asf/hbase/blob/56659f34/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.java
index 1ed866a..0a7eb67 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.java
@@ -1259,12 +1259,13 @@ public class RegionCoprocessorHost
* @param result the result returned by the append
* @throws IOException if an error occurred on the coprocessor
*/
-  public void postAppend(final Append append, final Result result) throws 
IOException {
-execOperation(coprocessors.isEmpty() ? null : new RegionOperation() {
+  public Result postAppend(final Append append, final Result result) throws 
IOException {
+return execOperationWithResult(result,
+coprocessors.isEmpty() ? null : new 
RegionOperationWithResult() {
   @Override
   public void call(RegionObserver oserver, 
ObserverContext ctx)
   throws IOException {
-oserver.postAppend(ctx, append, result);
+setResult(oserver.postAppend(ctx, append, result));
   }
 });
   }

http://git-wip-us.apache.org/repos/asf/hbase/blob/56659f34/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestResultFromCoprocessor.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestResultFromCoprocessor.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestResultFromCoprocessor.java
new file mode 100644
index 000..79cf297
--- /dev/null
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestResultFromCoprocessor.java
@@ -0,0 +1,128 @@
+/**
+ * Copyright The Apache Software Foundation
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.client;
+
+import java.io.IOExce

hbase git commit: HBASE-18267 The result from the postAppend is ignored

2017-07-10 Thread chia7712
Repository: hbase
Updated Branches:
  refs/heads/branch-1.3 29e0e7317 -> 0b31a7300


HBASE-18267 The result from the postAppend is ignored


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/0b31a730
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/0b31a730
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/0b31a730

Branch: refs/heads/branch-1.3
Commit: 0b31a730065ea4bfb6354646611049dda56350fa
Parents: 29e0e73
Author: Chia-Ping Tsai 
Authored: Tue Jul 11 10:32:21 2017 +0800
Committer: Chia-Ping Tsai 
Committed: Tue Jul 11 10:32:21 2017 +0800

--
 .../hbase/regionserver/RSRpcServices.java   |   2 +-
 .../regionserver/RegionCoprocessorHost.java |   7 +-
 .../hbase/client/TestResultFromCoprocessor.java | 128 +++
 3 files changed, 133 insertions(+), 4 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/0b31a730/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSRpcServices.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSRpcServices.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSRpcServices.java
index 47e50c7..b6fc5bd 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSRpcServices.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSRpcServices.java
@@ -582,7 +582,7 @@ public class RSRpcServices implements HBaseRPCErrorHandler,
 }
   }
   if (region.getCoprocessorHost() != null) {
-region.getCoprocessorHost().postAppend(append, r);
+r = region.getCoprocessorHost().postAppend(append, r);
   }
 }
 if (regionServer.metricsRegionServer != null) {

http://git-wip-us.apache.org/repos/asf/hbase/blob/0b31a730/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.java
index 1ed866a..0a7eb67 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.java
@@ -1259,12 +1259,13 @@ public class RegionCoprocessorHost
* @param result the result returned by the append
* @throws IOException if an error occurred on the coprocessor
*/
-  public void postAppend(final Append append, final Result result) throws 
IOException {
-execOperation(coprocessors.isEmpty() ? null : new RegionOperation() {
+  public Result postAppend(final Append append, final Result result) throws 
IOException {
+return execOperationWithResult(result,
+coprocessors.isEmpty() ? null : new 
RegionOperationWithResult() {
   @Override
   public void call(RegionObserver oserver, 
ObserverContext ctx)
   throws IOException {
-oserver.postAppend(ctx, append, result);
+setResult(oserver.postAppend(ctx, append, result));
   }
 });
   }

http://git-wip-us.apache.org/repos/asf/hbase/blob/0b31a730/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestResultFromCoprocessor.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestResultFromCoprocessor.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestResultFromCoprocessor.java
new file mode 100644
index 000..79cf297
--- /dev/null
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestResultFromCoprocessor.java
@@ -0,0 +1,128 @@
+/**
+ * Copyright The Apache Software Foundation
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.client;
+
+import java.io.IOException;
+import java.util.Arrays;
+import 

hbase git commit: HBASE-18267 The result from the postAppend is ignored

2017-07-10 Thread chia7712
Repository: hbase
Updated Branches:
  refs/heads/branch-1.4 2256aedd3 -> 4d88c460f


HBASE-18267 The result from the postAppend is ignored


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/4d88c460
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/4d88c460
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/4d88c460

Branch: refs/heads/branch-1.4
Commit: 4d88c460f43983920aa571ac86c46b43ccbbbaf6
Parents: 2256aed
Author: Chia-Ping Tsai 
Authored: Tue Jul 11 10:31:46 2017 +0800
Committer: Chia-Ping Tsai 
Committed: Tue Jul 11 10:31:46 2017 +0800

--
 .../hbase/regionserver/RSRpcServices.java   |   2 +-
 .../regionserver/RegionCoprocessorHost.java |   7 +-
 .../hbase/client/TestResultFromCoprocessor.java | 128 +++
 3 files changed, 133 insertions(+), 4 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/4d88c460/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSRpcServices.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSRpcServices.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSRpcServices.java
index 68fb1fb..4071fed 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSRpcServices.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSRpcServices.java
@@ -593,7 +593,7 @@ public class RSRpcServices implements HBaseRPCErrorHandler,
 }
   }
   if (region.getCoprocessorHost() != null) {
-region.getCoprocessorHost().postAppend(append, r);
+r = region.getCoprocessorHost().postAppend(append, r);
   }
 }
 if (regionServer.metricsRegionServer != null) {

http://git-wip-us.apache.org/repos/asf/hbase/blob/4d88c460/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.java
index 3ecd970..96855b8 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.java
@@ -1277,12 +1277,13 @@ public class RegionCoprocessorHost
* @param result the result returned by the append
* @throws IOException if an error occurred on the coprocessor
*/
-  public void postAppend(final Append append, final Result result) throws 
IOException {
-execOperation(coprocessors.isEmpty() ? null : new RegionOperation() {
+  public Result postAppend(final Append append, final Result result) throws 
IOException {
+return execOperationWithResult(result,
+coprocessors.isEmpty() ? null : new 
RegionOperationWithResult() {
   @Override
   public void call(RegionObserver oserver, 
ObserverContext ctx)
   throws IOException {
-oserver.postAppend(ctx, append, result);
+setResult(oserver.postAppend(ctx, append, result));
   }
 });
   }

http://git-wip-us.apache.org/repos/asf/hbase/blob/4d88c460/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestResultFromCoprocessor.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestResultFromCoprocessor.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestResultFromCoprocessor.java
new file mode 100644
index 000..79cf297
--- /dev/null
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestResultFromCoprocessor.java
@@ -0,0 +1,128 @@
+/**
+ * Copyright The Apache Software Foundation
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.client;
+
+import java.io.IOException;
+import java.util.Arrays;
+import 

hbase git commit: HBASE-18267 The result from the postAppend is ignored

2017-07-10 Thread chia7712
Repository: hbase
Updated Branches:
  refs/heads/branch-1 844596e09 -> 44651e52d


HBASE-18267 The result from the postAppend is ignored


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/44651e52
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/44651e52
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/44651e52

Branch: refs/heads/branch-1
Commit: 44651e52d831328802accd06bd5e93a6f3c15549
Parents: 844596e
Author: Chia-Ping Tsai 
Authored: Tue Jul 11 10:31:21 2017 +0800
Committer: Chia-Ping Tsai 
Committed: Tue Jul 11 10:31:21 2017 +0800

--
 .../hbase/regionserver/RSRpcServices.java   |   2 +-
 .../regionserver/RegionCoprocessorHost.java |   7 +-
 .../hbase/client/TestResultFromCoprocessor.java | 128 +++
 3 files changed, 133 insertions(+), 4 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/44651e52/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSRpcServices.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSRpcServices.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSRpcServices.java
index 68fb1fb..4071fed 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSRpcServices.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSRpcServices.java
@@ -593,7 +593,7 @@ public class RSRpcServices implements HBaseRPCErrorHandler,
 }
   }
   if (region.getCoprocessorHost() != null) {
-region.getCoprocessorHost().postAppend(append, r);
+r = region.getCoprocessorHost().postAppend(append, r);
   }
 }
 if (regionServer.metricsRegionServer != null) {

http://git-wip-us.apache.org/repos/asf/hbase/blob/44651e52/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.java
index 3ecd970..96855b8 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.java
@@ -1277,12 +1277,13 @@ public class RegionCoprocessorHost
* @param result the result returned by the append
* @throws IOException if an error occurred on the coprocessor
*/
-  public void postAppend(final Append append, final Result result) throws 
IOException {
-execOperation(coprocessors.isEmpty() ? null : new RegionOperation() {
+  public Result postAppend(final Append append, final Result result) throws 
IOException {
+return execOperationWithResult(result,
+coprocessors.isEmpty() ? null : new 
RegionOperationWithResult() {
   @Override
   public void call(RegionObserver oserver, 
ObserverContext ctx)
   throws IOException {
-oserver.postAppend(ctx, append, result);
+setResult(oserver.postAppend(ctx, append, result));
   }
 });
   }

http://git-wip-us.apache.org/repos/asf/hbase/blob/44651e52/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestResultFromCoprocessor.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestResultFromCoprocessor.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestResultFromCoprocessor.java
new file mode 100644
index 000..79cf297
--- /dev/null
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestResultFromCoprocessor.java
@@ -0,0 +1,128 @@
+/**
+ * Copyright The Apache Software Foundation
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.client;
+
+import java.io.IOException;
+import java.util.Arrays;
+import stat

hbase git commit: HBASE-18267 The result from the postAppend is ignored

2017-07-10 Thread chia7712
Repository: hbase
Updated Branches:
  refs/heads/branch-2 f2e5727e1 -> 6456c8bf8


HBASE-18267 The result from the postAppend is ignored


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/6456c8bf
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/6456c8bf
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/6456c8bf

Branch: refs/heads/branch-2
Commit: 6456c8bf84d5341908161be76a0457f89288cb6e
Parents: f2e5727
Author: Chia-Ping Tsai 
Authored: Tue Jul 11 10:30:42 2017 +0800
Committer: Chia-Ping Tsai 
Committed: Tue Jul 11 10:30:42 2017 +0800

--
 .../hbase/regionserver/RSRpcServices.java   |   2 +-
 .../regionserver/RegionCoprocessorHost.java |   7 +-
 .../hbase/client/TestResultFromCoprocessor.java | 127 +++
 3 files changed, 132 insertions(+), 4 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/6456c8bf/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSRpcServices.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSRpcServices.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSRpcServices.java
index 4c0625a..37d2d22 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSRpcServices.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSRpcServices.java
@@ -691,7 +691,7 @@ public class RSRpcServices implements HBaseRPCErrorHandler,
 }
   }
   if (region.getCoprocessorHost() != null) {
-region.getCoprocessorHost().postAppend(append, r);
+r = region.getCoprocessorHost().postAppend(append, r);
   }
 }
 if (regionServer.metricsRegionServer != null) {

http://git-wip-us.apache.org/repos/asf/hbase/blob/6456c8bf/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.java
index 4570cec..3f56a49 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.java
@@ -1226,12 +1226,13 @@ public class RegionCoprocessorHost
* @param result the result returned by the append
* @throws IOException if an error occurred on the coprocessor
*/
-  public void postAppend(final Append append, final Result result) throws 
IOException {
-execOperation(coprocessors.isEmpty() ? null : new RegionOperation() {
+  public Result postAppend(final Append append, final Result result) throws 
IOException {
+return execOperationWithResult(result,
+coprocessors.isEmpty() ? null : new 
RegionOperationWithResult() {
   @Override
   public void call(RegionObserver oserver, 
ObserverContext ctx)
   throws IOException {
-oserver.postAppend(ctx, append, result);
+setResult(oserver.postAppend(ctx, append, result));
   }
 });
   }

http://git-wip-us.apache.org/repos/asf/hbase/blob/6456c8bf/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestResultFromCoprocessor.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestResultFromCoprocessor.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestResultFromCoprocessor.java
new file mode 100644
index 000..4a81cc2
--- /dev/null
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestResultFromCoprocessor.java
@@ -0,0 +1,127 @@
+/**
+ * Copyright The Apache Software Foundation
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.client;
+
+import java.io.IOException;
+import java.util.Arrays;
+import stat

hbase git commit: HBASE-18267 The result from the postAppend is ignored

2017-07-10 Thread chia7712
Repository: hbase
Updated Branches:
  refs/heads/master 7d08c7810 -> 43492d2d3


HBASE-18267 The result from the postAppend is ignored


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/43492d2d
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/43492d2d
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/43492d2d

Branch: refs/heads/master
Commit: 43492d2d3b2f889273651a2f0159f5071e8ca9ff
Parents: 7d08c78
Author: Chia-Ping Tsai 
Authored: Tue Jul 11 10:30:06 2017 +0800
Committer: Chia-Ping Tsai 
Committed: Tue Jul 11 10:30:06 2017 +0800

--
 .../hbase/regionserver/RSRpcServices.java   |   2 +-
 .../regionserver/RegionCoprocessorHost.java |   7 +-
 .../hbase/client/TestResultFromCoprocessor.java | 127 +++
 3 files changed, 132 insertions(+), 4 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/43492d2d/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSRpcServices.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSRpcServices.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSRpcServices.java
index 4c0625a..37d2d22 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSRpcServices.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSRpcServices.java
@@ -691,7 +691,7 @@ public class RSRpcServices implements HBaseRPCErrorHandler,
 }
   }
   if (region.getCoprocessorHost() != null) {
-region.getCoprocessorHost().postAppend(append, r);
+r = region.getCoprocessorHost().postAppend(append, r);
   }
 }
 if (regionServer.metricsRegionServer != null) {

http://git-wip-us.apache.org/repos/asf/hbase/blob/43492d2d/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.java
index 4570cec..3f56a49 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.java
@@ -1226,12 +1226,13 @@ public class RegionCoprocessorHost
* @param result the result returned by the append
* @throws IOException if an error occurred on the coprocessor
*/
-  public void postAppend(final Append append, final Result result) throws 
IOException {
-execOperation(coprocessors.isEmpty() ? null : new RegionOperation() {
+  public Result postAppend(final Append append, final Result result) throws 
IOException {
+return execOperationWithResult(result,
+coprocessors.isEmpty() ? null : new 
RegionOperationWithResult() {
   @Override
   public void call(RegionObserver oserver, 
ObserverContext ctx)
   throws IOException {
-oserver.postAppend(ctx, append, result);
+setResult(oserver.postAppend(ctx, append, result));
   }
 });
   }

http://git-wip-us.apache.org/repos/asf/hbase/blob/43492d2d/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestResultFromCoprocessor.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestResultFromCoprocessor.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestResultFromCoprocessor.java
new file mode 100644
index 000..4a81cc2
--- /dev/null
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestResultFromCoprocessor.java
@@ -0,0 +1,127 @@
+/**
+ * Copyright The Apache Software Foundation
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.client;
+
+import java.io.IOException;
+import java.util.Arrays;
+import static j

[2/2] hbase git commit: Revert "HBASE-18216 [AMv2] Workaround for HBASE-18152, corrupt procedure WAL including ADDENDUM"

2017-07-10 Thread apurtell
Revert "HBASE-18216 [AMv2] Workaround for HBASE-18152, corrupt procedure WAL 
including ADDENDUM"

This reverts commit 8ce3f49f8034417734ef1a3e6c624a601a22ace9.
See HBASE-18340


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/844596e0
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/844596e0
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/844596e0

Branch: refs/heads/branch-1
Commit: 844596e09e7961f506ab2e8bb4aee5557e1b4a44
Parents: 5d710bb
Author: Andrew Purtell 
Authored: Mon Jul 10 19:06:26 2017 -0700
Committer: Andrew Purtell 
Committed: Mon Jul 10 19:25:09 2017 -0700

--
 .../store/wal/ProcedureWALFormatReader.java | 93 +---
 1 file changed, 24 insertions(+), 69 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/844596e0/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/store/wal/ProcedureWALFormatReader.java
--
diff --git 
a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/store/wal/ProcedureWALFormatReader.java
 
b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/store/wal/ProcedureWALFormatReader.java
index 8349611..281292d 100644
--- 
a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/store/wal/ProcedureWALFormatReader.java
+++ 
b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/store/wal/ProcedureWALFormatReader.java
@@ -43,7 +43,7 @@ public class ProcedureWALFormatReader {
   private static final Log LOG = 
LogFactory.getLog(ProcedureWALFormatReader.class);
 
   // 
==
-  //  We read the WALs in reverse order from the newest to the oldest.
+  //  We read the WALs in reverse order. from the newest to the oldest.
   //  We have different entry types:
   //   - INIT: Procedure submitted by the user (also known as 'root procedure')
   //   - INSERT: Children added to the procedure :[, ...]
@@ -52,8 +52,7 @@ public class ProcedureWALFormatReader {
   //
   // In the WAL we can find multiple times the same procedure as UPDATE or 
INSERT.
   // We read the WAL from top to bottom, so every time we find an entry of the
-  // same procedure, that will be the "latest" update (Caveat: with multiple 
threads writing
-  // the store, this assumption does not hold).
+  // same procedure, that will be the "latest" update.
   //
   // We keep two in-memory maps:
   //  - localProcedureMap: is the map containing the entries in the WAL we are 
processing
@@ -65,7 +64,7 @@ public class ProcedureWALFormatReader {
   //
   // The WAL is append-only so the last procedure in the WAL is the one that
   // was in execution at the time we crashed/closed the server.
-  // Given that, the procedure replay order can be inferred by the WAL order.
+  // given that, the procedure replay order can be inferred by the WAL order.
   //
   // Example:
   //WAL-2: [A, B, A, C, D]
@@ -78,7 +77,7 @@ public class ProcedureWALFormatReader {
   //WAL-2 localProcedureMap.replayOrder is [D, C, A, B]
   //WAL-1 localProcedureMap.replayOrder is [F, G]
   //
-  // Each time we reach the WAL-EOF, the "replayOrder" list is merged/appended 
in 'procedureMap'
+  // each time we reach the WAL-EOF, the "replayOrder" list is merged/appended 
in 'procedureMap'
   // so using the example above we end up with: [D, C, A, B] + [F, G] as 
replay order.
   //
   //  Fast Start: INIT/INSERT record and StackIDs
@@ -154,12 +153,12 @@ public class ProcedureWALFormatReader {
   log.setProcIds(localProcedureMap.getMinProcId(), 
localProcedureMap.getMaxProcId());
   procedureMap.mergeTail(localProcedureMap);
   //if (hasFastStartSupport) {
-  // TODO: Some procedure may be already runnables (see readInitEntry())
-  //   (we can also check the "update map" in the log trackers)
-  // --
-  //EntryIterator iter = procedureMap.fetchReady();
-  //if (iter != null) loader.load(iter);
-  // --
+// TODO: Some procedure may be already runnables (see readInitEntry())
+//   (we can also check the "update map" in the log trackers)
+// --
+//EntryIterator iter = procedureMap.fetchReady();
+//if (iter != null) loader.load(iter);
+// --
   //}
 }
   }
@@ -190,7 +189,7 @@ public class ProcedureWALFormatReader {
   }
 
   private void readInitEntry(final ProcedureWALEntry entry)
-  throws IOException {
+  throws IOException {
 assert entry.getProcedureCount() == 1 : "Expected only one 

[1/2] hbase git commit: Revert "HBASE-18216 [AMv2] Workaround for HBASE-18152, corrupt procedure WAL including ADDENDUM"

2017-07-10 Thread apurtell
Repository: hbase
Updated Branches:
  refs/heads/branch-1 5d710bbe7 -> 844596e09
  refs/heads/branch-1.4 3903358ee -> 2256aedd3


Revert "HBASE-18216 [AMv2] Workaround for HBASE-18152, corrupt procedure WAL 
including ADDENDUM"

This reverts commit 8ce3f49f8034417734ef1a3e6c624a601a22ace9.
See HBASE-18340


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/2256aedd
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/2256aedd
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/2256aedd

Branch: refs/heads/branch-1.4
Commit: 2256aedd3bf84aa9bdaee8d244da61e50ff919d9
Parents: 3903358
Author: Andrew Purtell 
Authored: Mon Jul 10 19:06:03 2017 -0700
Committer: Andrew Purtell 
Committed: Mon Jul 10 19:25:00 2017 -0700

--
 .../store/wal/ProcedureWALFormatReader.java | 93 +---
 1 file changed, 24 insertions(+), 69 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/2256aedd/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/store/wal/ProcedureWALFormatReader.java
--
diff --git 
a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/store/wal/ProcedureWALFormatReader.java
 
b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/store/wal/ProcedureWALFormatReader.java
index 8349611..281292d 100644
--- 
a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/store/wal/ProcedureWALFormatReader.java
+++ 
b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/store/wal/ProcedureWALFormatReader.java
@@ -43,7 +43,7 @@ public class ProcedureWALFormatReader {
   private static final Log LOG = 
LogFactory.getLog(ProcedureWALFormatReader.class);
 
   // 
==
-  //  We read the WALs in reverse order from the newest to the oldest.
+  //  We read the WALs in reverse order. from the newest to the oldest.
   //  We have different entry types:
   //   - INIT: Procedure submitted by the user (also known as 'root procedure')
   //   - INSERT: Children added to the procedure :[, ...]
@@ -52,8 +52,7 @@ public class ProcedureWALFormatReader {
   //
   // In the WAL we can find multiple times the same procedure as UPDATE or 
INSERT.
   // We read the WAL from top to bottom, so every time we find an entry of the
-  // same procedure, that will be the "latest" update (Caveat: with multiple 
threads writing
-  // the store, this assumption does not hold).
+  // same procedure, that will be the "latest" update.
   //
   // We keep two in-memory maps:
   //  - localProcedureMap: is the map containing the entries in the WAL we are 
processing
@@ -65,7 +64,7 @@ public class ProcedureWALFormatReader {
   //
   // The WAL is append-only so the last procedure in the WAL is the one that
   // was in execution at the time we crashed/closed the server.
-  // Given that, the procedure replay order can be inferred by the WAL order.
+  // given that, the procedure replay order can be inferred by the WAL order.
   //
   // Example:
   //WAL-2: [A, B, A, C, D]
@@ -78,7 +77,7 @@ public class ProcedureWALFormatReader {
   //WAL-2 localProcedureMap.replayOrder is [D, C, A, B]
   //WAL-1 localProcedureMap.replayOrder is [F, G]
   //
-  // Each time we reach the WAL-EOF, the "replayOrder" list is merged/appended 
in 'procedureMap'
+  // each time we reach the WAL-EOF, the "replayOrder" list is merged/appended 
in 'procedureMap'
   // so using the example above we end up with: [D, C, A, B] + [F, G] as 
replay order.
   //
   //  Fast Start: INIT/INSERT record and StackIDs
@@ -154,12 +153,12 @@ public class ProcedureWALFormatReader {
   log.setProcIds(localProcedureMap.getMinProcId(), 
localProcedureMap.getMaxProcId());
   procedureMap.mergeTail(localProcedureMap);
   //if (hasFastStartSupport) {
-  // TODO: Some procedure may be already runnables (see readInitEntry())
-  //   (we can also check the "update map" in the log trackers)
-  // --
-  //EntryIterator iter = procedureMap.fetchReady();
-  //if (iter != null) loader.load(iter);
-  // --
+// TODO: Some procedure may be already runnables (see readInitEntry())
+//   (we can also check the "update map" in the log trackers)
+// --
+//EntryIterator iter = procedureMap.fetchReady();
+//if (iter != null) loader.load(iter);
+// --
   //}
 }
   }
@@ -190,7 +189,7 @@ public class ProcedureWALFormatReader {
   }
 
   private void readInitEntry(final Procedure

hbase git commit: HBASE-18341 (addendum) Replace '.' in test names with '_' when using them as html ids since dot have special significance in CSS selectors. Without it, we weren't able to select elem

2017-07-10 Thread appy
Repository: hbase
Updated Branches:
  refs/heads/branch-2 5d7dfa4d1 -> f2e5727e1


HBASE-18341 (addendum) Replace '.' in test names with '_' when using them as 
html ids since
dot have special significance in CSS selectors.
Without it, we weren't able to select elements and add svg (trends columns) to 
them.

Change-Id: Ia63ff0851f0a2fad0789a24edf2f0ad00060d578


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/f2e5727e
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/f2e5727e
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/f2e5727e

Branch: refs/heads/branch-2
Commit: f2e5727e1b0e95f5b3daee36b7b076152f2f8d85
Parents: 5d7dfa4
Author: Apekshit Sharma 
Authored: Mon Jul 10 12:01:02 2017 -0700
Committer: Apekshit Sharma 
Committed: Mon Jul 10 12:05:28 2017 -0700

--
 dev-support/flaky-dashboard-template.html | 3 ++-
 1 file changed, 2 insertions(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/f2e5727e/dev-support/flaky-dashboard-template.html
--
diff --git a/dev-support/flaky-dashboard-template.html 
b/dev-support/flaky-dashboard-template.html
index 49ab9b0..f37c7d5 100644
--- a/dev-support/flaky-dashboard-template.html
+++ b/dev-support/flaky-dashboard-template.html
@@ -156,7 +156,8 @@ Legend : green: success, red: failed, yellow: timeout, 
blue: hanging
 
 {{ failed|length }} / {{ timeout|length }} / {{ hanging|length }}
 
-{% set sparkline_id = "sparkline_" ~ test ~ "_" ~ url_counter  %}
+{# Replace '.' in test names with '_' because dots are part of css 
selectors.  #}
+{% set sparkline_id = "sparkline_" ~ test|replace(".","_") ~ "_" ~ 
url_counter  %}
 
 
 

hbase git commit: HBASE-18341 (addendum) Replace '.' in test names with '_' when using them as html ids since dot have special significance in CSS selectors. Without it, we weren't able to select elem

2017-07-10 Thread appy
Repository: hbase
Updated Branches:
  refs/heads/master 7d007eac9 -> 7d08c7810


HBASE-18341 (addendum) Replace '.' in test names with '_' when using them as 
html ids since
dot have special significance in CSS selectors.
Without it, we weren't able to select elements and add svg (trends columns) to 
them.

Change-Id: Ia63ff0851f0a2fad0789a24edf2f0ad00060d578


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/7d08c781
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/7d08c781
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/7d08c781

Branch: refs/heads/master
Commit: 7d08c78103792e599b0676c318da1aca54501289
Parents: 7d007ea
Author: Apekshit Sharma 
Authored: Mon Jul 10 12:01:02 2017 -0700
Committer: Apekshit Sharma 
Committed: Mon Jul 10 12:04:05 2017 -0700

--
 dev-support/flaky-dashboard-template.html | 3 ++-
 1 file changed, 2 insertions(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/7d08c781/dev-support/flaky-dashboard-template.html
--
diff --git a/dev-support/flaky-dashboard-template.html 
b/dev-support/flaky-dashboard-template.html
index 49ab9b0..f37c7d5 100644
--- a/dev-support/flaky-dashboard-template.html
+++ b/dev-support/flaky-dashboard-template.html
@@ -156,7 +156,8 @@ Legend : green: success, red: failed, yellow: timeout, 
blue: hanging
 
 {{ failed|length }} / {{ timeout|length }} / {{ hanging|length }}
 
-{% set sparkline_id = "sparkline_" ~ test ~ "_" ~ url_counter  %}
+{# Replace '.' in test names with '_' because dots are part of css 
selectors.  #}
+{% set sparkline_id = "sparkline_" ~ test|replace(".","_") ~ "_" ~ 
url_counter  %}
 
 
 

hbase git commit: HBASE-17705 Procedure execution must fail fast if procedure is not registered (Vladimir Rodionov)

2017-07-10 Thread tedyu
Repository: hbase
Updated Branches:
  refs/heads/branch-2 4ab66aca8 -> 5d7dfa4d1


HBASE-17705 Procedure execution must fail fast if procedure is not registered 
(Vladimir Rodionov)


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/5d7dfa4d
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/5d7dfa4d
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/5d7dfa4d

Branch: refs/heads/branch-2
Commit: 5d7dfa4d1dfc290b0eed4f37b68c4bbb9aff7a2b
Parents: 4ab66ac
Author: tedyu 
Authored: Mon Jul 10 09:11:58 2017 -0700
Committer: tedyu 
Committed: Mon Jul 10 09:11:58 2017 -0700

--
 .../hbase/client/RpcRetryingCallerImpl.java |  6 ++-
 .../hadoop/hbase/master/MasterRpcServices.java  |  4 +-
 .../TestFastFailOnProcedureNotRegistered.java   | 42 
 3 files changed, 49 insertions(+), 3 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/5d7dfa4d/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RpcRetryingCallerImpl.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RpcRetryingCallerImpl.java
 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RpcRetryingCallerImpl.java
index c59b020..22b79cf 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RpcRetryingCallerImpl.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RpcRetryingCallerImpl.java
@@ -109,7 +109,11 @@ public class RpcRetryingCallerImpl implements 
RpcRetryingCaller {
   } catch (Throwable t) {
 Throwable e = t.getCause();
 ExceptionUtil.rethrowIfInterrupt(t);
-
+Throwable cause = t.getCause();
+if (cause instanceof DoNotRetryIOException) {
+  // Fail fast
+  throw (DoNotRetryIOException) cause;
+}
 // translateException throws exception when should not retry: i.e. 
when request is bad.
 interceptor.handleFailure(context, t);
 t = translateException(t);

http://git-wip-us.apache.org/repos/asf/hbase/blob/5d7dfa4d/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterRpcServices.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterRpcServices.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterRpcServices.java
index c6397f3..64b7757 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterRpcServices.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterRpcServices.java
@@ -702,8 +702,8 @@ public class MasterRpcServices extends RSRpcServices
   MasterProcedureManager mpm = 
master.getMasterProcedureManagerHost().getProcedureManager(
 desc.getSignature());
   if (mpm == null) {
-throw new ServiceException("The procedure is not registered: "
-  + desc.getSignature());
+throw new ServiceException(new DoNotRetryIOException("The procedure is 
not registered: "
+  + desc.getSignature()));
   }
 
   LOG.info(master.getClientIdAuditPrefix() + " procedure request for: "

http://git-wip-us.apache.org/repos/asf/hbase/blob/5d7dfa4d/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestFastFailOnProcedureNotRegistered.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestFastFailOnProcedureNotRegistered.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestFastFailOnProcedureNotRegistered.java
new file mode 100644
index 000..363eba8
--- /dev/null
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestFastFailOnProcedureNotRegistered.java
@@ -0,0 +1,42 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hbase.master.procedure;
+
+import java.io.IOException;
+import java.util.HashMap;
+import java.util

hbase git commit: HBASE-17705 Procedure execution must fail fast if procedure is not registered (Vladimir Rodionov)

2017-07-10 Thread tedyu
Repository: hbase
Updated Branches:
  refs/heads/master 351703455 -> 7d007eac9


HBASE-17705 Procedure execution must fail fast if procedure is not registered 
(Vladimir Rodionov)


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/7d007eac
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/7d007eac
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/7d007eac

Branch: refs/heads/master
Commit: 7d007eac98daef8d41ac1d8adf397967fa2919fd
Parents: 3517034
Author: tedyu 
Authored: Mon Jul 10 09:04:56 2017 -0700
Committer: tedyu 
Committed: Mon Jul 10 09:04:56 2017 -0700

--
 .../hbase/client/RpcRetryingCallerImpl.java |  6 ++-
 .../hadoop/hbase/master/MasterRpcServices.java  |  4 +-
 .../TestFastFailOnProcedureNotRegistered.java   | 42 
 3 files changed, 49 insertions(+), 3 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/7d007eac/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RpcRetryingCallerImpl.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RpcRetryingCallerImpl.java
 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RpcRetryingCallerImpl.java
index c59b020..22b79cf 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RpcRetryingCallerImpl.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RpcRetryingCallerImpl.java
@@ -109,7 +109,11 @@ public class RpcRetryingCallerImpl implements 
RpcRetryingCaller {
   } catch (Throwable t) {
 Throwable e = t.getCause();
 ExceptionUtil.rethrowIfInterrupt(t);
-
+Throwable cause = t.getCause();
+if (cause instanceof DoNotRetryIOException) {
+  // Fail fast
+  throw (DoNotRetryIOException) cause;
+}
 // translateException throws exception when should not retry: i.e. 
when request is bad.
 interceptor.handleFailure(context, t);
 t = translateException(t);

http://git-wip-us.apache.org/repos/asf/hbase/blob/7d007eac/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterRpcServices.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterRpcServices.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterRpcServices.java
index c6397f3..64b7757 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterRpcServices.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterRpcServices.java
@@ -702,8 +702,8 @@ public class MasterRpcServices extends RSRpcServices
   MasterProcedureManager mpm = 
master.getMasterProcedureManagerHost().getProcedureManager(
 desc.getSignature());
   if (mpm == null) {
-throw new ServiceException("The procedure is not registered: "
-  + desc.getSignature());
+throw new ServiceException(new DoNotRetryIOException("The procedure is 
not registered: "
+  + desc.getSignature()));
   }
 
   LOG.info(master.getClientIdAuditPrefix() + " procedure request for: "

http://git-wip-us.apache.org/repos/asf/hbase/blob/7d007eac/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestFastFailOnProcedureNotRegistered.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestFastFailOnProcedureNotRegistered.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestFastFailOnProcedureNotRegistered.java
new file mode 100644
index 000..363eba8
--- /dev/null
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestFastFailOnProcedureNotRegistered.java
@@ -0,0 +1,42 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hbase.master.procedure;
+
+import java.io.IOException;
+import java.util.HashMap;
+import java.util.Map

[42/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-10 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/0821e51a/devapidocs/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.ChecksumCreater.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.ChecksumCreater.html
 
b/devapidocs/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.ChecksumCreater.html
index 8811ade..8bde0e5 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.ChecksumCreater.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.ChecksumCreater.html
@@ -105,7 +105,7 @@ var activeTableTab = "activeTableTab";
 
 
 
-private static interface FanOutOneBlockAsyncDFSOutputHelper.ChecksumCreater
+private static interface FanOutOneBlockAsyncDFSOutputHelper.ChecksumCreater
 
 
 
@@ -149,7 +149,7 @@ var activeTableTab = "activeTableTab";
 
 
 createChecksum
-org.apache.hadoop.util.DataChecksum createChecksum(http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true";
 title="class or interface in java.lang">Object conf)
+org.apache.hadoop.util.DataChecksum createChecksum(http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true";
 title="class or interface in java.lang">Object conf)
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/0821e51a/devapidocs/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.DFSClientAdaptor.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.DFSClientAdaptor.html
 
b/devapidocs/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.DFSClientAdaptor.html
index 9dfc4a7..dffe7ae 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.DFSClientAdaptor.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.DFSClientAdaptor.html
@@ -105,7 +105,7 @@ var activeTableTab = "activeTableTab";
 
 
 
-private static interface FanOutOneBlockAsyncDFSOutputHelper.DFSClientAdaptor
+private static interface FanOutOneBlockAsyncDFSOutputHelper.DFSClientAdaptor
 
 
 
@@ -149,7 +149,7 @@ var activeTableTab = "activeTableTab";
 
 
 isClientRunning
-boolean isClientRunning(org.apache.hadoop.hdfs.DFSClient client)
+boolean isClientRunning(org.apache.hadoop.hdfs.DFSClient client)
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/0821e51a/devapidocs/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.LeaseManager.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.LeaseManager.html
 
b/devapidocs/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.LeaseManager.html
index 1c95459..344abd2 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.LeaseManager.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.LeaseManager.html
@@ -105,7 +105,7 @@ var activeTableTab = "activeTableTab";
 
 
 
-private static interface FanOutOneBlockAsyncDFSOutputHelper.LeaseManager
+private static interface FanOutOneBlockAsyncDFSOutputHelper.LeaseManager
 
 
 
@@ -155,7 +155,7 @@ var activeTableTab = "activeTableTab";
 
 
 begin
-void begin(org.apache.hadoop.hdfs.DFSClient client,
+void begin(org.apache.hadoop.hdfs.DFSClient client,
long inodeId)
 
 
@@ -165,7 +165,7 @@ var activeTableTab = "activeTableTab";
 
 
 end
-void end(org.apache.hadoop.hdfs.DFSClient client,
+void end(org.apache.hadoop.hdfs.DFSClient client,
  long inodeId)
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/0821e51a/devapidocs/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.NameNodeException.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.NameNodeException.html
 
b/devapidocs/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.NameNodeException.html
index a004806..3b67ea1 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.NameNodeException.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.NameNodeException.html
@@ -126,7 +126,7 @@
 
 
 
-public static class FanOutOneBlockAsyncDFSOutputHelper.NameNodeException
+public static class FanOutOneBlockAsyncDFSOutputHelper.NameNodeException
 extends http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true";
 title="class or interface in java.io">IOException
 Exception other than RemoteException thrown when calling 
create on namenode
 
@@ -215,7 +215,

[49/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-10 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/0821e51a/checkstyle-aggregate.html
--
diff --git a/checkstyle-aggregate.html b/checkstyle-aggregate.html
index 81d6ee0..3f51398 100644
--- a/checkstyle-aggregate.html
+++ b/checkstyle-aggregate.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase – Checkstyle Results
 
@@ -286,10 +286,10 @@
  Warnings
  Errors
 
-2242
+2244
 0
 0
-14785
+14778
 
 Files
 
@@ -762,7 +762,7 @@
 org/apache/hadoop/hbase/client/AsyncAdmin.java
 0
 0
-15
+16
 
 org/apache/hadoop/hbase/client/AsyncAdminBuilder.java
 0
@@ -2044,5030 +2044,5030 @@
 0
 1
 
-org/apache/hadoop/hbase/io/asyncfs/AsyncFSOutput.java
+org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutput.java
 0
 0
 1
 
-org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutput.java
-0
-0
-2
-
 org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.java
 0
 0
-2
-
+1
+
 org/apache/hadoop/hbase/io/compress/Compression.java
 0
 0
 9
-
+
 org/apache/hadoop/hbase/io/crypto/Cipher.java
 0
 0
 4
-
+
 org/apache/hadoop/hbase/io/crypto/Context.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/io/crypto/Decryptor.java
 0
 0
 3
-
+
 org/apache/hadoop/hbase/io/crypto/Encryption.java
 0
 0
 53
-
+
 org/apache/hadoop/hbase/io/crypto/Encryptor.java
 0
 0
 3
-
+
 org/apache/hadoop/hbase/io/crypto/KeyProvider.java
 0
 0
 4
-
+
 org/apache/hadoop/hbase/io/crypto/aes/AES.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/io/crypto/aes/AESDecryptor.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/io/crypto/aes/AESEncryptor.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/io/crypto/aes/CommonsCryptoAES.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/io/crypto/aes/CommonsCryptoAESDecryptor.java
 0
 0
 3
-
+
 org/apache/hadoop/hbase/io/crypto/aes/CommonsCryptoAESEncryptor.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/io/crypto/aes/CryptoAES.java
 0
 0
 4
-
+
 org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.java
 0
 0
 6
-
+
 org/apache/hadoop/hbase/io/encoding/CopyKeyDataBlockEncoder.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/io/encoding/DataBlockEncoder.java
 0
 0
 15
-
+
 org/apache/hadoop/hbase/io/encoding/DataBlockEncoding.java
 0
 0
 10
-
+
 org/apache/hadoop/hbase/io/encoding/EncodedDataBlock.java
 0
 0
 4
-
+
 org/apache/hadoop/hbase/io/encoding/HFileBlockDecodingContext.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/io/encoding/HFileBlockDefaultEncodingContext.java
 0
 0
 4
-
+
 org/apache/hadoop/hbase/io/encoding/HFileBlockEncodingContext.java
 0
 0
 3
-
+
 org/apache/hadoop/hbase/io/encoding/RowIndexCodecV1.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/io/encoding/RowIndexSeekerV1.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/io/hadoopbackport/ThrottledInputStream.java
 0
 0
 4
-
+
 org/apache/hadoop/hbase/io/hfile/BlockCache.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/io/hfile/BlockCacheKey.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/io/hfile/BlockCacheUtil.java
 0
 0
 23
-
+
 org/apache/hadoop/hbase/io/hfile/BlockCachesIterator.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/io/hfile/BlockType.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/io/hfile/CacheConfig.java
 0
 0
 12
-
+
 org/apache/hadoop/hbase/io/hfile/CacheStats.java
 0
 0
 4
-
+
 org/apache/hadoop/hbase/io/hfile/CacheableDeserializer.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/io/hfile/CacheableDeserializerIdManager.java
 0
 0
 4
-
+
 org/apache/hadoop/hbase/io/hfile/ChecksumUtil.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/io/hfile/CombinedBlockCache.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/io/hfile/CompoundBloomFilter.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/io/hfile/CompoundBloomFilterBase.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/io/hfile/CompoundBloomFilterWriter.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/io/hfile/CorruptHFileException.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/io/hfile/FixedFileTrailer.java
 0
 0
 14
-
+
 org/apache/hadoop/hbase/io/hfile/HFile.java
 0
 0
 37
-
+
 org/apache/hadoop/hbase/io/hfile/HFileBlock.java
 0
 0
 46
-
+
 org/apache/hadoop/hbase/io/hfile/HFileBlockIndex.java
 0
 0
 37
-
+
 org/apache/hadoop/hbase/io/hfile/HFileContext.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/io/hfile/HFileDataBlockEncoder.java
 0
 0
 13
-
+
 org/apache/hadoop/hbase/io/hfile/HFileDataBlockEncoderImpl.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/io/hfile/HFilePrettyPrinter.java
 0
 0
 14
-
+
 org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.java
 0
 0
 52
-
+
 org/apache/hadoop/hbase/io/hfile/HFileScanner.java
 0
 0
 22
-
+
 org/apache/hadoop/hbase/io/hfile/HFileUtil.java
 0
 0
 5
-
+
 org/apache/hadoop/hbase/io/hfile/HFileWriterImpl.java
 0
 0
 23
-
+
 org/apache/hadoop/hbase/io/hfile/LruBlockCache.java
 0
 0
 9
-
+
 org/apache/hadoop/hbase/io/hfile/LruCachedBlock.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/io/hfile/LruCachedBlockQueue.java
 0
 0
 3
-
+
 org/apache/hadoop/hbase/io/hfile/MemcachedBlockCache.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/io/hfile/NoOpDataBlockEncoder.j

[33/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-10 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/0821e51a/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.AdminRpcCall.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.AdminRpcCall.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.AdminRpcCall.html
index 71844ce..75db22d 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.AdminRpcCall.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.AdminRpcCall.html
@@ -105,2564 +105,2642 @@
 097import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionLoadResponse;
 098import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.SplitRegionRequest;
 099import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.SplitRegionResponse;
-100import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ProcedureDescription;
-101import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema;
-102import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.RegionSpecifierType;
-103import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AbortProcedureRequest;
-104import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AbortProcedureResponse;
-105import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnRequest;
-106import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnResponse;
-107import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AssignRegionRequest;
-108import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AssignRegionResponse;
-109import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.BalanceRequest;
-110import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.BalanceResponse;
-111import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateNamespaceRequest;
-112import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateNamespaceResponse;
-113import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteNamespaceRequest;
-114import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteNamespaceResponse;
-115import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteSnapshotRequest;
-116import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteSnapshotResponse;
-117import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DisableTableRequest;
-118import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DisableTableResponse;
-119import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableCatalogJanitorRequest;
-120import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableCatalogJanitorResponse;
-121import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DrainRegionServersRequest;
-122import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DrainRegionServersResponse;
-123import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableTableRequest;
-124import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableTableResponse;
-125import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteColumnRequest;
-126import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteColumnResponse;
-127import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ExecProcedureRequest;
-128import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ExecProcedureResponse;
-129import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetClusterStatusRequest;
-130import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetClusterStatusResponse;
-131import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest;
-132import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCompletedSnapshotsResponse;
-133import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetNamespaceDescriptorRequest;
-134import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetNamespaceDescriptorResponse;
-135import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetProcedureResultRequest;
-136import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetProcedureResultResponse;
-137import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetSchemaAlterStatusRequest;
-138import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetSchemaAlterStatusResponse;
-139import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableDescriptorsRequest;
-140import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProto

[35/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-10 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/0821e51a/devapidocs/overview-tree.html
--
diff --git a/devapidocs/overview-tree.html b/devapidocs/overview-tree.html
index dd3dc2c..3e73e95 100644
--- a/devapidocs/overview-tree.html
+++ b/devapidocs/overview-tree.html
@@ -2397,6 +2397,8 @@
 org.apache.hadoop.hbase.namespace.NamespaceStateManager
 org.apache.hadoop.hbase.namespace.NamespaceTableAndRegionInfo
 org.apache.hadoop.hbase.replication.NamespaceTableCfWALEntryFilter (implements 
org.apache.hadoop.hbase.replication.WALCellFilter, 
org.apache.hadoop.hbase.replication.WALEntryFilter)
+org.apache.hadoop.hbase.wal.NettyAsyncFSWALConfigHelper
+org.apache.hadoop.hbase.util.NettyEventLoopGroupConfig
 org.apache.hadoop.hbase.ipc.NettyRpcClientConfigHelper
 org.apache.hadoop.hbase.http.NoCacheFilter (implements 
javax.servlet.Filter)
 org.apache.hadoop.hbase.regionserver.throttle.NoLimitThroughputController (implements 
org.apache.hadoop.hbase.regionserver.throttle.ThroughputController)

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/0821e51a/devapidocs/src-html/org/apache/hadoop/hbase/Version.html
--
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/Version.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/Version.html
index 64c0395..2f0802d 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/Version.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/Version.html
@@ -16,11 +16,11 @@
 008@InterfaceAudience.Private
 009public class Version {
 010  public static final String version = 
"3.0.0-SNAPSHOT";
-011  public static final String revision = 
"c48bb67123e7bd622c567393097d81665dc5fff8";
+011  public static final String revision = 
"351703455a091171a1abc90f250f52f0a7a0aaab";
 012  public static final String user = 
"jenkins";
-013  public static final String date = "Sun 
Jul  9 14:39:17 UTC 2017";
+013  public static final String date = "Mon 
Jul 10 14:39:30 UTC 2017";
 014  public static final String url = 
"git://asf920.gq1.ygridcore.net/home/jenkins/jenkins-slave/workspace/hbase_generate_website/hbase";
-015  public static final String srcChecksum 
= "61ae5acbf52c24d2025a705b662b6bb1";
+015  public static final String srcChecksum 
= "da1c60d65bc12f522b3d4324366e19df";
 016}
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/0821e51a/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncAdmin.html
--
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncAdmin.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncAdmin.html
index cdfccb8..e303773 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncAdmin.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncAdmin.html
@@ -832,154 +832,182 @@
 824  }
 825
 826  /**
-827   * Get a list of {@link RegionLoad} of 
all regions hosted on a region seerver for a table.
-828   * @param serverName
-829   * @param tableName
-830   * @return a list of {@link RegionLoad} 
wrapped by {@link CompletableFuture}
-831   */
-832  
CompletableFuture> getRegionLoads(ServerName 
serverName,
-833  Optional 
tableName);
-834
-835  /**
-836   * Check whether master is in 
maintenance mode
-837   * @return true if master is in 
maintenance mode, false otherwise. The return value will be
-838   * wrapped by a {@link 
CompletableFuture}
+827   * Shuts down the HBase cluster.
+828   */
+829  CompletableFuture 
shutdown();
+830
+831  /**
+832   * Shuts down the current HBase master 
only.
+833   */
+834  CompletableFuture 
stopMaster();
+835
+836  /**
+837   * Stop the designated regionserver.
+838   * @param serverName
 839   */
-840  CompletableFuture 
isMasterInMaintenanceMode();
+840  CompletableFuture 
stopRegionServer(ServerName serverName);
 841
 842  /**
-843   * Get the current compaction state of 
a table. It could be in a major compaction, a minor
-844   * compaction, both, or none.
-845   * @param tableName table to examine
-846   * @return the current compaction state 
wrapped by a {@link CompletableFuture}
-847   */
-848  
CompletableFuture getCompactionState(TableName 
tableName);
-849
-850  /**
-851   * Get the current compaction state of 
region. It could be in a major compaction, a minor
-852   * compaction, both, or none.
-853   * @param regionName region to 
examine
-854   * @return the current compaction state 
wrapped by a {@link CompletableFuture}
-855   */
-856  
CompletableFuture getCompactionStateForRegion(byte[] 
regionName);
-857
-858  /**
-859   * Get the timestamp of the last major 
compaction for the passed table.
-860   * 

-861 * The timestamp of the oldest HFile resulting from a major compaction of that table, or not -862 * present if no such HFile coul


[06/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-10 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/0821e51a/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.DFSClientAdaptor.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.DFSClientAdaptor.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.DFSClientAdaptor.html
index f729c99..0a32350 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.DFSClientAdaptor.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.DFSClientAdaptor.html
@@ -54,757 +54,756 @@
 046import 
io.netty.channel.ChannelPipeline;
 047import io.netty.channel.EventLoop;
 048import 
io.netty.channel.SimpleChannelInboundHandler;
-049import 
io.netty.channel.socket.nio.NioSocketChannel;
-050import 
io.netty.handler.codec.protobuf.ProtobufDecoder;
-051import 
io.netty.handler.codec.protobuf.ProtobufVarint32FrameDecoder;
-052import 
io.netty.handler.timeout.IdleStateEvent;
-053import 
io.netty.handler.timeout.IdleStateHandler;
-054import io.netty.util.concurrent.Future;
-055import 
io.netty.util.concurrent.FutureListener;
-056import 
io.netty.util.concurrent.Promise;
-057
-058import java.io.IOException;
-059import 
java.lang.reflect.InvocationTargetException;
-060import java.lang.reflect.Method;
-061import java.util.ArrayList;
-062import java.util.EnumSet;
-063import java.util.List;
-064import java.util.concurrent.TimeUnit;
-065
-066import org.apache.commons.logging.Log;
-067import 
org.apache.commons.logging.LogFactory;
-068import 
org.apache.hadoop.conf.Configuration;
-069import 
org.apache.hadoop.crypto.CryptoProtocolVersion;
-070import 
org.apache.hadoop.crypto.Encryptor;
-071import org.apache.hadoop.fs.CreateFlag;
-072import org.apache.hadoop.fs.FileSystem;
-073import 
org.apache.hadoop.fs.FileSystemLinkResolver;
-074import org.apache.hadoop.fs.Path;
-075import 
org.apache.hadoop.fs.UnresolvedLinkException;
-076import 
org.apache.hadoop.fs.permission.FsPermission;
-077import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
-078import 
org.apache.hadoop.hbase.client.ConnectionUtils;
-079import 
org.apache.hadoop.hbase.util.CancelableProgressable;
-080import 
org.apache.hadoop.hbase.util.FSUtils;
-081import 
org.apache.hadoop.hdfs.DFSClient;
-082import 
org.apache.hadoop.hdfs.DFSOutputStream;
-083import 
org.apache.hadoop.hdfs.DistributedFileSystem;
-084import 
org.apache.hadoop.hdfs.protocol.ClientProtocol;
-085import 
org.apache.hadoop.hdfs.protocol.DatanodeInfo;
-086import 
org.apache.hadoop.hdfs.protocol.ExtendedBlock;
-087import 
org.apache.hadoop.hdfs.protocol.HdfsFileStatus;
-088import 
org.apache.hadoop.hdfs.protocol.LocatedBlock;
-089import 
org.apache.hadoop.hdfs.protocol.datatransfer.BlockConstructionStage;
-090import 
org.apache.hadoop.hdfs.protocol.datatransfer.DataTransferProtoUtil;
-091import 
org.apache.hadoop.hdfs.protocol.datatransfer.DataTransferProtocol;
-092import 
org.apache.hadoop.hdfs.protocol.datatransfer.Op;
-093import 
org.apache.hadoop.hdfs.protocol.datatransfer.PipelineAck;
-094import 
org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.BaseHeaderProto;
-095import 
org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.BlockOpResponseProto;
-096import 
org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.CachingStrategyProto;
-097import 
org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.ChecksumProto;
-098import 
org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.ClientOperationHeaderProto;
-099import 
org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.OpWriteBlockProto;
-100import 
org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.PipelineAckProto;
-101import 
org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.Status;
-102import 
org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ExtendedBlockProto;
-103import 
org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.StorageTypeProto;
-104import 
org.apache.hadoop.hdfs.security.token.block.BlockTokenIdentifier;
-105import 
org.apache.hadoop.hdfs.security.token.block.InvalidBlockTokenException;
-106import 
org.apache.hadoop.hdfs.server.namenode.LeaseExpiredException;
-107import 
org.apache.hadoop.io.EnumSetWritable;
-108import 
org.apache.hadoop.ipc.RemoteException;
-109import org.apache.hadoop.net.NetUtils;
-110import 
org.apache.hadoop.security.proto.SecurityProtos.TokenProto;
-111import 
org.apache.hadoop.security.token.Token;
-112import 
org.apache.hadoop.util.DataChecksum;
-113
-114/**
-115 * Helper class for implementing {@link 
FanOutOneBlockAsyncDFSOutput}.
-116 */
-117@InterfaceAudience.Private
-118public final class 
FanOutOneBlockAsyncDFSOutputHelper {
+049import 
io.netty.handler.codec.protobuf.ProtobufDecoder;
+050import 
io.netty.handler.codec.protobuf.ProtobufVarint32FrameDecoder;
+051import

[25/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-10 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/0821e51a/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.EnableTableProcedureBiConsumer.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.EnableTableProcedureBiConsumer.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.EnableTableProcedureBiConsumer.html
index 71844ce..75db22d 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.EnableTableProcedureBiConsumer.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.EnableTableProcedureBiConsumer.html
@@ -105,2564 +105,2642 @@
 097import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionLoadResponse;
 098import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.SplitRegionRequest;
 099import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.SplitRegionResponse;
-100import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ProcedureDescription;
-101import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema;
-102import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.RegionSpecifierType;
-103import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AbortProcedureRequest;
-104import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AbortProcedureResponse;
-105import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnRequest;
-106import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnResponse;
-107import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AssignRegionRequest;
-108import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AssignRegionResponse;
-109import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.BalanceRequest;
-110import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.BalanceResponse;
-111import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateNamespaceRequest;
-112import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateNamespaceResponse;
-113import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteNamespaceRequest;
-114import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteNamespaceResponse;
-115import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteSnapshotRequest;
-116import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteSnapshotResponse;
-117import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DisableTableRequest;
-118import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DisableTableResponse;
-119import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableCatalogJanitorRequest;
-120import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableCatalogJanitorResponse;
-121import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DrainRegionServersRequest;
-122import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DrainRegionServersResponse;
-123import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableTableRequest;
-124import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableTableResponse;
-125import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteColumnRequest;
-126import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteColumnResponse;
-127import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ExecProcedureRequest;
-128import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ExecProcedureResponse;
-129import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetClusterStatusRequest;
-130import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetClusterStatusResponse;
-131import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest;
-132import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCompletedSnapshotsResponse;
-133import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetNamespaceDescriptorRequest;
-134import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetNamespaceDescriptorResponse;
-135import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetProcedureResultRequest;
-136import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetProcedureResultResponse;
-137import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetSchemaAlterStatusRequest;
-138import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetSchemaAlterStatusResponse;
-139import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableDes

[29/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-10 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/0821e51a/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.DeleteColumnFamilyProcedureBiConsumer.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.DeleteColumnFamilyProcedureBiConsumer.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.DeleteColumnFamilyProcedureBiConsumer.html
index 71844ce..75db22d 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.DeleteColumnFamilyProcedureBiConsumer.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.DeleteColumnFamilyProcedureBiConsumer.html
@@ -105,2564 +105,2642 @@
 097import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionLoadResponse;
 098import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.SplitRegionRequest;
 099import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.SplitRegionResponse;
-100import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ProcedureDescription;
-101import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema;
-102import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.RegionSpecifierType;
-103import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AbortProcedureRequest;
-104import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AbortProcedureResponse;
-105import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnRequest;
-106import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnResponse;
-107import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AssignRegionRequest;
-108import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AssignRegionResponse;
-109import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.BalanceRequest;
-110import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.BalanceResponse;
-111import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateNamespaceRequest;
-112import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateNamespaceResponse;
-113import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteNamespaceRequest;
-114import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteNamespaceResponse;
-115import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteSnapshotRequest;
-116import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteSnapshotResponse;
-117import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DisableTableRequest;
-118import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DisableTableResponse;
-119import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableCatalogJanitorRequest;
-120import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableCatalogJanitorResponse;
-121import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DrainRegionServersRequest;
-122import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DrainRegionServersResponse;
-123import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableTableRequest;
-124import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableTableResponse;
-125import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteColumnRequest;
-126import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteColumnResponse;
-127import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ExecProcedureRequest;
-128import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ExecProcedureResponse;
-129import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetClusterStatusRequest;
-130import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetClusterStatusResponse;
-131import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest;
-132import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCompletedSnapshotsResponse;
-133import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetNamespaceDescriptorRequest;
-134import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetNamespaceDescriptorResponse;
-135import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetProcedureResultRequest;
-136import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetProcedureResultResponse;
-137import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetSchemaAlterStatusRequest;
-138import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetSchemaAlterStatusResponse;
-139import 
org.apache.hadoop.hbase.shaded.protobuf

[27/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-10 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/0821e51a/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.DeleteTableProcedureBiConsumer.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.DeleteTableProcedureBiConsumer.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.DeleteTableProcedureBiConsumer.html
index 71844ce..75db22d 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.DeleteTableProcedureBiConsumer.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.DeleteTableProcedureBiConsumer.html
@@ -105,2564 +105,2642 @@
 097import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionLoadResponse;
 098import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.SplitRegionRequest;
 099import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.SplitRegionResponse;
-100import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ProcedureDescription;
-101import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema;
-102import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.RegionSpecifierType;
-103import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AbortProcedureRequest;
-104import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AbortProcedureResponse;
-105import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnRequest;
-106import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnResponse;
-107import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AssignRegionRequest;
-108import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AssignRegionResponse;
-109import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.BalanceRequest;
-110import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.BalanceResponse;
-111import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateNamespaceRequest;
-112import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateNamespaceResponse;
-113import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteNamespaceRequest;
-114import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteNamespaceResponse;
-115import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteSnapshotRequest;
-116import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteSnapshotResponse;
-117import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DisableTableRequest;
-118import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DisableTableResponse;
-119import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableCatalogJanitorRequest;
-120import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableCatalogJanitorResponse;
-121import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DrainRegionServersRequest;
-122import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DrainRegionServersResponse;
-123import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableTableRequest;
-124import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableTableResponse;
-125import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteColumnRequest;
-126import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteColumnResponse;
-127import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ExecProcedureRequest;
-128import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ExecProcedureResponse;
-129import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetClusterStatusRequest;
-130import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetClusterStatusResponse;
-131import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest;
-132import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCompletedSnapshotsResponse;
-133import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetNamespaceDescriptorRequest;
-134import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetNamespaceDescriptorResponse;
-135import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetProcedureResultRequest;
-136import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetProcedureResultResponse;
-137import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetSchemaAlterStatusRequest;
-138import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetSchemaAlterStatusResponse;
-139import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableDes

[39/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-10 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/0821e51a/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionServer.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionServer.html 
b/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionServer.html
index 832f618..2ca99ba 100644
--- a/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionServer.html
+++ b/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionServer.html
@@ -18,7 +18,7 @@
 catch(err) {
 }
 //-->
-var methods = 
{"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":9,"i11":10,"i12":10,"i13":10,"i14":10,"i15":10,"i16":10,"i17":10,"i18":10,"i19":10,"i20":10,"i21":9,"i22":10,"i23":10,"i24":10,"i25":10,"i26":10,"i27":9,"i28":10,"i29":10,"i30":10,"i31":10,"i32":10,"i33":10,"i34":10,"i35":10,"i36":10,"i37":10,"i38":10,"i39":10,"i40":10,"i41":10,"i42":10,"i43":10,"i44":10,"i45":10,"i46":10,"i47":10,"i48":10,"i49":10,"i50":10,"i51":10,"i52":10,"i53":10,"i54":10,"i55":10,"i56":10,"i57":10,"i58":10,"i59":10,"i60":10,"i61":10,"i62":10,"i63":10,"i64":10,"i65":10,"i66":10,"i67":10,"i68":10,"i69":10,"i70":10,"i71":10,"i72":10,"i73":10,"i74":10,"i75":10,"i76":10,"i77":10,"i78":10,"i79":10,"i80":10,"i81":10,"i82":10,"i83":10,"i84":10,"i85":10,"i86":10,"i87":10,"i88":10,"i89":10,"i90":10,"i91":10,"i92":10,"i93":10,"i94":10,"i95":10,"i96":10,"i97":10,"i98":10,"i99":10,"i100":10,"i101":10,"i102":10,"i103":10,"i104":10,"i105":10,"i106":10,"i107":10,"i108":10,"i109
 
":10,"i110":10,"i111":10,"i112":10,"i113":10,"i114":10,"i115":10,"i116":10,"i117":10,"i118":10,"i119":10,"i120":10,"i121":10,"i122":10,"i123":9,"i124":10,"i125":9,"i126":10,"i127":10,"i128":10,"i129":10,"i130":10,"i131":10,"i132":10,"i133":10,"i134":10,"i135":10,"i136":10,"i137":10,"i138":10,"i139":10,"i140":10,"i141":10,"i142":10,"i143":10,"i144":10,"i145":10,"i146":10,"i147":10,"i148":10,"i149":10,"i150":10,"i151":10,"i152":10,"i153":10,"i154":10,"i155":10,"i156":10,"i157":10,"i158":10,"i159":10,"i160":10,"i161":10,"i162":10,"i163":10,"i164":10};
+var methods = 
{"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":9,"i11":10,"i12":10,"i13":10,"i14":10,"i15":10,"i16":10,"i17":10,"i18":10,"i19":10,"i20":10,"i21":9,"i22":10,"i23":10,"i24":10,"i25":10,"i26":10,"i27":9,"i28":10,"i29":10,"i30":10,"i31":10,"i32":10,"i33":10,"i34":10,"i35":10,"i36":10,"i37":10,"i38":10,"i39":10,"i40":10,"i41":10,"i42":10,"i43":10,"i44":10,"i45":10,"i46":10,"i47":10,"i48":10,"i49":10,"i50":10,"i51":10,"i52":10,"i53":10,"i54":10,"i55":10,"i56":10,"i57":10,"i58":10,"i59":10,"i60":10,"i61":10,"i62":10,"i63":10,"i64":10,"i65":10,"i66":10,"i67":10,"i68":10,"i69":10,"i70":10,"i71":10,"i72":10,"i73":10,"i74":10,"i75":10,"i76":10,"i77":10,"i78":10,"i79":10,"i80":10,"i81":10,"i82":10,"i83":10,"i84":10,"i85":10,"i86":10,"i87":10,"i88":10,"i89":10,"i90":10,"i91":10,"i92":10,"i93":10,"i94":10,"i95":10,"i96":10,"i97":10,"i98":10,"i99":10,"i100":10,"i101":10,"i102":10,"i103":10,"i104":10,"i105":10,"i106":10,"i107":10,"i108":10,"i109
 
":10,"i110":10,"i111":10,"i112":10,"i113":10,"i114":10,"i115":10,"i116":10,"i117":10,"i118":10,"i119":10,"i120":10,"i121":10,"i122":10,"i123":10,"i124":9,"i125":10,"i126":9,"i127":10,"i128":10,"i129":10,"i130":10,"i131":10,"i132":10,"i133":10,"i134":10,"i135":10,"i136":10,"i137":10,"i138":10,"i139":10,"i140":10,"i141":10,"i142":10,"i143":10,"i144":10,"i145":10,"i146":10,"i147":10,"i148":10,"i149":10,"i150":10,"i151":10,"i152":10,"i153":10,"i154":10,"i155":10,"i156":10,"i157":10,"i158":10,"i159":10,"i160":10,"i161":10,"i162":10,"i163":10,"i164":10,"i165":10};
 var tabs = {65535:["t0","All Methods"],1:["t1","Static 
Methods"],2:["t2","Instance Methods"],8:["t4","Concrete Methods"]};
 var altColor = "altColor";
 var rowColor = "rowColor";
@@ -123,7 +123,7 @@ var activeTableTab = "activeTableTab";
 
 
 @InterfaceAudience.LimitedPrivate(value="Tools")
-public class HRegionServer
+public class HRegionServer
 extends HasThread
 implements RegionServerServices, LastSequenceId, 
ConfigurationObserver
 HRegionServer makes a set of HRegions available to clients. 
It checks in with
@@ -260,169 +260,173 @@ implements DEFAULT_REGION_LOCK_AWAIT_TIME_SEC 
 
 
+private NettyEventLoopGroupConfig
+eventLoopGroupConfig 
+
+
 private ThroughputController
 flushThroughputController 
 
-
+
 protected HFileSystem
 fs 
 
-
+
 protected boolean
 fsOk 
 
-
+
 protected FileSystemUtilizationChore
 fsUtilizationChore 
 
-
+
 private HealthCheckChore
 healthCheckChore
 The health check chore.
 
 
-
+
 protected HeapMemoryManager
 hMemManager 
 
-
+
 protected InfoServer
 infoServer 
 
-
+
 private static int
 INIT_PAUSE_TIME_MS 
 
-
+
 protected http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CountDownLatch.html?is-external=true";
 title="class or interface in 
java.util.concurrent">CountDownLatch
 initLatch 
 
-
+
 (package private) boolean
 kil

[41/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-10 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/0821e51a/devapidocs/org/apache/hadoop/hbase/ipc/NettyRpcServer.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/ipc/NettyRpcServer.html 
b/devapidocs/org/apache/hadoop/hbase/ipc/NettyRpcServer.html
index a4b0b4c..f0adbba 100644
--- a/devapidocs/org/apache/hadoop/hbase/ipc/NettyRpcServer.html
+++ b/devapidocs/org/apache/hadoop/hbase/ipc/NettyRpcServer.html
@@ -18,8 +18,8 @@
 catch(err) {
 }
 //-->
-var methods = 
{"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":9};
-var tabs = {65535:["t0","All Methods"],1:["t1","Static 
Methods"],2:["t2","Instance Methods"],8:["t4","Concrete Methods"]};
+var methods = 
{"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10};
+var tabs = {65535:["t0","All Methods"],2:["t2","Instance 
Methods"],8:["t4","Concrete Methods"]};
 var altColor = "altColor";
 var rowColor = "rowColor";
 var tableTab = "tableTab";
@@ -214,7 +214,7 @@ extends 
 Method Summary
 
-All Methods Static Methods Instance Methods Concrete Methods 
+All Methods Instance Methods Concrete Methods 
 
 Modifier and Type
 Method and Description
@@ -265,10 +265,6 @@ extends void
 stop() 
 
-
-private static boolean
-useEpoll(org.apache.hadoop.conf.Configuration conf) 
-
 
 
 
@@ -378,22 +374,13 @@ extends 
 
 Method Detail
-
-
-
-
-
-useEpoll
-private static boolean useEpoll(org.apache.hadoop.conf.Configuration conf)
-
-
 
 
 
 
 
 start
-public void start()
+public void start()
 
 
 
@@ -402,7 +389,7 @@ extends 
 
 stop
-public void stop()
+public void stop()
 
 
 
@@ -411,7 +398,7 @@ extends 
 
 join
-public void join()
+public void join()
   throws http://docs.oracle.com/javase/8/docs/api/java/lang/InterruptedException.html?is-external=true";
 title="class or interface in java.lang">InterruptedException
 
 Throws:
@@ -425,7 +412,7 @@ extends 
 
 getListenerAddress
-public http://docs.oracle.com/javase/8/docs/api/java/net/InetSocketAddress.html?is-external=true";
 title="class or interface in java.net">InetSocketAddress getListenerAddress()
+public http://docs.oracle.com/javase/8/docs/api/java/net/InetSocketAddress.html?is-external=true";
 title="class or interface in java.net">InetSocketAddress getListenerAddress()
 
 
 
@@ -434,7 +421,7 @@ extends 
 
 setSocketSendBufSize
-public void setSocketSendBufSize(int size)
+public void setSocketSendBufSize(int size)
 
 
 
@@ -443,7 +430,7 @@ extends 
 
 getNumOpenConnections
-public int getNumOpenConnections()
+public int getNumOpenConnections()
 Description copied from 
class: RpcServer
 The number of open RPC conections
 
@@ -460,7 +447,7 @@ extends 
 
 call
-public Pair call(org.apache.hadoop.hbase.shaded.com.google.protobuf.BlockingService service,
+public Pair call(org.apache.hadoop.hbase.shaded.com.google.protobuf.BlockingService service,

  
org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.MethodDescriptor md,

  org.apache.hadoop.hbase.shaded.com.google.protobuf.Message param,

  CellScanner cellScanner,
@@ -479,7 +466,7 @@ extends 
 
 call
-public Pair call(org.apache.hadoop.hbase.shaded.com.google.protobuf.BlockingService service,
+public Pair call(org.apache.hadoop.hbase.shaded.com.google.protobuf.BlockingService service,

  
org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.MethodDescriptor md,

  org.apache.hadoop.hbase.shaded.com.google.protobuf.Message param,

  CellScanner cellScanner,

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/0821e51a/devapidocs/org/apache/hadoop/hbase/ipc/package-tree.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/ipc/package-tree.html 
b/devapidocs/org/apache/hadoop/hbase/ipc/package-tree.html
index cdb6af5..f01840a 100644
--- a/devapidocs/org/apache/hadoop/hbase/ipc/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/ipc/package-tree.html
@@ -341,9 +341,9 @@
 
 java.lang.http://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true";
 title="class or interface in java.lang">Enum (implements java.lang.http://docs.oracl

[31/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-10 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/0821e51a/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.CreateNamespaceProcedureBiConsumer.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.CreateNamespaceProcedureBiConsumer.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.CreateNamespaceProcedureBiConsumer.html
index 71844ce..75db22d 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.CreateNamespaceProcedureBiConsumer.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.CreateNamespaceProcedureBiConsumer.html
@@ -105,2564 +105,2642 @@
 097import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionLoadResponse;
 098import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.SplitRegionRequest;
 099import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.SplitRegionResponse;
-100import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ProcedureDescription;
-101import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema;
-102import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.RegionSpecifierType;
-103import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AbortProcedureRequest;
-104import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AbortProcedureResponse;
-105import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnRequest;
-106import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnResponse;
-107import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AssignRegionRequest;
-108import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AssignRegionResponse;
-109import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.BalanceRequest;
-110import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.BalanceResponse;
-111import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateNamespaceRequest;
-112import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateNamespaceResponse;
-113import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteNamespaceRequest;
-114import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteNamespaceResponse;
-115import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteSnapshotRequest;
-116import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteSnapshotResponse;
-117import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DisableTableRequest;
-118import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DisableTableResponse;
-119import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableCatalogJanitorRequest;
-120import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableCatalogJanitorResponse;
-121import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DrainRegionServersRequest;
-122import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DrainRegionServersResponse;
-123import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableTableRequest;
-124import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableTableResponse;
-125import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteColumnRequest;
-126import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteColumnResponse;
-127import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ExecProcedureRequest;
-128import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ExecProcedureResponse;
-129import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetClusterStatusRequest;
-130import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetClusterStatusResponse;
-131import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest;
-132import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCompletedSnapshotsResponse;
-133import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetNamespaceDescriptorRequest;
-134import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetNamespaceDescriptorResponse;
-135import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetProcedureResultRequest;
-136import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetProcedureResultResponse;
-137import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetSchemaAlterStatusRequest;
-138import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetSchemaAlterStatusResponse;
-139import 
org.apache.hadoop.hbase.shaded.protobuf.generated.Mast

[45/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-10 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/0821e51a/devapidocs/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.MergeTableRegionProcedureBiConsumer.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.MergeTableRegionProcedureBiConsumer.html
 
b/devapidocs/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.MergeTableRegionProcedureBiConsumer.html
index a1a29a5..b34fe88 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.MergeTableRegionProcedureBiConsumer.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.MergeTableRegionProcedureBiConsumer.html
@@ -127,7 +127,7 @@ var activeTableTab = "activeTableTab";
 
 
 
-private class RawAsyncHBaseAdmin.MergeTableRegionProcedureBiConsumer
+private class RawAsyncHBaseAdmin.MergeTableRegionProcedureBiConsumer
 extends RawAsyncHBaseAdmin.TableProcedureBiConsumer
 
 
@@ -240,7 +240,7 @@ extends 
 
 MergeTableRegionProcedureBiConsumer
-MergeTableRegionProcedureBiConsumer(AsyncAdmin admin,
+MergeTableRegionProcedureBiConsumer(AsyncAdmin admin,
 TableName tableName)
 
 
@@ -258,7 +258,7 @@ extends 
 
 getOperationType
-http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String getOperationType()
+http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String getOperationType()
 
 Specified by:
 getOperationType in
 class RawAsyncHBaseAdmin.TableProcedureBiConsumer

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/0821e51a/devapidocs/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.ModifyColumnFamilyProcedureBiConsumer.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.ModifyColumnFamilyProcedureBiConsumer.html
 
b/devapidocs/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.ModifyColumnFamilyProcedureBiConsumer.html
index 7c56c7d..33e9534 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.ModifyColumnFamilyProcedureBiConsumer.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.ModifyColumnFamilyProcedureBiConsumer.html
@@ -127,7 +127,7 @@ var activeTableTab = "activeTableTab";
 
 
 
-private class RawAsyncHBaseAdmin.ModifyColumnFamilyProcedureBiConsumer
+private class RawAsyncHBaseAdmin.ModifyColumnFamilyProcedureBiConsumer
 extends RawAsyncHBaseAdmin.TableProcedureBiConsumer
 
 
@@ -240,7 +240,7 @@ extends 
 
 ModifyColumnFamilyProcedureBiConsumer
-ModifyColumnFamilyProcedureBiConsumer(AsyncAdmin admin,
+ModifyColumnFamilyProcedureBiConsumer(AsyncAdmin admin,
   TableName tableName)
 
 
@@ -258,7 +258,7 @@ extends 
 
 getOperationType
-http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String getOperationType()
+http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String getOperationType()
 
 Specified by:
 getOperationType in
 class RawAsyncHBaseAdmin.TableProcedureBiConsumer

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/0821e51a/devapidocs/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.ModifyNamespaceProcedureBiConsumer.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.ModifyNamespaceProcedureBiConsumer.html
 
b/devapidocs/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.ModifyNamespaceProcedureBiConsumer.html
index bd37753..c5a98cf 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.ModifyNamespaceProcedureBiConsumer.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.ModifyNamespaceProcedureBiConsumer.html
@@ -127,7 +127,7 @@ var activeTableTab = "activeTableTab";
 
 
 
-private class RawAsyncHBaseAdmin.ModifyNamespaceProcedureBiConsumer
+private class RawAsyncHBaseAdmin.ModifyNamespaceProcedureBiConsumer
 extends RawAsyncHBaseAdmin.NamespaceProcedureBiConsumer
 
 
@@ -240,7 +240,7 @@ extends 
 
 ModifyNamespaceProcedureBiConsumer
-ModifyNamespaceProcedureBiConsumer(AsyncAdmin admin,
+ModifyNamespaceProcedureBiConsumer(AsyncAdmin admin,
http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String namespaceName)
 
 
@@ -258,7 +258,7 @@ extends 
 
 getOperationType
-http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String getOperationType()
+http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String getOperationType()
 
 Spe

[26/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-10 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/0821e51a/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.DisableTableProcedureBiConsumer.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.DisableTableProcedureBiConsumer.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.DisableTableProcedureBiConsumer.html
index 71844ce..75db22d 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.DisableTableProcedureBiConsumer.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.DisableTableProcedureBiConsumer.html
@@ -105,2564 +105,2642 @@
 097import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionLoadResponse;
 098import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.SplitRegionRequest;
 099import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.SplitRegionResponse;
-100import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ProcedureDescription;
-101import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema;
-102import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.RegionSpecifierType;
-103import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AbortProcedureRequest;
-104import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AbortProcedureResponse;
-105import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnRequest;
-106import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnResponse;
-107import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AssignRegionRequest;
-108import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AssignRegionResponse;
-109import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.BalanceRequest;
-110import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.BalanceResponse;
-111import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateNamespaceRequest;
-112import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateNamespaceResponse;
-113import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteNamespaceRequest;
-114import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteNamespaceResponse;
-115import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteSnapshotRequest;
-116import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteSnapshotResponse;
-117import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DisableTableRequest;
-118import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DisableTableResponse;
-119import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableCatalogJanitorRequest;
-120import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableCatalogJanitorResponse;
-121import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DrainRegionServersRequest;
-122import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DrainRegionServersResponse;
-123import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableTableRequest;
-124import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableTableResponse;
-125import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteColumnRequest;
-126import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteColumnResponse;
-127import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ExecProcedureRequest;
-128import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ExecProcedureResponse;
-129import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetClusterStatusRequest;
-130import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetClusterStatusResponse;
-131import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest;
-132import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCompletedSnapshotsResponse;
-133import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetNamespaceDescriptorRequest;
-134import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetNamespaceDescriptorResponse;
-135import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetProcedureResultRequest;
-136import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetProcedureResultResponse;
-137import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetSchemaAlterStatusRequest;
-138import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetSchemaAlterStatusResponse;
-139import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTab

[46/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-10 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/0821e51a/devapidocs/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.html 
b/devapidocs/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.html
index f2a53e4..8c06db0 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.html
@@ -18,7 +18,7 @@
 catch(err) {
 }
 //-->
-var methods = 
{"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10,"i14":10,"i15":10,"i16":10,"i17":10,"i18":10,"i19":10,"i20":10,"i21":10,"i22":10,"i23":10,"i24":10,"i25":10,"i26":10,"i27":10,"i28":10,"i29":10,"i30":10,"i31":10,"i32":10,"i33":10,"i34":10,"i35":10,"i36":10,"i37":10,"i38":10,"i39":10,"i40":10,"i41":10,"i42":10,"i43":10,"i44":10,"i45":10,"i46":10,"i47":10,"i48":10,"i49":10,"i50":10,"i51":10,"i52":10,"i53":10,"i54":10,"i55":10,"i56":10,"i57":10,"i58":10,"i59":10,"i60":10,"i61":10,"i62":10,"i63":10,"i64":10,"i65":10,"i66":10,"i67":10,"i68":10,"i69":10,"i70":10,"i71":10,"i72":10,"i73":10,"i74":10,"i75":10,"i76":10,"i77":10,"i78":10,"i79":10,"i80":10,"i81":10,"i82":10,"i83":10,"i84":10,"i85":10,"i86":10,"i87":10,"i88":10,"i89":10,"i90":10,"i91":10,"i92":10};
+var methods = 
{"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10,"i14":10,"i15":10,"i16":10,"i17":10,"i18":10,"i19":10,"i20":10,"i21":10,"i22":10,"i23":10,"i24":10,"i25":10,"i26":10,"i27":10,"i28":10,"i29":10,"i30":10,"i31":10,"i32":10,"i33":10,"i34":10,"i35":10,"i36":10,"i37":10,"i38":10,"i39":10,"i40":10,"i41":10,"i42":10,"i43":10,"i44":10,"i45":10,"i46":10,"i47":10,"i48":10,"i49":10,"i50":10,"i51":10,"i52":10,"i53":10,"i54":10,"i55":10,"i56":10,"i57":10,"i58":10,"i59":10,"i60":10,"i61":10,"i62":10,"i63":10,"i64":10,"i65":10,"i66":10,"i67":10,"i68":10,"i69":10,"i70":10,"i71":10,"i72":10,"i73":10,"i74":10,"i75":10,"i76":10,"i77":10,"i78":10,"i79":10,"i80":10,"i81":10,"i82":10,"i83":10,"i84":10,"i85":10,"i86":10,"i87":10,"i88":10,"i89":10,"i90":10,"i91":10,"i92":10,"i93":10,"i94":10,"i95":10,"i96":10,"i97":10};
 var tabs = {65535:["t0","All Methods"],2:["t2","Instance 
Methods"],8:["t4","Concrete Methods"]};
 var altColor = "altColor";
 var rowColor = "rowColor";
@@ -716,56 +716,87 @@ implements 
 http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";
 title="class or interface in java.util.concurrent">CompletableFutureVoid>
+shutdown()
+Shuts down the HBase cluster.
+
+
+
+http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";
 title="class or interface in java.util.concurrent">CompletableFutureVoid>
 snapshot(SnapshotDescription snapshot)
 Take a snapshot and wait for the server to complete that 
snapshot asynchronously.
 
 
-
+
 http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";
 title="class or interface in java.util.concurrent">CompletableFutureVoid>
 split(TableName tableName)
 Split a table.
 
 
-
+
 http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";
 title="class or interface in java.util.concurrent">CompletableFutureVoid>
 split(TableName tableName,
  byte[] splitPoint)
 Split a table.
 
 
-
+
 http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";
 title="class or interface in java.util.concurrent">CompletableFutureVoid>
 splitRegion(byte[] regionName,
http://docs.oracle.com/javase/8/docs/api/java/util/Optional.html?is-external=true";
 title="class or interface in 
java.util">Optional splitPoint)
 Split an individual region.
 
 
-
+
+http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";
 title="class or interface in java.util.concurrent">CompletableFutureVoid>
+stopMaster()
+Shuts down the current HBase master only.
+
+
+
+http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-exter

[48/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-10 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/0821e51a/checkstyle.rss
--
diff --git a/checkstyle.rss b/checkstyle.rss
index 4c349ef..54a5ff5 100644
--- a/checkstyle.rss
+++ b/checkstyle.rss
@@ -25,8 +25,8 @@ under the License.
 en-us
 ©2007 - 2017 The Apache Software Foundation
 
-  File: 2242,
- Errors: 14785,
+  File: 2244,
+ Errors: 14778,
  Warnings: 0,
  Infos: 0
   
@@ -16897,7 +16897,7 @@ under the License.
   0
 
 
-  2
+  1
 
   
   
@@ -17989,7 +17989,7 @@ under the License.
   0
 
 
-  136
+  129
 
   
   
@@ -19935,7 +19935,7 @@ under the License.
   0
 
 
-  15
+  16
 
   
   
@@ -21564,6 +21564,20 @@ under the License.
   
   
 
+  http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.wal.NettyAsyncFSWALConfigHelper.java";>org/apache/hadoop/hbase/wal/NettyAsyncFSWALConfigHelper.java
+
+
+  0
+
+
+  0
+
+
+  2
+
+  
+  
+
   http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.master.assignment.RegionStates.java";>org/apache/hadoop/hbase/master/assignment/RegionStates.java
 
 
@@ -26459,7 +26473,7 @@ under the License.
   0
 
 
-  2
+  1
 
   
   
@@ -27220,6 +27234,20 @@ under the License.
   
   
 
+  http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.util.NettyEventLoopGroupConfig.java";>org/apache/hadoop/hbase/util/NettyEventLoopGroupConfig.java
+
+
+  0
+
+
+  0
+
+
+  0
+
+  
+  
+
   http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.metrics.Snapshot.java";>org/apache/hadoop/hbase/metrics/Snapshot.java
 
 
@@ -31429,7 +31457,7 @@ under the License.
   0
 
 
-  1
+  0
 
   
   

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/0821e51a/coc.html
--
diff --git a/coc.html b/coc.html
index ca07225..db663e4 100644
--- a/coc.html
+++ b/coc.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase – 
   Code of Conduct Policy
@@ -380,7 +380,7 @@ email to mailto:priv...@hbase.apache.org";>the priv
 https://www.apache.org/";>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2017-07-09
+  Last Published: 
2017-07-10
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/0821e51a/cygwin.html
--
diff --git a/cygwin.html b/cygwin.html
index 0fd1f51..174b518 100644
--- a/cygwin.html
+++ b/cygwin.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase – Installing Apache HBase (TM) on Windows using 
Cygwin
 
@@ -679,7 +679,7 @@ Now your HBase server is running, start 
coding and build that next
 https://www.apache.org/";>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2017-07-09
+  Last Published: 
2017-07-10
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/0821e51a/dependencies.html
--
diff --git a/dependencies.html b/dependencies.html
index 2557ab6..48c5ab7 100644
--- a/dependencies.html
+++ b/dependencies.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase – Project Dependencies
 
@@ -527,7 +527,7 @@
 https://www.apache.org/";>The Apache Software 
Foundation.
 All rights reserved.  
   

[38/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-10 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/0821e51a/devapidocs/org/apache/hadoop/hbase/regionserver/package-tree.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/regionserver/package-tree.html 
b/devapidocs/org/apache/hadoop/hbase/regionserver/package-tree.html
index 144958a..b626a85 100644
--- a/devapidocs/org/apache/hadoop/hbase/regionserver/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/regionserver/package-tree.html
@@ -692,20 +692,20 @@
 
 java.lang.http://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true";
 title="class or interface in java.lang">Enum (implements java.lang.http://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true";
 title="class or interface in java.lang">Comparable, java.io.http://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true";
 title="class or interface in java.io">Serializable)
 
-org.apache.hadoop.hbase.regionserver.StoreScanner.StoreScannerCompactionRace
-org.apache.hadoop.hbase.regionserver.CompactingMemStore.IndexType
-org.apache.hadoop.hbase.regionserver.SplitLogWorker.TaskExecutor.Status
+org.apache.hadoop.hbase.regionserver.ScanType
+org.apache.hadoop.hbase.regionserver.ScannerContext.LimitScope
 org.apache.hadoop.hbase.regionserver.Region.FlushResult.Result
+org.apache.hadoop.hbase.regionserver.RegionOpeningState
+org.apache.hadoop.hbase.regionserver.MetricsRegionServerSourceFactoryImpl.FactoryStorage
+org.apache.hadoop.hbase.regionserver.DefaultHeapMemoryTuner.StepDirection
+org.apache.hadoop.hbase.regionserver.Region.Operation
+org.apache.hadoop.hbase.regionserver.StoreScanner.StoreScannerCompactionRace
 org.apache.hadoop.hbase.regionserver.ScannerContext.NextState
-org.apache.hadoop.hbase.regionserver.ScanType
+org.apache.hadoop.hbase.regionserver.SplitLogWorker.TaskExecutor.Status
 org.apache.hadoop.hbase.regionserver.FlushType
-org.apache.hadoop.hbase.regionserver.ScannerContext.LimitScope
-org.apache.hadoop.hbase.regionserver.BloomType
 org.apache.hadoop.hbase.regionserver.MemStoreCompactor.Action
-org.apache.hadoop.hbase.regionserver.Region.Operation
-org.apache.hadoop.hbase.regionserver.DefaultHeapMemoryTuner.StepDirection
-org.apache.hadoop.hbase.regionserver.MetricsRegionServerSourceFactoryImpl.FactoryStorage
-org.apache.hadoop.hbase.regionserver.RegionOpeningState
+org.apache.hadoop.hbase.regionserver.CompactingMemStore.IndexType
+org.apache.hadoop.hbase.regionserver.BloomType
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/0821e51a/devapidocs/org/apache/hadoop/hbase/regionserver/querymatcher/package-tree.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/regionserver/querymatcher/package-tree.html
 
b/devapidocs/org/apache/hadoop/hbase/regionserver/querymatcher/package-tree.html
index bab0558..7bc7bde 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/regionserver/querymatcher/package-tree.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/regionserver/querymatcher/package-tree.html
@@ -125,9 +125,9 @@
 
 java.lang.http://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true";
 title="class or interface in java.lang">Enum (implements java.lang.http://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true";
 title="class or interface in java.lang">Comparable, java.io.http://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true";
 title="class or interface in java.io">Serializable)
 
-org.apache.hadoop.hbase.regionserver.querymatcher.DeleteTracker.DeleteCompare
 org.apache.hadoop.hbase.regionserver.querymatcher.DeleteTracker.DeleteResult
 org.apache.hadoop.hbase.regionserver.querymatcher.StripeCompactionScanQueryMatcher.DropDeletesInOutput
+org.apache.hadoop.hbase.regionserver.querymatcher.DeleteTracker.DeleteCompare
 org.apache.hadoop.hbase.regionserver.querymatcher.ScanQueryMatcher.MatchCode
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/0821e51a/devapidocs/org/apache/hadoop/hbase/regionserver/wal/AsyncFSWAL.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/regionserver/wal/AsyncFSWAL.html 
b/devapidocs/org/apache/hadoop/hbase/regionserver/wal/AsyncFSWAL.html
index b933b9a..8ed9d4d 100644
--- a/devapidocs/org/apache/hadoop/hbase/regionserver/wal/AsyncFSWAL.html
+++ b/devapidocs/org/apache/hadoop/hbase/regionserver/wal/AsyncFSWAL.html
@@ -119,7 +119,7 @@ var activeTableTab = "activeTableTab";
 
 
 @InterfaceAudience.LimitedPrivate(value="Configuration")
-public class AsyncFSWAL
+public class AsyncFSWAL
 extends AbstractFSWAL
 An asynchronous implementation of FSWAL.
  
@@ -220,102 +220,106 @@ extends batchSize 
 
 
+private http://docs.oracle.com/javase/8/docs/api/java/lang/Class.html?is-external=true";
 title="class or inte

[12/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-10 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/0821e51a/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutput.Callback.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutput.Callback.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutput.Callback.html
index 6d0fff2..0c07a2f 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutput.Callback.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutput.Callback.html
@@ -34,552 +34,553 @@
 026import static 
org.apache.hadoop.hbase.io.asyncfs.FanOutOneBlockAsyncDFSOutputHelper.getStatus;
 027import static 
org.apache.hadoop.hdfs.DFSConfigKeys.DFS_CLIENT_SOCKET_TIMEOUT_KEY;
 028
-029import io.netty.buffer.ByteBuf;
-030import 
io.netty.buffer.ByteBufAllocator;
-031import io.netty.channel.Channel;
-032import 
io.netty.channel.ChannelHandler.Sharable;
-033import 
io.netty.channel.ChannelHandlerContext;
-034import io.netty.channel.EventLoop;
-035import 
io.netty.channel.SimpleChannelInboundHandler;
-036import 
io.netty.handler.codec.protobuf.ProtobufDecoder;
-037import 
io.netty.handler.codec.protobuf.ProtobufVarint32FrameDecoder;
-038import 
io.netty.handler.timeout.IdleStateEvent;
-039import 
io.netty.handler.timeout.IdleStateHandler;
-040import 
io.netty.util.concurrent.Promise;
-041import 
io.netty.util.concurrent.PromiseCombiner;
-042
-043import java.io.IOException;
-044import java.nio.ByteBuffer;
-045import java.util.ArrayDeque;
-046import java.util.Collection;
-047import java.util.Collections;
-048import java.util.Deque;
-049import java.util.IdentityHashMap;
-050import java.util.List;
-051import java.util.Set;
-052import 
java.util.concurrent.CompletableFuture;
-053import java.util.concurrent.TimeUnit;
-054import java.util.function.Supplier;
-055
-056import 
org.apache.hadoop.conf.Configuration;
-057import 
org.apache.hadoop.crypto.Encryptor;
-058import org.apache.hadoop.fs.Path;
-059import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
-060import 
org.apache.hadoop.hbase.io.asyncfs.FanOutOneBlockAsyncDFSOutputHelper.CancelOnClose;
-061import 
org.apache.hadoop.hbase.util.CancelableProgressable;
-062import 
org.apache.hadoop.hbase.util.FSUtils;
-063import 
org.apache.hadoop.hdfs.DFSClient;
-064import 
org.apache.hadoop.hdfs.DistributedFileSystem;
-065import 
org.apache.hadoop.hdfs.protocol.ClientProtocol;
-066import 
org.apache.hadoop.hdfs.protocol.DatanodeInfo;
-067import 
org.apache.hadoop.hdfs.protocol.LocatedBlock;
-068import 
org.apache.hadoop.hdfs.protocol.datatransfer.PacketHeader;
-069import 
org.apache.hadoop.hdfs.protocol.datatransfer.PipelineAck;
-070import 
org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.PipelineAckProto;
-071import 
org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.Status;
-072import 
org.apache.hadoop.util.DataChecksum;
-073
-074import 
com.google.common.annotations.VisibleForTesting;
-075
-076/**
-077 * An asynchronous HDFS output stream 
implementation which fans out data to datanode and only
-078 * supports writing file with only one 
block.
-079 * 

-080 * Use the createOutput method in {@link FanOutOneBlockAsyncDFSOutputHelper} to create. The mainly -081 * usage of this class is implementing WAL, so we only expose a little HDFS configurations in the -082 * method. And we place it here under util package because we want to make it independent of WAL -083 * implementation thus easier to move it to HDFS project finally. -084 *

-085 * Note that, all connections to datanode will run in the same {@link EventLoop} which means we only -086 * need one thread here. But be careful, we do some blocking operations in {@link #close()} and -087 * {@link #recoverAndClose(CancelableProgressable)} methods, so do not call them inside -088 * {@link EventLoop}. And for {@link #write(byte[])} {@link #write(byte[], int, int)}, -089 * {@link #buffered()} and {@link #flush(boolean)}, if you call them outside {@link EventLoop}, -090 * there will be an extra context-switch. -091 *

-092 * Advantages compare to DFSOutputStream: -093 *

    -094 *
  1. The fan out mechanism. This will reduce the latency.
  2. -095 *
  3. The asynchronous WAL could also run in the same EventLoop, we could just call write and flush -096 * inside the EventLoop thread, so generally we only have one thread to do all the things.
  4. -097 *
  5. Fail-fast when connection to datanode error. The WAL implementation could open new writer -098 * ASAP.
  6. -099 *
  7. We could benefit from netty's ByteBuf management mechanism.
  8. -100 *
-101 */ -102@InterfaceAudience.Private -103public class FanOutOneBlockAsyncDFSOutput implements AsyncFSOutput { -104 -105 // The MAX_PACKET_SIZE is 16MB but it include the header size and checksum size. So here we set a -10

[50/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-10 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/0821e51a/apidocs/src-html/org/apache/hadoop/hbase/client/AsyncAdmin.html
--
diff --git a/apidocs/src-html/org/apache/hadoop/hbase/client/AsyncAdmin.html 
b/apidocs/src-html/org/apache/hadoop/hbase/client/AsyncAdmin.html
index cdfccb8..e303773 100644
--- a/apidocs/src-html/org/apache/hadoop/hbase/client/AsyncAdmin.html
+++ b/apidocs/src-html/org/apache/hadoop/hbase/client/AsyncAdmin.html
@@ -832,154 +832,182 @@
 824  }
 825
 826  /**
-827   * Get a list of {@link RegionLoad} of 
all regions hosted on a region seerver for a table.
-828   * @param serverName
-829   * @param tableName
-830   * @return a list of {@link RegionLoad} 
wrapped by {@link CompletableFuture}
-831   */
-832  
CompletableFuture> getRegionLoads(ServerName 
serverName,
-833  Optional 
tableName);
-834
-835  /**
-836   * Check whether master is in 
maintenance mode
-837   * @return true if master is in 
maintenance mode, false otherwise. The return value will be
-838   * wrapped by a {@link 
CompletableFuture}
+827   * Shuts down the HBase cluster.
+828   */
+829  CompletableFuture 
shutdown();
+830
+831  /**
+832   * Shuts down the current HBase master 
only.
+833   */
+834  CompletableFuture 
stopMaster();
+835
+836  /**
+837   * Stop the designated regionserver.
+838   * @param serverName
 839   */
-840  CompletableFuture 
isMasterInMaintenanceMode();
+840  CompletableFuture 
stopRegionServer(ServerName serverName);
 841
 842  /**
-843   * Get the current compaction state of 
a table. It could be in a major compaction, a minor
-844   * compaction, both, or none.
-845   * @param tableName table to examine
-846   * @return the current compaction state 
wrapped by a {@link CompletableFuture}
-847   */
-848  
CompletableFuture getCompactionState(TableName 
tableName);
-849
-850  /**
-851   * Get the current compaction state of 
region. It could be in a major compaction, a minor
-852   * compaction, both, or none.
-853   * @param regionName region to 
examine
-854   * @return the current compaction state 
wrapped by a {@link CompletableFuture}
-855   */
-856  
CompletableFuture getCompactionStateForRegion(byte[] 
regionName);
-857
-858  /**
-859   * Get the timestamp of the last major 
compaction for the passed table.
-860   * 

-861 * The timestamp of the oldest HFile resulting from a major compaction of that table, or not -862 * present if no such HFile could be found. -863 * @param tableName table to examine -864 * @return the last major compaction timestamp wrapped by a {@link CompletableFuture} -865 */ -866 CompletableFuture> getLastMajorCompactionTimestamp(TableName tableName); -867 -868 /** -869 * Get the timestamp of the last major compaction for the passed region. -870 *

-871 * The timestamp of the oldest HFile resulting from a major compaction of that region, or not -872 * present if no such HFile could be found. -873 * @param regionName region to examine -874 * @return the last major compaction timestamp wrapped by a {@link CompletableFuture} +843 * Update the configuration and trigger an online config change on the regionserver. +844 * @param serverName : The server whose config needs to be updated. +845 */ +846 CompletableFuture updateConfiguration(ServerName serverName); +847 +848 /** +849 * Update the configuration and trigger an online config change on all the masters and +850 * regionservers. +851 */ +852 CompletableFuture updateConfiguration(); +853 +854 /** +855 * Get a list of {@link RegionLoad} of all regions hosted on a region seerver for a table. +856 * @param serverName +857 * @param tableName +858 * @return a list of {@link RegionLoad} wrapped by {@link CompletableFuture} +859 */ +860 CompletableFuture> getRegionLoads(ServerName serverName, +861 Optional tableName); +862 +863 /** +864 * Check whether master is in maintenance mode +865 * @return true if master is in maintenance mode, false otherwise. The return value will be +866 * wrapped by a {@link CompletableFuture} +867 */ +868 CompletableFuture isMasterInMaintenanceMode(); +869 +870 /** +871 * Get the current compaction state of a table. It could be in a major compaction, a minor +872 * compaction, both, or none. +873 * @param tableName table to examine +874 * @return the current compaction state wrapped by a {@link CompletableFuture} 875 */ -876 CompletableFuture> getLastMajorCompactionTimestampForRegion(byte[] regionName); +876 CompletableFuture getCompactionState(TableName tableName); 877 878 /** -879 * Turn the load balancer on or off. -880 * @param on -881 * @return Previous balancer value wrapped by a {@link Co


hbase-site git commit: INFRA-10751 Empty commit

2017-07-10 Thread git-site-role
Repository: hbase-site
Updated Branches:
  refs/heads/asf-site 0821e51a0 -> 5a4910e7b


INFRA-10751 Empty commit


Project: http://git-wip-us.apache.org/repos/asf/hbase-site/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase-site/commit/5a4910e7
Tree: http://git-wip-us.apache.org/repos/asf/hbase-site/tree/5a4910e7
Diff: http://git-wip-us.apache.org/repos/asf/hbase-site/diff/5a4910e7

Branch: refs/heads/asf-site
Commit: 5a4910e7b4bfcf6e07e75955ebb3b35b0ef98889
Parents: 0821e51
Author: jenkins 
Authored: Mon Jul 10 15:02:19 2017 +
Committer: jenkins 
Committed: Mon Jul 10 15:02:19 2017 +

--

--




[36/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-10 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/0821e51a/devapidocs/org/apache/hadoop/hbase/util/package-summary.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/util/package-summary.html 
b/devapidocs/org/apache/hadoop/hbase/util/package-summary.html
index f0f6f89..daa78ec 100644
--- a/devapidocs/org/apache/hadoop/hbase/util/package-summary.html
+++ b/devapidocs/org/apache/hadoop/hbase/util/package-summary.html
@@ -973,101 +973,107 @@
 
 
 
+NettyEventLoopGroupConfig
+
+Event loop group related config.
+
+
+
 NonceKey
 
 This implementation is not smart and just treats nonce 
group and nonce as random bits.
 
 
-
+
 ObjectIntPair
 
 A generic class for pair of an Object and and a primitive 
int value.
 
 
-
+
 ObjectPool
 
 A thread-safe shared object pool in which object creation 
is expected to be lightweight, and the
  objects may be excessively created and discarded.
 
 
-
+
 OrderedBytes
 
 Utility class that handles ordered byte arrays.
 
 
-
+
 Pair
 
 A generic class for pairs.
 
 
-
+
 PairOfSameType
 
 A generic, immutable class for pairs of objects both of 
type T.
 
 
-
+
 PoolMap
 
 The PoolMap maps a key to a collection of 
values, the elements
  of which are managed by a pool.
 
 
-
+
 PoolMap.ThreadLocalPool
 
 The ThreadLocalPool represents a PoolMap.Pool 
that
  builds on the http://docs.oracle.com/javase/8/docs/api/java/lang/ThreadLocal.html?is-external=true";
 title="class or interface in java.lang">ThreadLocal 
class.
 
 
-
+
 PrettyPrinter
  
 
-
+
 ProtoUtil
  
 
-
+
 ReflectionUtils
  
 
-
+
 RegionMover
 
 Tool for loading/unloading regions to/from given 
regionserver This tool can be run from Command
  line directly as a utility.
 
 
-
+
 RegionMover.MoveWithoutAck
 
 Move Regions without Acknowledging.Usefule in case of RS 
shutdown as we might want to shut the
  RS down anyways and not abort on a stuck region.
 
 
-
+
 RegionMover.RegionMoverBuilder
 
 Builder for Region mover.
 
 
-
+
 RegionSizeCalculator
 
 Computes size of each region for given table and given 
column families.
 
 
-
+
 RegionSplitCalculator
 
 This is a generic region split calculator.
 
 
-
+
 RegionSplitter
 
 The RegionSplitter 
class provides several utilities to help in the
@@ -1075,208 +1081,208 @@
  instead of having HBase handle that automatically.
 
 
-
+
 RegionSplitter.HexStringSplit
 
 HexStringSplit is a well-known RegionSplitter.SplitAlgorithm 
for choosing region
  boundaries.
 
 
-
+
 RegionSplitter.UniformSplit
 
 A SplitAlgorithm that divides the space of possible keys 
evenly.
 
 
-
+
 RetryCounter
  
 
-
+
 RetryCounter.BackoffPolicy
 
 Policy for calculating sleeping intervals between retry 
attempts
 
 
-
+
 RetryCounter.ExponentialBackoffPolicy
  
 
-
+
 RetryCounter.ExponentialBackoffPolicyWithLimit
  
 
-
+
 RetryCounter.RetryConfig
 
 Configuration for a retry counter
 
 
-
+
 RetryCounterFactory
  
 
-
+
 RollingStatCalculator
 
 This class maintains mean and variation for any sequence of 
input provided to it.
 
 
-
+
 RowBloomContext
 
 Handles ROW bloom related context.
 
 
-
+
 RowBloomHashKey
  
 
-
+
 RowColBloomContext
 
 Handles ROWCOL bloom related context.
 
 
-
+
 RowColBloomHashKey
 
 An hash key for ROWCOL bloom.
 
 
-
+
 ServerCommandLine
 
 Base class for command lines that start up various HBase 
daemons.
 
 
-
+
 ServerRegionReplicaUtil
 
 Similar to RegionReplicaUtil but for the 
server side
 
 
-
+
 ShutdownHookManager
 
 This class provides ShutdownHookManager shims for HBase to 
interact with the Hadoop 1.0.x and the
  Hadoop 2.0+ series.
 
 
-
+
 ShutdownHookManager.ShutdownHookManagerV1
  
 
-
+
 ShutdownHookManager.ShutdownHookManagerV2
  
 
-
+
 SimpleByteRange
 
 A read only version of the ByteRange.
 
 
-
+
 SimpleMutableByteRange
 
 A basic mutable ByteRange 
implementation.
 
 
-
+
 SimplePositionedByteRange
 
 Extends the basic SimpleMutableByteRange 
implementation with position
  support and it is a readonly version.
 
 
-
+
 SimplePositionedMutableByteRange
 
 Extends the basic AbstractPositionedByteRange 
implementation with
  position support and it is a mutable version.
 
 
-
+
 Sleeper
 
 Sleeper for current thread.
 
 
-
+
 SoftObjectPool
 
 A SoftReference based shared object pool.
 
 
-
+
 SortedList
 
 Simple sorted list implementation that uses http://docs.oracle.com/javase/8/docs/api/java/util/ArrayList.html?is-external=true";
 title="class or interface in java.util">ArrayList as
  the underlying collection so we can support RandomAccess.
 
 
-
+
 StealJobQueue
 
 This queue allows a ThreadPoolExecutor to steal jobs from 
another ThreadPoolExecutor.
 
 
-
+
 Strings
 
 Utility for Strings.
 
 
-
+
 Threads
 
 Thread Utility
 
 
-
+
 Threads.PrintThreadInfoLazyHolder
  
 
-
+
 Triple
 
 Utility class to manage a triple.
 
 
-
+
 UnsafeAccess
  
 
-
+
 UnsafeAvailChecker
  
 
-
+
 VersionInfo
 

[22/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-10 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/0821e51a/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.ModifyColumnFamilyProcedureBiConsumer.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.ModifyColumnFamilyProcedureBiConsumer.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.ModifyColumnFamilyProcedureBiConsumer.html
index 71844ce..75db22d 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.ModifyColumnFamilyProcedureBiConsumer.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.ModifyColumnFamilyProcedureBiConsumer.html
@@ -105,2564 +105,2642 @@
 097import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionLoadResponse;
 098import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.SplitRegionRequest;
 099import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.SplitRegionResponse;
-100import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ProcedureDescription;
-101import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema;
-102import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.RegionSpecifierType;
-103import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AbortProcedureRequest;
-104import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AbortProcedureResponse;
-105import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnRequest;
-106import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnResponse;
-107import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AssignRegionRequest;
-108import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AssignRegionResponse;
-109import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.BalanceRequest;
-110import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.BalanceResponse;
-111import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateNamespaceRequest;
-112import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateNamespaceResponse;
-113import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteNamespaceRequest;
-114import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteNamespaceResponse;
-115import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteSnapshotRequest;
-116import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteSnapshotResponse;
-117import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DisableTableRequest;
-118import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DisableTableResponse;
-119import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableCatalogJanitorRequest;
-120import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableCatalogJanitorResponse;
-121import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DrainRegionServersRequest;
-122import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DrainRegionServersResponse;
-123import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableTableRequest;
-124import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableTableResponse;
-125import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteColumnRequest;
-126import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteColumnResponse;
-127import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ExecProcedureRequest;
-128import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ExecProcedureResponse;
-129import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetClusterStatusRequest;
-130import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetClusterStatusResponse;
-131import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest;
-132import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCompletedSnapshotsResponse;
-133import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetNamespaceDescriptorRequest;
-134import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetNamespaceDescriptorResponse;
-135import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetProcedureResultRequest;
-136import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetProcedureResultResponse;
-137import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetSchemaAlterStatusRequest;
-138import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetSchemaAlterStatusResponse;
-139import 
org.apache.hadoop.hbase.shaded.protobuf

[16/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-10 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/0821e51a/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.TruncateTableProcedureBiConsumer.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.TruncateTableProcedureBiConsumer.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.TruncateTableProcedureBiConsumer.html
index 71844ce..75db22d 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.TruncateTableProcedureBiConsumer.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.TruncateTableProcedureBiConsumer.html
@@ -105,2564 +105,2642 @@
 097import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionLoadResponse;
 098import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.SplitRegionRequest;
 099import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.SplitRegionResponse;
-100import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ProcedureDescription;
-101import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema;
-102import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.RegionSpecifierType;
-103import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AbortProcedureRequest;
-104import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AbortProcedureResponse;
-105import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnRequest;
-106import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnResponse;
-107import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AssignRegionRequest;
-108import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AssignRegionResponse;
-109import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.BalanceRequest;
-110import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.BalanceResponse;
-111import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateNamespaceRequest;
-112import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateNamespaceResponse;
-113import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteNamespaceRequest;
-114import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteNamespaceResponse;
-115import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteSnapshotRequest;
-116import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteSnapshotResponse;
-117import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DisableTableRequest;
-118import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DisableTableResponse;
-119import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableCatalogJanitorRequest;
-120import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableCatalogJanitorResponse;
-121import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DrainRegionServersRequest;
-122import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DrainRegionServersResponse;
-123import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableTableRequest;
-124import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableTableResponse;
-125import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteColumnRequest;
-126import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteColumnResponse;
-127import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ExecProcedureRequest;
-128import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ExecProcedureResponse;
-129import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetClusterStatusRequest;
-130import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetClusterStatusResponse;
-131import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest;
-132import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCompletedSnapshotsResponse;
-133import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetNamespaceDescriptorRequest;
-134import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetNamespaceDescriptorResponse;
-135import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetProcedureResultRequest;
-136import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetProcedureResultResponse;
-137import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetSchemaAlterStatusRequest;
-138import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetSchemaAlterStatusResponse;
-139import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.G

[30/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-10 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/0821e51a/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.CreateTableProcedureBiConsumer.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.CreateTableProcedureBiConsumer.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.CreateTableProcedureBiConsumer.html
index 71844ce..75db22d 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.CreateTableProcedureBiConsumer.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.CreateTableProcedureBiConsumer.html
@@ -105,2564 +105,2642 @@
 097import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionLoadResponse;
 098import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.SplitRegionRequest;
 099import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.SplitRegionResponse;
-100import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ProcedureDescription;
-101import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema;
-102import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.RegionSpecifierType;
-103import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AbortProcedureRequest;
-104import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AbortProcedureResponse;
-105import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnRequest;
-106import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnResponse;
-107import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AssignRegionRequest;
-108import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AssignRegionResponse;
-109import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.BalanceRequest;
-110import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.BalanceResponse;
-111import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateNamespaceRequest;
-112import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateNamespaceResponse;
-113import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteNamespaceRequest;
-114import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteNamespaceResponse;
-115import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteSnapshotRequest;
-116import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteSnapshotResponse;
-117import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DisableTableRequest;
-118import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DisableTableResponse;
-119import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableCatalogJanitorRequest;
-120import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableCatalogJanitorResponse;
-121import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DrainRegionServersRequest;
-122import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DrainRegionServersResponse;
-123import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableTableRequest;
-124import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableTableResponse;
-125import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteColumnRequest;
-126import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteColumnResponse;
-127import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ExecProcedureRequest;
-128import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ExecProcedureResponse;
-129import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetClusterStatusRequest;
-130import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetClusterStatusResponse;
-131import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest;
-132import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCompletedSnapshotsResponse;
-133import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetNamespaceDescriptorRequest;
-134import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetNamespaceDescriptorResponse;
-135import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetProcedureResultRequest;
-136import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetProcedureResultResponse;
-137import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetSchemaAlterStatusRequest;
-138import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetSchemaAlterStatusResponse;
-139import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableDes

[28/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-10 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/0821e51a/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.DeleteNamespaceProcedureBiConsumer.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.DeleteNamespaceProcedureBiConsumer.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.DeleteNamespaceProcedureBiConsumer.html
index 71844ce..75db22d 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.DeleteNamespaceProcedureBiConsumer.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.DeleteNamespaceProcedureBiConsumer.html
@@ -105,2564 +105,2642 @@
 097import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionLoadResponse;
 098import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.SplitRegionRequest;
 099import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.SplitRegionResponse;
-100import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ProcedureDescription;
-101import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema;
-102import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.RegionSpecifierType;
-103import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AbortProcedureRequest;
-104import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AbortProcedureResponse;
-105import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnRequest;
-106import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnResponse;
-107import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AssignRegionRequest;
-108import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AssignRegionResponse;
-109import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.BalanceRequest;
-110import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.BalanceResponse;
-111import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateNamespaceRequest;
-112import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateNamespaceResponse;
-113import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteNamespaceRequest;
-114import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteNamespaceResponse;
-115import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteSnapshotRequest;
-116import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteSnapshotResponse;
-117import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DisableTableRequest;
-118import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DisableTableResponse;
-119import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableCatalogJanitorRequest;
-120import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableCatalogJanitorResponse;
-121import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DrainRegionServersRequest;
-122import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DrainRegionServersResponse;
-123import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableTableRequest;
-124import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableTableResponse;
-125import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteColumnRequest;
-126import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteColumnResponse;
-127import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ExecProcedureRequest;
-128import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ExecProcedureResponse;
-129import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetClusterStatusRequest;
-130import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetClusterStatusResponse;
-131import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest;
-132import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCompletedSnapshotsResponse;
-133import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetNamespaceDescriptorRequest;
-134import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetNamespaceDescriptorResponse;
-135import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetProcedureResultRequest;
-136import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetProcedureResultResponse;
-137import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetSchemaAlterStatusRequest;
-138import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetSchemaAlterStatusResponse;
-139import 
org.apache.hadoop.hbase.shaded.protobuf.generated.Mast

[23/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-10 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/0821e51a/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.MergeTableRegionProcedureBiConsumer.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.MergeTableRegionProcedureBiConsumer.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.MergeTableRegionProcedureBiConsumer.html
index 71844ce..75db22d 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.MergeTableRegionProcedureBiConsumer.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.MergeTableRegionProcedureBiConsumer.html
@@ -105,2564 +105,2642 @@
 097import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionLoadResponse;
 098import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.SplitRegionRequest;
 099import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.SplitRegionResponse;
-100import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ProcedureDescription;
-101import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema;
-102import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.RegionSpecifierType;
-103import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AbortProcedureRequest;
-104import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AbortProcedureResponse;
-105import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnRequest;
-106import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnResponse;
-107import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AssignRegionRequest;
-108import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AssignRegionResponse;
-109import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.BalanceRequest;
-110import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.BalanceResponse;
-111import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateNamespaceRequest;
-112import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateNamespaceResponse;
-113import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteNamespaceRequest;
-114import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteNamespaceResponse;
-115import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteSnapshotRequest;
-116import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteSnapshotResponse;
-117import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DisableTableRequest;
-118import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DisableTableResponse;
-119import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableCatalogJanitorRequest;
-120import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableCatalogJanitorResponse;
-121import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DrainRegionServersRequest;
-122import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DrainRegionServersResponse;
-123import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableTableRequest;
-124import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableTableResponse;
-125import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteColumnRequest;
-126import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteColumnResponse;
-127import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ExecProcedureRequest;
-128import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ExecProcedureResponse;
-129import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetClusterStatusRequest;
-130import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetClusterStatusResponse;
-131import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest;
-132import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCompletedSnapshotsResponse;
-133import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetNamespaceDescriptorRequest;
-134import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetNamespaceDescriptorResponse;
-135import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetProcedureResultRequest;
-136import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetProcedureResultResponse;
-137import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetSchemaAlterStatusRequest;
-138import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetSchemaAlterStatusResponse;
-139import 
org.apache.hadoop.hbase.shaded.protobuf.generated

[10/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-10 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/0821e51a/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutput.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutput.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutput.html
index 6d0fff2..0c07a2f 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutput.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutput.html
@@ -34,552 +34,553 @@
 026import static 
org.apache.hadoop.hbase.io.asyncfs.FanOutOneBlockAsyncDFSOutputHelper.getStatus;
 027import static 
org.apache.hadoop.hdfs.DFSConfigKeys.DFS_CLIENT_SOCKET_TIMEOUT_KEY;
 028
-029import io.netty.buffer.ByteBuf;
-030import 
io.netty.buffer.ByteBufAllocator;
-031import io.netty.channel.Channel;
-032import 
io.netty.channel.ChannelHandler.Sharable;
-033import 
io.netty.channel.ChannelHandlerContext;
-034import io.netty.channel.EventLoop;
-035import 
io.netty.channel.SimpleChannelInboundHandler;
-036import 
io.netty.handler.codec.protobuf.ProtobufDecoder;
-037import 
io.netty.handler.codec.protobuf.ProtobufVarint32FrameDecoder;
-038import 
io.netty.handler.timeout.IdleStateEvent;
-039import 
io.netty.handler.timeout.IdleStateHandler;
-040import 
io.netty.util.concurrent.Promise;
-041import 
io.netty.util.concurrent.PromiseCombiner;
-042
-043import java.io.IOException;
-044import java.nio.ByteBuffer;
-045import java.util.ArrayDeque;
-046import java.util.Collection;
-047import java.util.Collections;
-048import java.util.Deque;
-049import java.util.IdentityHashMap;
-050import java.util.List;
-051import java.util.Set;
-052import 
java.util.concurrent.CompletableFuture;
-053import java.util.concurrent.TimeUnit;
-054import java.util.function.Supplier;
-055
-056import 
org.apache.hadoop.conf.Configuration;
-057import 
org.apache.hadoop.crypto.Encryptor;
-058import org.apache.hadoop.fs.Path;
-059import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
-060import 
org.apache.hadoop.hbase.io.asyncfs.FanOutOneBlockAsyncDFSOutputHelper.CancelOnClose;
-061import 
org.apache.hadoop.hbase.util.CancelableProgressable;
-062import 
org.apache.hadoop.hbase.util.FSUtils;
-063import 
org.apache.hadoop.hdfs.DFSClient;
-064import 
org.apache.hadoop.hdfs.DistributedFileSystem;
-065import 
org.apache.hadoop.hdfs.protocol.ClientProtocol;
-066import 
org.apache.hadoop.hdfs.protocol.DatanodeInfo;
-067import 
org.apache.hadoop.hdfs.protocol.LocatedBlock;
-068import 
org.apache.hadoop.hdfs.protocol.datatransfer.PacketHeader;
-069import 
org.apache.hadoop.hdfs.protocol.datatransfer.PipelineAck;
-070import 
org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.PipelineAckProto;
-071import 
org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.Status;
-072import 
org.apache.hadoop.util.DataChecksum;
-073
-074import 
com.google.common.annotations.VisibleForTesting;
-075
-076/**
-077 * An asynchronous HDFS output stream 
implementation which fans out data to datanode and only
-078 * supports writing file with only one 
block.
-079 * 

-080 * Use the createOutput method in {@link FanOutOneBlockAsyncDFSOutputHelper} to create. The mainly -081 * usage of this class is implementing WAL, so we only expose a little HDFS configurations in the -082 * method. And we place it here under util package because we want to make it independent of WAL -083 * implementation thus easier to move it to HDFS project finally. -084 *

-085 * Note that, all connections to datanode will run in the same {@link EventLoop} which means we only -086 * need one thread here. But be careful, we do some blocking operations in {@link #close()} and -087 * {@link #recoverAndClose(CancelableProgressable)} methods, so do not call them inside -088 * {@link EventLoop}. And for {@link #write(byte[])} {@link #write(byte[], int, int)}, -089 * {@link #buffered()} and {@link #flush(boolean)}, if you call them outside {@link EventLoop}, -090 * there will be an extra context-switch. -091 *

-092 * Advantages compare to DFSOutputStream: -093 *

    -094 *
  1. The fan out mechanism. This will reduce the latency.
  2. -095 *
  3. The asynchronous WAL could also run in the same EventLoop, we could just call write and flush -096 * inside the EventLoop thread, so generally we only have one thread to do all the things.
  4. -097 *
  5. Fail-fast when connection to datanode error. The WAL implementation could open new writer -098 * ASAP.
  6. -099 *
  7. We could benefit from netty's ByteBuf management mechanism.
  8. -100 *
-101 */ -102@InterfaceAudience.Private -103public class FanOutOneBlockAsyncDFSOutput implements AsyncFSOutput { -104 -105 // The MAX_PACKET_SIZE is 16MB but it include the header size and checksum size. So here we set a -106 // smaller limit for data size. -107 priv

[43/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-10 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/0821e51a/devapidocs/org/apache/hadoop/hbase/client/package-tree.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/client/package-tree.html 
b/devapidocs/org/apache/hadoop/hbase/client/package-tree.html
index f51caa5..9b3d915 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/package-tree.html
@@ -531,25 +531,25 @@
 
 java.lang.http://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true";
 title="class or interface in java.lang">Enum (implements java.lang.http://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true";
 title="class or interface in java.lang">Comparable, java.io.http://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true";
 title="class or interface in java.io">Serializable)
 
-org.apache.hadoop.hbase.client.TableState.State
-org.apache.hadoop.hbase.client.AbstractResponse.ResponseType
-org.apache.hadoop.hbase.client.IsolationLevel
-org.apache.hadoop.hbase.client.CompactionState
-org.apache.hadoop.hbase.client.AsyncScanSingleRegionRpcRetryingCaller.ScanResumerState
-org.apache.hadoop.hbase.client.AsyncRequestFutureImpl.Retry
-org.apache.hadoop.hbase.client.ScannerCallable.MoreResults
-org.apache.hadoop.hbase.client.AsyncProcessTask.SubmittedRows
-org.apache.hadoop.hbase.client.Scan.ReadType
-org.apache.hadoop.hbase.client.RequestController.ReturnCode
-org.apache.hadoop.hbase.client.SnapshotType
+org.apache.hadoop.hbase.client.AsyncScanSingleRegionRpcRetryingCaller.ScanControllerState
 org.apache.hadoop.hbase.client.HBaseAdmin.ReplicationState
 org.apache.hadoop.hbase.client.RegionLocateType
-org.apache.hadoop.hbase.client.AsyncScanSingleRegionRpcRetryingCaller.ScanControllerState
+org.apache.hadoop.hbase.client.ScannerCallable.MoreResults
+org.apache.hadoop.hbase.client.AsyncRequestFutureImpl.Retry
+org.apache.hadoop.hbase.client.AbstractResponse.ResponseType
+org.apache.hadoop.hbase.client.AsyncProcessTask.SubmittedRows
+org.apache.hadoop.hbase.client.CompactType
 org.apache.hadoop.hbase.client.MobCompactPartitionPolicy
+org.apache.hadoop.hbase.client.Consistency
+org.apache.hadoop.hbase.client.SnapshotType
 org.apache.hadoop.hbase.client.Durability
-org.apache.hadoop.hbase.client.CompactType
+org.apache.hadoop.hbase.client.IsolationLevel
+org.apache.hadoop.hbase.client.CompactionState
+org.apache.hadoop.hbase.client.RequestController.ReturnCode
+org.apache.hadoop.hbase.client.Scan.ReadType
+org.apache.hadoop.hbase.client.AsyncScanSingleRegionRpcRetryingCaller.ScanResumerState
+org.apache.hadoop.hbase.client.TableState.State
 org.apache.hadoop.hbase.client.MasterSwitchType
-org.apache.hadoop.hbase.client.Consistency
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/0821e51a/devapidocs/org/apache/hadoop/hbase/executor/package-tree.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/executor/package-tree.html 
b/devapidocs/org/apache/hadoop/hbase/executor/package-tree.html
index 5bdab95..bbf68db 100644
--- a/devapidocs/org/apache/hadoop/hbase/executor/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/executor/package-tree.html
@@ -104,8 +104,8 @@
 
 java.lang.http://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true";
 title="class or interface in java.lang">Enum (implements java.lang.http://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true";
 title="class or interface in java.lang">Comparable, java.io.http://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true";
 title="class or interface in java.io">Serializable)
 
-org.apache.hadoop.hbase.executor.ExecutorType
 org.apache.hadoop.hbase.executor.EventType
+org.apache.hadoop.hbase.executor.ExecutorType
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/0821e51a/devapidocs/org/apache/hadoop/hbase/filter/package-tree.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/filter/package-tree.html 
b/devapidocs/org/apache/hadoop/hbase/filter/package-tree.html
index 85f8501..2835bdc 100644
--- a/devapidocs/org/apache/hadoop/hbase/filter/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/filter/package-tree.html
@@ -175,14 +175,14 @@
 
 java.lang.http://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true";
 title="class or interface in java.lang">Enum (implements java.lang.http://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true";
 title="class or interface in java.lang">Comparable, java.io.http://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true";
 title="class or interface in java.io">Serializable)
 
-org.apache.hadoop.hbase.filter.BitComparator.Bitwis

[07/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-10 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/0821e51a/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.ChecksumCreater.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.ChecksumCreater.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.ChecksumCreater.html
index f729c99..0a32350 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.ChecksumCreater.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.ChecksumCreater.html
@@ -54,757 +54,756 @@
 046import 
io.netty.channel.ChannelPipeline;
 047import io.netty.channel.EventLoop;
 048import 
io.netty.channel.SimpleChannelInboundHandler;
-049import 
io.netty.channel.socket.nio.NioSocketChannel;
-050import 
io.netty.handler.codec.protobuf.ProtobufDecoder;
-051import 
io.netty.handler.codec.protobuf.ProtobufVarint32FrameDecoder;
-052import 
io.netty.handler.timeout.IdleStateEvent;
-053import 
io.netty.handler.timeout.IdleStateHandler;
-054import io.netty.util.concurrent.Future;
-055import 
io.netty.util.concurrent.FutureListener;
-056import 
io.netty.util.concurrent.Promise;
-057
-058import java.io.IOException;
-059import 
java.lang.reflect.InvocationTargetException;
-060import java.lang.reflect.Method;
-061import java.util.ArrayList;
-062import java.util.EnumSet;
-063import java.util.List;
-064import java.util.concurrent.TimeUnit;
-065
-066import org.apache.commons.logging.Log;
-067import 
org.apache.commons.logging.LogFactory;
-068import 
org.apache.hadoop.conf.Configuration;
-069import 
org.apache.hadoop.crypto.CryptoProtocolVersion;
-070import 
org.apache.hadoop.crypto.Encryptor;
-071import org.apache.hadoop.fs.CreateFlag;
-072import org.apache.hadoop.fs.FileSystem;
-073import 
org.apache.hadoop.fs.FileSystemLinkResolver;
-074import org.apache.hadoop.fs.Path;
-075import 
org.apache.hadoop.fs.UnresolvedLinkException;
-076import 
org.apache.hadoop.fs.permission.FsPermission;
-077import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
-078import 
org.apache.hadoop.hbase.client.ConnectionUtils;
-079import 
org.apache.hadoop.hbase.util.CancelableProgressable;
-080import 
org.apache.hadoop.hbase.util.FSUtils;
-081import 
org.apache.hadoop.hdfs.DFSClient;
-082import 
org.apache.hadoop.hdfs.DFSOutputStream;
-083import 
org.apache.hadoop.hdfs.DistributedFileSystem;
-084import 
org.apache.hadoop.hdfs.protocol.ClientProtocol;
-085import 
org.apache.hadoop.hdfs.protocol.DatanodeInfo;
-086import 
org.apache.hadoop.hdfs.protocol.ExtendedBlock;
-087import 
org.apache.hadoop.hdfs.protocol.HdfsFileStatus;
-088import 
org.apache.hadoop.hdfs.protocol.LocatedBlock;
-089import 
org.apache.hadoop.hdfs.protocol.datatransfer.BlockConstructionStage;
-090import 
org.apache.hadoop.hdfs.protocol.datatransfer.DataTransferProtoUtil;
-091import 
org.apache.hadoop.hdfs.protocol.datatransfer.DataTransferProtocol;
-092import 
org.apache.hadoop.hdfs.protocol.datatransfer.Op;
-093import 
org.apache.hadoop.hdfs.protocol.datatransfer.PipelineAck;
-094import 
org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.BaseHeaderProto;
-095import 
org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.BlockOpResponseProto;
-096import 
org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.CachingStrategyProto;
-097import 
org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.ChecksumProto;
-098import 
org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.ClientOperationHeaderProto;
-099import 
org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.OpWriteBlockProto;
-100import 
org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.PipelineAckProto;
-101import 
org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.Status;
-102import 
org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ExtendedBlockProto;
-103import 
org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.StorageTypeProto;
-104import 
org.apache.hadoop.hdfs.security.token.block.BlockTokenIdentifier;
-105import 
org.apache.hadoop.hdfs.security.token.block.InvalidBlockTokenException;
-106import 
org.apache.hadoop.hdfs.server.namenode.LeaseExpiredException;
-107import 
org.apache.hadoop.io.EnumSetWritable;
-108import 
org.apache.hadoop.ipc.RemoteException;
-109import org.apache.hadoop.net.NetUtils;
-110import 
org.apache.hadoop.security.proto.SecurityProtos.TokenProto;
-111import 
org.apache.hadoop.security.token.Token;
-112import 
org.apache.hadoop.util.DataChecksum;
-113
-114/**
-115 * Helper class for implementing {@link 
FanOutOneBlockAsyncDFSOutput}.
-116 */
-117@InterfaceAudience.Private
-118public final class 
FanOutOneBlockAsyncDFSOutputHelper {
+049import 
io.netty.handler.codec.protobuf.ProtobufDecoder;
+050import 
io.netty.handler.codec.protobuf.ProtobufVarint32FrameDecoder;
+051import 
io.

[19/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-10 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/0821e51a/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.ProcedureBiConsumer.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.ProcedureBiConsumer.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.ProcedureBiConsumer.html
index 71844ce..75db22d 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.ProcedureBiConsumer.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.ProcedureBiConsumer.html
@@ -105,2564 +105,2642 @@
 097import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionLoadResponse;
 098import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.SplitRegionRequest;
 099import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.SplitRegionResponse;
-100import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ProcedureDescription;
-101import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema;
-102import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.RegionSpecifierType;
-103import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AbortProcedureRequest;
-104import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AbortProcedureResponse;
-105import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnRequest;
-106import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnResponse;
-107import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AssignRegionRequest;
-108import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AssignRegionResponse;
-109import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.BalanceRequest;
-110import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.BalanceResponse;
-111import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateNamespaceRequest;
-112import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateNamespaceResponse;
-113import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteNamespaceRequest;
-114import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteNamespaceResponse;
-115import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteSnapshotRequest;
-116import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteSnapshotResponse;
-117import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DisableTableRequest;
-118import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DisableTableResponse;
-119import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableCatalogJanitorRequest;
-120import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableCatalogJanitorResponse;
-121import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DrainRegionServersRequest;
-122import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DrainRegionServersResponse;
-123import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableTableRequest;
-124import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableTableResponse;
-125import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteColumnRequest;
-126import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteColumnResponse;
-127import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ExecProcedureRequest;
-128import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ExecProcedureResponse;
-129import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetClusterStatusRequest;
-130import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetClusterStatusResponse;
-131import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest;
-132import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCompletedSnapshotsResponse;
-133import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetNamespaceDescriptorRequest;
-134import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetNamespaceDescriptorResponse;
-135import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetProcedureResultRequest;
-136import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetProcedureResultResponse;
-137import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetSchemaAlterStatusRequest;
-138import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetSchemaAlterStatusResponse;
-139import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableDescriptorsRequest;
-140import 
org.apache.hadoop.hbase.sh

[14/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-10 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/0821e51a/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/AsyncFSOutput.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/AsyncFSOutput.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/AsyncFSOutput.html
index bca52d7..9eea520 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/AsyncFSOutput.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/AsyncFSOutput.html
@@ -28,70 +28,69 @@
 020import java.io.Closeable;
 021import java.io.IOException;
 022import java.nio.ByteBuffer;
-023import 
java.nio.channels.CompletionHandler;
-024import 
java.util.concurrent.CompletableFuture;
-025
-026import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
-027import 
org.apache.hadoop.hbase.util.CancelableProgressable;
-028import 
org.apache.hadoop.hdfs.protocol.DatanodeInfo;
-029
-030/**
-031 * Interface for asynchronous filesystem 
output stream.
-032 */
-033@InterfaceAudience.Private
-034public interface AsyncFSOutput extends 
Closeable {
-035
-036  /**
-037   * Just call write(b, 0, b.length).
-038   * @see #write(byte[], int, int)
-039   */
-040  void write(byte[] b);
-041
-042  /**
-043   * Copy the data into the buffer. Note 
that you need to call {@link #flush(boolean)} to flush the
-044   * buffer manually.
-045   */
-046  void write(byte[] b, int off, int 
len);
-047
-048  /**
-049   * Write an int to the buffer.
-050   */
-051  void writeInt(int i);
-052
-053  /**
-054   * Copy the data in the given {@code 
bb} into the buffer.
-055   */
-056  void write(ByteBuffer bb);
-057
-058  /**
-059   * Return the current size of buffered 
data.
-060   */
-061  int buffered();
-062
-063  /**
-064   * Return current pipeline. Empty array 
if no pipeline.
-065   */
-066  DatanodeInfo[] getPipeline();
-067
-068  /**
-069   * Flush the buffer out.
-070   * @param sync persistent the data to 
device
-071   * @return A CompletableFuture that 
hold the acked length after flushing.
-072   */
-073  CompletableFuture 
flush(boolean sync);
-074
-075  /**
-076   * The close method when error 
occurred.
-077   */
-078  void 
recoverAndClose(CancelableProgressable reporter) throws IOException;
-079
-080  /**
-081   * Close the file. You should call 
{@link #recoverAndClose(CancelableProgressable)} if this method
-082   * throws an exception.
-083   */
-084  @Override
-085  void close() throws IOException;
-086}
+023import 
java.util.concurrent.CompletableFuture;
+024
+025import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
+026import 
org.apache.hadoop.hbase.util.CancelableProgressable;
+027import 
org.apache.hadoop.hdfs.protocol.DatanodeInfo;
+028
+029/**
+030 * Interface for asynchronous filesystem 
output stream.
+031 */
+032@InterfaceAudience.Private
+033public interface AsyncFSOutput extends 
Closeable {
+034
+035  /**
+036   * Just call write(b, 0, b.length).
+037   * @see #write(byte[], int, int)
+038   */
+039  void write(byte[] b);
+040
+041  /**
+042   * Copy the data into the buffer. Note 
that you need to call {@link #flush(boolean)} to flush the
+043   * buffer manually.
+044   */
+045  void write(byte[] b, int off, int 
len);
+046
+047  /**
+048   * Write an int to the buffer.
+049   */
+050  void writeInt(int i);
+051
+052  /**
+053   * Copy the data in the given {@code 
bb} into the buffer.
+054   */
+055  void write(ByteBuffer bb);
+056
+057  /**
+058   * Return the current size of buffered 
data.
+059   */
+060  int buffered();
+061
+062  /**
+063   * Return current pipeline. Empty array 
if no pipeline.
+064   */
+065  DatanodeInfo[] getPipeline();
+066
+067  /**
+068   * Flush the buffer out.
+069   * @param sync persistent the data to 
device
+070   * @return A CompletableFuture that 
hold the acked length after flushing.
+071   */
+072  CompletableFuture 
flush(boolean sync);
+073
+074  /**
+075   * The close method when error 
occurred.
+076   */
+077  void 
recoverAndClose(CancelableProgressable reporter) throws IOException;
+078
+079  /**
+080   * Close the file. You should call 
{@link #recoverAndClose(CancelableProgressable)} if this method
+081   * throws an exception.
+082   */
+083  @Override
+084  void close() throws IOException;
+085}
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/0821e51a/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/AsyncFSOutputHelper.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/AsyncFSOutputHelper.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/AsyncFSOutputHelper.html
index e6485b5..5fb4dd3 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/AsyncFSOutputHelper.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/AsyncFSOutputHelper.html
@@ -28,153 +28,154 @@
 020import 
com.google.commo

[37/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-10 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/0821e51a/devapidocs/org/apache/hadoop/hbase/regionserver/wal/AsyncProtobufLogWriter.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/regionserver/wal/AsyncProtobufLogWriter.html
 
b/devapidocs/org/apache/hadoop/hbase/regionserver/wal/AsyncProtobufLogWriter.html
index 463362d..2a43abe 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/regionserver/wal/AsyncProtobufLogWriter.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/regionserver/wal/AsyncProtobufLogWriter.html
@@ -123,7 +123,7 @@ var activeTableTab = "activeTableTab";
 
 
 @InterfaceAudience.Private
-public class AsyncProtobufLogWriter
+public class AsyncProtobufLogWriter
 extends AbstractProtobufLogWriter
 implements AsyncFSWALProvider.AsyncWriter
 AsyncWriter for protobuf-based WAL.
@@ -169,14 +169,18 @@ implements asyncOutputWrapper 
 
 
+private http://docs.oracle.com/javase/8/docs/api/java/lang/Class.html?is-external=true";
 title="class or interface in java.lang">Class
+channelClass 
+
+
 private io.netty.channel.EventLoop
 eventLoop 
 
-
+
 private static 
org.apache.commons.logging.Log
 LOG 
 
-
+
 private AsyncFSOutput
 output 
 
@@ -202,7 +206,8 @@ implements Constructor and Description
 
 
-AsyncProtobufLogWriter(io.netty.channel.EventLoop eventLoop) 
+AsyncProtobufLogWriter(io.netty.channel.EventLoop eventLoop,
+  http://docs.oracle.com/javase/8/docs/api/java/lang/Class.html?is-external=true";
 title="class or interface in java.lang">Class channelClass) 
 
 
 
@@ -313,7 +318,7 @@ implements 
 
 LOG
-private static final org.apache.commons.logging.Log LOG
+private static final org.apache.commons.logging.Log LOG
 
 
 
@@ -322,7 +327,16 @@ implements 
 
 eventLoop
-private final io.netty.channel.EventLoop eventLoop
+private final io.netty.channel.EventLoop eventLoop
+
+
+
+
+
+
+
+channelClass
+private final http://docs.oracle.com/javase/8/docs/api/java/lang/Class.html?is-external=true";
 title="class or interface in java.lang">Class channelClass
 
 
 
@@ -331,7 +345,7 @@ implements 
 
 output
-private AsyncFSOutput output
+private AsyncFSOutput output
 
 
 
@@ -340,7 +354,7 @@ implements 
 
 asyncOutputWrapper
-private http://docs.oracle.com/javase/8/docs/api/java/io/OutputStream.html?is-external=true";
 title="class or interface in java.io">OutputStream asyncOutputWrapper
+private http://docs.oracle.com/javase/8/docs/api/java/io/OutputStream.html?is-external=true";
 title="class or interface in java.io">OutputStream asyncOutputWrapper
 
 
 
@@ -351,13 +365,14 @@ implements 
+
 
 
 
 
 AsyncProtobufLogWriter
-public AsyncProtobufLogWriter(io.netty.channel.EventLoop eventLoop)
+public AsyncProtobufLogWriter(io.netty.channel.EventLoop eventLoop,
+  http://docs.oracle.com/javase/8/docs/api/java/lang/Class.html?is-external=true";
 title="class or interface in java.lang">Class channelClass)
 
 
 
@@ -374,7 +389,7 @@ implements 
 
 append
-public void append(WAL.Entry entry)
+public void append(WAL.Entry entry)
 
 Specified by:
 append in
 interface WALProvider.AsyncWriter
@@ -387,7 +402,7 @@ implements 
 
 sync
-public http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";
 title="class or interface in java.util.concurrent">CompletableFutureLong> sync()
+public http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";
 title="class or interface in java.util.concurrent">CompletableFutureLong> sync()
 
 Specified by:
 sync in
 interface WALProvider.AsyncWriter
@@ -400,7 +415,7 @@ implements 
 
 close
-public void close()
+public void close()
throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true";
 title="class or interface in java.io">IOException
 
 Specified by:
@@ -418,7 +433,7 @@ implements 
 
 getOutput
-public AsyncFSOutput getOutput()
+public AsyncFSOutput getOutput()
 
 
 
@@ -427,7 +442,7 @@ implements 
 
 initOutput
-protected void initOutput(org.apache.hadoop.fs.FileSystem fs,
+protected void initOutput(org.apache.hadoop.fs.FileSystem fs,
   org.apache.hadoop.fs.Path path,
   boolean overwritable,
   int bufferSize,
@@ -448,7 +463,7 @@ implements 
 
 write
-private long write(http://docs.oracle.com/javase/8/docs/api/java/util/function/Consumer.html?is-external=true";
 title="class or interface in java.util.function">Consumer

[51/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-10 Thread git-site-role
Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.


Project: http://git-wip-us.apache.org/repos/asf/hbase-site/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase-site/commit/0821e51a
Tree: http://git-wip-us.apache.org/repos/asf/hbase-site/tree/0821e51a
Diff: http://git-wip-us.apache.org/repos/asf/hbase-site/diff/0821e51a

Branch: refs/heads/asf-site
Commit: 0821e51a0b0f41c38d69dce76ef06d6b0d0d70a8
Parents: f2b6c72
Author: jenkins 
Authored: Mon Jul 10 15:01:59 2017 +
Committer: jenkins 
Committed: Mon Jul 10 15:01:59 2017 +

--
 acid-semantics.html | 4 +-
 apache_hbase_reference_guide.pdf| 4 +-
 apache_hbase_reference_guide.pdfmarks   | 4 +-
 apidocs/index-all.html  |21 +
 .../hadoop/hbase/class-use/ServerName.html  |12 +
 .../apache/hadoop/hbase/client/AsyncAdmin.html  |   150 +-
 .../apache/hadoop/hbase/client/AsyncAdmin.html  |   272 +-
 bulk-loads.html | 4 +-
 checkstyle-aggregate.html   | 17520 -
 checkstyle.rss  |42 +-
 coc.html| 4 +-
 cygwin.html | 4 +-
 dependencies.html   | 4 +-
 dependency-convergence.html | 4 +-
 dependency-info.html| 4 +-
 dependency-management.html  | 4 +-
 devapidocs/allclasses-frame.html| 2 +
 devapidocs/allclasses-noframe.html  | 2 +
 devapidocs/constant-values.html |32 +-
 devapidocs/index-all.html   |   105 +-
 .../hadoop/hbase/backup/package-tree.html   | 4 +-
 .../hadoop/hbase/class-use/ServerName.html  |34 +-
 .../class-use/InterfaceAudience.Private.html|60 +-
 .../apache/hadoop/hbase/client/AsyncAdmin.html  |   150 +-
 .../hadoop/hbase/client/AsyncHBaseAdmin.html|   163 +-
 ...dmin.AddColumnFamilyProcedureBiConsumer.html | 6 +-
 .../client/RawAsyncHBaseAdmin.AdminRpcCall.html | 4 +-
 .../client/RawAsyncHBaseAdmin.Converter.html| 4 +-
 ...dmin.CreateNamespaceProcedureBiConsumer.html | 6 +-
 ...aseAdmin.CreateTableProcedureBiConsumer.html | 6 +-
 ...n.DeleteColumnFamilyProcedureBiConsumer.html | 6 +-
 ...dmin.DeleteNamespaceProcedureBiConsumer.html | 6 +-
 ...aseAdmin.DeleteTableProcedureBiConsumer.html | 8 +-
 ...seAdmin.DisableTableProcedureBiConsumer.html | 6 +-
 ...aseAdmin.EnableTableProcedureBiConsumer.html | 6 +-
 .../RawAsyncHBaseAdmin.MasterRpcCall.html   | 4 +-
 ...min.MergeTableRegionProcedureBiConsumer.html | 6 +-
 ...n.ModifyColumnFamilyProcedureBiConsumer.html | 6 +-
 ...dmin.ModifyNamespaceProcedureBiConsumer.html | 6 +-
 ...HBaseAdmin.NamespaceProcedureBiConsumer.html |14 +-
 .../RawAsyncHBaseAdmin.ProcedureBiConsumer.html |12 +-
 .../RawAsyncHBaseAdmin.TableOperator.html   | 4 +-
 ...syncHBaseAdmin.TableProcedureBiConsumer.html |14 +-
 ...eAdmin.TruncateTableProcedureBiConsumer.html | 6 +-
 .../hadoop/hbase/client/RawAsyncHBaseAdmin.html |   401 +-
 .../hadoop/hbase/client/package-tree.html   |28 +-
 .../hadoop/hbase/executor/package-tree.html | 2 +-
 .../hadoop/hbase/filter/package-tree.html   |10 +-
 .../hadoop/hbase/io/asyncfs/AsyncFSOutput.html  |20 +-
 .../hbase/io/asyncfs/AsyncFSOutputHelper.html   |16 +-
 ...FanOutOneBlockAsyncDFSOutput.AckHandler.html |14 +-
 .../FanOutOneBlockAsyncDFSOutput.Callback.html  |10 +-
 .../FanOutOneBlockAsyncDFSOutput.State.html |14 +-
 .../asyncfs/FanOutOneBlockAsyncDFSOutput.html   |88 +-
 ...OneBlockAsyncDFSOutputHelper.BlockAdder.html | 4 +-
 ...BlockAsyncDFSOutputHelper.CancelOnClose.html | 8 +-
 ...ockAsyncDFSOutputHelper.ChecksumCreater.html | 4 +-
 ...ckAsyncDFSOutputHelper.DFSClientAdaptor.html | 4 +-
 ...eBlockAsyncDFSOutputHelper.LeaseManager.html | 6 +-
 ...kAsyncDFSOutputHelper.NameNodeException.html | 6 +-
 ...utOneBlockAsyncDFSOutputHelper.PBHelper.html | 6 +-
 ...DFSOutputHelper.PipelineAckStatusGetter.html | 4 +-
 ...kAsyncDFSOutputHelper.StorageTypeSetter.html | 4 +-
 .../FanOutOneBlockAsyncDFSOutputHelper.html |   108 +-
 .../io/asyncfs/class-use/AsyncFSOutput.html | 5 +-
 .../class-use/FanOutOneBlockAsyncDFSOutput.html |10 +-
 .../hadoop/hbase/io/hfile/package-tree.html | 4 +-
 .../apache/hadoop/hbase/ipc/NettyRpcServer.html |35 +-
 .../apache/hadoop/hbase/ipc/package-tree.html   | 2 +-
 .../hadoop/hbase/mapreduce/package-tree.html| 4 +-
 .../org/apache/hadoop/hbase/master/HMaster.html | 2 +-
 .../master/HMasterCommandLine.LocalHMaster.html | 2 +-
 .../hbase/master/balancer/package-tree.h

[44/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-10 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/0821e51a/devapidocs/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.html 
b/devapidocs/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.html
index 19f2a02..477054a 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.html
@@ -18,7 +18,7 @@
 catch(err) {
 }
 //-->
-var methods = 
{"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10,"i14":10,"i15":10,"i16":10,"i17":10,"i18":10,"i19":10,"i20":10,"i21":10,"i22":10,"i23":10,"i24":10,"i25":10,"i26":10,"i27":10,"i28":10,"i29":10,"i30":10,"i31":10,"i32":10,"i33":10,"i34":10,"i35":10,"i36":10,"i37":10,"i38":10,"i39":10,"i40":10,"i41":10,"i42":10,"i43":10,"i44":10,"i45":10,"i46":10,"i47":10,"i48":10,"i49":10,"i50":10,"i51":10,"i52":10,"i53":10,"i54":10,"i55":10,"i56":10,"i57":10,"i58":10,"i59":10,"i60":10,"i61":10,"i62":10,"i63":10,"i64":10,"i65":10,"i66":10,"i67":10,"i68":10,"i69":10,"i70":10,"i71":10,"i72":10,"i73":10,"i74":10,"i75":10,"i76":10,"i77":10,"i78":10,"i79":10,"i80":10,"i81":10,"i82":10,"i83":10,"i84":10,"i85":10,"i86":10,"i87":10,"i88":10,"i89":10,"i90":10,"i91":10,"i92":10,"i93":10,"i94":10,"i95":10,"i96":10,"i97":10,"i98":10,"i99":10,"i100":10,"i101":10,"i102":10,"i103":10,"i104":10,"i105":10,"i106":10,"i107":10,"i108":10,"i
 
109":10,"i110":10,"i111":10,"i112":10,"i113":10,"i114":10,"i115":10,"i116":10,"i117":10,"i118":10,"i119":10,"i120":10,"i121":10};
+var methods = 
{"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10,"i14":10,"i15":10,"i16":10,"i17":10,"i18":10,"i19":10,"i20":10,"i21":10,"i22":10,"i23":10,"i24":10,"i25":10,"i26":10,"i27":10,"i28":10,"i29":10,"i30":10,"i31":10,"i32":10,"i33":10,"i34":10,"i35":10,"i36":10,"i37":10,"i38":10,"i39":10,"i40":10,"i41":10,"i42":10,"i43":10,"i44":10,"i45":10,"i46":10,"i47":10,"i48":10,"i49":10,"i50":10,"i51":10,"i52":10,"i53":10,"i54":10,"i55":10,"i56":10,"i57":10,"i58":10,"i59":10,"i60":10,"i61":10,"i62":10,"i63":10,"i64":10,"i65":10,"i66":10,"i67":10,"i68":10,"i69":10,"i70":10,"i71":10,"i72":10,"i73":10,"i74":10,"i75":10,"i76":10,"i77":10,"i78":10,"i79":10,"i80":10,"i81":10,"i82":10,"i83":10,"i84":10,"i85":10,"i86":10,"i87":10,"i88":10,"i89":10,"i90":10,"i91":10,"i92":10,"i93":10,"i94":10,"i95":10,"i96":10,"i97":10,"i98":10,"i99":10,"i100":10,"i101":10,"i102":10,"i103":10,"i104":10,"i105":10,"i106":10,"i107":10,"i108":10,"i
 
109":10,"i110":10,"i111":10,"i112":10,"i113":10,"i114":10,"i115":10,"i116":10,"i117":10,"i118":10,"i119":10,"i120":10,"i121":10,"i122":10,"i123":10,"i124":10,"i125":10,"i126":10};
 var tabs = {65535:["t0","All Methods"],2:["t2","Instance 
Methods"],8:["t4","Concrete Methods"]};
 var altColor = "altColor";
 var rowColor = "rowColor";
@@ -114,7 +114,7 @@ var activeTableTab = "activeTableTab";
 
 
 @InterfaceAudience.Private
-public class RawAsyncHBaseAdmin
+public class RawAsyncHBaseAdmin
 extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true";
 title="class or interface in java.lang">Object
 implements AsyncAdmin
 The implementation of AsyncAdmin.
@@ -985,70 +985,101 @@ implements 
 http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";
 title="class or interface in java.util.concurrent">CompletableFutureVoid>
+shutdown()
+Shuts down the HBase cluster.
+
+
+
+http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";
 title="class or interface in java.util.concurrent">CompletableFutureVoid>
 snapshot(SnapshotDescription snapshotDesc)
 Take a snapshot and wait for the server to complete that 
snapshot asynchronously.
 
 
-
+
 private http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";
 title="class or interface in java.util.concurrent">CompletableFutureVoid>
 split(ServerName sn,
  HRegionInfo hri,
  http://docs.oracle.com/javase/8/docs/api/java/util/Optional.html?is-external=true";
 title="class or interface in 
java.util">Optional splitPoint) 
 
-
+
 http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";
 title="class or interface in java.util.concurrent">CompletableFuture

[21/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-10 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/0821e51a/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.ModifyNamespaceProcedureBiConsumer.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.ModifyNamespaceProcedureBiConsumer.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.ModifyNamespaceProcedureBiConsumer.html
index 71844ce..75db22d 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.ModifyNamespaceProcedureBiConsumer.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.ModifyNamespaceProcedureBiConsumer.html
@@ -105,2564 +105,2642 @@
 097import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionLoadResponse;
 098import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.SplitRegionRequest;
 099import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.SplitRegionResponse;
-100import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ProcedureDescription;
-101import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema;
-102import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.RegionSpecifierType;
-103import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AbortProcedureRequest;
-104import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AbortProcedureResponse;
-105import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnRequest;
-106import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnResponse;
-107import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AssignRegionRequest;
-108import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AssignRegionResponse;
-109import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.BalanceRequest;
-110import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.BalanceResponse;
-111import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateNamespaceRequest;
-112import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateNamespaceResponse;
-113import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteNamespaceRequest;
-114import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteNamespaceResponse;
-115import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteSnapshotRequest;
-116import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteSnapshotResponse;
-117import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DisableTableRequest;
-118import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DisableTableResponse;
-119import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableCatalogJanitorRequest;
-120import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableCatalogJanitorResponse;
-121import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DrainRegionServersRequest;
-122import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DrainRegionServersResponse;
-123import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableTableRequest;
-124import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableTableResponse;
-125import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteColumnRequest;
-126import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteColumnResponse;
-127import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ExecProcedureRequest;
-128import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ExecProcedureResponse;
-129import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetClusterStatusRequest;
-130import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetClusterStatusResponse;
-131import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest;
-132import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCompletedSnapshotsResponse;
-133import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetNamespaceDescriptorRequest;
-134import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetNamespaceDescriptorResponse;
-135import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetProcedureResultRequest;
-136import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetProcedureResultResponse;
-137import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetSchemaAlterStatusRequest;
-138import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetSchemaAlterStatusResponse;
-139import 
org.apache.hadoop.hbase.shaded.protobuf.generated.Mast

[13/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-10 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/0821e51a/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutput.AckHandler.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutput.AckHandler.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutput.AckHandler.html
index 6d0fff2..0c07a2f 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutput.AckHandler.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutput.AckHandler.html
@@ -34,552 +34,553 @@
 026import static 
org.apache.hadoop.hbase.io.asyncfs.FanOutOneBlockAsyncDFSOutputHelper.getStatus;
 027import static 
org.apache.hadoop.hdfs.DFSConfigKeys.DFS_CLIENT_SOCKET_TIMEOUT_KEY;
 028
-029import io.netty.buffer.ByteBuf;
-030import 
io.netty.buffer.ByteBufAllocator;
-031import io.netty.channel.Channel;
-032import 
io.netty.channel.ChannelHandler.Sharable;
-033import 
io.netty.channel.ChannelHandlerContext;
-034import io.netty.channel.EventLoop;
-035import 
io.netty.channel.SimpleChannelInboundHandler;
-036import 
io.netty.handler.codec.protobuf.ProtobufDecoder;
-037import 
io.netty.handler.codec.protobuf.ProtobufVarint32FrameDecoder;
-038import 
io.netty.handler.timeout.IdleStateEvent;
-039import 
io.netty.handler.timeout.IdleStateHandler;
-040import 
io.netty.util.concurrent.Promise;
-041import 
io.netty.util.concurrent.PromiseCombiner;
-042
-043import java.io.IOException;
-044import java.nio.ByteBuffer;
-045import java.util.ArrayDeque;
-046import java.util.Collection;
-047import java.util.Collections;
-048import java.util.Deque;
-049import java.util.IdentityHashMap;
-050import java.util.List;
-051import java.util.Set;
-052import 
java.util.concurrent.CompletableFuture;
-053import java.util.concurrent.TimeUnit;
-054import java.util.function.Supplier;
-055
-056import 
org.apache.hadoop.conf.Configuration;
-057import 
org.apache.hadoop.crypto.Encryptor;
-058import org.apache.hadoop.fs.Path;
-059import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
-060import 
org.apache.hadoop.hbase.io.asyncfs.FanOutOneBlockAsyncDFSOutputHelper.CancelOnClose;
-061import 
org.apache.hadoop.hbase.util.CancelableProgressable;
-062import 
org.apache.hadoop.hbase.util.FSUtils;
-063import 
org.apache.hadoop.hdfs.DFSClient;
-064import 
org.apache.hadoop.hdfs.DistributedFileSystem;
-065import 
org.apache.hadoop.hdfs.protocol.ClientProtocol;
-066import 
org.apache.hadoop.hdfs.protocol.DatanodeInfo;
-067import 
org.apache.hadoop.hdfs.protocol.LocatedBlock;
-068import 
org.apache.hadoop.hdfs.protocol.datatransfer.PacketHeader;
-069import 
org.apache.hadoop.hdfs.protocol.datatransfer.PipelineAck;
-070import 
org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.PipelineAckProto;
-071import 
org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.Status;
-072import 
org.apache.hadoop.util.DataChecksum;
-073
-074import 
com.google.common.annotations.VisibleForTesting;
-075
-076/**
-077 * An asynchronous HDFS output stream 
implementation which fans out data to datanode and only
-078 * supports writing file with only one 
block.
-079 * 

-080 * Use the createOutput method in {@link FanOutOneBlockAsyncDFSOutputHelper} to create. The mainly -081 * usage of this class is implementing WAL, so we only expose a little HDFS configurations in the -082 * method. And we place it here under util package because we want to make it independent of WAL -083 * implementation thus easier to move it to HDFS project finally. -084 *

-085 * Note that, all connections to datanode will run in the same {@link EventLoop} which means we only -086 * need one thread here. But be careful, we do some blocking operations in {@link #close()} and -087 * {@link #recoverAndClose(CancelableProgressable)} methods, so do not call them inside -088 * {@link EventLoop}. And for {@link #write(byte[])} {@link #write(byte[], int, int)}, -089 * {@link #buffered()} and {@link #flush(boolean)}, if you call them outside {@link EventLoop}, -090 * there will be an extra context-switch. -091 *

-092 * Advantages compare to DFSOutputStream: -093 *

    -094 *
  1. The fan out mechanism. This will reduce the latency.
  2. -095 *
  3. The asynchronous WAL could also run in the same EventLoop, we could just call write and flush -096 * inside the EventLoop thread, so generally we only have one thread to do all the things.
  4. -097 *
  5. Fail-fast when connection to datanode error. The WAL implementation could open new writer -098 * ASAP.
  6. -099 *
  7. We could benefit from netty's ByteBuf management mechanism.
  8. -100 *
-101 */ -102@InterfaceAudience.Private -103public class FanOutOneBlockAsyncDFSOutput implements AsyncFSOutput { -104 -105 // The MAX_PACKET_SIZE is 16MB but it include the header size and checksum size. So here we

[47/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-10 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/0821e51a/devapidocs/org/apache/hadoop/hbase/client/AsyncAdmin.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/client/AsyncAdmin.html 
b/devapidocs/org/apache/hadoop/hbase/client/AsyncAdmin.html
index e36f006..1ebd57d 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/AsyncAdmin.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/AsyncAdmin.html
@@ -18,7 +18,7 @@
 catch(err) {
 }
 //-->
-var methods = 
{"i0":6,"i1":6,"i2":6,"i3":6,"i4":6,"i5":18,"i6":6,"i7":6,"i8":6,"i9":18,"i10":6,"i11":18,"i12":6,"i13":6,"i14":6,"i15":18,"i16":6,"i17":6,"i18":6,"i19":6,"i20":6,"i21":18,"i22":6,"i23":6,"i24":6,"i25":6,"i26":6,"i27":6,"i28":6,"i29":6,"i30":6,"i31":6,"i32":6,"i33":6,"i34":6,"i35":6,"i36":6,"i37":18,"i38":6,"i39":6,"i40":6,"i41":6,"i42":6,"i43":18,"i44":6,"i45":6,"i46":6,"i47":18,"i48":6,"i49":18,"i50":6,"i51":6,"i52":6,"i53":6,"i54":6,"i55":6,"i56":6,"i57":6,"i58":6,"i59":6,"i60":18,"i61":6,"i62":6,"i63":6,"i64":6,"i65":6,"i66":6,"i67":6,"i68":18,"i69":6,"i70":18,"i71":6,"i72":18,"i73":6,"i74":18,"i75":6,"i76":6,"i77":18,"i78":6,"i79":18,"i80":6,"i81":6,"i82":6,"i83":6,"i84":6,"i85":6,"i86":6,"i87":6,"i88":6,"i89":6,"i90":6,"i91":6,"i92":6,"i93":6,"i94":6,"i95":6,"i96":6,"i97":6,"i98":6,"i99":6,"i100":6,"i101":18,"i102":18,"i103":6,"i104":6,"i105":18,"i106":6,"i107":6,"i108":6,"i109":6,"i110":6};
+var methods = 
{"i0":6,"i1":6,"i2":6,"i3":6,"i4":6,"i5":18,"i6":6,"i7":6,"i8":6,"i9":18,"i10":6,"i11":18,"i12":6,"i13":6,"i14":6,"i15":18,"i16":6,"i17":6,"i18":6,"i19":6,"i20":6,"i21":18,"i22":6,"i23":6,"i24":6,"i25":6,"i26":6,"i27":6,"i28":6,"i29":6,"i30":6,"i31":6,"i32":6,"i33":6,"i34":6,"i35":6,"i36":6,"i37":18,"i38":6,"i39":6,"i40":6,"i41":6,"i42":6,"i43":18,"i44":6,"i45":6,"i46":6,"i47":18,"i48":6,"i49":18,"i50":6,"i51":6,"i52":6,"i53":6,"i54":6,"i55":6,"i56":6,"i57":6,"i58":6,"i59":6,"i60":18,"i61":6,"i62":6,"i63":6,"i64":6,"i65":6,"i66":6,"i67":6,"i68":18,"i69":6,"i70":18,"i71":6,"i72":18,"i73":6,"i74":18,"i75":6,"i76":6,"i77":18,"i78":6,"i79":18,"i80":6,"i81":6,"i82":6,"i83":6,"i84":6,"i85":6,"i86":6,"i87":6,"i88":6,"i89":6,"i90":6,"i91":6,"i92":6,"i93":6,"i94":6,"i95":6,"i96":6,"i97":6,"i98":6,"i99":6,"i100":6,"i101":6,"i102":18,"i103":18,"i104":6,"i105":6,"i106":18,"i107":6,"i108":6,"i109":6,"i110":6,"i111":6,"i112":6,"i113":6,"i114":6,"i115":6};
 var tabs = {65535:["t0","All Methods"],2:["t2","Instance 
Methods"],4:["t3","Abstract Methods"],16:["t5","Default Methods"]};
 var altColor = "altColor";
 var rowColor = "rowColor";
@@ -752,18 +752,24 @@ public interface 
 http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";
 title="class or interface in java.util.concurrent">CompletableFutureVoid>
+shutdown()
+Shuts down the HBase cluster.
+
+
+
+http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";
 title="class or interface in java.util.concurrent">CompletableFutureVoid>
 snapshot(SnapshotDescription snapshot)
 Take a snapshot and wait for the server to complete that 
snapshot asynchronously.
 
 
-
+
 default http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";
 title="class or interface in java.util.concurrent">CompletableFutureVoid>
 snapshot(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String snapshotName,
 TableName tableName)
 Take a snapshot for the given table.
 
 
-
+
 default http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";
 title="class or interface in java.util.concurrent">CompletableFutureVoid>
 snapshot(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String snapshotName,
 TableName tableName,
@@ -771,51 +777,76 @@ public interface Create typed snapshot of the table.
 
 
-
+
 http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";
 title="class or interface in java.util.concurrent">CompletableFutureVoid>
 split(TableName tableName)
 Split a table.
 
 
-
+
 http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is

[11/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-10 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/0821e51a/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutput.State.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutput.State.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutput.State.html
index 6d0fff2..0c07a2f 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutput.State.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutput.State.html
@@ -34,552 +34,553 @@
 026import static 
org.apache.hadoop.hbase.io.asyncfs.FanOutOneBlockAsyncDFSOutputHelper.getStatus;
 027import static 
org.apache.hadoop.hdfs.DFSConfigKeys.DFS_CLIENT_SOCKET_TIMEOUT_KEY;
 028
-029import io.netty.buffer.ByteBuf;
-030import 
io.netty.buffer.ByteBufAllocator;
-031import io.netty.channel.Channel;
-032import 
io.netty.channel.ChannelHandler.Sharable;
-033import 
io.netty.channel.ChannelHandlerContext;
-034import io.netty.channel.EventLoop;
-035import 
io.netty.channel.SimpleChannelInboundHandler;
-036import 
io.netty.handler.codec.protobuf.ProtobufDecoder;
-037import 
io.netty.handler.codec.protobuf.ProtobufVarint32FrameDecoder;
-038import 
io.netty.handler.timeout.IdleStateEvent;
-039import 
io.netty.handler.timeout.IdleStateHandler;
-040import 
io.netty.util.concurrent.Promise;
-041import 
io.netty.util.concurrent.PromiseCombiner;
-042
-043import java.io.IOException;
-044import java.nio.ByteBuffer;
-045import java.util.ArrayDeque;
-046import java.util.Collection;
-047import java.util.Collections;
-048import java.util.Deque;
-049import java.util.IdentityHashMap;
-050import java.util.List;
-051import java.util.Set;
-052import 
java.util.concurrent.CompletableFuture;
-053import java.util.concurrent.TimeUnit;
-054import java.util.function.Supplier;
-055
-056import 
org.apache.hadoop.conf.Configuration;
-057import 
org.apache.hadoop.crypto.Encryptor;
-058import org.apache.hadoop.fs.Path;
-059import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
-060import 
org.apache.hadoop.hbase.io.asyncfs.FanOutOneBlockAsyncDFSOutputHelper.CancelOnClose;
-061import 
org.apache.hadoop.hbase.util.CancelableProgressable;
-062import 
org.apache.hadoop.hbase.util.FSUtils;
-063import 
org.apache.hadoop.hdfs.DFSClient;
-064import 
org.apache.hadoop.hdfs.DistributedFileSystem;
-065import 
org.apache.hadoop.hdfs.protocol.ClientProtocol;
-066import 
org.apache.hadoop.hdfs.protocol.DatanodeInfo;
-067import 
org.apache.hadoop.hdfs.protocol.LocatedBlock;
-068import 
org.apache.hadoop.hdfs.protocol.datatransfer.PacketHeader;
-069import 
org.apache.hadoop.hdfs.protocol.datatransfer.PipelineAck;
-070import 
org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.PipelineAckProto;
-071import 
org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.Status;
-072import 
org.apache.hadoop.util.DataChecksum;
-073
-074import 
com.google.common.annotations.VisibleForTesting;
-075
-076/**
-077 * An asynchronous HDFS output stream 
implementation which fans out data to datanode and only
-078 * supports writing file with only one 
block.
-079 * 

-080 * Use the createOutput method in {@link FanOutOneBlockAsyncDFSOutputHelper} to create. The mainly -081 * usage of this class is implementing WAL, so we only expose a little HDFS configurations in the -082 * method. And we place it here under util package because we want to make it independent of WAL -083 * implementation thus easier to move it to HDFS project finally. -084 *

-085 * Note that, all connections to datanode will run in the same {@link EventLoop} which means we only -086 * need one thread here. But be careful, we do some blocking operations in {@link #close()} and -087 * {@link #recoverAndClose(CancelableProgressable)} methods, so do not call them inside -088 * {@link EventLoop}. And for {@link #write(byte[])} {@link #write(byte[], int, int)}, -089 * {@link #buffered()} and {@link #flush(boolean)}, if you call them outside {@link EventLoop}, -090 * there will be an extra context-switch. -091 *

-092 * Advantages compare to DFSOutputStream: -093 *

    -094 *
  1. The fan out mechanism. This will reduce the latency.
  2. -095 *
  3. The asynchronous WAL could also run in the same EventLoop, we could just call write and flush -096 * inside the EventLoop thread, so generally we only have one thread to do all the things.
  4. -097 *
  5. Fail-fast when connection to datanode error. The WAL implementation could open new writer -098 * ASAP.
  6. -099 *
  7. We could benefit from netty's ByteBuf management mechanism.
  8. -100 *
-101 */ -102@InterfaceAudience.Private -103public class FanOutOneBlockAsyncDFSOutput implements AsyncFSOutput { -104 -105 // The MAX_PACKET_SIZE is 16MB but it include the header size and checksum size. So here we set a -106 // smaller l

[05/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-10 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/0821e51a/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.LeaseManager.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.LeaseManager.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.LeaseManager.html
index f729c99..0a32350 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.LeaseManager.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.LeaseManager.html
@@ -54,757 +54,756 @@
 046import 
io.netty.channel.ChannelPipeline;
 047import io.netty.channel.EventLoop;
 048import 
io.netty.channel.SimpleChannelInboundHandler;
-049import 
io.netty.channel.socket.nio.NioSocketChannel;
-050import 
io.netty.handler.codec.protobuf.ProtobufDecoder;
-051import 
io.netty.handler.codec.protobuf.ProtobufVarint32FrameDecoder;
-052import 
io.netty.handler.timeout.IdleStateEvent;
-053import 
io.netty.handler.timeout.IdleStateHandler;
-054import io.netty.util.concurrent.Future;
-055import 
io.netty.util.concurrent.FutureListener;
-056import 
io.netty.util.concurrent.Promise;
-057
-058import java.io.IOException;
-059import 
java.lang.reflect.InvocationTargetException;
-060import java.lang.reflect.Method;
-061import java.util.ArrayList;
-062import java.util.EnumSet;
-063import java.util.List;
-064import java.util.concurrent.TimeUnit;
-065
-066import org.apache.commons.logging.Log;
-067import 
org.apache.commons.logging.LogFactory;
-068import 
org.apache.hadoop.conf.Configuration;
-069import 
org.apache.hadoop.crypto.CryptoProtocolVersion;
-070import 
org.apache.hadoop.crypto.Encryptor;
-071import org.apache.hadoop.fs.CreateFlag;
-072import org.apache.hadoop.fs.FileSystem;
-073import 
org.apache.hadoop.fs.FileSystemLinkResolver;
-074import org.apache.hadoop.fs.Path;
-075import 
org.apache.hadoop.fs.UnresolvedLinkException;
-076import 
org.apache.hadoop.fs.permission.FsPermission;
-077import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
-078import 
org.apache.hadoop.hbase.client.ConnectionUtils;
-079import 
org.apache.hadoop.hbase.util.CancelableProgressable;
-080import 
org.apache.hadoop.hbase.util.FSUtils;
-081import 
org.apache.hadoop.hdfs.DFSClient;
-082import 
org.apache.hadoop.hdfs.DFSOutputStream;
-083import 
org.apache.hadoop.hdfs.DistributedFileSystem;
-084import 
org.apache.hadoop.hdfs.protocol.ClientProtocol;
-085import 
org.apache.hadoop.hdfs.protocol.DatanodeInfo;
-086import 
org.apache.hadoop.hdfs.protocol.ExtendedBlock;
-087import 
org.apache.hadoop.hdfs.protocol.HdfsFileStatus;
-088import 
org.apache.hadoop.hdfs.protocol.LocatedBlock;
-089import 
org.apache.hadoop.hdfs.protocol.datatransfer.BlockConstructionStage;
-090import 
org.apache.hadoop.hdfs.protocol.datatransfer.DataTransferProtoUtil;
-091import 
org.apache.hadoop.hdfs.protocol.datatransfer.DataTransferProtocol;
-092import 
org.apache.hadoop.hdfs.protocol.datatransfer.Op;
-093import 
org.apache.hadoop.hdfs.protocol.datatransfer.PipelineAck;
-094import 
org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.BaseHeaderProto;
-095import 
org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.BlockOpResponseProto;
-096import 
org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.CachingStrategyProto;
-097import 
org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.ChecksumProto;
-098import 
org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.ClientOperationHeaderProto;
-099import 
org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.OpWriteBlockProto;
-100import 
org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.PipelineAckProto;
-101import 
org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.Status;
-102import 
org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ExtendedBlockProto;
-103import 
org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.StorageTypeProto;
-104import 
org.apache.hadoop.hdfs.security.token.block.BlockTokenIdentifier;
-105import 
org.apache.hadoop.hdfs.security.token.block.InvalidBlockTokenException;
-106import 
org.apache.hadoop.hdfs.server.namenode.LeaseExpiredException;
-107import 
org.apache.hadoop.io.EnumSetWritable;
-108import 
org.apache.hadoop.ipc.RemoteException;
-109import org.apache.hadoop.net.NetUtils;
-110import 
org.apache.hadoop.security.proto.SecurityProtos.TokenProto;
-111import 
org.apache.hadoop.security.token.Token;
-112import 
org.apache.hadoop.util.DataChecksum;
-113
-114/**
-115 * Helper class for implementing {@link 
FanOutOneBlockAsyncDFSOutput}.
-116 */
-117@InterfaceAudience.Private
-118public final class 
FanOutOneBlockAsyncDFSOutputHelper {
+049import 
io.netty.handler.codec.protobuf.ProtobufDecoder;
+050import 
io.netty.handler.codec.protobuf.ProtobufVarint32FrameDecoder;
+051import 
io.netty.handler.t

[09/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-10 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/0821e51a/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.BlockAdder.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.BlockAdder.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.BlockAdder.html
index f729c99..0a32350 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.BlockAdder.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.BlockAdder.html
@@ -54,757 +54,756 @@
 046import 
io.netty.channel.ChannelPipeline;
 047import io.netty.channel.EventLoop;
 048import 
io.netty.channel.SimpleChannelInboundHandler;
-049import 
io.netty.channel.socket.nio.NioSocketChannel;
-050import 
io.netty.handler.codec.protobuf.ProtobufDecoder;
-051import 
io.netty.handler.codec.protobuf.ProtobufVarint32FrameDecoder;
-052import 
io.netty.handler.timeout.IdleStateEvent;
-053import 
io.netty.handler.timeout.IdleStateHandler;
-054import io.netty.util.concurrent.Future;
-055import 
io.netty.util.concurrent.FutureListener;
-056import 
io.netty.util.concurrent.Promise;
-057
-058import java.io.IOException;
-059import 
java.lang.reflect.InvocationTargetException;
-060import java.lang.reflect.Method;
-061import java.util.ArrayList;
-062import java.util.EnumSet;
-063import java.util.List;
-064import java.util.concurrent.TimeUnit;
-065
-066import org.apache.commons.logging.Log;
-067import 
org.apache.commons.logging.LogFactory;
-068import 
org.apache.hadoop.conf.Configuration;
-069import 
org.apache.hadoop.crypto.CryptoProtocolVersion;
-070import 
org.apache.hadoop.crypto.Encryptor;
-071import org.apache.hadoop.fs.CreateFlag;
-072import org.apache.hadoop.fs.FileSystem;
-073import 
org.apache.hadoop.fs.FileSystemLinkResolver;
-074import org.apache.hadoop.fs.Path;
-075import 
org.apache.hadoop.fs.UnresolvedLinkException;
-076import 
org.apache.hadoop.fs.permission.FsPermission;
-077import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
-078import 
org.apache.hadoop.hbase.client.ConnectionUtils;
-079import 
org.apache.hadoop.hbase.util.CancelableProgressable;
-080import 
org.apache.hadoop.hbase.util.FSUtils;
-081import 
org.apache.hadoop.hdfs.DFSClient;
-082import 
org.apache.hadoop.hdfs.DFSOutputStream;
-083import 
org.apache.hadoop.hdfs.DistributedFileSystem;
-084import 
org.apache.hadoop.hdfs.protocol.ClientProtocol;
-085import 
org.apache.hadoop.hdfs.protocol.DatanodeInfo;
-086import 
org.apache.hadoop.hdfs.protocol.ExtendedBlock;
-087import 
org.apache.hadoop.hdfs.protocol.HdfsFileStatus;
-088import 
org.apache.hadoop.hdfs.protocol.LocatedBlock;
-089import 
org.apache.hadoop.hdfs.protocol.datatransfer.BlockConstructionStage;
-090import 
org.apache.hadoop.hdfs.protocol.datatransfer.DataTransferProtoUtil;
-091import 
org.apache.hadoop.hdfs.protocol.datatransfer.DataTransferProtocol;
-092import 
org.apache.hadoop.hdfs.protocol.datatransfer.Op;
-093import 
org.apache.hadoop.hdfs.protocol.datatransfer.PipelineAck;
-094import 
org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.BaseHeaderProto;
-095import 
org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.BlockOpResponseProto;
-096import 
org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.CachingStrategyProto;
-097import 
org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.ChecksumProto;
-098import 
org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.ClientOperationHeaderProto;
-099import 
org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.OpWriteBlockProto;
-100import 
org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.PipelineAckProto;
-101import 
org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.Status;
-102import 
org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ExtendedBlockProto;
-103import 
org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.StorageTypeProto;
-104import 
org.apache.hadoop.hdfs.security.token.block.BlockTokenIdentifier;
-105import 
org.apache.hadoop.hdfs.security.token.block.InvalidBlockTokenException;
-106import 
org.apache.hadoop.hdfs.server.namenode.LeaseExpiredException;
-107import 
org.apache.hadoop.io.EnumSetWritable;
-108import 
org.apache.hadoop.ipc.RemoteException;
-109import org.apache.hadoop.net.NetUtils;
-110import 
org.apache.hadoop.security.proto.SecurityProtos.TokenProto;
-111import 
org.apache.hadoop.security.token.Token;
-112import 
org.apache.hadoop.util.DataChecksum;
-113
-114/**
-115 * Helper class for implementing {@link 
FanOutOneBlockAsyncDFSOutput}.
-116 */
-117@InterfaceAudience.Private
-118public final class 
FanOutOneBlockAsyncDFSOutputHelper {
+049import 
io.netty.handler.codec.protobuf.ProtobufDecoder;
+050import 
io.netty.handler.codec.protobuf.ProtobufVarint32FrameDecoder;
+051import 
io.netty.handler.timeout.Idl

[08/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-10 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/0821e51a/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.CancelOnClose.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.CancelOnClose.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.CancelOnClose.html
index f729c99..0a32350 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.CancelOnClose.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.CancelOnClose.html
@@ -54,757 +54,756 @@
 046import 
io.netty.channel.ChannelPipeline;
 047import io.netty.channel.EventLoop;
 048import 
io.netty.channel.SimpleChannelInboundHandler;
-049import 
io.netty.channel.socket.nio.NioSocketChannel;
-050import 
io.netty.handler.codec.protobuf.ProtobufDecoder;
-051import 
io.netty.handler.codec.protobuf.ProtobufVarint32FrameDecoder;
-052import 
io.netty.handler.timeout.IdleStateEvent;
-053import 
io.netty.handler.timeout.IdleStateHandler;
-054import io.netty.util.concurrent.Future;
-055import 
io.netty.util.concurrent.FutureListener;
-056import 
io.netty.util.concurrent.Promise;
-057
-058import java.io.IOException;
-059import 
java.lang.reflect.InvocationTargetException;
-060import java.lang.reflect.Method;
-061import java.util.ArrayList;
-062import java.util.EnumSet;
-063import java.util.List;
-064import java.util.concurrent.TimeUnit;
-065
-066import org.apache.commons.logging.Log;
-067import 
org.apache.commons.logging.LogFactory;
-068import 
org.apache.hadoop.conf.Configuration;
-069import 
org.apache.hadoop.crypto.CryptoProtocolVersion;
-070import 
org.apache.hadoop.crypto.Encryptor;
-071import org.apache.hadoop.fs.CreateFlag;
-072import org.apache.hadoop.fs.FileSystem;
-073import 
org.apache.hadoop.fs.FileSystemLinkResolver;
-074import org.apache.hadoop.fs.Path;
-075import 
org.apache.hadoop.fs.UnresolvedLinkException;
-076import 
org.apache.hadoop.fs.permission.FsPermission;
-077import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
-078import 
org.apache.hadoop.hbase.client.ConnectionUtils;
-079import 
org.apache.hadoop.hbase.util.CancelableProgressable;
-080import 
org.apache.hadoop.hbase.util.FSUtils;
-081import 
org.apache.hadoop.hdfs.DFSClient;
-082import 
org.apache.hadoop.hdfs.DFSOutputStream;
-083import 
org.apache.hadoop.hdfs.DistributedFileSystem;
-084import 
org.apache.hadoop.hdfs.protocol.ClientProtocol;
-085import 
org.apache.hadoop.hdfs.protocol.DatanodeInfo;
-086import 
org.apache.hadoop.hdfs.protocol.ExtendedBlock;
-087import 
org.apache.hadoop.hdfs.protocol.HdfsFileStatus;
-088import 
org.apache.hadoop.hdfs.protocol.LocatedBlock;
-089import 
org.apache.hadoop.hdfs.protocol.datatransfer.BlockConstructionStage;
-090import 
org.apache.hadoop.hdfs.protocol.datatransfer.DataTransferProtoUtil;
-091import 
org.apache.hadoop.hdfs.protocol.datatransfer.DataTransferProtocol;
-092import 
org.apache.hadoop.hdfs.protocol.datatransfer.Op;
-093import 
org.apache.hadoop.hdfs.protocol.datatransfer.PipelineAck;
-094import 
org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.BaseHeaderProto;
-095import 
org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.BlockOpResponseProto;
-096import 
org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.CachingStrategyProto;
-097import 
org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.ChecksumProto;
-098import 
org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.ClientOperationHeaderProto;
-099import 
org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.OpWriteBlockProto;
-100import 
org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.PipelineAckProto;
-101import 
org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.Status;
-102import 
org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ExtendedBlockProto;
-103import 
org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.StorageTypeProto;
-104import 
org.apache.hadoop.hdfs.security.token.block.BlockTokenIdentifier;
-105import 
org.apache.hadoop.hdfs.security.token.block.InvalidBlockTokenException;
-106import 
org.apache.hadoop.hdfs.server.namenode.LeaseExpiredException;
-107import 
org.apache.hadoop.io.EnumSetWritable;
-108import 
org.apache.hadoop.ipc.RemoteException;
-109import org.apache.hadoop.net.NetUtils;
-110import 
org.apache.hadoop.security.proto.SecurityProtos.TokenProto;
-111import 
org.apache.hadoop.security.token.Token;
-112import 
org.apache.hadoop.util.DataChecksum;
-113
-114/**
-115 * Helper class for implementing {@link 
FanOutOneBlockAsyncDFSOutput}.
-116 */
-117@InterfaceAudience.Private
-118public final class 
FanOutOneBlockAsyncDFSOutputHelper {
+049import 
io.netty.handler.codec.protobuf.ProtobufDecoder;
+050import 
io.netty.handler.codec.protobuf.ProtobufVarint32FrameDecoder;
+051import 
io.netty.hand

[15/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-10 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/0821e51a/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.html
index 71844ce..75db22d 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.html
@@ -105,2564 +105,2642 @@
 097import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionLoadResponse;
 098import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.SplitRegionRequest;
 099import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.SplitRegionResponse;
-100import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ProcedureDescription;
-101import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema;
-102import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.RegionSpecifierType;
-103import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AbortProcedureRequest;
-104import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AbortProcedureResponse;
-105import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnRequest;
-106import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnResponse;
-107import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AssignRegionRequest;
-108import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AssignRegionResponse;
-109import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.BalanceRequest;
-110import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.BalanceResponse;
-111import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateNamespaceRequest;
-112import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateNamespaceResponse;
-113import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteNamespaceRequest;
-114import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteNamespaceResponse;
-115import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteSnapshotRequest;
-116import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteSnapshotResponse;
-117import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DisableTableRequest;
-118import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DisableTableResponse;
-119import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableCatalogJanitorRequest;
-120import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableCatalogJanitorResponse;
-121import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DrainRegionServersRequest;
-122import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DrainRegionServersResponse;
-123import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableTableRequest;
-124import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableTableResponse;
-125import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteColumnRequest;
-126import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteColumnResponse;
-127import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ExecProcedureRequest;
-128import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ExecProcedureResponse;
-129import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetClusterStatusRequest;
-130import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetClusterStatusResponse;
-131import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest;
-132import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCompletedSnapshotsResponse;
-133import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetNamespaceDescriptorRequest;
-134import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetNamespaceDescriptorResponse;
-135import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetProcedureResultRequest;
-136import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetProcedureResultResponse;
-137import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetSchemaAlterStatusRequest;
-138import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetSchemaAlterStatusResponse;
-139import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableDescriptorsRequest;
-140import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableDescriptorsResponse;
-141import 
org.apache.hadoop.hbase.s

[17/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-10 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/0821e51a/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.TableProcedureBiConsumer.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.TableProcedureBiConsumer.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.TableProcedureBiConsumer.html
index 71844ce..75db22d 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.TableProcedureBiConsumer.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.TableProcedureBiConsumer.html
@@ -105,2564 +105,2642 @@
 097import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionLoadResponse;
 098import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.SplitRegionRequest;
 099import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.SplitRegionResponse;
-100import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ProcedureDescription;
-101import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema;
-102import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.RegionSpecifierType;
-103import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AbortProcedureRequest;
-104import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AbortProcedureResponse;
-105import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnRequest;
-106import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnResponse;
-107import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AssignRegionRequest;
-108import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AssignRegionResponse;
-109import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.BalanceRequest;
-110import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.BalanceResponse;
-111import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateNamespaceRequest;
-112import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateNamespaceResponse;
-113import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteNamespaceRequest;
-114import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteNamespaceResponse;
-115import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteSnapshotRequest;
-116import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteSnapshotResponse;
-117import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DisableTableRequest;
-118import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DisableTableResponse;
-119import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableCatalogJanitorRequest;
-120import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableCatalogJanitorResponse;
-121import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DrainRegionServersRequest;
-122import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DrainRegionServersResponse;
-123import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableTableRequest;
-124import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableTableResponse;
-125import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteColumnRequest;
-126import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteColumnResponse;
-127import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ExecProcedureRequest;
-128import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ExecProcedureResponse;
-129import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetClusterStatusRequest;
-130import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetClusterStatusResponse;
-131import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest;
-132import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCompletedSnapshotsResponse;
-133import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetNamespaceDescriptorRequest;
-134import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetNamespaceDescriptorResponse;
-135import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetProcedureResultRequest;
-136import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetProcedureResultResponse;
-137import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetSchemaAlterStatusRequest;
-138import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetSchemaAlterStatusResponse;
-139import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableDescriptorsRequest;
-140import 
o

[32/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-10 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/0821e51a/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.Converter.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.Converter.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.Converter.html
index 71844ce..75db22d 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.Converter.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.Converter.html
@@ -105,2564 +105,2642 @@
 097import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionLoadResponse;
 098import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.SplitRegionRequest;
 099import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.SplitRegionResponse;
-100import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ProcedureDescription;
-101import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema;
-102import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.RegionSpecifierType;
-103import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AbortProcedureRequest;
-104import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AbortProcedureResponse;
-105import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnRequest;
-106import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnResponse;
-107import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AssignRegionRequest;
-108import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AssignRegionResponse;
-109import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.BalanceRequest;
-110import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.BalanceResponse;
-111import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateNamespaceRequest;
-112import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateNamespaceResponse;
-113import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteNamespaceRequest;
-114import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteNamespaceResponse;
-115import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteSnapshotRequest;
-116import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteSnapshotResponse;
-117import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DisableTableRequest;
-118import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DisableTableResponse;
-119import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableCatalogJanitorRequest;
-120import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableCatalogJanitorResponse;
-121import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DrainRegionServersRequest;
-122import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DrainRegionServersResponse;
-123import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableTableRequest;
-124import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableTableResponse;
-125import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteColumnRequest;
-126import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteColumnResponse;
-127import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ExecProcedureRequest;
-128import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ExecProcedureResponse;
-129import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetClusterStatusRequest;
-130import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetClusterStatusResponse;
-131import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest;
-132import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCompletedSnapshotsResponse;
-133import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetNamespaceDescriptorRequest;
-134import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetNamespaceDescriptorResponse;
-135import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetProcedureResultRequest;
-136import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetProcedureResultResponse;
-137import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetSchemaAlterStatusRequest;
-138import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetSchemaAlterStatusResponse;
-139import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableDescriptorsRequest;
-140import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableDescr

[04/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-10 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/0821e51a/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.NameNodeException.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.NameNodeException.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.NameNodeException.html
index f729c99..0a32350 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.NameNodeException.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.NameNodeException.html
@@ -54,757 +54,756 @@
 046import 
io.netty.channel.ChannelPipeline;
 047import io.netty.channel.EventLoop;
 048import 
io.netty.channel.SimpleChannelInboundHandler;
-049import 
io.netty.channel.socket.nio.NioSocketChannel;
-050import 
io.netty.handler.codec.protobuf.ProtobufDecoder;
-051import 
io.netty.handler.codec.protobuf.ProtobufVarint32FrameDecoder;
-052import 
io.netty.handler.timeout.IdleStateEvent;
-053import 
io.netty.handler.timeout.IdleStateHandler;
-054import io.netty.util.concurrent.Future;
-055import 
io.netty.util.concurrent.FutureListener;
-056import 
io.netty.util.concurrent.Promise;
-057
-058import java.io.IOException;
-059import 
java.lang.reflect.InvocationTargetException;
-060import java.lang.reflect.Method;
-061import java.util.ArrayList;
-062import java.util.EnumSet;
-063import java.util.List;
-064import java.util.concurrent.TimeUnit;
-065
-066import org.apache.commons.logging.Log;
-067import 
org.apache.commons.logging.LogFactory;
-068import 
org.apache.hadoop.conf.Configuration;
-069import 
org.apache.hadoop.crypto.CryptoProtocolVersion;
-070import 
org.apache.hadoop.crypto.Encryptor;
-071import org.apache.hadoop.fs.CreateFlag;
-072import org.apache.hadoop.fs.FileSystem;
-073import 
org.apache.hadoop.fs.FileSystemLinkResolver;
-074import org.apache.hadoop.fs.Path;
-075import 
org.apache.hadoop.fs.UnresolvedLinkException;
-076import 
org.apache.hadoop.fs.permission.FsPermission;
-077import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
-078import 
org.apache.hadoop.hbase.client.ConnectionUtils;
-079import 
org.apache.hadoop.hbase.util.CancelableProgressable;
-080import 
org.apache.hadoop.hbase.util.FSUtils;
-081import 
org.apache.hadoop.hdfs.DFSClient;
-082import 
org.apache.hadoop.hdfs.DFSOutputStream;
-083import 
org.apache.hadoop.hdfs.DistributedFileSystem;
-084import 
org.apache.hadoop.hdfs.protocol.ClientProtocol;
-085import 
org.apache.hadoop.hdfs.protocol.DatanodeInfo;
-086import 
org.apache.hadoop.hdfs.protocol.ExtendedBlock;
-087import 
org.apache.hadoop.hdfs.protocol.HdfsFileStatus;
-088import 
org.apache.hadoop.hdfs.protocol.LocatedBlock;
-089import 
org.apache.hadoop.hdfs.protocol.datatransfer.BlockConstructionStage;
-090import 
org.apache.hadoop.hdfs.protocol.datatransfer.DataTransferProtoUtil;
-091import 
org.apache.hadoop.hdfs.protocol.datatransfer.DataTransferProtocol;
-092import 
org.apache.hadoop.hdfs.protocol.datatransfer.Op;
-093import 
org.apache.hadoop.hdfs.protocol.datatransfer.PipelineAck;
-094import 
org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.BaseHeaderProto;
-095import 
org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.BlockOpResponseProto;
-096import 
org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.CachingStrategyProto;
-097import 
org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.ChecksumProto;
-098import 
org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.ClientOperationHeaderProto;
-099import 
org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.OpWriteBlockProto;
-100import 
org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.PipelineAckProto;
-101import 
org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.Status;
-102import 
org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ExtendedBlockProto;
-103import 
org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.StorageTypeProto;
-104import 
org.apache.hadoop.hdfs.security.token.block.BlockTokenIdentifier;
-105import 
org.apache.hadoop.hdfs.security.token.block.InvalidBlockTokenException;
-106import 
org.apache.hadoop.hdfs.server.namenode.LeaseExpiredException;
-107import 
org.apache.hadoop.io.EnumSetWritable;
-108import 
org.apache.hadoop.ipc.RemoteException;
-109import org.apache.hadoop.net.NetUtils;
-110import 
org.apache.hadoop.security.proto.SecurityProtos.TokenProto;
-111import 
org.apache.hadoop.security.token.Token;
-112import 
org.apache.hadoop.util.DataChecksum;
-113
-114/**
-115 * Helper class for implementing {@link 
FanOutOneBlockAsyncDFSOutput}.
-116 */
-117@InterfaceAudience.Private
-118public final class 
FanOutOneBlockAsyncDFSOutputHelper {
+049import 
io.netty.handler.codec.protobuf.ProtobufDecoder;
+050import 
io.netty.handler.codec.protobuf.ProtobufVarint32FrameDecoder;
+051i

[18/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-10 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/0821e51a/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.TableOperator.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.TableOperator.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.TableOperator.html
index 71844ce..75db22d 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.TableOperator.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.TableOperator.html
@@ -105,2564 +105,2642 @@
 097import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionLoadResponse;
 098import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.SplitRegionRequest;
 099import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.SplitRegionResponse;
-100import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ProcedureDescription;
-101import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema;
-102import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.RegionSpecifierType;
-103import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AbortProcedureRequest;
-104import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AbortProcedureResponse;
-105import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnRequest;
-106import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnResponse;
-107import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AssignRegionRequest;
-108import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AssignRegionResponse;
-109import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.BalanceRequest;
-110import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.BalanceResponse;
-111import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateNamespaceRequest;
-112import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateNamespaceResponse;
-113import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteNamespaceRequest;
-114import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteNamespaceResponse;
-115import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteSnapshotRequest;
-116import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteSnapshotResponse;
-117import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DisableTableRequest;
-118import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DisableTableResponse;
-119import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableCatalogJanitorRequest;
-120import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableCatalogJanitorResponse;
-121import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DrainRegionServersRequest;
-122import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DrainRegionServersResponse;
-123import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableTableRequest;
-124import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableTableResponse;
-125import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteColumnRequest;
-126import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteColumnResponse;
-127import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ExecProcedureRequest;
-128import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ExecProcedureResponse;
-129import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetClusterStatusRequest;
-130import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetClusterStatusResponse;
-131import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest;
-132import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCompletedSnapshotsResponse;
-133import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetNamespaceDescriptorRequest;
-134import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetNamespaceDescriptorResponse;
-135import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetProcedureResultRequest;
-136import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetProcedureResultResponse;
-137import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetSchemaAlterStatusRequest;
-138import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetSchemaAlterStatusResponse;
-139import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableDescriptorsRequest;
-140import 
org.apache.hadoop.hbase.shaded.protobuf.generated.Master

[24/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-10 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/0821e51a/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.MasterRpcCall.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.MasterRpcCall.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.MasterRpcCall.html
index 71844ce..75db22d 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.MasterRpcCall.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.MasterRpcCall.html
@@ -105,2564 +105,2642 @@
 097import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionLoadResponse;
 098import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.SplitRegionRequest;
 099import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.SplitRegionResponse;
-100import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ProcedureDescription;
-101import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema;
-102import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.RegionSpecifierType;
-103import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AbortProcedureRequest;
-104import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AbortProcedureResponse;
-105import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnRequest;
-106import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnResponse;
-107import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AssignRegionRequest;
-108import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AssignRegionResponse;
-109import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.BalanceRequest;
-110import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.BalanceResponse;
-111import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateNamespaceRequest;
-112import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateNamespaceResponse;
-113import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteNamespaceRequest;
-114import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteNamespaceResponse;
-115import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteSnapshotRequest;
-116import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteSnapshotResponse;
-117import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DisableTableRequest;
-118import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DisableTableResponse;
-119import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableCatalogJanitorRequest;
-120import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableCatalogJanitorResponse;
-121import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DrainRegionServersRequest;
-122import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DrainRegionServersResponse;
-123import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableTableRequest;
-124import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableTableResponse;
-125import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteColumnRequest;
-126import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteColumnResponse;
-127import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ExecProcedureRequest;
-128import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ExecProcedureResponse;
-129import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetClusterStatusRequest;
-130import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetClusterStatusResponse;
-131import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest;
-132import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCompletedSnapshotsResponse;
-133import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetNamespaceDescriptorRequest;
-134import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetNamespaceDescriptorResponse;
-135import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetProcedureResultRequest;
-136import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetProcedureResultResponse;
-137import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetSchemaAlterStatusRequest;
-138import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetSchemaAlterStatusResponse;
-139import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableDescriptorsRequest;
-140import 
org.apache.hadoop.hbase.shaded.protobuf.generated.Master

[03/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-10 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/0821e51a/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.PBHelper.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.PBHelper.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.PBHelper.html
index f729c99..0a32350 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.PBHelper.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.PBHelper.html
@@ -54,757 +54,756 @@
 046import 
io.netty.channel.ChannelPipeline;
 047import io.netty.channel.EventLoop;
 048import 
io.netty.channel.SimpleChannelInboundHandler;
-049import 
io.netty.channel.socket.nio.NioSocketChannel;
-050import 
io.netty.handler.codec.protobuf.ProtobufDecoder;
-051import 
io.netty.handler.codec.protobuf.ProtobufVarint32FrameDecoder;
-052import 
io.netty.handler.timeout.IdleStateEvent;
-053import 
io.netty.handler.timeout.IdleStateHandler;
-054import io.netty.util.concurrent.Future;
-055import 
io.netty.util.concurrent.FutureListener;
-056import 
io.netty.util.concurrent.Promise;
-057
-058import java.io.IOException;
-059import 
java.lang.reflect.InvocationTargetException;
-060import java.lang.reflect.Method;
-061import java.util.ArrayList;
-062import java.util.EnumSet;
-063import java.util.List;
-064import java.util.concurrent.TimeUnit;
-065
-066import org.apache.commons.logging.Log;
-067import 
org.apache.commons.logging.LogFactory;
-068import 
org.apache.hadoop.conf.Configuration;
-069import 
org.apache.hadoop.crypto.CryptoProtocolVersion;
-070import 
org.apache.hadoop.crypto.Encryptor;
-071import org.apache.hadoop.fs.CreateFlag;
-072import org.apache.hadoop.fs.FileSystem;
-073import 
org.apache.hadoop.fs.FileSystemLinkResolver;
-074import org.apache.hadoop.fs.Path;
-075import 
org.apache.hadoop.fs.UnresolvedLinkException;
-076import 
org.apache.hadoop.fs.permission.FsPermission;
-077import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
-078import 
org.apache.hadoop.hbase.client.ConnectionUtils;
-079import 
org.apache.hadoop.hbase.util.CancelableProgressable;
-080import 
org.apache.hadoop.hbase.util.FSUtils;
-081import 
org.apache.hadoop.hdfs.DFSClient;
-082import 
org.apache.hadoop.hdfs.DFSOutputStream;
-083import 
org.apache.hadoop.hdfs.DistributedFileSystem;
-084import 
org.apache.hadoop.hdfs.protocol.ClientProtocol;
-085import 
org.apache.hadoop.hdfs.protocol.DatanodeInfo;
-086import 
org.apache.hadoop.hdfs.protocol.ExtendedBlock;
-087import 
org.apache.hadoop.hdfs.protocol.HdfsFileStatus;
-088import 
org.apache.hadoop.hdfs.protocol.LocatedBlock;
-089import 
org.apache.hadoop.hdfs.protocol.datatransfer.BlockConstructionStage;
-090import 
org.apache.hadoop.hdfs.protocol.datatransfer.DataTransferProtoUtil;
-091import 
org.apache.hadoop.hdfs.protocol.datatransfer.DataTransferProtocol;
-092import 
org.apache.hadoop.hdfs.protocol.datatransfer.Op;
-093import 
org.apache.hadoop.hdfs.protocol.datatransfer.PipelineAck;
-094import 
org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.BaseHeaderProto;
-095import 
org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.BlockOpResponseProto;
-096import 
org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.CachingStrategyProto;
-097import 
org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.ChecksumProto;
-098import 
org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.ClientOperationHeaderProto;
-099import 
org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.OpWriteBlockProto;
-100import 
org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.PipelineAckProto;
-101import 
org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.Status;
-102import 
org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ExtendedBlockProto;
-103import 
org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.StorageTypeProto;
-104import 
org.apache.hadoop.hdfs.security.token.block.BlockTokenIdentifier;
-105import 
org.apache.hadoop.hdfs.security.token.block.InvalidBlockTokenException;
-106import 
org.apache.hadoop.hdfs.server.namenode.LeaseExpiredException;
-107import 
org.apache.hadoop.io.EnumSetWritable;
-108import 
org.apache.hadoop.ipc.RemoteException;
-109import org.apache.hadoop.net.NetUtils;
-110import 
org.apache.hadoop.security.proto.SecurityProtos.TokenProto;
-111import 
org.apache.hadoop.security.token.Token;
-112import 
org.apache.hadoop.util.DataChecksum;
-113
-114/**
-115 * Helper class for implementing {@link 
FanOutOneBlockAsyncDFSOutput}.
-116 */
-117@InterfaceAudience.Private
-118public final class 
FanOutOneBlockAsyncDFSOutputHelper {
+049import 
io.netty.handler.codec.protobuf.ProtobufDecoder;
+050import 
io.netty.handler.codec.protobuf.ProtobufVarint32FrameDecoder;
+051import 
io.netty.handler.timeout.IdleStateEven

[20/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-10 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/0821e51a/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.NamespaceProcedureBiConsumer.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.NamespaceProcedureBiConsumer.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.NamespaceProcedureBiConsumer.html
index 71844ce..75db22d 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.NamespaceProcedureBiConsumer.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.NamespaceProcedureBiConsumer.html
@@ -105,2564 +105,2642 @@
 097import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionLoadResponse;
 098import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.SplitRegionRequest;
 099import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.SplitRegionResponse;
-100import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ProcedureDescription;
-101import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema;
-102import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.RegionSpecifierType;
-103import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AbortProcedureRequest;
-104import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AbortProcedureResponse;
-105import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnRequest;
-106import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnResponse;
-107import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AssignRegionRequest;
-108import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AssignRegionResponse;
-109import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.BalanceRequest;
-110import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.BalanceResponse;
-111import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateNamespaceRequest;
-112import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateNamespaceResponse;
-113import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteNamespaceRequest;
-114import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteNamespaceResponse;
-115import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteSnapshotRequest;
-116import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteSnapshotResponse;
-117import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DisableTableRequest;
-118import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DisableTableResponse;
-119import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableCatalogJanitorRequest;
-120import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableCatalogJanitorResponse;
-121import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DrainRegionServersRequest;
-122import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DrainRegionServersResponse;
-123import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableTableRequest;
-124import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableTableResponse;
-125import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteColumnRequest;
-126import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteColumnResponse;
-127import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ExecProcedureRequest;
-128import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ExecProcedureResponse;
-129import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetClusterStatusRequest;
-130import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetClusterStatusResponse;
-131import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest;
-132import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCompletedSnapshotsResponse;
-133import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetNamespaceDescriptorRequest;
-134import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetNamespaceDescriptorResponse;
-135import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetProcedureResultRequest;
-136import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetProcedureResultResponse;
-137import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetSchemaAlterStatusRequest;
-138import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetSchemaAlterStatusResponse;
-139import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableDescriptorsRe

[40/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-10 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/0821e51a/devapidocs/org/apache/hadoop/hbase/procedure2/package-tree.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/procedure2/package-tree.html 
b/devapidocs/org/apache/hadoop/hbase/procedure2/package-tree.html
index da6519ab..325693f 100644
--- a/devapidocs/org/apache/hadoop/hbase/procedure2/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/procedure2/package-tree.html
@@ -199,11 +199,11 @@
 
 java.lang.http://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true";
 title="class or interface in java.lang">Enum (implements java.lang.http://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true";
 title="class or interface in java.lang">Comparable, java.io.http://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true";
 title="class or interface in java.io">Serializable)
 
-org.apache.hadoop.hbase.procedure2.LockInfo.LockType
-org.apache.hadoop.hbase.procedure2.StateMachineProcedure.Flow
-org.apache.hadoop.hbase.procedure2.RootProcedureState.State
 org.apache.hadoop.hbase.procedure2.LockInfo.ResourceType
 org.apache.hadoop.hbase.procedure2.Procedure.LockState
+org.apache.hadoop.hbase.procedure2.RootProcedureState.State
+org.apache.hadoop.hbase.procedure2.LockInfo.LockType
+org.apache.hadoop.hbase.procedure2.StateMachineProcedure.Flow
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/0821e51a/devapidocs/org/apache/hadoop/hbase/quotas/package-tree.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/quotas/package-tree.html 
b/devapidocs/org/apache/hadoop/hbase/quotas/package-tree.html
index b04f74b..1a3ae02 100644
--- a/devapidocs/org/apache/hadoop/hbase/quotas/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/quotas/package-tree.html
@@ -201,13 +201,13 @@
 
 java.lang.http://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true";
 title="class or interface in java.lang">Enum (implements java.lang.http://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true";
 title="class or interface in java.lang">Comparable, java.io.http://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true";
 title="class or interface in java.io">Serializable)
 
+org.apache.hadoop.hbase.quotas.QuotaType
+org.apache.hadoop.hbase.quotas.OperationQuota.OperationType
 org.apache.hadoop.hbase.quotas.QuotaScope
+org.apache.hadoop.hbase.quotas.SpaceViolationPolicy
 org.apache.hadoop.hbase.quotas.ThrottleType
-org.apache.hadoop.hbase.quotas.ThrottlingException.Type
 org.apache.hadoop.hbase.quotas.QuotaSnapshotStore.ViolationState
-org.apache.hadoop.hbase.quotas.OperationQuota.OperationType
-org.apache.hadoop.hbase.quotas.QuotaType
-org.apache.hadoop.hbase.quotas.SpaceViolationPolicy
+org.apache.hadoop.hbase.quotas.ThrottlingException.Type
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/0821e51a/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionServer.CompactionChecker.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionServer.CompactionChecker.html
 
b/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionServer.CompactionChecker.html
index f04a8eb..c4e1202 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionServer.CompactionChecker.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionServer.CompactionChecker.html
@@ -122,7 +122,7 @@ var activeTableTab = "activeTableTab";
 
 
 
-private static class HRegionServer.CompactionChecker
+private static class HRegionServer.CompactionChecker
 extends ScheduledChore
 
 
@@ -233,7 +233,7 @@ extends 
 
 instance
-private final HRegionServer instance
+private final HRegionServer instance
 
 
 
@@ -242,7 +242,7 @@ extends 
 
 majorCompactPriority
-private final int majorCompactPriority
+private final int majorCompactPriority
 
 
 
@@ -251,7 +251,7 @@ extends 
 
 DEFAULT_PRIORITY
-private static final int DEFAULT_PRIORITY
+private static final int DEFAULT_PRIORITY
 
 See Also:
 Constant
 Field Values
@@ -264,7 +264,7 @@ extends 
 
 iteration
-private long iteration
+private long iteration
 
 
 
@@ -281,7 +281,7 @@ extends 
 
 CompactionChecker
-CompactionChecker(HRegionServer h,
+CompactionChecker(HRegionServer h,
   int sleepTime,
   Stoppable stopper)
 
@@ -300,7 +300,7 @@ extends 
 
 chore
-protected void chore()
+protected void chore()
 Description copied from 
class: ScheduledChore
 The task to execute on each scheduled execution of the 
Chore
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/0821e51a/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionServer.MovedRegionInfo.html
--
diff --

[34/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-10 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/0821e51a/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.AddColumnFamilyProcedureBiConsumer.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.AddColumnFamilyProcedureBiConsumer.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.AddColumnFamilyProcedureBiConsumer.html
index 71844ce..75db22d 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.AddColumnFamilyProcedureBiConsumer.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.AddColumnFamilyProcedureBiConsumer.html
@@ -105,2564 +105,2642 @@
 097import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionLoadResponse;
 098import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.SplitRegionRequest;
 099import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.SplitRegionResponse;
-100import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ProcedureDescription;
-101import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema;
-102import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.RegionSpecifierType;
-103import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AbortProcedureRequest;
-104import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AbortProcedureResponse;
-105import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnRequest;
-106import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnResponse;
-107import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AssignRegionRequest;
-108import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AssignRegionResponse;
-109import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.BalanceRequest;
-110import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.BalanceResponse;
-111import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateNamespaceRequest;
-112import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateNamespaceResponse;
-113import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteNamespaceRequest;
-114import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteNamespaceResponse;
-115import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteSnapshotRequest;
-116import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteSnapshotResponse;
-117import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DisableTableRequest;
-118import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DisableTableResponse;
-119import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableCatalogJanitorRequest;
-120import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableCatalogJanitorResponse;
-121import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DrainRegionServersRequest;
-122import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DrainRegionServersResponse;
-123import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableTableRequest;
-124import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableTableResponse;
-125import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteColumnRequest;
-126import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteColumnResponse;
-127import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ExecProcedureRequest;
-128import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ExecProcedureResponse;
-129import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetClusterStatusRequest;
-130import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetClusterStatusResponse;
-131import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest;
-132import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCompletedSnapshotsResponse;
-133import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetNamespaceDescriptorRequest;
-134import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetNamespaceDescriptorResponse;
-135import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetProcedureResultRequest;
-136import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetProcedureResultResponse;
-137import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetSchemaAlterStatusRequest;
-138import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetSchemaAlterStatusResponse;
-139import 
org.apache.hadoop.hbase.shaded.protobuf.generated.Mast

[01/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-10 Thread git-site-role
Repository: hbase-site
Updated Branches:
  refs/heads/asf-site f2b6c7226 -> 0821e51a0


http://git-wip-us.apache.org/repos/asf/hbase-site/blob/0821e51a/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.StorageTypeSetter.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.StorageTypeSetter.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.StorageTypeSetter.html
index f729c99..0a32350 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.StorageTypeSetter.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.StorageTypeSetter.html
@@ -54,757 +54,756 @@
 046import 
io.netty.channel.ChannelPipeline;
 047import io.netty.channel.EventLoop;
 048import 
io.netty.channel.SimpleChannelInboundHandler;
-049import 
io.netty.channel.socket.nio.NioSocketChannel;
-050import 
io.netty.handler.codec.protobuf.ProtobufDecoder;
-051import 
io.netty.handler.codec.protobuf.ProtobufVarint32FrameDecoder;
-052import 
io.netty.handler.timeout.IdleStateEvent;
-053import 
io.netty.handler.timeout.IdleStateHandler;
-054import io.netty.util.concurrent.Future;
-055import 
io.netty.util.concurrent.FutureListener;
-056import 
io.netty.util.concurrent.Promise;
-057
-058import java.io.IOException;
-059import 
java.lang.reflect.InvocationTargetException;
-060import java.lang.reflect.Method;
-061import java.util.ArrayList;
-062import java.util.EnumSet;
-063import java.util.List;
-064import java.util.concurrent.TimeUnit;
-065
-066import org.apache.commons.logging.Log;
-067import 
org.apache.commons.logging.LogFactory;
-068import 
org.apache.hadoop.conf.Configuration;
-069import 
org.apache.hadoop.crypto.CryptoProtocolVersion;
-070import 
org.apache.hadoop.crypto.Encryptor;
-071import org.apache.hadoop.fs.CreateFlag;
-072import org.apache.hadoop.fs.FileSystem;
-073import 
org.apache.hadoop.fs.FileSystemLinkResolver;
-074import org.apache.hadoop.fs.Path;
-075import 
org.apache.hadoop.fs.UnresolvedLinkException;
-076import 
org.apache.hadoop.fs.permission.FsPermission;
-077import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
-078import 
org.apache.hadoop.hbase.client.ConnectionUtils;
-079import 
org.apache.hadoop.hbase.util.CancelableProgressable;
-080import 
org.apache.hadoop.hbase.util.FSUtils;
-081import 
org.apache.hadoop.hdfs.DFSClient;
-082import 
org.apache.hadoop.hdfs.DFSOutputStream;
-083import 
org.apache.hadoop.hdfs.DistributedFileSystem;
-084import 
org.apache.hadoop.hdfs.protocol.ClientProtocol;
-085import 
org.apache.hadoop.hdfs.protocol.DatanodeInfo;
-086import 
org.apache.hadoop.hdfs.protocol.ExtendedBlock;
-087import 
org.apache.hadoop.hdfs.protocol.HdfsFileStatus;
-088import 
org.apache.hadoop.hdfs.protocol.LocatedBlock;
-089import 
org.apache.hadoop.hdfs.protocol.datatransfer.BlockConstructionStage;
-090import 
org.apache.hadoop.hdfs.protocol.datatransfer.DataTransferProtoUtil;
-091import 
org.apache.hadoop.hdfs.protocol.datatransfer.DataTransferProtocol;
-092import 
org.apache.hadoop.hdfs.protocol.datatransfer.Op;
-093import 
org.apache.hadoop.hdfs.protocol.datatransfer.PipelineAck;
-094import 
org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.BaseHeaderProto;
-095import 
org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.BlockOpResponseProto;
-096import 
org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.CachingStrategyProto;
-097import 
org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.ChecksumProto;
-098import 
org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.ClientOperationHeaderProto;
-099import 
org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.OpWriteBlockProto;
-100import 
org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.PipelineAckProto;
-101import 
org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.Status;
-102import 
org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ExtendedBlockProto;
-103import 
org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.StorageTypeProto;
-104import 
org.apache.hadoop.hdfs.security.token.block.BlockTokenIdentifier;
-105import 
org.apache.hadoop.hdfs.security.token.block.InvalidBlockTokenException;
-106import 
org.apache.hadoop.hdfs.server.namenode.LeaseExpiredException;
-107import 
org.apache.hadoop.io.EnumSetWritable;
-108import 
org.apache.hadoop.ipc.RemoteException;
-109import org.apache.hadoop.net.NetUtils;
-110import 
org.apache.hadoop.security.proto.SecurityProtos.TokenProto;
-111import 
org.apache.hadoop.security.token.Token;
-112import 
org.apache.hadoop.util.DataChecksum;
-113
-114/**
-115 * Helper class for implementing {@link 
FanOutOneBlockAsyncDFSOutput}.
-116 */
-117@InterfaceAudience.Private
-118public final class 
FanOutOneBlockAsyncDFSOutputHelper {
+049import 
io.netty.handler.codec.protobuf.Protobuf

[02/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-10 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/0821e51a/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.PipelineAckStatusGetter.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.PipelineAckStatusGetter.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.PipelineAckStatusGetter.html
index f729c99..0a32350 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.PipelineAckStatusGetter.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.PipelineAckStatusGetter.html
@@ -54,757 +54,756 @@
 046import 
io.netty.channel.ChannelPipeline;
 047import io.netty.channel.EventLoop;
 048import 
io.netty.channel.SimpleChannelInboundHandler;
-049import 
io.netty.channel.socket.nio.NioSocketChannel;
-050import 
io.netty.handler.codec.protobuf.ProtobufDecoder;
-051import 
io.netty.handler.codec.protobuf.ProtobufVarint32FrameDecoder;
-052import 
io.netty.handler.timeout.IdleStateEvent;
-053import 
io.netty.handler.timeout.IdleStateHandler;
-054import io.netty.util.concurrent.Future;
-055import 
io.netty.util.concurrent.FutureListener;
-056import 
io.netty.util.concurrent.Promise;
-057
-058import java.io.IOException;
-059import 
java.lang.reflect.InvocationTargetException;
-060import java.lang.reflect.Method;
-061import java.util.ArrayList;
-062import java.util.EnumSet;
-063import java.util.List;
-064import java.util.concurrent.TimeUnit;
-065
-066import org.apache.commons.logging.Log;
-067import 
org.apache.commons.logging.LogFactory;
-068import 
org.apache.hadoop.conf.Configuration;
-069import 
org.apache.hadoop.crypto.CryptoProtocolVersion;
-070import 
org.apache.hadoop.crypto.Encryptor;
-071import org.apache.hadoop.fs.CreateFlag;
-072import org.apache.hadoop.fs.FileSystem;
-073import 
org.apache.hadoop.fs.FileSystemLinkResolver;
-074import org.apache.hadoop.fs.Path;
-075import 
org.apache.hadoop.fs.UnresolvedLinkException;
-076import 
org.apache.hadoop.fs.permission.FsPermission;
-077import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
-078import 
org.apache.hadoop.hbase.client.ConnectionUtils;
-079import 
org.apache.hadoop.hbase.util.CancelableProgressable;
-080import 
org.apache.hadoop.hbase.util.FSUtils;
-081import 
org.apache.hadoop.hdfs.DFSClient;
-082import 
org.apache.hadoop.hdfs.DFSOutputStream;
-083import 
org.apache.hadoop.hdfs.DistributedFileSystem;
-084import 
org.apache.hadoop.hdfs.protocol.ClientProtocol;
-085import 
org.apache.hadoop.hdfs.protocol.DatanodeInfo;
-086import 
org.apache.hadoop.hdfs.protocol.ExtendedBlock;
-087import 
org.apache.hadoop.hdfs.protocol.HdfsFileStatus;
-088import 
org.apache.hadoop.hdfs.protocol.LocatedBlock;
-089import 
org.apache.hadoop.hdfs.protocol.datatransfer.BlockConstructionStage;
-090import 
org.apache.hadoop.hdfs.protocol.datatransfer.DataTransferProtoUtil;
-091import 
org.apache.hadoop.hdfs.protocol.datatransfer.DataTransferProtocol;
-092import 
org.apache.hadoop.hdfs.protocol.datatransfer.Op;
-093import 
org.apache.hadoop.hdfs.protocol.datatransfer.PipelineAck;
-094import 
org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.BaseHeaderProto;
-095import 
org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.BlockOpResponseProto;
-096import 
org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.CachingStrategyProto;
-097import 
org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.ChecksumProto;
-098import 
org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.ClientOperationHeaderProto;
-099import 
org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.OpWriteBlockProto;
-100import 
org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.PipelineAckProto;
-101import 
org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.Status;
-102import 
org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ExtendedBlockProto;
-103import 
org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.StorageTypeProto;
-104import 
org.apache.hadoop.hdfs.security.token.block.BlockTokenIdentifier;
-105import 
org.apache.hadoop.hdfs.security.token.block.InvalidBlockTokenException;
-106import 
org.apache.hadoop.hdfs.server.namenode.LeaseExpiredException;
-107import 
org.apache.hadoop.io.EnumSetWritable;
-108import 
org.apache.hadoop.ipc.RemoteException;
-109import org.apache.hadoop.net.NetUtils;
-110import 
org.apache.hadoop.security.proto.SecurityProtos.TokenProto;
-111import 
org.apache.hadoop.security.token.Token;
-112import 
org.apache.hadoop.util.DataChecksum;
-113
-114/**
-115 * Helper class for implementing {@link 
FanOutOneBlockAsyncDFSOutput}.
-116 */
-117@InterfaceAudience.Private
-118public final class 
FanOutOneBlockAsyncDFSOutputHelper {
+049import 
io.netty.handler.codec.protobuf.ProtobufDecoder;
+050import 
io.netty.handler.codec.protobuf.Proto

hbase git commit: HBASE-18307 Share the same EventLoopGroup for NettyRpcServer, NettyRpcClient and AsyncFSWALProvider at RS side

2017-07-10 Thread zhangduo
Repository: hbase
Updated Branches:
  refs/heads/branch-2 4368f0905 -> 4ab66aca8


HBASE-18307 Share the same EventLoopGroup for NettyRpcServer, NettyRpcClient 
and AsyncFSWALProvider at RS side


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/4ab66aca
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/4ab66aca
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/4ab66aca

Branch: refs/heads/branch-2
Commit: 4ab66aca89da137b67b0a03d14f209e31eab37dc
Parents: 4368f09
Author: zhangduo 
Authored: Mon Jul 10 16:33:37 2017 +0800
Committer: zhangduo 
Committed: Mon Jul 10 21:02:55 2017 +0800

--
 .../hadoop/hbase/io/asyncfs/AsyncFSOutput.java  |  1 -
 .../hbase/io/asyncfs/AsyncFSOutputHelper.java   |  7 +-
 .../asyncfs/FanOutOneBlockAsyncDFSOutput.java   |  9 +-
 .../FanOutOneBlockAsyncDFSOutputHelper.java | 15 ++--
 .../apache/hadoop/hbase/ipc/NettyRpcServer.java | 93 
 .../hbase/regionserver/HRegionServer.java   | 36 +---
 .../hbase/regionserver/wal/AsyncFSWAL.java  |  9 +-
 .../wal/AsyncProtobufLogWriter.java |  8 +-
 .../wal/SecureAsyncProtobufLogWriter.java   |  5 +-
 .../hbase/util/NettyEventLoopGroupConfig.java   | 82 +
 .../hadoop/hbase/wal/AsyncFSWALProvider.java| 27 --
 .../hbase/wal/NettyAsyncFSWALConfigHelper.java  | 63 +
 .../TestFanOutOneBlockAsyncDFSOutput.java   | 42 +
 .../hbase/io/asyncfs/TestLocalAsyncOutput.java  |  8 +-
 .../TestSaslFanOutOneBlockAsyncDFSOutput.java   |  9 +-
 .../hbase/regionserver/wal/TestAsyncFSWAL.java  |  9 +-
 .../regionserver/wal/TestAsyncProtobufLog.java  |  7 +-
 .../regionserver/wal/TestAsyncWALReplay.java|  7 +-
 18 files changed, 315 insertions(+), 122 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/4ab66aca/hbase-server/src/main/java/org/apache/hadoop/hbase/io/asyncfs/AsyncFSOutput.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/asyncfs/AsyncFSOutput.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/asyncfs/AsyncFSOutput.java
index 7d513db..8dd927e 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/asyncfs/AsyncFSOutput.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/asyncfs/AsyncFSOutput.java
@@ -20,7 +20,6 @@ package org.apache.hadoop.hbase.io.asyncfs;
 import java.io.Closeable;
 import java.io.IOException;
 import java.nio.ByteBuffer;
-import java.nio.channels.CompletionHandler;
 import java.util.concurrent.CompletableFuture;
 
 import org.apache.hadoop.hbase.classification.InterfaceAudience;

http://git-wip-us.apache.org/repos/asf/hbase/blob/4ab66aca/hbase-server/src/main/java/org/apache/hadoop/hbase/io/asyncfs/AsyncFSOutputHelper.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/asyncfs/AsyncFSOutputHelper.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/asyncfs/AsyncFSOutputHelper.java
index 7fe86be..57613dc 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/asyncfs/AsyncFSOutputHelper.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/asyncfs/AsyncFSOutputHelper.java
@@ -20,6 +20,7 @@ package org.apache.hadoop.hbase.io.asyncfs;
 import com.google.common.base.Throwables;
 import com.google.common.util.concurrent.ThreadFactoryBuilder;
 
+import io.netty.channel.Channel;
 import io.netty.channel.EventLoop;
 
 import java.io.IOException;
@@ -54,11 +55,11 @@ public final class AsyncFSOutputHelper {
* implementation for other {@link FileSystem} which wraps around a {@link 
FSDataOutputStream}.
*/
   public static AsyncFSOutput createOutput(FileSystem fs, Path f, boolean 
overwrite,
-  boolean createParent, short replication, long blockSize, final EventLoop 
eventLoop)
-  throws IOException {
+  boolean createParent, short replication, long blockSize, EventLoop 
eventLoop,
+  Class channelClass) throws IOException {
 if (fs instanceof DistributedFileSystem) {
   return 
FanOutOneBlockAsyncDFSOutputHelper.createOutput((DistributedFileSystem) fs, f,
-overwrite, createParent, replication, blockSize, eventLoop);
+overwrite, createParent, replication, blockSize, eventLoop, 
channelClass);
 }
 final FSDataOutputStream fsOut;
 int bufferSize = 
fs.getConf().getInt(CommonConfigurationKeysPublic.IO_FILE_BUFFER_SIZE_KEY,

http://git-wip-us.apache.org/repos/asf/hbase/blob/4ab66aca/hbase-server/src/main/java/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutput.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/

hbase git commit: HBASE-18307 Share the same EventLoopGroup for NettyRpcServer, NettyRpcClient and AsyncFSWALProvider at RS side

2017-07-10 Thread zhangduo
Repository: hbase
Updated Branches:
  refs/heads/master 1ddcc07d6 -> 351703455


HBASE-18307 Share the same EventLoopGroup for NettyRpcServer, NettyRpcClient 
and AsyncFSWALProvider at RS side


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/35170345
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/35170345
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/35170345

Branch: refs/heads/master
Commit: 351703455a091171a1abc90f250f52f0a7a0aaab
Parents: 1ddcc07
Author: zhangduo 
Authored: Mon Jul 10 16:33:37 2017 +0800
Committer: zhangduo 
Committed: Mon Jul 10 21:00:44 2017 +0800

--
 .../hadoop/hbase/io/asyncfs/AsyncFSOutput.java  |  1 -
 .../hbase/io/asyncfs/AsyncFSOutputHelper.java   |  7 +-
 .../asyncfs/FanOutOneBlockAsyncDFSOutput.java   |  9 +-
 .../FanOutOneBlockAsyncDFSOutputHelper.java | 15 ++--
 .../apache/hadoop/hbase/ipc/NettyRpcServer.java | 93 
 .../hbase/regionserver/HRegionServer.java   | 36 +---
 .../hbase/regionserver/wal/AsyncFSWAL.java  |  9 +-
 .../wal/AsyncProtobufLogWriter.java |  8 +-
 .../wal/SecureAsyncProtobufLogWriter.java   |  5 +-
 .../hbase/util/NettyEventLoopGroupConfig.java   | 82 +
 .../hadoop/hbase/wal/AsyncFSWALProvider.java| 27 --
 .../hbase/wal/NettyAsyncFSWALConfigHelper.java  | 63 +
 .../TestFanOutOneBlockAsyncDFSOutput.java   | 42 +
 .../hbase/io/asyncfs/TestLocalAsyncOutput.java  |  8 +-
 .../TestSaslFanOutOneBlockAsyncDFSOutput.java   |  9 +-
 .../hbase/regionserver/wal/TestAsyncFSWAL.java  |  9 +-
 .../regionserver/wal/TestAsyncProtobufLog.java  |  7 +-
 .../regionserver/wal/TestAsyncWALReplay.java|  7 +-
 18 files changed, 315 insertions(+), 122 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/35170345/hbase-server/src/main/java/org/apache/hadoop/hbase/io/asyncfs/AsyncFSOutput.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/asyncfs/AsyncFSOutput.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/asyncfs/AsyncFSOutput.java
index 7d513db..8dd927e 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/asyncfs/AsyncFSOutput.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/asyncfs/AsyncFSOutput.java
@@ -20,7 +20,6 @@ package org.apache.hadoop.hbase.io.asyncfs;
 import java.io.Closeable;
 import java.io.IOException;
 import java.nio.ByteBuffer;
-import java.nio.channels.CompletionHandler;
 import java.util.concurrent.CompletableFuture;
 
 import org.apache.hadoop.hbase.classification.InterfaceAudience;

http://git-wip-us.apache.org/repos/asf/hbase/blob/35170345/hbase-server/src/main/java/org/apache/hadoop/hbase/io/asyncfs/AsyncFSOutputHelper.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/asyncfs/AsyncFSOutputHelper.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/asyncfs/AsyncFSOutputHelper.java
index 7fe86be..57613dc 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/asyncfs/AsyncFSOutputHelper.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/asyncfs/AsyncFSOutputHelper.java
@@ -20,6 +20,7 @@ package org.apache.hadoop.hbase.io.asyncfs;
 import com.google.common.base.Throwables;
 import com.google.common.util.concurrent.ThreadFactoryBuilder;
 
+import io.netty.channel.Channel;
 import io.netty.channel.EventLoop;
 
 import java.io.IOException;
@@ -54,11 +55,11 @@ public final class AsyncFSOutputHelper {
* implementation for other {@link FileSystem} which wraps around a {@link 
FSDataOutputStream}.
*/
   public static AsyncFSOutput createOutput(FileSystem fs, Path f, boolean 
overwrite,
-  boolean createParent, short replication, long blockSize, final EventLoop 
eventLoop)
-  throws IOException {
+  boolean createParent, short replication, long blockSize, EventLoop 
eventLoop,
+  Class channelClass) throws IOException {
 if (fs instanceof DistributedFileSystem) {
   return 
FanOutOneBlockAsyncDFSOutputHelper.createOutput((DistributedFileSystem) fs, f,
-overwrite, createParent, replication, blockSize, eventLoop);
+overwrite, createParent, replication, blockSize, eventLoop, 
channelClass);
 }
 final FSDataOutputStream fsOut;
 int bufferSize = 
fs.getConf().getInt(CommonConfigurationKeysPublic.IO_FILE_BUFFER_SIZE_KEY,

http://git-wip-us.apache.org/repos/asf/hbase/blob/35170345/hbase-server/src/main/java/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutput.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbas