hbase git commit: HBASE-18177 FanOutOneBlockAsyncDFSOutputHelper fails to compile against Hadoop 3

2017-07-11 Thread zhangduo
Repository: hbase
Updated Branches:
  refs/heads/branch-2 45cabfb40 -> 2cde0be39


HBASE-18177 FanOutOneBlockAsyncDFSOutputHelper fails to compile against Hadoop 3

Because ClientProtocol::create has API changes between Hadoop 2/3

Signed-off-by: zhangduo 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/2cde0be3
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/2cde0be3
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/2cde0be3

Branch: refs/heads/branch-2
Commit: 2cde0be39303c64454591ac1bdb4747aafe17bab
Parents: 45cabfb
Author: Mike Drob 
Authored: Mon Jun 26 11:29:34 2017 -0500
Committer: zhangduo 
Committed: Wed Jul 12 13:40:34 2017 +0800

--
 .../FanOutOneBlockAsyncDFSOutputHelper.java | 63 +++-
 1 file changed, 62 insertions(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/2cde0be3/hbase-server/src/main/java/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.java
index d14d4d8..1dbe131 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.java
@@ -67,6 +67,7 @@ import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.crypto.CryptoProtocolVersion;
 import org.apache.hadoop.crypto.Encryptor;
+import org.apache.hadoop.fs.CreateFlag;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.FileSystemLinkResolver;
 import org.apache.hadoop.fs.Path;
@@ -195,6 +196,32 @@ public final class FanOutOneBlockAsyncDFSOutputHelper {
 
   private static final ChecksumCreater CHECKSUM_CREATER;
 
+  // helper class for creating files.
+  private interface FileCreator {
+default HdfsFileStatus create(ClientProtocol instance, String src, 
FsPermission masked,
+String clientName, EnumSetWritable flag,
+boolean createParent, short replication, long blockSize,
+CryptoProtocolVersion[] supportedVersions) throws Exception {
+  try {
+return (HdfsFileStatus) createObject(instance, src, masked, 
clientName, flag, createParent,
+replication, blockSize, supportedVersions);
+  } catch (InvocationTargetException e) {
+if (e.getCause() instanceof Exception) {
+  throw (Exception) e.getCause();
+} else {
+  throw new RuntimeException(e.getCause());
+}
+  }
+};
+
+Object createObject(ClientProtocol instance, String src, FsPermission 
masked,
+String clientName, EnumSetWritable flag,
+boolean createParent, short replication, long blockSize,
+CryptoProtocolVersion[] supportedVersions) throws Exception;
+  }
+
+  private static final FileCreator FILE_CREATOR;
+
   private static DFSClientAdaptor createDFSClientAdaptor() throws 
NoSuchMethodException {
 Method isClientRunningMethod = 
DFSClient.class.getDeclaredMethod("isClientRunning");
 isClientRunningMethod.setAccessible(true);
@@ -460,6 +487,39 @@ public final class FanOutOneBlockAsyncDFSOutputHelper {
 return 
createChecksumCreater27(Class.forName("org.apache.hadoop.hdfs.DFSClient$Conf"));
   }
 
+  private static FileCreator createFileCreator3() throws NoSuchMethodException 
{
+Method createMethod = ClientProtocol.class.getMethod("create", 
String.class, FsPermission.class,
+  String.class, EnumSetWritable.class, boolean.class, short.class, 
long.class, CryptoProtocolVersion[].class,
+  String.class);
+
+return (instance, src, masked, clientName, flag, createParent, 
replication, blockSize,
+supportedVersions) -> {
+  return (HdfsFileStatus) createMethod.invoke(instance,
+  src, masked, clientName, flag, createParent, replication, blockSize, 
supportedVersions,
+  null);
+};
+  }
+
+  private static FileCreator createFileCreator2() throws NoSuchMethodException 
{
+Method createMethod = ClientProtocol.class.getMethod("create", 
String.class, FsPermission.class,
+  String.class, EnumSetWritable.class, boolean.class, short.class, 
long.class, CryptoProtocolVersion[].class);
+
+return (instance, src, masked, clientName, flag, createParent, 
replication, blockSize,
+supportedVersions) -> {
+  return (HdfsFileStatus) createMethod.invoke(instance,
+  src, masked, clientName, flag, createParent, replication, blockSize, 
supportedVersions);
+};
+  }
+
+  pri

hbase git commit: HBASE-18177 FanOutOneBlockAsyncDFSOutputHelper fails to compile against Hadoop 3

2017-07-11 Thread zhangduo
Repository: hbase
Updated Branches:
  refs/heads/master 22dce22e0 -> cb5299ae9


HBASE-18177 FanOutOneBlockAsyncDFSOutputHelper fails to compile against Hadoop 3

Because ClientProtocol::create has API changes between Hadoop 2/3

Signed-off-by: zhangduo 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/cb5299ae
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/cb5299ae
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/cb5299ae

Branch: refs/heads/master
Commit: cb5299ae9b3360a6cca93958a74417d663135a60
Parents: 22dce22
Author: Mike Drob 
Authored: Mon Jun 26 11:29:34 2017 -0500
Committer: zhangduo 
Committed: Wed Jul 12 13:40:05 2017 +0800

--
 .../FanOutOneBlockAsyncDFSOutputHelper.java | 63 +++-
 1 file changed, 62 insertions(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/cb5299ae/hbase-server/src/main/java/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.java
index d14d4d8..1dbe131 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.java
@@ -67,6 +67,7 @@ import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.crypto.CryptoProtocolVersion;
 import org.apache.hadoop.crypto.Encryptor;
+import org.apache.hadoop.fs.CreateFlag;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.FileSystemLinkResolver;
 import org.apache.hadoop.fs.Path;
@@ -195,6 +196,32 @@ public final class FanOutOneBlockAsyncDFSOutputHelper {
 
   private static final ChecksumCreater CHECKSUM_CREATER;
 
+  // helper class for creating files.
+  private interface FileCreator {
+default HdfsFileStatus create(ClientProtocol instance, String src, 
FsPermission masked,
+String clientName, EnumSetWritable flag,
+boolean createParent, short replication, long blockSize,
+CryptoProtocolVersion[] supportedVersions) throws Exception {
+  try {
+return (HdfsFileStatus) createObject(instance, src, masked, 
clientName, flag, createParent,
+replication, blockSize, supportedVersions);
+  } catch (InvocationTargetException e) {
+if (e.getCause() instanceof Exception) {
+  throw (Exception) e.getCause();
+} else {
+  throw new RuntimeException(e.getCause());
+}
+  }
+};
+
+Object createObject(ClientProtocol instance, String src, FsPermission 
masked,
+String clientName, EnumSetWritable flag,
+boolean createParent, short replication, long blockSize,
+CryptoProtocolVersion[] supportedVersions) throws Exception;
+  }
+
+  private static final FileCreator FILE_CREATOR;
+
   private static DFSClientAdaptor createDFSClientAdaptor() throws 
NoSuchMethodException {
 Method isClientRunningMethod = 
DFSClient.class.getDeclaredMethod("isClientRunning");
 isClientRunningMethod.setAccessible(true);
@@ -460,6 +487,39 @@ public final class FanOutOneBlockAsyncDFSOutputHelper {
 return 
createChecksumCreater27(Class.forName("org.apache.hadoop.hdfs.DFSClient$Conf"));
   }
 
+  private static FileCreator createFileCreator3() throws NoSuchMethodException 
{
+Method createMethod = ClientProtocol.class.getMethod("create", 
String.class, FsPermission.class,
+  String.class, EnumSetWritable.class, boolean.class, short.class, 
long.class, CryptoProtocolVersion[].class,
+  String.class);
+
+return (instance, src, masked, clientName, flag, createParent, 
replication, blockSize,
+supportedVersions) -> {
+  return (HdfsFileStatus) createMethod.invoke(instance,
+  src, masked, clientName, flag, createParent, replication, blockSize, 
supportedVersions,
+  null);
+};
+  }
+
+  private static FileCreator createFileCreator2() throws NoSuchMethodException 
{
+Method createMethod = ClientProtocol.class.getMethod("create", 
String.class, FsPermission.class,
+  String.class, EnumSetWritable.class, boolean.class, short.class, 
long.class, CryptoProtocolVersion[].class);
+
+return (instance, src, masked, clientName, flag, createParent, 
replication, blockSize,
+supportedVersions) -> {
+  return (HdfsFileStatus) createMethod.invoke(instance,
+  src, masked, clientName, flag, createParent, replication, blockSize, 
supportedVersions);
+};
+  }
+
+  private

hbase git commit: HBASE-18343 (addendum) Track the remaining unimplemented methods for async admin

2017-07-11 Thread zghao
Repository: hbase
Updated Branches:
  refs/heads/branch-2 f9f1a3a9f -> 45cabfb40


HBASE-18343 (addendum) Track the remaining unimplemented methods for async admin


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/45cabfb4
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/45cabfb4
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/45cabfb4

Branch: refs/heads/branch-2
Commit: 45cabfb406de8eb56593dbde5ca86e214a88bc62
Parents: f9f1a3a
Author: Guanghao Zhang 
Authored: Tue Jul 11 18:00:54 2017 +0800
Committer: Guanghao Zhang 
Committed: Wed Jul 12 09:58:53 2017 +0800

--
 .../hadoop/hbase/client/TestAsyncRegionAdminApi.java| 12 +---
 1 file changed, 5 insertions(+), 7 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/45cabfb4/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncRegionAdminApi.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncRegionAdminApi.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncRegionAdminApi.java
index fcd3d01..2d64afc 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncRegionAdminApi.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncRegionAdminApi.java
@@ -27,7 +27,6 @@ import java.io.IOException;
 import java.util.ArrayList;
 import java.util.List;
 import java.util.Optional;
-import java.util.Random;
 import java.util.concurrent.ExecutionException;
 import java.util.concurrent.atomic.AtomicInteger;
 import java.util.stream.Collectors;
@@ -66,8 +65,6 @@ import org.junit.runners.Parameterized;
 @Category({ LargeTests.class, ClientTests.class })
 public class TestAsyncRegionAdminApi extends TestAsyncAdminBase {
 
-  public static Random RANDOM = new Random(System.currentTimeMillis());
-
   @Test
   public void testCloseRegion() throws Exception {
 createTableWithDefaultConf(tableName);
@@ -358,7 +355,8 @@ public class TestAsyncRegionAdminApi extends 
TestAsyncAdminBase {
   public void testSplitSwitch() throws Exception {
 createTableWithDefaultConf(tableName);
 byte[][] families = { FAMILY };
-loadData(tableName, families, 1000);
+final int rows = 1;
+loadData(tableName, families, rows);
 
 RawAsyncTable metaTable = ASYNC_CONN.getRawTable(META_TABLE_NAME);
 List regionLocations =
@@ -367,12 +365,12 @@ public class TestAsyncRegionAdminApi extends 
TestAsyncAdminBase {
 
 initSplitMergeSwitch();
 assertTrue(admin.setSplitOn(false).get());
-admin.split(tableName).join();
+admin.split(tableName, Bytes.toBytes(rows / 2)).join();
 int count = admin.getTableRegions(tableName).get().size();
 assertTrue(originalCount == count);
 
 assertFalse(admin.setSplitOn(true).get());
-admin.split(tableName).join();
+admin.split(tableName, Bytes.toBytes(rows / 2)).join();
 while ((count = admin.getTableRegions(tableName).get().size()) == 
originalCount) {
   Threads.sleep(100);
 }
@@ -654,7 +652,7 @@ public class TestAsyncRegionAdminApi extends 
TestAsyncAdminBase {
 byte[] qualifier = Bytes.toBytes("val");
 for (int i = 0; i < flushes; i++) {
   for (int k = 0; k < rows; k++) {
-byte[] row = Bytes.toBytes(RANDOM.nextLong());
+byte[] row = Bytes.add(Bytes.toBytes(k), Bytes.toBytes(i));
 Put p = new Put(row);
 for (int j = 0; j < families.length; ++j) {
   p.addColumn(families[j], qualifier, row);



hbase git commit: HBASE-18343 (addendum) Track the remaining unimplemented methods for async admin

2017-07-11 Thread zghao
Repository: hbase
Updated Branches:
  refs/heads/master c0f743e44 -> 22dce22e0


HBASE-18343 (addendum) Track the remaining unimplemented methods for async admin


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/22dce22e
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/22dce22e
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/22dce22e

Branch: refs/heads/master
Commit: 22dce22e068a541a8436ea27712004b02f8ff84a
Parents: c0f743e
Author: Guanghao Zhang 
Authored: Tue Jul 11 18:00:54 2017 +0800
Committer: Guanghao Zhang 
Committed: Wed Jul 12 09:32:00 2017 +0800

--
 .../hadoop/hbase/client/TestAsyncRegionAdminApi.java| 12 +---
 1 file changed, 5 insertions(+), 7 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/22dce22e/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncRegionAdminApi.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncRegionAdminApi.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncRegionAdminApi.java
index fcd3d01..2d64afc 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncRegionAdminApi.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncRegionAdminApi.java
@@ -27,7 +27,6 @@ import java.io.IOException;
 import java.util.ArrayList;
 import java.util.List;
 import java.util.Optional;
-import java.util.Random;
 import java.util.concurrent.ExecutionException;
 import java.util.concurrent.atomic.AtomicInteger;
 import java.util.stream.Collectors;
@@ -66,8 +65,6 @@ import org.junit.runners.Parameterized;
 @Category({ LargeTests.class, ClientTests.class })
 public class TestAsyncRegionAdminApi extends TestAsyncAdminBase {
 
-  public static Random RANDOM = new Random(System.currentTimeMillis());
-
   @Test
   public void testCloseRegion() throws Exception {
 createTableWithDefaultConf(tableName);
@@ -358,7 +355,8 @@ public class TestAsyncRegionAdminApi extends 
TestAsyncAdminBase {
   public void testSplitSwitch() throws Exception {
 createTableWithDefaultConf(tableName);
 byte[][] families = { FAMILY };
-loadData(tableName, families, 1000);
+final int rows = 1;
+loadData(tableName, families, rows);
 
 RawAsyncTable metaTable = ASYNC_CONN.getRawTable(META_TABLE_NAME);
 List regionLocations =
@@ -367,12 +365,12 @@ public class TestAsyncRegionAdminApi extends 
TestAsyncAdminBase {
 
 initSplitMergeSwitch();
 assertTrue(admin.setSplitOn(false).get());
-admin.split(tableName).join();
+admin.split(tableName, Bytes.toBytes(rows / 2)).join();
 int count = admin.getTableRegions(tableName).get().size();
 assertTrue(originalCount == count);
 
 assertFalse(admin.setSplitOn(true).get());
-admin.split(tableName).join();
+admin.split(tableName, Bytes.toBytes(rows / 2)).join();
 while ((count = admin.getTableRegions(tableName).get().size()) == 
originalCount) {
   Threads.sleep(100);
 }
@@ -654,7 +652,7 @@ public class TestAsyncRegionAdminApi extends 
TestAsyncAdminBase {
 byte[] qualifier = Bytes.toBytes("val");
 for (int i = 0; i < flushes; i++) {
   for (int k = 0; k < rows; k++) {
-byte[] row = Bytes.toBytes(RANDOM.nextLong());
+byte[] row = Bytes.add(Bytes.toBytes(k), Bytes.toBytes(i));
 Put p = new Put(row);
 for (int j = 0; j < families.length; ++j) {
   p.addColumn(families[j], qualifier, row);



hbase git commit: HBASE-18358 Backport HBASE-18099 'FlushSnapshotSubprocedure should wait for concurrent Region#flush() to finish'

2017-07-11 Thread tedyu
Repository: hbase
Updated Branches:
  refs/heads/branch-1.2 d11b5d1fd -> 2857b75c2


HBASE-18358 Backport HBASE-18099 'FlushSnapshotSubprocedure should wait for 
concurrent Region#flush() to finish'


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/2857b75c
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/2857b75c
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/2857b75c

Branch: refs/heads/branch-1.2
Commit: 2857b75c2750f2cc0bd29a3d100794021dec509d
Parents: d11b5d1
Author: tedyu 
Authored: Tue Jul 11 18:19:37 2017 -0700
Committer: tedyu 
Committed: Tue Jul 11 18:19:37 2017 -0700

--
 .../hadoop/hbase/regionserver/HRegion.java  | 30 +++
 .../snapshot/FlushSnapshotSubprocedure.java | 31 +++-
 2 files changed, 60 insertions(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/2857b75c/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
index d0d457f..43bb046 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
@@ -1572,6 +1572,36 @@ public class HRegion implements HeapSize, 
PropagatingConfigurationObserver, Regi
 }
   }
 
+  public void waitForFlushes() {
+synchronized (writestate) {
+  if (this.writestate.readOnly) {
+// we should not wait for replayed flushed if we are read only (for 
example in case the
+// region is a secondary replica).
+return;
+  }
+  if (!writestate.flushing) return;
+  long start = System.currentTimeMillis();
+  boolean interrupted = false;
+  try {
+while (writestate.flushing) {
+  LOG.debug("waiting for cache flush to complete for region " + this);
+  try {
+writestate.wait();
+  } catch (InterruptedException iex) {
+// essentially ignore and propagate the interrupt back up
+LOG.warn("Interrupted while waiting");
+interrupted = true;
+  }
+}
+  } finally {
+if (interrupted) {
+  Thread.currentThread().interrupt();
+}
+  }
+  long duration = System.currentTimeMillis() - start;
+  LOG.debug("Waited " + duration + " ms for flush to complete");
+}
+  }
   protected ThreadPoolExecutor getStoreOpenAndCloseThreadPool(
   final String threadNamePrefix) {
 int numStores = Math.max(1, this.htableDescriptor.getFamilies().size());

http://git-wip-us.apache.org/repos/asf/hbase/blob/2857b75c/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/snapshot/FlushSnapshotSubprocedure.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/snapshot/FlushSnapshotSubprocedure.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/snapshot/FlushSnapshotSubprocedure.java
index f083601..5669452 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/snapshot/FlushSnapshotSubprocedure.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/snapshot/FlushSnapshotSubprocedure.java
@@ -17,6 +17,7 @@
  */
 package org.apache.hadoop.hbase.regionserver.snapshot;
 
+import java.io.IOException;
 import java.util.List;
 import java.util.concurrent.Callable;
 
@@ -24,6 +25,7 @@ import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.hbase.classification.InterfaceAudience;
 import org.apache.hadoop.hbase.classification.InterfaceStability;
+import org.apache.hadoop.hbase.client.IsolationLevel;
 import org.apache.hadoop.hbase.errorhandling.ForeignException;
 import org.apache.hadoop.hbase.errorhandling.ForeignExceptionDispatcher;
 import org.apache.hadoop.hbase.procedure.ProcedureMember;
@@ -31,6 +33,7 @@ import org.apache.hadoop.hbase.procedure.Subprocedure;
 import 
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription;
 import org.apache.hadoop.hbase.regionserver.HRegion;
 import org.apache.hadoop.hbase.regionserver.Region;
+import org.apache.hadoop.hbase.regionserver.Region.FlushResult;
 import org.apache.hadoop.hbase.regionserver.Region.Operation;
 import 
org.apache.hadoop.hbase.regionserver.snapshot.RegionServerSnapshotManager.SnapshotSubprocedurePool;
 import org.apache.hadoop.hbase.snapshot.ClientSnapshotDescriptionUtils;
@@ -52,6 +55,9 @@ public class FlushSnapshotSubprocedure extends Subprocedure {
   private 

hbase git commit: HBASE-18358 Backport HBASE-18099 'FlushSnapshotSubprocedure should wait for concurrent Region#flush() to finish'

2017-07-11 Thread tedyu
Repository: hbase
Updated Branches:
  refs/heads/branch-1.3 a84de3312 -> af4003cee


HBASE-18358 Backport HBASE-18099 'FlushSnapshotSubprocedure should wait for 
concurrent Region#flush() to finish'


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/af4003ce
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/af4003ce
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/af4003ce

Branch: refs/heads/branch-1.3
Commit: af4003cee633c4d5ec14d3a5159f7080785a167f
Parents: a84de33
Author: tedyu 
Authored: Tue Jul 11 18:17:06 2017 -0700
Committer: tedyu 
Committed: Tue Jul 11 18:17:06 2017 -0700

--
 .../hadoop/hbase/regionserver/HRegion.java  | 30 +++
 .../snapshot/FlushSnapshotSubprocedure.java | 31 +++-
 2 files changed, 60 insertions(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/af4003ce/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
index 61d532b..279b80e 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
@@ -1586,6 +1586,36 @@ public class HRegion implements HeapSize, 
PropagatingConfigurationObserver, Regi
 }
   }
 
+  public void waitForFlushes() {
+synchronized (writestate) {
+  if (this.writestate.readOnly) {
+// we should not wait for replayed flushed if we are read only (for 
example in case the
+// region is a secondary replica).
+return;
+  }
+  if (!writestate.flushing) return;
+  long start = System.currentTimeMillis();
+  boolean interrupted = false;
+  try {
+while (writestate.flushing) {
+  LOG.debug("waiting for cache flush to complete for region " + this);
+  try {
+writestate.wait();
+  } catch (InterruptedException iex) {
+// essentially ignore and propagate the interrupt back up
+LOG.warn("Interrupted while waiting");
+interrupted = true;
+  }
+}
+  } finally {
+if (interrupted) {
+  Thread.currentThread().interrupt();
+}
+  }
+  long duration = System.currentTimeMillis() - start;
+  LOG.debug("Waited " + duration + " ms for flush to complete");
+}
+  }
   protected ThreadPoolExecutor getStoreOpenAndCloseThreadPool(
   final String threadNamePrefix) {
 int numStores = Math.max(1, this.htableDescriptor.getFamilies().size());

http://git-wip-us.apache.org/repos/asf/hbase/blob/af4003ce/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/snapshot/FlushSnapshotSubprocedure.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/snapshot/FlushSnapshotSubprocedure.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/snapshot/FlushSnapshotSubprocedure.java
index f083601..5669452 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/snapshot/FlushSnapshotSubprocedure.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/snapshot/FlushSnapshotSubprocedure.java
@@ -17,6 +17,7 @@
  */
 package org.apache.hadoop.hbase.regionserver.snapshot;
 
+import java.io.IOException;
 import java.util.List;
 import java.util.concurrent.Callable;
 
@@ -24,6 +25,7 @@ import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.hbase.classification.InterfaceAudience;
 import org.apache.hadoop.hbase.classification.InterfaceStability;
+import org.apache.hadoop.hbase.client.IsolationLevel;
 import org.apache.hadoop.hbase.errorhandling.ForeignException;
 import org.apache.hadoop.hbase.errorhandling.ForeignExceptionDispatcher;
 import org.apache.hadoop.hbase.procedure.ProcedureMember;
@@ -31,6 +33,7 @@ import org.apache.hadoop.hbase.procedure.Subprocedure;
 import 
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription;
 import org.apache.hadoop.hbase.regionserver.HRegion;
 import org.apache.hadoop.hbase.regionserver.Region;
+import org.apache.hadoop.hbase.regionserver.Region.FlushResult;
 import org.apache.hadoop.hbase.regionserver.Region.Operation;
 import 
org.apache.hadoop.hbase.regionserver.snapshot.RegionServerSnapshotManager.SnapshotSubprocedurePool;
 import org.apache.hadoop.hbase.snapshot.ClientSnapshotDescriptionUtils;
@@ -52,6 +55,9 @@ public class FlushSnapshotSubprocedure extends Subprocedure {
   private 

hbase git commit: HBASE-18358 Backport HBASE-18099 'FlushSnapshotSubprocedure should wait for concurrent Region#flush() to finish'

2017-07-11 Thread tedyu
Repository: hbase
Updated Branches:
  refs/heads/branch-1.4 4ad8ae3db -> 5da7273db


HBASE-18358 Backport HBASE-18099 'FlushSnapshotSubprocedure should wait for 
concurrent Region#flush() to finish'


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/5da7273d
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/5da7273d
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/5da7273d

Branch: refs/heads/branch-1.4
Commit: 5da7273db2364d64cc60b3b2e063ee9cb891d3de
Parents: 4ad8ae3
Author: tedyu 
Authored: Tue Jul 11 18:16:20 2017 -0700
Committer: tedyu 
Committed: Tue Jul 11 18:16:20 2017 -0700

--
 .../snapshot/FlushSnapshotSubprocedure.java | 30 
 1 file changed, 25 insertions(+), 5 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/5da7273d/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/snapshot/FlushSnapshotSubprocedure.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/snapshot/FlushSnapshotSubprocedure.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/snapshot/FlushSnapshotSubprocedure.java
index 5b82a08..af2a496 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/snapshot/FlushSnapshotSubprocedure.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/snapshot/FlushSnapshotSubprocedure.java
@@ -17,6 +17,7 @@
  */
 package org.apache.hadoop.hbase.regionserver.snapshot;
 
+import java.io.IOException;
 import java.util.List;
 import java.util.concurrent.Callable;
 
@@ -24,6 +25,7 @@ import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.hbase.classification.InterfaceAudience;
 import org.apache.hadoop.hbase.classification.InterfaceStability;
+import org.apache.hadoop.hbase.client.IsolationLevel;
 import org.apache.hadoop.hbase.errorhandling.ForeignException;
 import org.apache.hadoop.hbase.errorhandling.ForeignExceptionDispatcher;
 import org.apache.hadoop.hbase.procedure.ProcedureMember;
@@ -52,6 +54,9 @@ public class FlushSnapshotSubprocedure extends Subprocedure {
   private final SnapshotSubprocedurePool taskManager;
   private boolean snapshotSkipFlush = false;
 
+  // the maximum number of attempts we flush
+  final static int MAX_RETRIES = 3;
+
   public FlushSnapshotSubprocedure(ProcedureMember member,
   ForeignExceptionDispatcher errorListener, long wakeFrequency, long 
timeout,
   List regions, SnapshotDescription snapshot,
@@ -96,11 +101,26 @@ public class FlushSnapshotSubprocedure extends 
Subprocedure {
   LOG.debug("take snapshot without flush memstore first");
 } else {
   LOG.debug("Flush Snapshotting region " + region.toString() + " 
started...");
-  FlushResult res = region.flush(true);
-  if (res.getResult() == FlushResult.Result.CANNOT_FLUSH) {
-// CANNOT_FLUSH may mean that a flush is already on-going
-// we need to wait for that flush to complete
-region.waitForFlushes();
+  boolean succeeded = false;
+  long readPt = region.getReadpoint(IsolationLevel.READ_COMMITTED);
+  for (int i = 0; i < MAX_RETRIES; i++) {
+FlushResult res = region.flush(true);
+if (res.getResult() == FlushResult.Result.CANNOT_FLUSH) {
+  // CANNOT_FLUSH may mean that a flush is already on-going
+  // we need to wait for that flush to complete
+  region.waitForFlushes();
+  if (region.getMaxFlushedSeqId() >= readPt) {
+// writes at the start of the snapshot have been persisted
+succeeded = true;
+break;
+  }
+} else {
+  succeeded = true;
+  break;
+}
+  }
+  if (!succeeded) {
+throw new IOException("Unable to complete flush after " + 
MAX_RETRIES + " attempts");
   }
 }
 ((HRegion)region).addRegionToSnapshot(snapshot, monitor);



hbase git commit: HBASE-18358 Backport HBASE-18099 'FlushSnapshotSubprocedure should wait for concurrent Region#flush() to finish'

2017-07-11 Thread tedyu
Repository: hbase
Updated Branches:
  refs/heads/branch-1 8eaad6786 -> e1bf966f9


HBASE-18358 Backport HBASE-18099 'FlushSnapshotSubprocedure should wait for 
concurrent Region#flush() to finish'


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/e1bf966f
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/e1bf966f
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/e1bf966f

Branch: refs/heads/branch-1
Commit: e1bf966f93d32127de205dfc2096d8daff104507
Parents: 8eaad67
Author: tedyu 
Authored: Tue Jul 11 18:12:15 2017 -0700
Committer: tedyu 
Committed: Tue Jul 11 18:12:15 2017 -0700

--
 .../snapshot/FlushSnapshotSubprocedure.java | 30 
 1 file changed, 25 insertions(+), 5 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/e1bf966f/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/snapshot/FlushSnapshotSubprocedure.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/snapshot/FlushSnapshotSubprocedure.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/snapshot/FlushSnapshotSubprocedure.java
index 5b82a08..af2a496 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/snapshot/FlushSnapshotSubprocedure.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/snapshot/FlushSnapshotSubprocedure.java
@@ -17,6 +17,7 @@
  */
 package org.apache.hadoop.hbase.regionserver.snapshot;
 
+import java.io.IOException;
 import java.util.List;
 import java.util.concurrent.Callable;
 
@@ -24,6 +25,7 @@ import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.hbase.classification.InterfaceAudience;
 import org.apache.hadoop.hbase.classification.InterfaceStability;
+import org.apache.hadoop.hbase.client.IsolationLevel;
 import org.apache.hadoop.hbase.errorhandling.ForeignException;
 import org.apache.hadoop.hbase.errorhandling.ForeignExceptionDispatcher;
 import org.apache.hadoop.hbase.procedure.ProcedureMember;
@@ -52,6 +54,9 @@ public class FlushSnapshotSubprocedure extends Subprocedure {
   private final SnapshotSubprocedurePool taskManager;
   private boolean snapshotSkipFlush = false;
 
+  // the maximum number of attempts we flush
+  final static int MAX_RETRIES = 3;
+
   public FlushSnapshotSubprocedure(ProcedureMember member,
   ForeignExceptionDispatcher errorListener, long wakeFrequency, long 
timeout,
   List regions, SnapshotDescription snapshot,
@@ -96,11 +101,26 @@ public class FlushSnapshotSubprocedure extends 
Subprocedure {
   LOG.debug("take snapshot without flush memstore first");
 } else {
   LOG.debug("Flush Snapshotting region " + region.toString() + " 
started...");
-  FlushResult res = region.flush(true);
-  if (res.getResult() == FlushResult.Result.CANNOT_FLUSH) {
-// CANNOT_FLUSH may mean that a flush is already on-going
-// we need to wait for that flush to complete
-region.waitForFlushes();
+  boolean succeeded = false;
+  long readPt = region.getReadpoint(IsolationLevel.READ_COMMITTED);
+  for (int i = 0; i < MAX_RETRIES; i++) {
+FlushResult res = region.flush(true);
+if (res.getResult() == FlushResult.Result.CANNOT_FLUSH) {
+  // CANNOT_FLUSH may mean that a flush is already on-going
+  // we need to wait for that flush to complete
+  region.waitForFlushes();
+  if (region.getMaxFlushedSeqId() >= readPt) {
+// writes at the start of the snapshot have been persisted
+succeeded = true;
+break;
+  }
+} else {
+  succeeded = true;
+  break;
+}
+  }
+  if (!succeeded) {
+throw new IOException("Unable to complete flush after " + 
MAX_RETRIES + " attempts");
   }
 }
 ((HRegion)region).addRegionToSnapshot(snapshot, monitor);



hbase git commit: HBASE-18358 Backport HBASE-18099 'FlushSnapshotSubprocedure should wait for concurrent Region#flush() to finish'

2017-07-11 Thread tedyu
Repository: hbase
Updated Branches:
  refs/heads/branch-2 ca5d8c40b -> f9f1a3a9f


HBASE-18358 Backport HBASE-18099 'FlushSnapshotSubprocedure should wait for 
concurrent Region#flush() to finish'


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/f9f1a3a9
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/f9f1a3a9
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/f9f1a3a9

Branch: refs/heads/branch-2
Commit: f9f1a3a9fbaf3ac98a21d8406b0da3eba0dd1d7c
Parents: ca5d8c4
Author: tedyu 
Authored: Tue Jul 11 18:10:07 2017 -0700
Committer: tedyu 
Committed: Tue Jul 11 18:10:07 2017 -0700

--
 .../snapshot/FlushSnapshotSubprocedure.java | 30 
 1 file changed, 25 insertions(+), 5 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/f9f1a3a9/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/snapshot/FlushSnapshotSubprocedure.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/snapshot/FlushSnapshotSubprocedure.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/snapshot/FlushSnapshotSubprocedure.java
index 22df895..b30d622 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/snapshot/FlushSnapshotSubprocedure.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/snapshot/FlushSnapshotSubprocedure.java
@@ -17,6 +17,7 @@
  */
 package org.apache.hadoop.hbase.regionserver.snapshot;
 
+import java.io.IOException;
 import java.util.List;
 import java.util.concurrent.Callable;
 
@@ -24,6 +25,7 @@ import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.hbase.classification.InterfaceAudience;
 import org.apache.hadoop.hbase.classification.InterfaceStability;
+import org.apache.hadoop.hbase.client.IsolationLevel;
 import org.apache.hadoop.hbase.errorhandling.ForeignException;
 import org.apache.hadoop.hbase.errorhandling.ForeignExceptionDispatcher;
 import org.apache.hadoop.hbase.procedure.ProcedureMember;
@@ -52,6 +54,9 @@ public class FlushSnapshotSubprocedure extends Subprocedure {
   private final SnapshotSubprocedurePool taskManager;
   private boolean snapshotSkipFlush = false;
 
+  // the maximum number of attempts we flush
+  final static int MAX_RETRIES = 3;
+
   public FlushSnapshotSubprocedure(ProcedureMember member,
   ForeignExceptionDispatcher errorListener, long wakeFrequency, long 
timeout,
   List regions, SnapshotDescription snapshot,
@@ -96,11 +101,26 @@ public class FlushSnapshotSubprocedure extends 
Subprocedure {
   LOG.debug("take snapshot without flush memstore first");
 } else {
   LOG.debug("Flush Snapshotting region " + region.toString() + " 
started...");
-  FlushResult res = region.flush(true);
-  if (res.getResult() == FlushResult.Result.CANNOT_FLUSH) {
-// CANNOT_FLUSH may mean that a flush is already on-going
-// we need to wait for that flush to complete
-region.waitForFlushes();
+  boolean succeeded = false;
+  long readPt = region.getReadpoint(IsolationLevel.READ_COMMITTED);
+  for (int i = 0; i < MAX_RETRIES; i++) {
+FlushResult res = region.flush(true);
+if (res.getResult() == FlushResult.Result.CANNOT_FLUSH) {
+  // CANNOT_FLUSH may mean that a flush is already on-going
+  // we need to wait for that flush to complete
+  region.waitForFlushes();
+  if (region.getMaxFlushedSeqId() >= readPt) {
+// writes at the start of the snapshot have been persisted
+succeeded = true;
+break;
+  }
+} else {
+  succeeded = true;
+  break;
+}
+  }
+  if (!succeeded) {
+throw new IOException("Unable to complete flush after " + 
MAX_RETRIES + " attempts");
   }
 }
 ((HRegion)region).addRegionToSnapshot(snapshot, monitor);



hbase git commit: HBASE-18358 Backport HBASE-18099 'FlushSnapshotSubprocedure should wait for concurrent Region#flush() to finish'

2017-07-11 Thread tedyu
Repository: hbase
Updated Branches:
  refs/heads/master cc4301ca0 -> c0f743e44


HBASE-18358 Backport HBASE-18099 'FlushSnapshotSubprocedure should wait for 
concurrent Region#flush() to finish'


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/c0f743e4
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/c0f743e4
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/c0f743e4

Branch: refs/heads/master
Commit: c0f743e44f3e9ec8095d98321bf8558a6ec1
Parents: cc4301c
Author: tedyu 
Authored: Tue Jul 11 17:26:22 2017 -0700
Committer: tedyu 
Committed: Tue Jul 11 17:26:22 2017 -0700

--
 .../snapshot/FlushSnapshotSubprocedure.java | 30 
 1 file changed, 25 insertions(+), 5 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/c0f743e4/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/snapshot/FlushSnapshotSubprocedure.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/snapshot/FlushSnapshotSubprocedure.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/snapshot/FlushSnapshotSubprocedure.java
index 22df895..b30d622 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/snapshot/FlushSnapshotSubprocedure.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/snapshot/FlushSnapshotSubprocedure.java
@@ -17,6 +17,7 @@
  */
 package org.apache.hadoop.hbase.regionserver.snapshot;
 
+import java.io.IOException;
 import java.util.List;
 import java.util.concurrent.Callable;
 
@@ -24,6 +25,7 @@ import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.hbase.classification.InterfaceAudience;
 import org.apache.hadoop.hbase.classification.InterfaceStability;
+import org.apache.hadoop.hbase.client.IsolationLevel;
 import org.apache.hadoop.hbase.errorhandling.ForeignException;
 import org.apache.hadoop.hbase.errorhandling.ForeignExceptionDispatcher;
 import org.apache.hadoop.hbase.procedure.ProcedureMember;
@@ -52,6 +54,9 @@ public class FlushSnapshotSubprocedure extends Subprocedure {
   private final SnapshotSubprocedurePool taskManager;
   private boolean snapshotSkipFlush = false;
 
+  // the maximum number of attempts we flush
+  final static int MAX_RETRIES = 3;
+
   public FlushSnapshotSubprocedure(ProcedureMember member,
   ForeignExceptionDispatcher errorListener, long wakeFrequency, long 
timeout,
   List regions, SnapshotDescription snapshot,
@@ -96,11 +101,26 @@ public class FlushSnapshotSubprocedure extends 
Subprocedure {
   LOG.debug("take snapshot without flush memstore first");
 } else {
   LOG.debug("Flush Snapshotting region " + region.toString() + " 
started...");
-  FlushResult res = region.flush(true);
-  if (res.getResult() == FlushResult.Result.CANNOT_FLUSH) {
-// CANNOT_FLUSH may mean that a flush is already on-going
-// we need to wait for that flush to complete
-region.waitForFlushes();
+  boolean succeeded = false;
+  long readPt = region.getReadpoint(IsolationLevel.READ_COMMITTED);
+  for (int i = 0; i < MAX_RETRIES; i++) {
+FlushResult res = region.flush(true);
+if (res.getResult() == FlushResult.Result.CANNOT_FLUSH) {
+  // CANNOT_FLUSH may mean that a flush is already on-going
+  // we need to wait for that flush to complete
+  region.waitForFlushes();
+  if (region.getMaxFlushedSeqId() >= readPt) {
+// writes at the start of the snapshot have been persisted
+succeeded = true;
+break;
+  }
+} else {
+  succeeded = true;
+  break;
+}
+  }
+  if (!succeeded) {
+throw new IOException("Unable to complete flush after " + 
MAX_RETRIES + " attempts");
   }
 }
 ((HRegion)region).addRegionToSnapshot(snapshot, monitor);



hbase git commit: HBASE-18268 Eliminate the findbugs warnings for hbase-client

2017-07-11 Thread chia7712
Repository: hbase
Updated Branches:
  refs/heads/branch-1.2 56659f342 -> d11b5d1fd


HBASE-18268 Eliminate the findbugs warnings for hbase-client


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/d11b5d1f
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/d11b5d1f
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/d11b5d1f

Branch: refs/heads/branch-1.2
Commit: d11b5d1fd54a71cbd58ab616ffe73c106fa0b9f2
Parents: 56659f3
Author: Chia-Ping Tsai 
Authored: Wed Jul 12 03:17:40 2017 +0800
Committer: Chia-Ping Tsai 
Committed: Wed Jul 12 03:17:40 2017 +0800

--
 .../java/org/apache/hadoop/hbase/HColumnDescriptor.java   | 10 ++
 .../java/org/apache/hadoop/hbase/HTableDescriptor.java|  8 
 .../java/org/apache/hadoop/hbase/ipc/AsyncRpcChannel.java |  2 ++
 .../java/org/apache/hadoop/hbase/ipc/RpcClientImpl.java   |  2 ++
 .../java/org/apache/hadoop/hbase/security/SaslUtil.java   |  2 ++
 .../main/java/org/apache/hadoop/hbase/util/PoolMap.java   |  2 +-
 6 files changed, 17 insertions(+), 9 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/d11b5d1f/hbase-client/src/main/java/org/apache/hadoop/hbase/HColumnDescriptor.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/HColumnDescriptor.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/HColumnDescriptor.java
index 17ca37a..b7e8177 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/HColumnDescriptor.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/HColumnDescriptor.java
@@ -1176,13 +1176,14 @@ public class HColumnDescriptor implements 
WritableComparable
 boolean hasConfigKeys = false;
 
 // print all reserved keys first
-for (ImmutableBytesWritable k : values.keySet()) {
+for (Map.Entry entry : 
values.entrySet()) {
+  ImmutableBytesWritable k = entry.getKey();
   if (!RESERVED_KEYWORDS.contains(k)) {
 hasConfigKeys = true;
 continue;
   }
   String key = Bytes.toString(k.get());
-  String value = Bytes.toStringBinary(values.get(k).get());
+  String value = Bytes.toStringBinary(entry.getValue().get());
   if (printDefaults
   || !DEFAULT_VALUES.containsKey(key)
   || !DEFAULT_VALUES.get(key).equalsIgnoreCase(value)) {
@@ -1199,12 +1200,13 @@ public class HColumnDescriptor implements 
WritableComparable
   s.append(HConstants.METADATA).append(" => ");
   s.append('{');
   boolean printComma = false;
-  for (ImmutableBytesWritable k : values.keySet()) {
+  for (Map.Entry entry : 
values.entrySet()) {
+ImmutableBytesWritable k = entry.getKey();
 if (RESERVED_KEYWORDS.contains(k)) {
   continue;
 }
 String key = Bytes.toString(k.get());
-String value = Bytes.toStringBinary(values.get(k).get());
+String value = Bytes.toStringBinary(entry.getValue().get());
 if (printComma) {
   s.append(", ");
 }

http://git-wip-us.apache.org/repos/asf/hbase/blob/d11b5d1f/hbase-client/src/main/java/org/apache/hadoop/hbase/HTableDescriptor.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/HTableDescriptor.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/HTableDescriptor.java
index b1e62f0..0eb0d20 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/HTableDescriptor.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/HTableDescriptor.java
@@ -949,7 +949,8 @@ public class HTableDescriptor implements 
WritableComparable {
 // step 1: set partitioning and pruning
 Set reservedKeys = new 
TreeSet();
 Set userKeys = new 
TreeSet();
-for (ImmutableBytesWritable k : values.keySet()) {
+for (Map.Entry entry : 
values.entrySet()) {
+  ImmutableBytesWritable k = entry.getKey();
   if (k == null || k.get() == null) continue;
   String key = Bytes.toString(k.get());
   // in this section, print out reserved keywords + coprocessor info
@@ -958,7 +959,7 @@ public class HTableDescriptor implements 
WritableComparable {
 continue;
   }
   // only print out IS_ROOT/IS_META if true
-  String value = Bytes.toString(values.get(k).get());
+  String value = Bytes.toString(entry.getValue().get());
   if (key.equalsIgnoreCase(IS_ROOT) || key.equalsIgnoreCase(IS_META)) {
 if (Boolean.valueOf(value) == false) continue;
   }
@@ -1166,8 +1167,7 @@ public class HTableDescriptor implements 
WritableComparable {
   result = families.size() - other.families.size();
 }
 if (result == 0 && families.size() != other.families.size()) {
-  result = Integer.valueOf(families.si

hbase git commit: HBASE-18268 Eliminate the findbugs warnings for hbase-client

2017-07-11 Thread chia7712
Repository: hbase
Updated Branches:
  refs/heads/branch-1.3 e0b858e72 -> a84de3312


HBASE-18268 Eliminate the findbugs warnings for hbase-client


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/a84de331
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/a84de331
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/a84de331

Branch: refs/heads/branch-1.3
Commit: a84de33129054685c3b6c138bae79ea0e52f9479
Parents: e0b858e
Author: Chia-Ping Tsai 
Authored: Wed Jul 12 03:16:58 2017 +0800
Committer: Chia-Ping Tsai 
Committed: Wed Jul 12 03:16:58 2017 +0800

--
 .../java/org/apache/hadoop/hbase/HColumnDescriptor.java   | 10 ++
 .../java/org/apache/hadoop/hbase/HTableDescriptor.java|  8 
 .../java/org/apache/hadoop/hbase/ipc/AsyncRpcChannel.java |  2 ++
 .../java/org/apache/hadoop/hbase/ipc/RpcClientImpl.java   |  2 ++
 .../java/org/apache/hadoop/hbase/security/SaslUtil.java   |  2 ++
 .../main/java/org/apache/hadoop/hbase/util/PoolMap.java   |  2 +-
 6 files changed, 17 insertions(+), 9 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/a84de331/hbase-client/src/main/java/org/apache/hadoop/hbase/HColumnDescriptor.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/HColumnDescriptor.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/HColumnDescriptor.java
index 32882c9..64366d4 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/HColumnDescriptor.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/HColumnDescriptor.java
@@ -1176,13 +1176,14 @@ public class HColumnDescriptor implements 
WritableComparable
 boolean hasConfigKeys = false;
 
 // print all reserved keys first
-for (ImmutableBytesWritable k : values.keySet()) {
+for (Map.Entry entry : 
values.entrySet()) {
+  ImmutableBytesWritable k = entry.getKey();
   if (!RESERVED_KEYWORDS.contains(k)) {
 hasConfigKeys = true;
 continue;
   }
   String key = Bytes.toString(k.get());
-  String value = Bytes.toStringBinary(values.get(k).get());
+  String value = Bytes.toStringBinary(entry.getValue().get());
   if (printDefaults
   || !DEFAULT_VALUES.containsKey(key)
   || !DEFAULT_VALUES.get(key).equalsIgnoreCase(value)) {
@@ -1199,12 +1200,13 @@ public class HColumnDescriptor implements 
WritableComparable
   s.append(HConstants.METADATA).append(" => ");
   s.append('{');
   boolean printComma = false;
-  for (ImmutableBytesWritable k : values.keySet()) {
+  for (Map.Entry entry : 
values.entrySet()) {
+ImmutableBytesWritable k = entry.getKey();
 if (RESERVED_KEYWORDS.contains(k)) {
   continue;
 }
 String key = Bytes.toString(k.get());
-String value = Bytes.toStringBinary(values.get(k).get());
+String value = Bytes.toStringBinary(entry.getValue().get());
 if (printComma) {
   s.append(", ");
 }

http://git-wip-us.apache.org/repos/asf/hbase/blob/a84de331/hbase-client/src/main/java/org/apache/hadoop/hbase/HTableDescriptor.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/HTableDescriptor.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/HTableDescriptor.java
index 31a880a..2381a26 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/HTableDescriptor.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/HTableDescriptor.java
@@ -957,7 +957,8 @@ public class HTableDescriptor implements 
WritableComparable {
 // step 1: set partitioning and pruning
 Set reservedKeys = new 
TreeSet();
 Set userKeys = new 
TreeSet();
-for (ImmutableBytesWritable k : values.keySet()) {
+for (Map.Entry entry : 
values.entrySet()) {
+  ImmutableBytesWritable k = entry.getKey();
   if (k == null || k.get() == null) continue;
   String key = Bytes.toString(k.get());
   // in this section, print out reserved keywords + coprocessor info
@@ -966,7 +967,7 @@ public class HTableDescriptor implements 
WritableComparable {
 continue;
   }
   // only print out IS_ROOT/IS_META if true
-  String value = Bytes.toString(values.get(k).get());
+  String value = Bytes.toString(entry.getValue().get());
   if (key.equalsIgnoreCase(IS_ROOT) || key.equalsIgnoreCase(IS_META)) {
 if (Boolean.valueOf(value) == false) continue;
   }
@@ -1174,8 +1175,7 @@ public class HTableDescriptor implements 
WritableComparable {
   result = families.size() - other.families.size();
 }
 if (result == 0 && families.size() != other.families.size()) {
-  result = Integer.valueOf(families.si

hbase git commit: HBASE-18268 Eliminate the findbugs warnings for hbase-client

2017-07-11 Thread chia7712
Repository: hbase
Updated Branches:
  refs/heads/branch-1.4 2e6caa91e -> 4ad8ae3db


HBASE-18268 Eliminate the findbugs warnings for hbase-client


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/4ad8ae3d
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/4ad8ae3d
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/4ad8ae3d

Branch: refs/heads/branch-1.4
Commit: 4ad8ae3db531c6aa70f868a1234d30188b53ba25
Parents: 2e6caa9
Author: Chia-Ping Tsai 
Authored: Tue Jun 27 11:34:31 2017 +0800
Committer: Chia-Ping Tsai 
Committed: Wed Jul 12 03:16:05 2017 +0800

--
 .../java/org/apache/hadoop/hbase/HColumnDescriptor.java   | 10 ++
 .../java/org/apache/hadoop/hbase/HTableDescriptor.java|  8 
 .../org/apache/hadoop/hbase/ipc/AbstractRpcClient.java|  2 ++
 .../main/java/org/apache/hadoop/hbase/util/PoolMap.java   |  2 +-
 4 files changed, 13 insertions(+), 9 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/4ad8ae3d/hbase-client/src/main/java/org/apache/hadoop/hbase/HColumnDescriptor.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/HColumnDescriptor.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/HColumnDescriptor.java
index 94899e3..9292633 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/HColumnDescriptor.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/HColumnDescriptor.java
@@ -1177,13 +1177,14 @@ public class HColumnDescriptor implements 
WritableComparable
 boolean hasConfigKeys = false;
 
 // print all reserved keys first
-for (ImmutableBytesWritable k : values.keySet()) {
+for (Map.Entry entry : 
values.entrySet()) {
+  ImmutableBytesWritable k = entry.getKey();
   if (!RESERVED_KEYWORDS.contains(k)) {
 hasConfigKeys = true;
 continue;
   }
   String key = Bytes.toString(k.get());
-  String value = Bytes.toStringBinary(values.get(k).get());
+  String value = Bytes.toStringBinary(entry.getValue().get());
   if (printDefaults
   || !DEFAULT_VALUES.containsKey(key)
   || !DEFAULT_VALUES.get(key).equalsIgnoreCase(value)) {
@@ -1200,12 +1201,13 @@ public class HColumnDescriptor implements 
WritableComparable
   s.append(HConstants.METADATA).append(" => ");
   s.append('{');
   boolean printComma = false;
-  for (ImmutableBytesWritable k : values.keySet()) {
+  for (Map.Entry entry : 
values.entrySet()) {
+ImmutableBytesWritable k = entry.getKey();
 if (RESERVED_KEYWORDS.contains(k)) {
   continue;
 }
 String key = Bytes.toString(k.get());
-String value = Bytes.toStringBinary(values.get(k).get());
+String value = Bytes.toStringBinary(entry.getValue().get());
 if (printComma) {
   s.append(", ");
 }

http://git-wip-us.apache.org/repos/asf/hbase/blob/4ad8ae3d/hbase-client/src/main/java/org/apache/hadoop/hbase/HTableDescriptor.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/HTableDescriptor.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/HTableDescriptor.java
index ba0c126..fbb9376 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/HTableDescriptor.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/HTableDescriptor.java
@@ -957,7 +957,8 @@ public class HTableDescriptor implements 
WritableComparable {
 // step 1: set partitioning and pruning
 Set reservedKeys = new 
TreeSet();
 Set userKeys = new 
TreeSet();
-for (ImmutableBytesWritable k : values.keySet()) {
+for (Map.Entry entry : 
values.entrySet()) {
+  ImmutableBytesWritable k = entry.getKey();
   if (k == null || k.get() == null) continue;
   String key = Bytes.toString(k.get());
   // in this section, print out reserved keywords + coprocessor info
@@ -966,7 +967,7 @@ public class HTableDescriptor implements 
WritableComparable {
 continue;
   }
   // only print out IS_ROOT/IS_META if true
-  String value = Bytes.toString(values.get(k).get());
+  String value = Bytes.toString(entry.getValue().get());
   if (key.equalsIgnoreCase(IS_ROOT) || key.equalsIgnoreCase(IS_META)) {
 if (Boolean.valueOf(value) == false) continue;
   }
@@ -1175,8 +1176,7 @@ public class HTableDescriptor implements 
WritableComparable {
   result = families.size() - other.families.size();
 }
 if (result == 0 && families.size() != other.families.size()) {
-  result = Integer.valueOf(families.size()).compareTo(
-  Integer.valueOf(other.families.size()));
+  result = Integer.compare(families.size(), other.families.s

hbase git commit: HBASE-18268 Eliminate the findbugs warnings for hbase-client

2017-07-11 Thread chia7712
Repository: hbase
Updated Branches:
  refs/heads/branch-1 bec34ae43 -> 8eaad6786


HBASE-18268 Eliminate the findbugs warnings for hbase-client


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/8eaad678
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/8eaad678
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/8eaad678

Branch: refs/heads/branch-1
Commit: 8eaad678662d4c26ea90cdfe7a1d89c832da354c
Parents: bec34ae
Author: Chia-Ping Tsai 
Authored: Tue Jun 27 11:34:31 2017 +0800
Committer: Chia-Ping Tsai 
Committed: Wed Jul 12 03:15:08 2017 +0800

--
 .../java/org/apache/hadoop/hbase/HColumnDescriptor.java   | 10 ++
 .../java/org/apache/hadoop/hbase/HTableDescriptor.java|  8 
 .../org/apache/hadoop/hbase/ipc/AbstractRpcClient.java|  2 ++
 .../main/java/org/apache/hadoop/hbase/util/PoolMap.java   |  2 +-
 4 files changed, 13 insertions(+), 9 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/8eaad678/hbase-client/src/main/java/org/apache/hadoop/hbase/HColumnDescriptor.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/HColumnDescriptor.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/HColumnDescriptor.java
index 94899e3..9292633 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/HColumnDescriptor.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/HColumnDescriptor.java
@@ -1177,13 +1177,14 @@ public class HColumnDescriptor implements 
WritableComparable
 boolean hasConfigKeys = false;
 
 // print all reserved keys first
-for (ImmutableBytesWritable k : values.keySet()) {
+for (Map.Entry entry : 
values.entrySet()) {
+  ImmutableBytesWritable k = entry.getKey();
   if (!RESERVED_KEYWORDS.contains(k)) {
 hasConfigKeys = true;
 continue;
   }
   String key = Bytes.toString(k.get());
-  String value = Bytes.toStringBinary(values.get(k).get());
+  String value = Bytes.toStringBinary(entry.getValue().get());
   if (printDefaults
   || !DEFAULT_VALUES.containsKey(key)
   || !DEFAULT_VALUES.get(key).equalsIgnoreCase(value)) {
@@ -1200,12 +1201,13 @@ public class HColumnDescriptor implements 
WritableComparable
   s.append(HConstants.METADATA).append(" => ");
   s.append('{');
   boolean printComma = false;
-  for (ImmutableBytesWritable k : values.keySet()) {
+  for (Map.Entry entry : 
values.entrySet()) {
+ImmutableBytesWritable k = entry.getKey();
 if (RESERVED_KEYWORDS.contains(k)) {
   continue;
 }
 String key = Bytes.toString(k.get());
-String value = Bytes.toStringBinary(values.get(k).get());
+String value = Bytes.toStringBinary(entry.getValue().get());
 if (printComma) {
   s.append(", ");
 }

http://git-wip-us.apache.org/repos/asf/hbase/blob/8eaad678/hbase-client/src/main/java/org/apache/hadoop/hbase/HTableDescriptor.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/HTableDescriptor.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/HTableDescriptor.java
index ba0c126..fbb9376 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/HTableDescriptor.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/HTableDescriptor.java
@@ -957,7 +957,8 @@ public class HTableDescriptor implements 
WritableComparable {
 // step 1: set partitioning and pruning
 Set reservedKeys = new 
TreeSet();
 Set userKeys = new 
TreeSet();
-for (ImmutableBytesWritable k : values.keySet()) {
+for (Map.Entry entry : 
values.entrySet()) {
+  ImmutableBytesWritable k = entry.getKey();
   if (k == null || k.get() == null) continue;
   String key = Bytes.toString(k.get());
   // in this section, print out reserved keywords + coprocessor info
@@ -966,7 +967,7 @@ public class HTableDescriptor implements 
WritableComparable {
 continue;
   }
   // only print out IS_ROOT/IS_META if true
-  String value = Bytes.toString(values.get(k).get());
+  String value = Bytes.toString(entry.getValue().get());
   if (key.equalsIgnoreCase(IS_ROOT) || key.equalsIgnoreCase(IS_META)) {
 if (Boolean.valueOf(value) == false) continue;
   }
@@ -1175,8 +1176,7 @@ public class HTableDescriptor implements 
WritableComparable {
   result = families.size() - other.families.size();
 }
 if (result == 0 && families.size() != other.families.size()) {
-  result = Integer.valueOf(families.size()).compareTo(
-  Integer.valueOf(other.families.size()));
+  result = Integer.compare(families.size(), other.families.size(

hbase git commit: HBASE-18268 Eliminate the findbugs warnings for hbase-client

2017-07-11 Thread chia7712
Repository: hbase
Updated Branches:
  refs/heads/branch-2 4c699fd82 -> ca5d8c40b


HBASE-18268 Eliminate the findbugs warnings for hbase-client


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/ca5d8c40
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/ca5d8c40
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/ca5d8c40

Branch: refs/heads/branch-2
Commit: ca5d8c40b7e39260f919cb6205acc2a875a1f118
Parents: 4c699fd
Author: Chia-Ping Tsai 
Authored: Tue Jun 27 10:17:41 2017 +0800
Committer: Chia-Ping Tsai 
Committed: Wed Jul 12 03:08:05 2017 +0800

--
 .../hadoop/hbase/client/ColumnFamilyDescriptorBuilder.java  | 5 +++--
 .../java/org/apache/hadoop/hbase/ipc/AbstractRpcClient.java | 2 ++
 .../org/apache/hadoop/hbase/shaded/protobuf/ProtobufUtil.java   | 1 -
 .../src/main/java/org/apache/hadoop/hbase/util/PoolMap.java | 2 +-
 4 files changed, 6 insertions(+), 4 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/ca5d8c40/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ColumnFamilyDescriptorBuilder.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ColumnFamilyDescriptorBuilder.java
 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ColumnFamilyDescriptorBuilder.java
index bb302db..20706c6 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ColumnFamilyDescriptorBuilder.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ColumnFamilyDescriptorBuilder.java
@@ -1091,12 +1091,13 @@ public class ColumnFamilyDescriptorBuilder {
 s.append(HConstants.METADATA).append(" => ");
 s.append('{');
 boolean printComma = false;
-for (Bytes k : values.keySet()) {
+for (Map.Entry entry : values.entrySet()) {
+  Bytes k = entry.getKey();
   if (RESERVED_KEYWORDS.contains(k)) {
 continue;
   }
   String key = Bytes.toString(k.get());
-  String value = Bytes.toStringBinary(values.get(k).get());
+  String value = Bytes.toStringBinary(entry.getValue().get());
   if (printComma) {
 s.append(", ");
   }

http://git-wip-us.apache.org/repos/asf/hbase/blob/ca5d8c40/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/AbstractRpcClient.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/AbstractRpcClient.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/AbstractRpcClient.java
index d414f70..2acddda 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/AbstractRpcClient.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/AbstractRpcClient.java
@@ -101,6 +101,8 @@ public abstract class AbstractRpcClient implements RpcC
   private static final ScheduledExecutorService IDLE_CONN_SWEEPER = Executors
   .newScheduledThreadPool(1, 
Threads.newDaemonThreadFactory("Idle-Rpc-Conn-Sweeper"));
 
+  
@edu.umd.cs.findbugs.annotations.SuppressWarnings(value="MS_MUTABLE_COLLECTION_PKGPROTECT",
+  justification="the rest of the system which live in the different 
package can use")
   protected final static Map> 
TOKEN_HANDLERS = new HashMap<>();
 
   static {

http://git-wip-us.apache.org/repos/asf/hbase/blob/ca5d8c40/hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/ProtobufUtil.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/ProtobufUtil.java
 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/ProtobufUtil.java
index eebe4bd..b1b52b1 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/ProtobufUtil.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/ProtobufUtil.java
@@ -3211,7 +3211,6 @@ public final class ProtobufUtil {
   for (ServerName serverName : status.getServers()) {
 LiveServerInfo.Builder lsi =
 
LiveServerInfo.newBuilder().setServer(ProtobufUtil.toServerName(serverName));
-status.getLoad(serverName);
 lsi.setServerLoad(status.getLoad(serverName).obtainServerLoadPB());
 builder.addLiveServers(lsi.build());
   }

http://git-wip-us.apache.org/repos/asf/hbase/blob/ca5d8c40/hbase-client/src/main/java/org/apache/hadoop/hbase/util/PoolMap.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/util/PoolMap.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/util/PoolMap.java
index 2131db3..40c6b55 100644
--- a/hbase-client/src/main/java/org/apache/

hbase git commit: HBASE-18268 Eliminate the findbugs warnings for hbase-client

2017-07-11 Thread chia7712
Repository: hbase
Updated Branches:
  refs/heads/master d215cb495 -> cc4301ca0


HBASE-18268 Eliminate the findbugs warnings for hbase-client


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/cc4301ca
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/cc4301ca
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/cc4301ca

Branch: refs/heads/master
Commit: cc4301ca0842d9c3f66dfa39c5627744f5f91a8c
Parents: d215cb4
Author: Chia-Ping Tsai 
Authored: Tue Jun 27 10:17:41 2017 +0800
Committer: Chia-Ping Tsai 
Committed: Wed Jul 12 03:07:17 2017 +0800

--
 .../hadoop/hbase/client/ColumnFamilyDescriptorBuilder.java  | 5 +++--
 .../java/org/apache/hadoop/hbase/ipc/AbstractRpcClient.java | 2 ++
 .../org/apache/hadoop/hbase/shaded/protobuf/ProtobufUtil.java   | 1 -
 .../src/main/java/org/apache/hadoop/hbase/util/PoolMap.java | 2 +-
 4 files changed, 6 insertions(+), 4 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/cc4301ca/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ColumnFamilyDescriptorBuilder.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ColumnFamilyDescriptorBuilder.java
 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ColumnFamilyDescriptorBuilder.java
index bb302db..20706c6 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ColumnFamilyDescriptorBuilder.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ColumnFamilyDescriptorBuilder.java
@@ -1091,12 +1091,13 @@ public class ColumnFamilyDescriptorBuilder {
 s.append(HConstants.METADATA).append(" => ");
 s.append('{');
 boolean printComma = false;
-for (Bytes k : values.keySet()) {
+for (Map.Entry entry : values.entrySet()) {
+  Bytes k = entry.getKey();
   if (RESERVED_KEYWORDS.contains(k)) {
 continue;
   }
   String key = Bytes.toString(k.get());
-  String value = Bytes.toStringBinary(values.get(k).get());
+  String value = Bytes.toStringBinary(entry.getValue().get());
   if (printComma) {
 s.append(", ");
   }

http://git-wip-us.apache.org/repos/asf/hbase/blob/cc4301ca/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/AbstractRpcClient.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/AbstractRpcClient.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/AbstractRpcClient.java
index d414f70..2acddda 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/AbstractRpcClient.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/AbstractRpcClient.java
@@ -101,6 +101,8 @@ public abstract class AbstractRpcClient implements RpcC
   private static final ScheduledExecutorService IDLE_CONN_SWEEPER = Executors
   .newScheduledThreadPool(1, 
Threads.newDaemonThreadFactory("Idle-Rpc-Conn-Sweeper"));
 
+  
@edu.umd.cs.findbugs.annotations.SuppressWarnings(value="MS_MUTABLE_COLLECTION_PKGPROTECT",
+  justification="the rest of the system which live in the different 
package can use")
   protected final static Map> 
TOKEN_HANDLERS = new HashMap<>();
 
   static {

http://git-wip-us.apache.org/repos/asf/hbase/blob/cc4301ca/hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/ProtobufUtil.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/ProtobufUtil.java
 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/ProtobufUtil.java
index eebe4bd..b1b52b1 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/ProtobufUtil.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/ProtobufUtil.java
@@ -3211,7 +3211,6 @@ public final class ProtobufUtil {
   for (ServerName serverName : status.getServers()) {
 LiveServerInfo.Builder lsi =
 
LiveServerInfo.newBuilder().setServer(ProtobufUtil.toServerName(serverName));
-status.getLoad(serverName);
 lsi.setServerLoad(status.getLoad(serverName).obtainServerLoadPB());
 builder.addLiveServers(lsi.build());
   }

http://git-wip-us.apache.org/repos/asf/hbase/blob/cc4301ca/hbase-client/src/main/java/org/apache/hadoop/hbase/util/PoolMap.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/util/PoolMap.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/util/PoolMap.java
index 2131db3..40c6b55 100644
--- a/hbase-client/src/main/java/org/apache/hado

hbase git commit: HBASE-18295 The result contains the cells across different rows

2017-07-11 Thread chia7712
Repository: hbase
Updated Branches:
  refs/heads/branch-1.3 0b31a7300 -> e0b858e72


HBASE-18295 The result contains the cells across different rows


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/e0b858e7
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/e0b858e7
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/e0b858e7

Branch: refs/heads/branch-1.3
Commit: e0b858e72f674f6f9d3a70a2917dca9233e27eb4
Parents: 0b31a73
Author: Chia-Ping Tsai 
Authored: Wed Jul 12 02:51:00 2017 +0800
Committer: Chia-Ping Tsai 
Committed: Wed Jul 12 02:51:00 2017 +0800

--
 .../hadoop/hbase/regionserver/StoreScanner.java |  38 +++-
 .../hadoop/hbase/regionserver/TestStore.java| 182 +--
 2 files changed, 203 insertions(+), 17 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/e0b858e7/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreScanner.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreScanner.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreScanner.java
index de66d4e..d42852a 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreScanner.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreScanner.java
@@ -139,7 +139,7 @@ public class StoreScanner extends 
NonReversedNonLazyKeyValueScanner
   private ReentrantLock flushLock = new ReentrantLock();
 
   private final long readPt;
-
+  private boolean topChanged = false;
   // used by the injection framework to test race between StoreScanner 
construction and compaction
   enum StoreScannerCompactionRace {
 BEFORE_SEEK,
@@ -531,7 +531,7 @@ public class StoreScanner extends 
NonReversedNonLazyKeyValueScanner
   if (prevCell != cell) ++kvsScanned; // Do object compare - we set prevKV 
from the same heap.
   checkScanOrder(prevCell, cell, comparator);
   prevCell = cell;
-
+  topChanged = false;
   ScanQueryMatcher.MatchCode qcode = matcher.match(cell);
   qcode = optimize(qcode, cell);
   switch(qcode) {
@@ -630,10 +630,18 @@ public class StoreScanner extends 
NonReversedNonLazyKeyValueScanner
   // another compareRow to say the current row is DONE
   matcher.row = null;
   seekToNextRow(cell);
+  NextState stateAfterSeekNextRow = needToReturn(outResult);
+  if (stateAfterSeekNextRow != null) {
+return 
scannerContext.setScannerState(stateAfterSeekNextRow).hasMoreValues();
+  }
   break;
 
 case SEEK_NEXT_COL:
   seekAsDirection(matcher.getKeyForNextColumn(cell));
+  NextState stateAfterSeekNextColumn = needToReturn(outResult);
+  if (stateAfterSeekNextColumn != null) {
+return 
scannerContext.setScannerState(stateAfterSeekNextColumn).hasMoreValues();
+  }
   break;
 
 case SKIP:
@@ -645,6 +653,10 @@ public class StoreScanner extends 
NonReversedNonLazyKeyValueScanner
   Cell nextKV = matcher.getNextKeyHint(cell);
   if (nextKV != null) {
 seekAsDirection(nextKV);
+NextState stateAfterSeekByHint = needToReturn(outResult);
+if (stateAfterSeekByHint != null) {
+  return 
scannerContext.setScannerState(stateAfterSeekByHint).hasMoreValues();
+}
   } else {
 heap.next();
   }
@@ -665,6 +677,24 @@ public class StoreScanner extends 
NonReversedNonLazyKeyValueScanner
   }
 
   /**
+   * If the top cell won't be flushed into disk, the new top cell may be
+   * changed after #reopenAfterFlush. Because the older top cell only exist
+   * in the memstore scanner but the memstore scanner is replaced by hfile
+   * scanner after #reopenAfterFlush. If the row of top cell is changed,
+   * we should return the current cells. Otherwise, we may return
+   * the cells across different rows.
+   * @param outResult the cells which are visible for user scan
+   * @return null is the top cell doesn't change. Otherwise, the NextState
+   * to return
+   */
+  private NextState needToReturn(List outResult) {
+if (!outResult.isEmpty() && topChanged) {
+  return heap.peek() == null ? NextState.NO_MORE_VALUES : 
NextState.MORE_VALUES;
+}
+return null;
+  }
+
+  /**
* See if we should actually SEEK or rather just SKIP to the next Cell (see 
HBASE-13109).
* This method works together with ColumnTrackers and Filters. 
ColumnTrackers may issue SEEK
* hints, such as seek to next column, next row, or seek to an arbitrary 
seek key.
@@ -817,14 +847,16 @@ public class StoreScanner extends 
NonReversedNonLazyKeyValueScanner
   resetScannerStack(

hbase git commit: HBASE-18295 The result contains the cells across different rows

2017-07-11 Thread chia7712
Repository: hbase
Updated Branches:
  refs/heads/branch-1.4 4d88c460f -> 2e6caa91e


HBASE-18295 The result contains the cells across different rows


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/2e6caa91
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/2e6caa91
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/2e6caa91

Branch: refs/heads/branch-1.4
Commit: 2e6caa91ea150589b50c7650d5d38fdfacc61e23
Parents: 4d88c46
Author: Chia-Ping Tsai 
Authored: Wed Jul 12 02:49:16 2017 +0800
Committer: Chia-Ping Tsai 
Committed: Wed Jul 12 02:49:16 2017 +0800

--
 .../hadoop/hbase/regionserver/StoreScanner.java |  37 +++-
 .../hadoop/hbase/regionserver/TestStore.java| 179 +--
 2 files changed, 204 insertions(+), 12 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/2e6caa91/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreScanner.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreScanner.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreScanner.java
index d3a1e49..7c34b87 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreScanner.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreScanner.java
@@ -142,6 +142,7 @@ public class StoreScanner extends 
NonReversedNonLazyKeyValueScanner
   private ReentrantLock flushLock = new ReentrantLock();
 
   private final long readPt;
+  private boolean topChanged = false;
 
   // used by the injection framework to test race between StoreScanner 
construction and compaction
   enum StoreScannerCompactionRace {
@@ -547,7 +548,7 @@ public class StoreScanner extends 
NonReversedNonLazyKeyValueScanner
   if (prevCell != cell) ++kvsScanned; // Do object compare - we set prevKV 
from the same heap.
   checkScanOrder(prevCell, cell, comparator);
   prevCell = cell;
-
+  topChanged = false;
   ScanQueryMatcher.MatchCode qcode = matcher.match(cell);
   switch (qcode) {
 case INCLUDE:
@@ -644,10 +645,18 @@ public class StoreScanner extends 
NonReversedNonLazyKeyValueScanner
   // another compareRow to say the current row is DONE
   matcher.clearCurrentRow();
   seekOrSkipToNextRow(cell);
+  NextState stateAfterSeekNextRow = needToReturn(outResult);
+  if (stateAfterSeekNextRow != null) {
+return 
scannerContext.setScannerState(stateAfterSeekNextRow).hasMoreValues();
+  }
   break;
 
 case SEEK_NEXT_COL:
   seekOrSkipToNextColumn(cell);
+  NextState stateAfterSeekNextColumn = needToReturn(outResult);
+  if (stateAfterSeekNextColumn != null) {
+return 
scannerContext.setScannerState(stateAfterSeekNextColumn).hasMoreValues();
+  }
   break;
 
 case SKIP:
@@ -658,6 +667,10 @@ public class StoreScanner extends 
NonReversedNonLazyKeyValueScanner
   Cell nextKV = matcher.getNextKeyHint(cell);
   if (nextKV != null) {
 seekAsDirection(nextKV);
+NextState stateAfterSeekByHint = needToReturn(outResult);
+if (stateAfterSeekByHint != null) {
+  return 
scannerContext.setScannerState(stateAfterSeekByHint).hasMoreValues();
+}
   } else {
 heap.next();
   }
@@ -677,6 +690,24 @@ public class StoreScanner extends 
NonReversedNonLazyKeyValueScanner
 return 
scannerContext.setScannerState(NextState.NO_MORE_VALUES).hasMoreValues();
   }
 
+  /**
+   * If the top cell won't be flushed into disk, the new top cell may be
+   * changed after #reopenAfterFlush. Because the older top cell only exist
+   * in the memstore scanner but the memstore scanner is replaced by hfile
+   * scanner after #reopenAfterFlush. If the row of top cell is changed,
+   * we should return the current cells. Otherwise, we may return
+   * the cells across different rows.
+   * @param outResult the cells which are visible for user scan
+   * @return null is the top cell doesn't change. Otherwise, the NextState
+   * to return
+   */
+  private NextState needToReturn(List outResult) {
+if (!outResult.isEmpty() && topChanged) {
+  return heap.peek() == null ? NextState.NO_MORE_VALUES : 
NextState.MORE_VALUES;
+}
+return null;
+  }
+
   private void seekOrSkipToNextRow(Cell cell) throws IOException {
 // If it is a Get Scan, then we know that we are done with this row; there 
are no more
 // rows beyond the current one: don't try to optimize.
@@ -846,14 +877,16 @@ public class StoreScanner extends 
NonReversedNonLazyKeyValueScanner
   resetScannerStack(this.lastTop);
   

hbase git commit: HBASE-18295 The result contains the cells across different rows

2017-07-11 Thread chia7712
Repository: hbase
Updated Branches:
  refs/heads/branch-1 44651e52d -> bec34ae43


HBASE-18295  The result contains the cells across different rows


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/bec34ae4
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/bec34ae4
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/bec34ae4

Branch: refs/heads/branch-1
Commit: bec34ae432920b37821a313d86a37a0c960aa9dd
Parents: 44651e5
Author: Chia-Ping Tsai 
Authored: Wed Jul 12 02:46:13 2017 +0800
Committer: Chia-Ping Tsai 
Committed: Wed Jul 12 02:46:13 2017 +0800

--
 .../hadoop/hbase/regionserver/StoreScanner.java |  37 +++-
 .../hadoop/hbase/regionserver/TestStore.java| 179 +--
 2 files changed, 204 insertions(+), 12 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/bec34ae4/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreScanner.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreScanner.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreScanner.java
index d3a1e49..7c34b87 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreScanner.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreScanner.java
@@ -142,6 +142,7 @@ public class StoreScanner extends 
NonReversedNonLazyKeyValueScanner
   private ReentrantLock flushLock = new ReentrantLock();
 
   private final long readPt;
+  private boolean topChanged = false;
 
   // used by the injection framework to test race between StoreScanner 
construction and compaction
   enum StoreScannerCompactionRace {
@@ -547,7 +548,7 @@ public class StoreScanner extends 
NonReversedNonLazyKeyValueScanner
   if (prevCell != cell) ++kvsScanned; // Do object compare - we set prevKV 
from the same heap.
   checkScanOrder(prevCell, cell, comparator);
   prevCell = cell;
-
+  topChanged = false;
   ScanQueryMatcher.MatchCode qcode = matcher.match(cell);
   switch (qcode) {
 case INCLUDE:
@@ -644,10 +645,18 @@ public class StoreScanner extends 
NonReversedNonLazyKeyValueScanner
   // another compareRow to say the current row is DONE
   matcher.clearCurrentRow();
   seekOrSkipToNextRow(cell);
+  NextState stateAfterSeekNextRow = needToReturn(outResult);
+  if (stateAfterSeekNextRow != null) {
+return 
scannerContext.setScannerState(stateAfterSeekNextRow).hasMoreValues();
+  }
   break;
 
 case SEEK_NEXT_COL:
   seekOrSkipToNextColumn(cell);
+  NextState stateAfterSeekNextColumn = needToReturn(outResult);
+  if (stateAfterSeekNextColumn != null) {
+return 
scannerContext.setScannerState(stateAfterSeekNextColumn).hasMoreValues();
+  }
   break;
 
 case SKIP:
@@ -658,6 +667,10 @@ public class StoreScanner extends 
NonReversedNonLazyKeyValueScanner
   Cell nextKV = matcher.getNextKeyHint(cell);
   if (nextKV != null) {
 seekAsDirection(nextKV);
+NextState stateAfterSeekByHint = needToReturn(outResult);
+if (stateAfterSeekByHint != null) {
+  return 
scannerContext.setScannerState(stateAfterSeekByHint).hasMoreValues();
+}
   } else {
 heap.next();
   }
@@ -677,6 +690,24 @@ public class StoreScanner extends 
NonReversedNonLazyKeyValueScanner
 return 
scannerContext.setScannerState(NextState.NO_MORE_VALUES).hasMoreValues();
   }
 
+  /**
+   * If the top cell won't be flushed into disk, the new top cell may be
+   * changed after #reopenAfterFlush. Because the older top cell only exist
+   * in the memstore scanner but the memstore scanner is replaced by hfile
+   * scanner after #reopenAfterFlush. If the row of top cell is changed,
+   * we should return the current cells. Otherwise, we may return
+   * the cells across different rows.
+   * @param outResult the cells which are visible for user scan
+   * @return null is the top cell doesn't change. Otherwise, the NextState
+   * to return
+   */
+  private NextState needToReturn(List outResult) {
+if (!outResult.isEmpty() && topChanged) {
+  return heap.peek() == null ? NextState.NO_MORE_VALUES : 
NextState.MORE_VALUES;
+}
+return null;
+  }
+
   private void seekOrSkipToNextRow(Cell cell) throws IOException {
 // If it is a Get Scan, then we know that we are done with this row; there 
are no more
 // rows beyond the current one: don't try to optimize.
@@ -846,14 +877,16 @@ public class StoreScanner extends 
NonReversedNonLazyKeyValueScanner
   resetScannerStack(this.lastTop);
  

hbase git commit: HBASE-18295 The result contains the cells across different rows

2017-07-11 Thread chia7712
Repository: hbase
Updated Branches:
  refs/heads/branch-2 8a8e299ee -> 4c699fd82


HBASE-18295  The result contains the cells across different rows


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/4c699fd8
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/4c699fd8
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/4c699fd8

Branch: refs/heads/branch-2
Commit: 4c699fd82154ae8c88ee01cf512be342eae60299
Parents: 8a8e299
Author: Chia-Ping Tsai 
Authored: Wed Jul 12 02:34:01 2017 +0800
Committer: Chia-Ping Tsai 
Committed: Wed Jul 12 02:34:01 2017 +0800

--
 .../hadoop/hbase/regionserver/StoreScanner.java |  46 +++-
 .../hadoop/hbase/regionserver/TestStore.java| 228 ---
 2 files changed, 232 insertions(+), 42 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/4c699fd8/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreScanner.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreScanner.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreScanner.java
index 11301d8..1fcb314 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreScanner.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreScanner.java
@@ -22,9 +22,7 @@ package org.apache.hadoop.hbase.regionserver;
 import java.io.IOException;
 import java.io.InterruptedIOException;
 import java.util.ArrayList;
-import java.util.HashMap;
 import java.util.List;
-import java.util.Map;
 import java.util.NavigableSet;
 import java.util.concurrent.CountDownLatch;
 import java.util.concurrent.locks.ReentrantLock;
@@ -158,6 +156,7 @@ public class StoreScanner extends 
NonReversedNonLazyKeyValueScanner
   private final ReentrantLock flushLock = new ReentrantLock();
 
   protected final long readPt;
+  private boolean topChanged = false;
 
   // used by the injection framework to test race between StoreScanner 
construction and compaction
   enum StoreScannerCompactionRace {
@@ -606,6 +605,7 @@ public class StoreScanner extends 
NonReversedNonLazyKeyValueScanner
   int cellSize = CellUtil.estimatedSerializedSizeOf(cell);
   bytesRead += cellSize;
   prevCell = cell;
+  topChanged = false;
   ScanQueryMatcher.MatchCode qcode = matcher.match(cell);
   switch (qcode) {
 case INCLUDE:
@@ -692,10 +692,18 @@ public class StoreScanner extends 
NonReversedNonLazyKeyValueScanner
   }
   matcher.clearCurrentRow();
   seekOrSkipToNextRow(cell);
+  NextState stateAfterSeekNextRow = needToReturn(outResult);
+  if (stateAfterSeekNextRow != null) {
+return 
scannerContext.setScannerState(stateAfterSeekNextRow).hasMoreValues();
+  }
   break;
 
 case SEEK_NEXT_COL:
   seekOrSkipToNextColumn(cell);
+  NextState stateAfterSeekNextColumn = needToReturn(outResult);
+  if (stateAfterSeekNextColumn != null) {
+return 
scannerContext.setScannerState(stateAfterSeekNextColumn).hasMoreValues();
+  }
   break;
 
 case SKIP:
@@ -706,6 +714,10 @@ public class StoreScanner extends 
NonReversedNonLazyKeyValueScanner
   Cell nextKV = matcher.getNextKeyHint(cell);
   if (nextKV != null) {
 seekAsDirection(nextKV);
+NextState stateAfterSeekByHint = needToReturn(outResult);
+if (stateAfterSeekByHint != null) {
+  return 
scannerContext.setScannerState(stateAfterSeekByHint).hasMoreValues();
+}
   } else {
 heap.next();
   }
@@ -725,6 +737,24 @@ public class StoreScanner extends 
NonReversedNonLazyKeyValueScanner
 return 
scannerContext.setScannerState(NextState.NO_MORE_VALUES).hasMoreValues();
   }
 
+  /**
+   * If the top cell won't be flushed into disk, the new top cell may be
+   * changed after #reopenAfterFlush. Because the older top cell only exist
+   * in the memstore scanner but the memstore scanner is replaced by hfile
+   * scanner after #reopenAfterFlush. If the row of top cell is changed,
+   * we should return the current cells. Otherwise, we may return
+   * the cells across different rows.
+   * @param outResult the cells which are visible for user scan
+   * @return null is the top cell doesn't change. Otherwise, the NextState
+   * to return
+   */
+  private NextState needToReturn(List outResult) {
+if (!outResult.isEmpty() && topChanged) {
+  return heap.peek() == null ? NextState.NO_MORE_VALUES : 
NextState.MORE_VALUES;
+}
+return null;
+  }
+
   private void seekOrSkipToNextRow(Cell cell) throws IOException {
 // If it is a Get Scan, t

hbase git commit: HBASE-18295 The result contains the cells across different rows

2017-07-11 Thread chia7712
Repository: hbase
Updated Branches:
  refs/heads/master 1978b78cd -> d215cb495


HBASE-18295  The result contains the cells across different rows


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/d215cb49
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/d215cb49
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/d215cb49

Branch: refs/heads/master
Commit: d215cb495085c5b7e5b27ce2ffdb6c4b4a90b95f
Parents: 1978b78
Author: Chia-Ping Tsai 
Authored: Wed Jul 12 02:27:29 2017 +0800
Committer: Chia-Ping Tsai 
Committed: Wed Jul 12 02:27:29 2017 +0800

--
 .../hadoop/hbase/regionserver/StoreScanner.java |  46 +++-
 .../hadoop/hbase/regionserver/TestStore.java| 228 ---
 2 files changed, 232 insertions(+), 42 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/d215cb49/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreScanner.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreScanner.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreScanner.java
index 11301d8..1fcb314 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreScanner.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreScanner.java
@@ -22,9 +22,7 @@ package org.apache.hadoop.hbase.regionserver;
 import java.io.IOException;
 import java.io.InterruptedIOException;
 import java.util.ArrayList;
-import java.util.HashMap;
 import java.util.List;
-import java.util.Map;
 import java.util.NavigableSet;
 import java.util.concurrent.CountDownLatch;
 import java.util.concurrent.locks.ReentrantLock;
@@ -158,6 +156,7 @@ public class StoreScanner extends 
NonReversedNonLazyKeyValueScanner
   private final ReentrantLock flushLock = new ReentrantLock();
 
   protected final long readPt;
+  private boolean topChanged = false;
 
   // used by the injection framework to test race between StoreScanner 
construction and compaction
   enum StoreScannerCompactionRace {
@@ -606,6 +605,7 @@ public class StoreScanner extends 
NonReversedNonLazyKeyValueScanner
   int cellSize = CellUtil.estimatedSerializedSizeOf(cell);
   bytesRead += cellSize;
   prevCell = cell;
+  topChanged = false;
   ScanQueryMatcher.MatchCode qcode = matcher.match(cell);
   switch (qcode) {
 case INCLUDE:
@@ -692,10 +692,18 @@ public class StoreScanner extends 
NonReversedNonLazyKeyValueScanner
   }
   matcher.clearCurrentRow();
   seekOrSkipToNextRow(cell);
+  NextState stateAfterSeekNextRow = needToReturn(outResult);
+  if (stateAfterSeekNextRow != null) {
+return 
scannerContext.setScannerState(stateAfterSeekNextRow).hasMoreValues();
+  }
   break;
 
 case SEEK_NEXT_COL:
   seekOrSkipToNextColumn(cell);
+  NextState stateAfterSeekNextColumn = needToReturn(outResult);
+  if (stateAfterSeekNextColumn != null) {
+return 
scannerContext.setScannerState(stateAfterSeekNextColumn).hasMoreValues();
+  }
   break;
 
 case SKIP:
@@ -706,6 +714,10 @@ public class StoreScanner extends 
NonReversedNonLazyKeyValueScanner
   Cell nextKV = matcher.getNextKeyHint(cell);
   if (nextKV != null) {
 seekAsDirection(nextKV);
+NextState stateAfterSeekByHint = needToReturn(outResult);
+if (stateAfterSeekByHint != null) {
+  return 
scannerContext.setScannerState(stateAfterSeekByHint).hasMoreValues();
+}
   } else {
 heap.next();
   }
@@ -725,6 +737,24 @@ public class StoreScanner extends 
NonReversedNonLazyKeyValueScanner
 return 
scannerContext.setScannerState(NextState.NO_MORE_VALUES).hasMoreValues();
   }
 
+  /**
+   * If the top cell won't be flushed into disk, the new top cell may be
+   * changed after #reopenAfterFlush. Because the older top cell only exist
+   * in the memstore scanner but the memstore scanner is replaced by hfile
+   * scanner after #reopenAfterFlush. If the row of top cell is changed,
+   * we should return the current cells. Otherwise, we may return
+   * the cells across different rows.
+   * @param outResult the cells which are visible for user scan
+   * @return null is the top cell doesn't change. Otherwise, the NextState
+   * to return
+   */
+  private NextState needToReturn(List outResult) {
+if (!outResult.isEmpty() && topChanged) {
+  return heap.peek() == null ? NextState.NO_MORE_VALUES : 
NextState.MORE_VALUES;
+}
+return null;
+  }
+
   private void seekOrSkipToNextRow(Cell cell) throws IOException {
 // If it is a Get Scan, then 

[46/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-11 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/90c7dfe4/checkstyle-aggregate.html
--
diff --git a/checkstyle-aggregate.html b/checkstyle-aggregate.html
index 3f51398..ea269ef 100644
--- a/checkstyle-aggregate.html
+++ b/checkstyle-aggregate.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase – Checkstyle Results
 
@@ -289,7 +289,7 @@
 2244
 0
 0
-14778
+14786
 
 Files
 
@@ -762,7 +762,7 @@
 org/apache/hadoop/hbase/client/AsyncAdmin.java
 0
 0
-16
+19
 
 org/apache/hadoop/hbase/client/AsyncAdminBuilder.java
 0
@@ -1057,7 +1057,7 @@
 org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.java
 0
 0
-116
+117
 
 org/apache/hadoop/hbase/client/RawAsyncTable.java
 0
@@ -4412,7 +4412,7 @@
 org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.java
 0
 0
-186
+190
 
 org/apache/hadoop/hbase/regionserver/RegionMergeRequest.java
 0
@@ -7364,7 +7364,7 @@
 caseIndent: "2"
 basicOffset: "2"
 lineWrappingIndentation: "2"
-5180
+5184
  Error
 
 javadoc
@@ -7376,7 +7376,7 @@
 
 
 http://checkstyle.sourceforge.net/config_javadoc.html#NonEmptyAtclauseDescription";>NonEmptyAtclauseDescription
-3247
+3250
  Error
 
 misc
@@ -7394,7 +7394,7 @@
 
 max: "100"
 ignorePattern: "^package.*|^import.*|a 
href|href|http://|https://|ftp://|org.apache.thrift.|com.google.protobuf.|hbase.protobuf.generated"
-970
+971
  Error
 
 
@@ -14962,55 +14962,55 @@
 imports
 ImportOrder
 Wrong order for 'org.apache.hadoop.hbase.NamespaceDescriptor' import.
-32
+33
 
  Error
 imports
 ImportOrder
 Wrong order for 'org.apache.hadoop.hbase.client.replication.TableCFs' 
import.
-37
+39
 
  Error
 sizes
 LineLength
 Line is longer than 100 characters (found 101).
-164
+167
 
  Error
 javadoc
 NonEmptyAtclauseDescription
 At-clause should have a non-empty description.
-174
+177
 
  Error
 sizes
 LineLength
 Line is longer than 100 characters (found 101).
-180
+183
 
  Error
 sizes
 LineLength
 Line is longer than 100 characters (found 106).
-183
+186
 
  Error
 javadoc
 NonEmptyAtclauseDescription
 At-clause should have a non-empty description.
-775
+412
 
  Error
 javadoc
 NonEmptyAtclauseDescription
 At-clause should have a non-empty description.
-819
+426
 
  Error
 javadoc
 NonEmptyAtclauseDescription
 At-clause should have a non-empty description.
-838
+812
 
  Error
 javadoc
@@ -15022,89 +15022,107 @@
 javadoc
 NonEmptyAtclauseDescription
 At-clause should have a non-empty description.
-857
+875
 
  Error
 javadoc
 NonEmptyAtclauseDescription
 At-clause should have a non-empty description.
-908
+904
+
+ Error
+javadoc
+NonEmptyAtclauseDescription
+At-clause should have a non-empty description.
+911
+
+ Error
+javadoc
+NonEmptyAtclauseDescription
+At-clause should have a non-empty description.
+912
 
  Error
+javadoc
+NonEmptyAtclauseDescription
+At-clause should have a non-empty description.
+969
+
+ Error
 sizes
 LineLength
 Line is longer than 100 characters (found 101).
-935
-
+996
+
  Error
 javadoc
 NonEmptyAtclauseDescription
 At-clause should have a non-empty description.
-963
-
+1024
+
  Error
 javadoc
 NonEmptyAtclauseDescription
 At-clause should have a non-empty description.
-984
+1045
 
 org/apache/hadoop/hbase/client/AsyncAdminBuilder.java
 
-
+
 Severity
 Category
 Rule
 Message
 Line
-
+
  Error
 javadoc
 NonEmptyAtclauseDescription
 At-clause should have a non-empty description.
 38
-
+
  Error
 javadoc
 NonEmptyAtclauseDescription
 At-clause should have a non-empty description.
 39
-
+
  Error
 javadoc
 NonEmptyAtclauseDescription
 At-clause should have a non-empty description.
 46
-
+
  Error
 javadoc
 NonEmptyAtclauseDescription
 At-clause should have a non-empty description.
 47
-
+
  Error
 javadoc
 NonEmptyAtclauseDescription
 At-clause should have a non-empty description.
 55
-
+
  Error
 javadoc
 NonEmptyAtclauseDescription
 At-clause should have a non-empty description.
 56
-
+
  Error
 javadoc
 NonEmptyAtclauseDescription
 At-clause should have a non-empty description.
 65
-
+
  Error
 javadoc
 NonEmptyAtclauseDescription
 At-clause should have a non-empty description.
 76
-
+
  Error
 javadoc
 NonEmptyAtclauseDescription
@@ -15113,13 +15131,13 @@
 
 org/apache/hadoop/hbase/client/AsyncAdminRequestRetryingCaller.java
 
-
+
 Severity
 Category
 Rule
 Message
 Line
-
+
  Error
 imports
 ImportOrder
@@ -15128,25 +15146,25 @@
 
 org/apache/hadoop/hbase/client/AsyncBatchRpcRetryingCaller.java
 
-
+
 Severity
 Category
 Rule
 Message
 Line
-
+
  Error
 design
 VisibilityModifier
 Variable 'loc' must be private and have accessor methods.
 112
-
+
  Error
 design
 VisibilityModifier
 Variable 'actions' must be private and have accessor methods.
 114
-
+
  Error
 design
 VisibilityModifier
@@ -15155,37 +15173,37 @@
 
 org/apache/hadoop/hbase/client/AsyncClientScanner.java
 
-
+
 Severity
 Category
 Rule
 Message
 Line
-
+
  Error
 imports
 AvoidStarImport
 Using the '.*' form of import should be avoided - 
org.apache.hadoop.hbase.cl

[35/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-11 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/90c7dfe4/devapidocs/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.EndpointOperationWithResult.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.EndpointOperationWithResult.html
 
b/devapidocs/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.EndpointOperationWithResult.html
index 70b52cd..f5c93d7 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.EndpointOperationWithResult.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.EndpointOperationWithResult.html
@@ -128,7 +128,7 @@ var activeTableTab = "activeTableTab";
 
 
 
-private abstract static class RegionCoprocessorHost.EndpointOperationWithResult
+private abstract static class RegionCoprocessorHost.EndpointOperationWithResult
 extends RegionCoprocessorHost.EndpointOperation
 
 
@@ -243,7 +243,7 @@ extends 
 
 result
-private T result
+private T result
 
 
 
@@ -260,7 +260,7 @@ extends 
 
 EndpointOperationWithResult
-private EndpointOperationWithResult()
+private EndpointOperationWithResult()
 
 
 
@@ -279,7 +279,7 @@ extends 
 
 setResult
-public void setResult(T result)
+public void setResult(T result)
 
 
 
@@ -288,7 +288,7 @@ extends 
 
 getResult
-public T getResult()
+public T getResult()
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/90c7dfe4/devapidocs/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.RegionOperation.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.RegionOperation.html
 
b/devapidocs/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.RegionOperation.html
index ca9d09d..d3c6557 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.RegionOperation.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.RegionOperation.html
@@ -127,7 +127,7 @@ var activeTableTab = "activeTableTab";
 
 
 
-private abstract static class RegionCoprocessorHost.RegionOperation
+private abstract static class RegionCoprocessorHost.RegionOperation
 extends RegionCoprocessorHost.CoprocessorOperation
 
 
@@ -223,7 +223,7 @@ extends 
 
 RegionOperation
-public RegionOperation()
+public RegionOperation()
 
 
 
@@ -232,7 +232,7 @@ extends 
 
 RegionOperation
-public RegionOperation(User user)
+public RegionOperation(User user)
 
 
 
@@ -249,7 +249,7 @@ extends 
 
 call
-public abstract void call(RegionObserver observer,
+public abstract void call(RegionObserver observer,
   ObserverContext ctx)
throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true";
 title="class or interface in java.io">IOException
 
@@ -264,7 +264,7 @@ extends 
 
 hasCall
-public boolean hasCall(Coprocessor observer)
+public boolean hasCall(Coprocessor observer)
 
 Specified by:
 hasCall in
 class RegionCoprocessorHost.CoprocessorOperation
@@ -277,7 +277,7 @@ extends 
 
 call
-public void call(Coprocessor observer,
+public void call(Coprocessor observer,
  ObserverContext ctx)
   throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true";
 title="class or interface in java.io">IOException
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/90c7dfe4/devapidocs/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.RegionOperationWithResult.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.RegionOperationWithResult.html
 
b/devapidocs/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.RegionOperationWithResult.html
index 54c84c2..eb93fd7 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.RegionOperationWithResult.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.RegionOperationWithResult.html
@@ -128,7 +128,7 @@ var activeTableTab = "activeTableTab";
 
 
 
-private abstract static class RegionCoprocessorHost.RegionOperationWithResult
+private abstract static class RegionCoprocessorHost.RegionOperationWithResult
 extends RegionCoprocessorHost.RegionOperation
 
 
@@ -244,7 +244,7 @@ extends 
 
 result
-private T result
+private T result
 
 
 
@@ -261,7 +261,7 @@ extends 
 
 RegionOperationWithResult
-public RegionOperationWithResult()
+public RegionOperationWithResult()
 
 
 
@@ -270,7 +270,7 @@ extends 
 
 RegionOperationWithResult
-public RegionOperationWithResult(User user)
+public RegionOperationWithResult(User user)
 
 
 
@@ -289,7 +289,7 @@ extends 
 
 setResult
-public void setResult(T result)
+public void setResult(T result)
 
 
 
@@ -298,7 +298,7 @@ ext

[44/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-11 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/90c7dfe4/devapidocs/org/apache/hadoop/hbase/class-use/ServerName.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/class-use/ServerName.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/ServerName.html
index c00c365..7350f85 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/ServerName.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/ServerName.html
@@ -861,42 +861,59 @@
 
 
 
+http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";
 title="class or interface in java.util.concurrent">CompletableFutureVoid>
+AsyncHBaseAdmin.clearCompactionQueues(ServerName serverName,
+ http://docs.oracle.com/javase/8/docs/api/java/util/Set.html?is-external=true";
 title="class or interface in java.util">SetString> queues) 
+
+
 void
 Admin.clearCompactionQueues(ServerName sn,
  http://docs.oracle.com/javase/8/docs/api/java/util/Set.html?is-external=true";
 title="class or interface in java.util">SetString> queues)
 Clear compacting queues on a regionserver.
 
 
-
+
 void
 HBaseAdmin.clearCompactionQueues(ServerName sn,
  http://docs.oracle.com/javase/8/docs/api/java/util/Set.html?is-external=true";
 title="class or interface in java.util">SetString> queues) 
 
+
+http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";
 title="class or interface in java.util.concurrent">CompletableFutureVoid>
+AsyncAdmin.clearCompactionQueues(ServerName serverName,
+ http://docs.oracle.com/javase/8/docs/api/java/util/Set.html?is-external=true";
 title="class or interface in java.util">SetString> queues)
+Clear compacting queues on a region server.
+
+
 
+http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";
 title="class or interface in java.util.concurrent">CompletableFutureVoid>
+RawAsyncHBaseAdmin.clearCompactionQueues(ServerName serverName,
+ http://docs.oracle.com/javase/8/docs/api/java/util/Set.html?is-external=true";
 title="class or interface in java.util">SetString> queues) 
+
+
 private http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";
 title="class or interface in java.util.concurrent">CompletableFutureBoolean>
 RawAsyncHBaseAdmin.closeRegion(HRegionInfo hri,
ServerName serverName) 
 
-
+
 void
 Admin.closeRegion(ServerName sn,
HRegionInfo hri)
 Close a region.
 
 
-
+
 void
 HBaseAdmin.closeRegion(ServerName sn,
HRegionInfo hri) 
 
-
+
 private void
 HBaseAdmin.compact(ServerName sn,
HRegionInfo hri,
boolean major,
byte[] family) 
 
-
+
 private http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";
 title="class or interface in java.util.concurrent">CompletableFutureVoid>
 RawAsyncHBaseAdmin.compact(ServerName sn,
HRegionInfo hri,
@@ -905,55 +922,55 @@
 Compact the region at specific region server.
 
 
-
+
 http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";
 title="class or interface in java.util.concurrent">CompletableFutureVoid>
 AsyncHBaseAdmin.compactRegionServer(ServerName serverName) 
 
-
+
 http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";
 title="class or interface in java.util.concurrent">CompletableFuture

[50/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-11 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/90c7dfe4/apidocs/org/apache/hadoop/hbase/client/AsyncAdmin.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/client/AsyncAdmin.html 
b/apidocs/org/apache/hadoop/hbase/client/AsyncAdmin.html
index 84e6b96..520047e 100644
--- a/apidocs/org/apache/hadoop/hbase/client/AsyncAdmin.html
+++ b/apidocs/org/apache/hadoop/hbase/client/AsyncAdmin.html
@@ -18,7 +18,7 @@
 catch(err) {
 }
 //-->
-var methods = 
{"i0":6,"i1":6,"i2":6,"i3":6,"i4":6,"i5":18,"i6":6,"i7":6,"i8":6,"i9":18,"i10":6,"i11":18,"i12":6,"i13":6,"i14":6,"i15":18,"i16":6,"i17":6,"i18":6,"i19":6,"i20":6,"i21":18,"i22":6,"i23":6,"i24":6,"i25":6,"i26":6,"i27":6,"i28":6,"i29":6,"i30":6,"i31":6,"i32":6,"i33":6,"i34":6,"i35":6,"i36":6,"i37":18,"i38":6,"i39":6,"i40":6,"i41":6,"i42":6,"i43":18,"i44":6,"i45":6,"i46":6,"i47":18,"i48":6,"i49":18,"i50":6,"i51":6,"i52":6,"i53":6,"i54":6,"i55":6,"i56":6,"i57":6,"i58":6,"i59":6,"i60":18,"i61":6,"i62":6,"i63":6,"i64":6,"i65":6,"i66":6,"i67":6,"i68":18,"i69":6,"i70":18,"i71":6,"i72":18,"i73":6,"i74":18,"i75":6,"i76":6,"i77":18,"i78":6,"i79":18,"i80":6,"i81":6,"i82":6,"i83":6,"i84":6,"i85":6,"i86":6,"i87":6,"i88":6,"i89":6,"i90":6,"i91":6,"i92":6,"i93":6,"i94":6,"i95":6,"i96":6,"i97":6,"i98":6,"i99":6,"i100":6,"i101":6,"i102":18,"i103":18,"i104":6,"i105":6,"i106":18,"i107":6,"i108":6,"i109":6,"i110":6,"i111":6,"i112":6,"i113":6,"i114":6,"i115":6};
+var methods = 
{"i0":6,"i1":6,"i2":6,"i3":6,"i4":6,"i5":18,"i6":6,"i7":6,"i8":6,"i9":6,"i10":18,"i11":6,"i12":18,"i13":6,"i14":6,"i15":6,"i16":18,"i17":6,"i18":6,"i19":6,"i20":6,"i21":6,"i22":18,"i23":6,"i24":6,"i25":6,"i26":6,"i27":6,"i28":6,"i29":6,"i30":6,"i31":6,"i32":6,"i33":6,"i34":6,"i35":6,"i36":6,"i37":6,"i38":18,"i39":6,"i40":6,"i41":6,"i42":6,"i43":6,"i44":18,"i45":6,"i46":6,"i47":6,"i48":18,"i49":6,"i50":18,"i51":6,"i52":6,"i53":6,"i54":6,"i55":6,"i56":6,"i57":6,"i58":6,"i59":6,"i60":6,"i61":6,"i62":6,"i63":6,"i64":18,"i65":6,"i66":6,"i67":6,"i68":6,"i69":6,"i70":6,"i71":6,"i72":6,"i73":18,"i74":6,"i75":18,"i76":6,"i77":18,"i78":6,"i79":18,"i80":6,"i81":6,"i82":18,"i83":6,"i84":18,"i85":6,"i86":6,"i87":6,"i88":6,"i89":6,"i90":6,"i91":6,"i92":6,"i93":6,"i94":6,"i95":6,"i96":6,"i97":6,"i98":6,"i99":6,"i100":6,"i101":6,"i102":6,"i103":6,"i104":6,"i105":6,"i106":6,"i107":6,"i108":6,"i109":6,"i110":18,"i111":18,"i112":6,"i113":6,"i114":18,"i115":6,"i116":6,"i117":6,"i118":6,"i
 119":6,"i120":6,"i121":6,"i122":6,"i123":6};
 var tabs = {65535:["t0","All Methods"],2:["t2","Instance 
Methods"],4:["t3","Abstract Methods"],16:["t5","Default Methods"]};
 var altColor = "altColor";
 var rowColor = "rowColor";
@@ -102,7 +102,7 @@ var activeTableTab = "activeTableTab";
 
 
 @InterfaceAudience.Public
-public interface AsyncAdmin
+public interface AsyncAdmin
 The asynchronous administrative API for HBase.
  
  This feature is still under development, so marked as IA.Private. Will change 
to public when
@@ -171,63 +171,70 @@ public interface 
 http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";
 title="class or interface in java.util.concurrent">CompletableFutureVoid>
+clearCompactionQueues(ServerName serverName,
+ http://docs.oracle.com/javase/8/docs/api/java/util/Set.html?is-external=true";
 title="class or interface in java.util">SetString> queues)
+Clear compacting queues on a region server.
+
+
+
+http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";
 title="class or interface in java.util.concurrent">CompletableFutureVoid>
 cloneSnapshot(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String snapshotName,
  TableName tableName)
 Create a new table by cloning the snapshot content.
 
 
-
+
 http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";
 title="class or interface in java.util.concurrent">CompletableFutureBoolean>
 closeRegion(byte[] regionName,
http://docs.oracle.com/javase/8/docs/api/java/util/Optional.html?is-external=true";
 title="class or interface in java.util">Optional serverName)
 Close a region.
 
 
-
+
 default http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";
 title="class or interface in java.util.concurrent">Comp

[24/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-11 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/90c7dfe4/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.DeleteTableProcedureBiConsumer.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.DeleteTableProcedureBiConsumer.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.DeleteTableProcedureBiConsumer.html
index 75db22d..99a09f9 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.DeleteTableProcedureBiConsumer.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.DeleteTableProcedureBiConsumer.html
@@ -37,2710 +37,2816 @@
 029import java.util.List;
 030import java.util.Map;
 031import java.util.Optional;
-032import 
java.util.concurrent.CompletableFuture;
-033import java.util.concurrent.TimeUnit;
-034import 
java.util.concurrent.atomic.AtomicReference;
-035import java.util.function.BiConsumer;
-036import java.util.regex.Pattern;
-037import java.util.stream.Collectors;
-038
-039import 
com.google.common.annotations.VisibleForTesting;
-040
-041import io.netty.util.Timeout;
-042import io.netty.util.TimerTask;
-043
-044import java.util.stream.Stream;
-045
-046import org.apache.commons.io.IOUtils;
-047import org.apache.commons.logging.Log;
-048import 
org.apache.commons.logging.LogFactory;
-049import 
org.apache.hadoop.hbase.ClusterStatus;
-050import 
org.apache.hadoop.hbase.HRegionInfo;
-051import 
org.apache.hadoop.hbase.HRegionLocation;
-052import 
org.apache.hadoop.hbase.MetaTableAccessor;
-053import 
org.apache.hadoop.hbase.MetaTableAccessor.QueryType;
-054import 
org.apache.hadoop.hbase.NotServingRegionException;
-055import 
org.apache.hadoop.hbase.ProcedureInfo;
-056import 
org.apache.hadoop.hbase.RegionLoad;
-057import 
org.apache.hadoop.hbase.RegionLocations;
-058import 
org.apache.hadoop.hbase.ServerName;
-059import 
org.apache.hadoop.hbase.NamespaceDescriptor;
-060import 
org.apache.hadoop.hbase.HConstants;
-061import 
org.apache.hadoop.hbase.TableExistsException;
-062import 
org.apache.hadoop.hbase.TableName;
-063import 
org.apache.hadoop.hbase.AsyncMetaTableAccessor;
-064import 
org.apache.hadoop.hbase.TableNotDisabledException;
-065import 
org.apache.hadoop.hbase.TableNotEnabledException;
-066import 
org.apache.hadoop.hbase.TableNotFoundException;
-067import 
org.apache.hadoop.hbase.UnknownRegionException;
-068import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
-069import 
org.apache.hadoop.hbase.client.AsyncRpcRetryingCallerFactory.AdminRequestCallerBuilder;
-070import 
org.apache.hadoop.hbase.client.AsyncRpcRetryingCallerFactory.MasterRequestCallerBuilder;
-071import 
org.apache.hadoop.hbase.client.Scan.ReadType;
-072import 
org.apache.hadoop.hbase.client.replication.ReplicationSerDeHelper;
-073import 
org.apache.hadoop.hbase.client.replication.TableCFs;
-074import 
org.apache.hadoop.hbase.exceptions.DeserializationException;
-075import 
org.apache.hadoop.hbase.ipc.HBaseRpcController;
-076import 
org.apache.hadoop.hbase.quotas.QuotaFilter;
-077import 
org.apache.hadoop.hbase.quotas.QuotaSettings;
-078import 
org.apache.hadoop.hbase.quotas.QuotaTableUtil;
-079import 
org.apache.hadoop.hbase.replication.ReplicationException;
-080import 
org.apache.hadoop.hbase.replication.ReplicationPeerConfig;
-081import 
org.apache.hadoop.hbase.replication.ReplicationPeerDescription;
-082import 
org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback;
-083import 
org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
-084import 
org.apache.hadoop.hbase.shaded.protobuf.RequestConverter;
-085import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminService;
-086import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CloseRegionRequest;
-087import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CloseRegionResponse;
-088import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CompactRegionRequest;
-089import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CompactRegionResponse;
-090import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.FlushRegionRequest;
-091import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.FlushRegionResponse;
-092import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetOnlineRegionRequest;
-093import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetOnlineRegionResponse;
-094import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionInfoRequest;
-095import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionInfoResponse;
-096import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionLoadRequest;
-097import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionLoadResponse;
-098import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProt

[51/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-11 Thread git-site-role
Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.


Project: http://git-wip-us.apache.org/repos/asf/hbase-site/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase-site/commit/90c7dfe4
Tree: http://git-wip-us.apache.org/repos/asf/hbase-site/tree/90c7dfe4
Diff: http://git-wip-us.apache.org/repos/asf/hbase-site/diff/90c7dfe4

Branch: refs/heads/asf-site
Commit: 90c7dfe414a15b816ff1a44dea70b6f96a030756
Parents: 5a4910e
Author: jenkins 
Authored: Tue Jul 11 15:02:22 2017 +
Committer: jenkins 
Committed: Tue Jul 11 15:02:22 2017 +

--
 acid-semantics.html |4 +-
 apache_hbase_reference_guide.pdf|4 +-
 apache_hbase_reference_guide.pdfmarks   |4 +-
 apidocs/index-all.html  |   30 +
 .../hadoop/hbase/class-use/ServerName.html  |   39 +-
 .../apache/hadoop/hbase/client/AsyncAdmin.html  |  624 +-
 .../security/class-use/SecurityCapability.html  |4 +
 .../apache/hadoop/hbase/client/AsyncAdmin.html  | 1949 ---
 bulk-loads.html |4 +-
 checkstyle-aggregate.html   | 1712 +++---
 checkstyle.rss  |8 +-
 coc.html|4 +-
 cygwin.html |4 +-
 dependencies.html   |4 +-
 dependency-convergence.html |4 +-
 dependency-info.html|4 +-
 dependency-management.html  |4 +-
 devapidocs/constant-values.html |6 +-
 devapidocs/index-all.html   |   74 +-
 .../hadoop/hbase/backup/package-tree.html   |4 +-
 .../hadoop/hbase/class-use/HRegionLocation.html |   36 +-
 .../hadoop/hbase/class-use/ServerName.html  |  185 +-
 .../hadoop/hbase/class-use/TableName.html   |  332 +-
 .../hbase/classification/package-tree.html  |6 +-
 .../apache/hadoop/hbase/client/AsyncAdmin.html  |  624 +-
 .../hadoop/hbase/client/AsyncHBaseAdmin.html|  579 +-
 .../hbase/client/AsyncMetaRegionLocator.html|   10 +-
 .../hbase/client/AsyncNonMetaRegionLocator.html |   26 +-
 .../hadoop/hbase/client/AsyncRegionLocator.html |   39 +-
 ...dmin.AddColumnFamilyProcedureBiConsumer.html |6 +-
 .../client/RawAsyncHBaseAdmin.AdminRpcCall.html |4 +-
 .../client/RawAsyncHBaseAdmin.Converter.html|4 +-
 ...dmin.CreateNamespaceProcedureBiConsumer.html |6 +-
 ...aseAdmin.CreateTableProcedureBiConsumer.html |6 +-
 ...n.DeleteColumnFamilyProcedureBiConsumer.html |6 +-
 ...dmin.DeleteNamespaceProcedureBiConsumer.html |6 +-
 ...aseAdmin.DeleteTableProcedureBiConsumer.html |8 +-
 ...seAdmin.DisableTableProcedureBiConsumer.html |6 +-
 ...aseAdmin.EnableTableProcedureBiConsumer.html |6 +-
 .../RawAsyncHBaseAdmin.MasterRpcCall.html   |4 +-
 ...min.MergeTableRegionProcedureBiConsumer.html |6 +-
 ...n.ModifyColumnFamilyProcedureBiConsumer.html |6 +-
 ...dmin.ModifyNamespaceProcedureBiConsumer.html |6 +-
 ...HBaseAdmin.NamespaceProcedureBiConsumer.html |   14 +-
 .../RawAsyncHBaseAdmin.ProcedureBiConsumer.html |   12 +-
 .../RawAsyncHBaseAdmin.TableOperator.html   |4 +-
 ...syncHBaseAdmin.TableProcedureBiConsumer.html |   14 +-
 ...eAdmin.TruncateTableProcedureBiConsumer.html |6 +-
 .../hadoop/hbase/client/RawAsyncHBaseAdmin.html |  727 ++-
 .../hbase/client/RpcRetryingCallerImpl.html |   10 +-
 .../hadoop/hbase/client/class-use/Append.html   |2 +-
 .../client/class-use/MasterSwitchType.html  |   11 +-
 .../client/class-use/RegionLocateType.html  |   24 +-
 .../hadoop/hbase/client/class-use/Result.html   |   15 +-
 .../hadoop/hbase/client/package-tree.html   |   26 +-
 .../security/class-use/SecurityCapability.html  |   14 +-
 .../hadoop/hbase/executor/package-tree.html |2 +-
 .../hadoop/hbase/filter/package-tree.html   |6 +-
 .../hadoop/hbase/io/hfile/package-tree.html |4 +-
 .../apache/hadoop/hbase/ipc/package-tree.html   |4 +-
 .../hadoop/hbase/mapreduce/package-tree.html|4 +-
 .../hbase/master/balancer/package-tree.html |2 +-
 .../hadoop/hbase/master/package-tree.html   |4 +-
 .../hbase/master/procedure/package-tree.html|2 +-
 .../org/apache/hadoop/hbase/package-tree.html   |   12 +-
 .../hbase/procedure2/class-use/LockInfo.html|   23 +
 .../hadoop/hbase/procedure2/package-tree.html   |6 +-
 .../hadoop/hbase/quotas/package-tree.html   |8 +-
 ...ionCoprocessorHost.CoprocessorOperation.html |   12 +-
 ...RegionCoprocessorHost.EndpointOperation.html |   10 +-
 ...ocessorHost.EndpointOperationWithResult.html |   10 +-
 .../RegionCoprocessorHost.RegionOperation.html  |   12 +-
 ...processorHost.RegionOperationWithResult.html |   12 +-
 .../regionserver/RegionCoprocessorHost.html |   68 +-

[22/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-11 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/90c7dfe4/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.EnableTableProcedureBiConsumer.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.EnableTableProcedureBiConsumer.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.EnableTableProcedureBiConsumer.html
index 75db22d..99a09f9 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.EnableTableProcedureBiConsumer.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.EnableTableProcedureBiConsumer.html
@@ -37,2710 +37,2816 @@
 029import java.util.List;
 030import java.util.Map;
 031import java.util.Optional;
-032import 
java.util.concurrent.CompletableFuture;
-033import java.util.concurrent.TimeUnit;
-034import 
java.util.concurrent.atomic.AtomicReference;
-035import java.util.function.BiConsumer;
-036import java.util.regex.Pattern;
-037import java.util.stream.Collectors;
-038
-039import 
com.google.common.annotations.VisibleForTesting;
-040
-041import io.netty.util.Timeout;
-042import io.netty.util.TimerTask;
-043
-044import java.util.stream.Stream;
-045
-046import org.apache.commons.io.IOUtils;
-047import org.apache.commons.logging.Log;
-048import 
org.apache.commons.logging.LogFactory;
-049import 
org.apache.hadoop.hbase.ClusterStatus;
-050import 
org.apache.hadoop.hbase.HRegionInfo;
-051import 
org.apache.hadoop.hbase.HRegionLocation;
-052import 
org.apache.hadoop.hbase.MetaTableAccessor;
-053import 
org.apache.hadoop.hbase.MetaTableAccessor.QueryType;
-054import 
org.apache.hadoop.hbase.NotServingRegionException;
-055import 
org.apache.hadoop.hbase.ProcedureInfo;
-056import 
org.apache.hadoop.hbase.RegionLoad;
-057import 
org.apache.hadoop.hbase.RegionLocations;
-058import 
org.apache.hadoop.hbase.ServerName;
-059import 
org.apache.hadoop.hbase.NamespaceDescriptor;
-060import 
org.apache.hadoop.hbase.HConstants;
-061import 
org.apache.hadoop.hbase.TableExistsException;
-062import 
org.apache.hadoop.hbase.TableName;
-063import 
org.apache.hadoop.hbase.AsyncMetaTableAccessor;
-064import 
org.apache.hadoop.hbase.TableNotDisabledException;
-065import 
org.apache.hadoop.hbase.TableNotEnabledException;
-066import 
org.apache.hadoop.hbase.TableNotFoundException;
-067import 
org.apache.hadoop.hbase.UnknownRegionException;
-068import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
-069import 
org.apache.hadoop.hbase.client.AsyncRpcRetryingCallerFactory.AdminRequestCallerBuilder;
-070import 
org.apache.hadoop.hbase.client.AsyncRpcRetryingCallerFactory.MasterRequestCallerBuilder;
-071import 
org.apache.hadoop.hbase.client.Scan.ReadType;
-072import 
org.apache.hadoop.hbase.client.replication.ReplicationSerDeHelper;
-073import 
org.apache.hadoop.hbase.client.replication.TableCFs;
-074import 
org.apache.hadoop.hbase.exceptions.DeserializationException;
-075import 
org.apache.hadoop.hbase.ipc.HBaseRpcController;
-076import 
org.apache.hadoop.hbase.quotas.QuotaFilter;
-077import 
org.apache.hadoop.hbase.quotas.QuotaSettings;
-078import 
org.apache.hadoop.hbase.quotas.QuotaTableUtil;
-079import 
org.apache.hadoop.hbase.replication.ReplicationException;
-080import 
org.apache.hadoop.hbase.replication.ReplicationPeerConfig;
-081import 
org.apache.hadoop.hbase.replication.ReplicationPeerDescription;
-082import 
org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback;
-083import 
org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
-084import 
org.apache.hadoop.hbase.shaded.protobuf.RequestConverter;
-085import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminService;
-086import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CloseRegionRequest;
-087import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CloseRegionResponse;
-088import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CompactRegionRequest;
-089import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CompactRegionResponse;
-090import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.FlushRegionRequest;
-091import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.FlushRegionResponse;
-092import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetOnlineRegionRequest;
-093import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetOnlineRegionResponse;
-094import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionInfoRequest;
-095import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionInfoResponse;
-096import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionLoadRequest;
-097import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionLoadResponse;
-098import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProt

[18/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-11 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/90c7dfe4/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.ModifyNamespaceProcedureBiConsumer.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.ModifyNamespaceProcedureBiConsumer.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.ModifyNamespaceProcedureBiConsumer.html
index 75db22d..99a09f9 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.ModifyNamespaceProcedureBiConsumer.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.ModifyNamespaceProcedureBiConsumer.html
@@ -37,2710 +37,2816 @@
 029import java.util.List;
 030import java.util.Map;
 031import java.util.Optional;
-032import 
java.util.concurrent.CompletableFuture;
-033import java.util.concurrent.TimeUnit;
-034import 
java.util.concurrent.atomic.AtomicReference;
-035import java.util.function.BiConsumer;
-036import java.util.regex.Pattern;
-037import java.util.stream.Collectors;
-038
-039import 
com.google.common.annotations.VisibleForTesting;
-040
-041import io.netty.util.Timeout;
-042import io.netty.util.TimerTask;
-043
-044import java.util.stream.Stream;
-045
-046import org.apache.commons.io.IOUtils;
-047import org.apache.commons.logging.Log;
-048import 
org.apache.commons.logging.LogFactory;
-049import 
org.apache.hadoop.hbase.ClusterStatus;
-050import 
org.apache.hadoop.hbase.HRegionInfo;
-051import 
org.apache.hadoop.hbase.HRegionLocation;
-052import 
org.apache.hadoop.hbase.MetaTableAccessor;
-053import 
org.apache.hadoop.hbase.MetaTableAccessor.QueryType;
-054import 
org.apache.hadoop.hbase.NotServingRegionException;
-055import 
org.apache.hadoop.hbase.ProcedureInfo;
-056import 
org.apache.hadoop.hbase.RegionLoad;
-057import 
org.apache.hadoop.hbase.RegionLocations;
-058import 
org.apache.hadoop.hbase.ServerName;
-059import 
org.apache.hadoop.hbase.NamespaceDescriptor;
-060import 
org.apache.hadoop.hbase.HConstants;
-061import 
org.apache.hadoop.hbase.TableExistsException;
-062import 
org.apache.hadoop.hbase.TableName;
-063import 
org.apache.hadoop.hbase.AsyncMetaTableAccessor;
-064import 
org.apache.hadoop.hbase.TableNotDisabledException;
-065import 
org.apache.hadoop.hbase.TableNotEnabledException;
-066import 
org.apache.hadoop.hbase.TableNotFoundException;
-067import 
org.apache.hadoop.hbase.UnknownRegionException;
-068import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
-069import 
org.apache.hadoop.hbase.client.AsyncRpcRetryingCallerFactory.AdminRequestCallerBuilder;
-070import 
org.apache.hadoop.hbase.client.AsyncRpcRetryingCallerFactory.MasterRequestCallerBuilder;
-071import 
org.apache.hadoop.hbase.client.Scan.ReadType;
-072import 
org.apache.hadoop.hbase.client.replication.ReplicationSerDeHelper;
-073import 
org.apache.hadoop.hbase.client.replication.TableCFs;
-074import 
org.apache.hadoop.hbase.exceptions.DeserializationException;
-075import 
org.apache.hadoop.hbase.ipc.HBaseRpcController;
-076import 
org.apache.hadoop.hbase.quotas.QuotaFilter;
-077import 
org.apache.hadoop.hbase.quotas.QuotaSettings;
-078import 
org.apache.hadoop.hbase.quotas.QuotaTableUtil;
-079import 
org.apache.hadoop.hbase.replication.ReplicationException;
-080import 
org.apache.hadoop.hbase.replication.ReplicationPeerConfig;
-081import 
org.apache.hadoop.hbase.replication.ReplicationPeerDescription;
-082import 
org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback;
-083import 
org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
-084import 
org.apache.hadoop.hbase.shaded.protobuf.RequestConverter;
-085import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminService;
-086import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CloseRegionRequest;
-087import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CloseRegionResponse;
-088import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CompactRegionRequest;
-089import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CompactRegionResponse;
-090import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.FlushRegionRequest;
-091import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.FlushRegionResponse;
-092import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetOnlineRegionRequest;
-093import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetOnlineRegionResponse;
-094import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionInfoRequest;
-095import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionInfoResponse;
-096import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionLoadRequest;
-097import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionLoadResponse;
-098import 
org.apache.hadoop.hbase.shaded.protobuf

[33/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-11 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/90c7dfe4/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.html
index ec0d665..013a7c2 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.html
@@ -29,560 +29,603 @@
 021import java.util.List;
 022import java.util.Map;
 023import java.util.Optional;
-024import 
java.util.concurrent.CompletableFuture;
-025import 
java.util.concurrent.ExecutorService;
-026import java.util.regex.Pattern;
-027
-028import org.apache.commons.logging.Log;
-029import 
org.apache.commons.logging.LogFactory;
-030import 
org.apache.hadoop.hbase.ClusterStatus;
-031import 
org.apache.hadoop.hbase.HRegionInfo;
-032import 
org.apache.hadoop.hbase.ProcedureInfo;
-033import 
org.apache.hadoop.hbase.RegionLoad;
-034import 
org.apache.hadoop.hbase.ServerName;
-035import 
org.apache.hadoop.hbase.NamespaceDescriptor;
-036import 
org.apache.hadoop.hbase.TableName;
-037import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
-038import 
org.apache.hadoop.hbase.client.replication.TableCFs;
-039import 
org.apache.hadoop.hbase.quotas.QuotaFilter;
-040import 
org.apache.hadoop.hbase.quotas.QuotaSettings;
-041import 
org.apache.hadoop.hbase.replication.ReplicationPeerConfig;
-042import 
org.apache.hadoop.hbase.replication.ReplicationPeerDescription;
-043import 
org.apache.hadoop.hbase.util.Pair;
-044
-045/**
-046 * The implementation of AsyncAdmin.
-047 */
-048@InterfaceAudience.Private
-049public class AsyncHBaseAdmin implements 
AsyncAdmin {
-050
-051  private static final Log LOG = 
LogFactory.getLog(AsyncHBaseAdmin.class);
-052
-053  private final RawAsyncHBaseAdmin 
rawAdmin;
-054
-055  private final ExecutorService pool;
-056
-057  AsyncHBaseAdmin(RawAsyncHBaseAdmin 
rawAdmin, ExecutorService pool) {
-058this.rawAdmin = rawAdmin;
-059this.pool = pool;
-060  }
-061
-062  private  
CompletableFuture wrap(CompletableFuture future) {
-063CompletableFuture 
asyncFuture = new CompletableFuture<>();
-064future.whenCompleteAsync((r, e) -> 
{
-065  if (e != null) {
-066
asyncFuture.completeExceptionally(e);
-067  } else {
-068asyncFuture.complete(r);
-069  }
-070}, pool);
-071return asyncFuture;
-072  }
-073
-074  @Override
-075  public CompletableFuture 
tableExists(TableName tableName) {
-076return 
wrap(rawAdmin.tableExists(tableName));
-077  }
-078
-079  @Override
-080  public 
CompletableFuture> 
listTables(Optional pattern,
-081  boolean includeSysTables) {
-082return 
wrap(rawAdmin.listTables(pattern, includeSysTables));
-083  }
-084
-085  @Override
-086  public 
CompletableFuture> 
listTableNames(Optional pattern,
-087  boolean includeSysTables) {
-088return 
wrap(rawAdmin.listTableNames(pattern, includeSysTables));
-089  }
-090
-091  @Override
-092  public 
CompletableFuture getTableDescriptor(TableName 
tableName) {
-093return 
wrap(rawAdmin.getTableDescriptor(tableName));
-094  }
-095
-096  @Override
-097  public CompletableFuture 
createTable(TableDescriptor desc, byte[] startKey, byte[] endKey,
-098  int numRegions) {
-099return 
wrap(rawAdmin.createTable(desc, startKey, endKey, numRegions));
-100  }
-101
-102  @Override
-103  public CompletableFuture 
createTable(TableDescriptor desc, Optional splitKeys) {
-104return 
wrap(rawAdmin.createTable(desc, splitKeys));
-105  }
-106
-107  @Override
-108  public CompletableFuture 
deleteTable(TableName tableName) {
-109return 
wrap(rawAdmin.deleteTable(tableName));
-110  }
-111
-112  @Override
-113  public 
CompletableFuture> deleteTables(Pattern 
pattern) {
-114return 
wrap(rawAdmin.deleteTables(pattern));
-115  }
-116
-117  @Override
-118  public CompletableFuture 
truncateTable(TableName tableName, boolean preserveSplits) {
-119return 
wrap(rawAdmin.truncateTable(tableName, preserveSplits));
-120  }
-121
-122  @Override
-123  public CompletableFuture 
enableTable(TableName tableName) {
-124return 
wrap(rawAdmin.enableTable(tableName));
-125  }
-126
-127  @Override
-128  public 
CompletableFuture> enableTables(Pattern 
pattern) {
-129return 
wrap(rawAdmin.enableTables(pattern));
-130  }
-131
-132  @Override
-133  public CompletableFuture 
disableTable(TableName tableName) {
-134return 
wrap(rawAdmin.disableTable(tableName));
-135  }
-136
-137  @Override
-138  public 
CompletableFuture> disableTables(Pattern 
pattern) {
-139return 
wrap(rawAdmin.disableTables(pattern));

[29/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-11 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/90c7dfe4/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.Converter.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.Converter.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.Converter.html
index 75db22d..99a09f9 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.Converter.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.Converter.html
@@ -37,2710 +37,2816 @@
 029import java.util.List;
 030import java.util.Map;
 031import java.util.Optional;
-032import 
java.util.concurrent.CompletableFuture;
-033import java.util.concurrent.TimeUnit;
-034import 
java.util.concurrent.atomic.AtomicReference;
-035import java.util.function.BiConsumer;
-036import java.util.regex.Pattern;
-037import java.util.stream.Collectors;
-038
-039import 
com.google.common.annotations.VisibleForTesting;
-040
-041import io.netty.util.Timeout;
-042import io.netty.util.TimerTask;
-043
-044import java.util.stream.Stream;
-045
-046import org.apache.commons.io.IOUtils;
-047import org.apache.commons.logging.Log;
-048import 
org.apache.commons.logging.LogFactory;
-049import 
org.apache.hadoop.hbase.ClusterStatus;
-050import 
org.apache.hadoop.hbase.HRegionInfo;
-051import 
org.apache.hadoop.hbase.HRegionLocation;
-052import 
org.apache.hadoop.hbase.MetaTableAccessor;
-053import 
org.apache.hadoop.hbase.MetaTableAccessor.QueryType;
-054import 
org.apache.hadoop.hbase.NotServingRegionException;
-055import 
org.apache.hadoop.hbase.ProcedureInfo;
-056import 
org.apache.hadoop.hbase.RegionLoad;
-057import 
org.apache.hadoop.hbase.RegionLocations;
-058import 
org.apache.hadoop.hbase.ServerName;
-059import 
org.apache.hadoop.hbase.NamespaceDescriptor;
-060import 
org.apache.hadoop.hbase.HConstants;
-061import 
org.apache.hadoop.hbase.TableExistsException;
-062import 
org.apache.hadoop.hbase.TableName;
-063import 
org.apache.hadoop.hbase.AsyncMetaTableAccessor;
-064import 
org.apache.hadoop.hbase.TableNotDisabledException;
-065import 
org.apache.hadoop.hbase.TableNotEnabledException;
-066import 
org.apache.hadoop.hbase.TableNotFoundException;
-067import 
org.apache.hadoop.hbase.UnknownRegionException;
-068import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
-069import 
org.apache.hadoop.hbase.client.AsyncRpcRetryingCallerFactory.AdminRequestCallerBuilder;
-070import 
org.apache.hadoop.hbase.client.AsyncRpcRetryingCallerFactory.MasterRequestCallerBuilder;
-071import 
org.apache.hadoop.hbase.client.Scan.ReadType;
-072import 
org.apache.hadoop.hbase.client.replication.ReplicationSerDeHelper;
-073import 
org.apache.hadoop.hbase.client.replication.TableCFs;
-074import 
org.apache.hadoop.hbase.exceptions.DeserializationException;
-075import 
org.apache.hadoop.hbase.ipc.HBaseRpcController;
-076import 
org.apache.hadoop.hbase.quotas.QuotaFilter;
-077import 
org.apache.hadoop.hbase.quotas.QuotaSettings;
-078import 
org.apache.hadoop.hbase.quotas.QuotaTableUtil;
-079import 
org.apache.hadoop.hbase.replication.ReplicationException;
-080import 
org.apache.hadoop.hbase.replication.ReplicationPeerConfig;
-081import 
org.apache.hadoop.hbase.replication.ReplicationPeerDescription;
-082import 
org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback;
-083import 
org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
-084import 
org.apache.hadoop.hbase.shaded.protobuf.RequestConverter;
-085import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminService;
-086import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CloseRegionRequest;
-087import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CloseRegionResponse;
-088import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CompactRegionRequest;
-089import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CompactRegionResponse;
-090import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.FlushRegionRequest;
-091import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.FlushRegionResponse;
-092import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetOnlineRegionRequest;
-093import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetOnlineRegionResponse;
-094import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionInfoRequest;
-095import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionInfoResponse;
-096import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionLoadRequest;
-097import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionLoadResponse;
-098import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.SplitRegionRequest;
-099import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.SplitReg

[31/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-11 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/90c7dfe4/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.AddColumnFamilyProcedureBiConsumer.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.AddColumnFamilyProcedureBiConsumer.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.AddColumnFamilyProcedureBiConsumer.html
index 75db22d..99a09f9 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.AddColumnFamilyProcedureBiConsumer.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.AddColumnFamilyProcedureBiConsumer.html
@@ -37,2710 +37,2816 @@
 029import java.util.List;
 030import java.util.Map;
 031import java.util.Optional;
-032import 
java.util.concurrent.CompletableFuture;
-033import java.util.concurrent.TimeUnit;
-034import 
java.util.concurrent.atomic.AtomicReference;
-035import java.util.function.BiConsumer;
-036import java.util.regex.Pattern;
-037import java.util.stream.Collectors;
-038
-039import 
com.google.common.annotations.VisibleForTesting;
-040
-041import io.netty.util.Timeout;
-042import io.netty.util.TimerTask;
-043
-044import java.util.stream.Stream;
-045
-046import org.apache.commons.io.IOUtils;
-047import org.apache.commons.logging.Log;
-048import 
org.apache.commons.logging.LogFactory;
-049import 
org.apache.hadoop.hbase.ClusterStatus;
-050import 
org.apache.hadoop.hbase.HRegionInfo;
-051import 
org.apache.hadoop.hbase.HRegionLocation;
-052import 
org.apache.hadoop.hbase.MetaTableAccessor;
-053import 
org.apache.hadoop.hbase.MetaTableAccessor.QueryType;
-054import 
org.apache.hadoop.hbase.NotServingRegionException;
-055import 
org.apache.hadoop.hbase.ProcedureInfo;
-056import 
org.apache.hadoop.hbase.RegionLoad;
-057import 
org.apache.hadoop.hbase.RegionLocations;
-058import 
org.apache.hadoop.hbase.ServerName;
-059import 
org.apache.hadoop.hbase.NamespaceDescriptor;
-060import 
org.apache.hadoop.hbase.HConstants;
-061import 
org.apache.hadoop.hbase.TableExistsException;
-062import 
org.apache.hadoop.hbase.TableName;
-063import 
org.apache.hadoop.hbase.AsyncMetaTableAccessor;
-064import 
org.apache.hadoop.hbase.TableNotDisabledException;
-065import 
org.apache.hadoop.hbase.TableNotEnabledException;
-066import 
org.apache.hadoop.hbase.TableNotFoundException;
-067import 
org.apache.hadoop.hbase.UnknownRegionException;
-068import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
-069import 
org.apache.hadoop.hbase.client.AsyncRpcRetryingCallerFactory.AdminRequestCallerBuilder;
-070import 
org.apache.hadoop.hbase.client.AsyncRpcRetryingCallerFactory.MasterRequestCallerBuilder;
-071import 
org.apache.hadoop.hbase.client.Scan.ReadType;
-072import 
org.apache.hadoop.hbase.client.replication.ReplicationSerDeHelper;
-073import 
org.apache.hadoop.hbase.client.replication.TableCFs;
-074import 
org.apache.hadoop.hbase.exceptions.DeserializationException;
-075import 
org.apache.hadoop.hbase.ipc.HBaseRpcController;
-076import 
org.apache.hadoop.hbase.quotas.QuotaFilter;
-077import 
org.apache.hadoop.hbase.quotas.QuotaSettings;
-078import 
org.apache.hadoop.hbase.quotas.QuotaTableUtil;
-079import 
org.apache.hadoop.hbase.replication.ReplicationException;
-080import 
org.apache.hadoop.hbase.replication.ReplicationPeerConfig;
-081import 
org.apache.hadoop.hbase.replication.ReplicationPeerDescription;
-082import 
org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback;
-083import 
org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
-084import 
org.apache.hadoop.hbase.shaded.protobuf.RequestConverter;
-085import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminService;
-086import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CloseRegionRequest;
-087import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CloseRegionResponse;
-088import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CompactRegionRequest;
-089import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CompactRegionResponse;
-090import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.FlushRegionRequest;
-091import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.FlushRegionResponse;
-092import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetOnlineRegionRequest;
-093import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetOnlineRegionResponse;
-094import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionInfoRequest;
-095import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionInfoResponse;
-096import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionLoadRequest;
-097import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionLoadResponse;
-098import 
org.apache.hadoop.hbase.shaded.protobuf

hbase-site git commit: INFRA-10751 Empty commit

2017-07-11 Thread git-site-role
Repository: hbase-site
Updated Branches:
  refs/heads/asf-site 90c7dfe41 -> 2f4156a78


INFRA-10751 Empty commit


Project: http://git-wip-us.apache.org/repos/asf/hbase-site/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase-site/commit/2f4156a7
Tree: http://git-wip-us.apache.org/repos/asf/hbase-site/tree/2f4156a7
Diff: http://git-wip-us.apache.org/repos/asf/hbase-site/diff/2f4156a7

Branch: refs/heads/asf-site
Commit: 2f4156a788b74922644f6db8af020369af25db48
Parents: 90c7dfe
Author: jenkins 
Authored: Tue Jul 11 15:02:58 2017 +
Committer: jenkins 
Committed: Tue Jul 11 15:02:58 2017 +

--

--




[23/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-11 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/90c7dfe4/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.DisableTableProcedureBiConsumer.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.DisableTableProcedureBiConsumer.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.DisableTableProcedureBiConsumer.html
index 75db22d..99a09f9 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.DisableTableProcedureBiConsumer.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.DisableTableProcedureBiConsumer.html
@@ -37,2710 +37,2816 @@
 029import java.util.List;
 030import java.util.Map;
 031import java.util.Optional;
-032import 
java.util.concurrent.CompletableFuture;
-033import java.util.concurrent.TimeUnit;
-034import 
java.util.concurrent.atomic.AtomicReference;
-035import java.util.function.BiConsumer;
-036import java.util.regex.Pattern;
-037import java.util.stream.Collectors;
-038
-039import 
com.google.common.annotations.VisibleForTesting;
-040
-041import io.netty.util.Timeout;
-042import io.netty.util.TimerTask;
-043
-044import java.util.stream.Stream;
-045
-046import org.apache.commons.io.IOUtils;
-047import org.apache.commons.logging.Log;
-048import 
org.apache.commons.logging.LogFactory;
-049import 
org.apache.hadoop.hbase.ClusterStatus;
-050import 
org.apache.hadoop.hbase.HRegionInfo;
-051import 
org.apache.hadoop.hbase.HRegionLocation;
-052import 
org.apache.hadoop.hbase.MetaTableAccessor;
-053import 
org.apache.hadoop.hbase.MetaTableAccessor.QueryType;
-054import 
org.apache.hadoop.hbase.NotServingRegionException;
-055import 
org.apache.hadoop.hbase.ProcedureInfo;
-056import 
org.apache.hadoop.hbase.RegionLoad;
-057import 
org.apache.hadoop.hbase.RegionLocations;
-058import 
org.apache.hadoop.hbase.ServerName;
-059import 
org.apache.hadoop.hbase.NamespaceDescriptor;
-060import 
org.apache.hadoop.hbase.HConstants;
-061import 
org.apache.hadoop.hbase.TableExistsException;
-062import 
org.apache.hadoop.hbase.TableName;
-063import 
org.apache.hadoop.hbase.AsyncMetaTableAccessor;
-064import 
org.apache.hadoop.hbase.TableNotDisabledException;
-065import 
org.apache.hadoop.hbase.TableNotEnabledException;
-066import 
org.apache.hadoop.hbase.TableNotFoundException;
-067import 
org.apache.hadoop.hbase.UnknownRegionException;
-068import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
-069import 
org.apache.hadoop.hbase.client.AsyncRpcRetryingCallerFactory.AdminRequestCallerBuilder;
-070import 
org.apache.hadoop.hbase.client.AsyncRpcRetryingCallerFactory.MasterRequestCallerBuilder;
-071import 
org.apache.hadoop.hbase.client.Scan.ReadType;
-072import 
org.apache.hadoop.hbase.client.replication.ReplicationSerDeHelper;
-073import 
org.apache.hadoop.hbase.client.replication.TableCFs;
-074import 
org.apache.hadoop.hbase.exceptions.DeserializationException;
-075import 
org.apache.hadoop.hbase.ipc.HBaseRpcController;
-076import 
org.apache.hadoop.hbase.quotas.QuotaFilter;
-077import 
org.apache.hadoop.hbase.quotas.QuotaSettings;
-078import 
org.apache.hadoop.hbase.quotas.QuotaTableUtil;
-079import 
org.apache.hadoop.hbase.replication.ReplicationException;
-080import 
org.apache.hadoop.hbase.replication.ReplicationPeerConfig;
-081import 
org.apache.hadoop.hbase.replication.ReplicationPeerDescription;
-082import 
org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback;
-083import 
org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
-084import 
org.apache.hadoop.hbase.shaded.protobuf.RequestConverter;
-085import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminService;
-086import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CloseRegionRequest;
-087import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CloseRegionResponse;
-088import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CompactRegionRequest;
-089import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CompactRegionResponse;
-090import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.FlushRegionRequest;
-091import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.FlushRegionResponse;
-092import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetOnlineRegionRequest;
-093import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetOnlineRegionResponse;
-094import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionInfoRequest;
-095import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionInfoResponse;
-096import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionLoadRequest;
-097import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionLoadResponse;
-098import 
org.apache.hadoop.hbase.shaded.protobuf.generated.Admi

[43/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-11 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/90c7dfe4/devapidocs/org/apache/hadoop/hbase/class-use/TableName.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/class-use/TableName.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/TableName.html
index 021cf88..6dd2483 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/TableName.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/TableName.html
@@ -3090,24 +3090,34 @@ service.
 
 
 (package private) http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";
 title="class or interface in java.util.concurrent">CompletableFuture
-AsyncNonMetaRegionLocator.getRegionLocation(TableName tableName,
+AsyncNonMetaRegionLocator.getRegionLocation(TableName tableName,
  byte[] row,
- RegionLocateType locateType) 
+ RegionLocateType locateType,
+ boolean reload) 
 
 
 (package private) http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";
 title="class or interface in java.util.concurrent">CompletableFuture
-AsyncRegionLocator.getRegionLocation(TableName tableName,
+AsyncRegionLocator.getRegionLocation(TableName tableName,
  byte[] row,
  RegionLocateType type,
+ boolean reload,
  long timeoutNs) 
 
 
+(package private) http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";
 title="class or interface in java.util.concurrent">CompletableFuture
+AsyncRegionLocator.getRegionLocation(TableName tableName,
+ byte[] row,
+ RegionLocateType type,
+ long timeoutNs) 
+
+
 private http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";
 title="class or interface in java.util.concurrent">CompletableFuture
-AsyncNonMetaRegionLocator.getRegionLocationInternal(TableName tableName,
+AsyncNonMetaRegionLocator.getRegionLocationInternal(TableName tableName,
  byte[] row,
- RegionLocateType locateType) 
+ RegionLocateType locateType,
+ boolean reload) 
 
-
+
 (package private) static RegionLocations
 RpcRetryingCallerWithReadReplicas.getRegionLocations(boolean useCache,
   int replicaId,
@@ -3115,7 +3125,7 @@ service.
   TableName tableName,
   byte[] row) 
 
-
+
 static RegionLocations
 RegionAdminServiceCallable.getRegionLocations(ClusterConnection connection,
   TableName tableName,
@@ -3123,83 +3133,83 @@ service.
   boolean useCache,
   int replicaId) 
 
-
+
 RegionLocator
 ConnectionImplementation.getRegionLocator(TableName tableName) 
 
-
+
 AsyncTableRegionLocator
 AsyncConnection.getRegionLocator(TableName tableName)
 Retrieve a AsyncRegionLocator implementation to inspect 
region information on a table.
 
 
-
+
 AsyncTableRegionLocator
 AsyncConnectionImpl.getRegionLocator(TableName tableName) 
 
-
+
 RegionLocator
 Connection.getRegionLocator(TableName tableName)
 Retrieve a RegionLocator implementation to inspect region 
information on a table.
 
 
-
+
 Table
 ConnectionImplementation.getTable(TableName tableName) 
 
-
+
 default Table
 Connection.getTable(TableName tableName)
 Retrieve a Table implementation for accessing a table.
 
 
-
+
 default AsyncTable
 AsyncConnection.getTable(TableName tableName,
 http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/ExecutorService.html?is-external=true";
 title="class or interface in 
java.util.concurrent">ExecutorService pool)
 Retrieve an AsyncTable implementation for accessing a 
table.
 
 
-
+
 default Table
 Connection.getTable(TableName tableName,
 http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/ExecutorService.html?is-external=true";
 title="class or interface in 
java.util.concurrent">ExecutorService pool)
 Retrieve a Table implementation for accessing a table.
 
 
-
+
 TableBuilder
 ConnectionImplementation.getTableBuilder(TableName tableName,
http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/ExecutorService.html?is-external=true";
 title="class or interface in 
java.util.concurrent">ExecutorService pool) 
 
-
+
 AsyncTableBuilder
 AsyncConnection.getTableBuilder(TableName tableName,
http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/ExecutorService.html?is-external=true";
 title="class or interface in 
java.util.concurrent">ExecutorService pool)
 Returns an AsyncTableBuilder for creating 
AsyncTable.
 
 
-
+
 AsyncTableBuilder
 AsyncConnectionImpl.getTableBuilder(TableName tableName,

[07/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-11 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/90c7dfe4/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.RegionEnvironment.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.RegionEnvironment.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.RegionEnvironment.html
index 25d6b70..635798d 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.RegionEnvironment.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.RegionEnvironment.html
@@ -1234,540 +1234,541 @@
 1226   * @param result the result returned 
by the append
 1227   * @throws IOException if an error 
occurred on the coprocessor
 1228   */
-1229  public void postAppend(final Append 
append, final Result result) throws IOException {
-1230execOperation(coprocessors.isEmpty() 
? null : new RegionOperation() {
-1231  @Override
-1232  public void call(RegionObserver 
oserver, ObserverContext ctx)
-1233  throws IOException {
-1234oserver.postAppend(ctx, append, 
result);
-1235  }
-1236});
-1237  }
-1238
-1239  /**
-1240   * @param increment increment object
-1241   * @param result the result returned 
by postIncrement
-1242   * @throws IOException if an error 
occurred on the coprocessor
-1243   */
-1244  public Result postIncrement(final 
Increment increment, Result result) throws IOException {
-1245return 
execOperationWithResult(result,
-1246coprocessors.isEmpty() ? null : 
new RegionOperationWithResult() {
-1247  @Override
-1248  public void call(RegionObserver 
oserver, ObserverContext ctx)
-1249  throws IOException {
-1250
setResult(oserver.postIncrement(ctx, increment, getResult()));
-1251  }
-1252});
-1253  }
-1254
-1255  /**
-1256   * @param scan the Scan 
specification
-1257   * @return scanner id to return to 
client if default operation should be
-1258   * bypassed, null otherwise
-1259   * @exception IOException Exception
-1260   */
-1261  public RegionScanner 
preScannerOpen(final Scan scan) throws IOException {
-1262return execOperationWithResult(true, 
null,
-1263coprocessors.isEmpty() ? null : 
new RegionOperationWithResult() {
-1264  @Override
-1265  public void call(RegionObserver 
oserver, ObserverContext ctx)
-1266  throws IOException {
-1267
setResult(oserver.preScannerOpen(ctx, scan, getResult()));
-1268  }
-1269});
-1270  }
-1271
-1272  /**
-1273   * See
-1274   * {@link 
RegionObserver#preStoreScannerOpen(ObserverContext,
-1275   *Store, Scan, NavigableSet, 
KeyValueScanner)}
-1276   */
-1277  public KeyValueScanner 
preStoreScannerOpen(final Store store, final Scan scan,
-1278  final NavigableSet 
targetCols, final long readPt) throws IOException {
-1279return 
execOperationWithResult(null,
-1280coprocessors.isEmpty() ? null : 
new RegionOperationWithResult() {
-1281  @Override
-1282  public void call(RegionObserver 
oserver, ObserverContext ctx)
-1283  throws IOException {
-1284
setResult(oserver.preStoreScannerOpen(ctx, store, scan, targetCols, 
getResult(), readPt));
-1285  }
-1286});
-1287  }
-1288
-1289  /**
-1290   * @param scan the Scan 
specification
-1291   * @param s the scanner
-1292   * @return the scanner instance to 
use
-1293   * @exception IOException Exception
-1294   */
-1295  public RegionScanner 
postScannerOpen(final Scan scan, RegionScanner s) throws IOException {
-1296return execOperationWithResult(s,
-1297coprocessors.isEmpty() ? null : 
new RegionOperationWithResult() {
-1298  @Override
-1299  public void call(RegionObserver 
oserver, ObserverContext ctx)
-1300  throws IOException {
-1301
setResult(oserver.postScannerOpen(ctx, scan, getResult()));
-1302  }
-1303});
-1304  }
-1305
-1306  /**
-1307   * @param s the scanner
-1308   * @param results the result set 
returned by the region server
-1309   * @param limit the maximum number of 
results to return
-1310   * @return 'has next' indication to 
client if bypassing default behavior, or
-1311   * null otherwise
-1312   * @exception IOException Exception
-1313   */
-1314  public Boolean preScannerNext(final 
InternalScanner s,
-1315  final List results, 
final int limit) throws IOException {
-1316return execOperationWithResult(true, 
false,
-1317coprocessors.isEmpty() ? null : 
new RegionOperationWithResult() {
-1318  @Override
-1319  public void call(RegionObserver 
oserver, ObserverContext ctx)

[02/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-11 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/90c7dfe4/export_control.html
--
diff --git a/export_control.html b/export_control.html
index b27dad9..4525bc6 100644
--- a/export_control.html
+++ b/export_control.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase – 
   Export Control
@@ -336,7 +336,7 @@ for more details.
 https://www.apache.org/";>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2017-07-10
+  Last Published: 
2017-07-11
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/90c7dfe4/hbase-annotations/checkstyle.html
--
diff --git a/hbase-annotations/checkstyle.html 
b/hbase-annotations/checkstyle.html
index e8c0fc2..8ed1732 100644
--- a/hbase-annotations/checkstyle.html
+++ b/hbase-annotations/checkstyle.html
@@ -1,5 +1,5 @@
 http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd";>
-
+
 http://www.w3.org/1999/xhtml"; xml:lang="en" lang="en">
   
 
@@ -10,7 +10,7 @@
   @import url("./css/site.css");
 
 
-
+
 
 
 
@@ -27,7 +27,7 @@
 
 
 
-Last Published: 2017-07-10
+Last Published: 2017-07-11
    | Version: 
3.0.0-SNAPSHOT
   
 Apache HBase - Annotations

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/90c7dfe4/hbase-annotations/dependencies.html
--
diff --git a/hbase-annotations/dependencies.html 
b/hbase-annotations/dependencies.html
index 49ea759..a9ab1b7 100644
--- a/hbase-annotations/dependencies.html
+++ b/hbase-annotations/dependencies.html
@@ -1,5 +1,5 @@
 http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd";>
-
+
 http://www.w3.org/1999/xhtml"; xml:lang="en" lang="en">
   
 
@@ -10,7 +10,7 @@
   @import url("./css/site.css");
 
 
-
+
 
 
 
@@ -27,7 +27,7 @@
 
 
 
-Last Published: 2017-07-10
+Last Published: 2017-07-11
    | Version: 
3.0.0-SNAPSHOT
   
 Apache HBase - Annotations

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/90c7dfe4/hbase-annotations/dependency-convergence.html
--
diff --git a/hbase-annotations/dependency-convergence.html 
b/hbase-annotations/dependency-convergence.html
index 1ed0ea2..b8b6960 100644
--- a/hbase-annotations/dependency-convergence.html
+++ b/hbase-annotations/dependency-convergence.html
@@ -1,5 +1,5 @@
 http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd";>
-
+
 http://www.w3.org/1999/xhtml"; xml:lang="en" lang="en">
   
 
@@ -10,7 +10,7 @@
   @import url("./css/site.css");
 
 
-
+
 
 
 
@@ -27,7 +27,7 @@
 
 
 
-Last Published: 2017-07-10
+Last Published: 2017-07-11
    | Version: 
3.0.0-SNAPSHOT
   
 Apache HBase - Annotations

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/90c7dfe4/hbase-annotations/dependency-info.html
--
diff --git a/hbase-annotations/dependency-info.html 
b/hbase-annotations/dependency-info.html
index 777f7c4..7d7899b 100644
--- a/hbase-annotations/dependency-info.html
+++ b/hbase-annotations/dependency-info.html
@@ -1,5 +1,5 @@
 http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd";>
-
+
 http://www.w3.org/1999/xhtml"; xml:lang="en" lang="en">
   
 
@@ -10,7 +10,7 @@
   @import url("./css/site.css");
 
 
-
+
 
 
 
@@ -27,7 +27,7 @@
 
 
 
-Last Published: 2017-07-10
+Last Published: 2017-07-11
    | Version: 
3.0.0-SNAPSHOT
   
 Apache HBase - Annotations

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/90c7dfe4/hbase-annotations/dependency-management.html
--
diff --git a/hbase-annotations/dependency-management.html 
b/hbase-annotations/dependency-management.html
index d2ab3f2..5916f4e2 100644
--- a/hbase-annotations/dependency-management.html
+++ b/hbase-annotations/dependency-management.html
@@ -1,5 +1,5 @@
 http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd";>
-
+
 http://www.w3.org/1999/xhtml"; xml:lang="en" lang="en">
   
 
@@ -10,7 +10,7 @@
   @import url("./css/site.css");
 
 
-   

[26/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-11 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/90c7dfe4/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.DeleteColumnFamilyProcedureBiConsumer.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.DeleteColumnFamilyProcedureBiConsumer.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.DeleteColumnFamilyProcedureBiConsumer.html
index 75db22d..99a09f9 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.DeleteColumnFamilyProcedureBiConsumer.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.DeleteColumnFamilyProcedureBiConsumer.html
@@ -37,2710 +37,2816 @@
 029import java.util.List;
 030import java.util.Map;
 031import java.util.Optional;
-032import 
java.util.concurrent.CompletableFuture;
-033import java.util.concurrent.TimeUnit;
-034import 
java.util.concurrent.atomic.AtomicReference;
-035import java.util.function.BiConsumer;
-036import java.util.regex.Pattern;
-037import java.util.stream.Collectors;
-038
-039import 
com.google.common.annotations.VisibleForTesting;
-040
-041import io.netty.util.Timeout;
-042import io.netty.util.TimerTask;
-043
-044import java.util.stream.Stream;
-045
-046import org.apache.commons.io.IOUtils;
-047import org.apache.commons.logging.Log;
-048import 
org.apache.commons.logging.LogFactory;
-049import 
org.apache.hadoop.hbase.ClusterStatus;
-050import 
org.apache.hadoop.hbase.HRegionInfo;
-051import 
org.apache.hadoop.hbase.HRegionLocation;
-052import 
org.apache.hadoop.hbase.MetaTableAccessor;
-053import 
org.apache.hadoop.hbase.MetaTableAccessor.QueryType;
-054import 
org.apache.hadoop.hbase.NotServingRegionException;
-055import 
org.apache.hadoop.hbase.ProcedureInfo;
-056import 
org.apache.hadoop.hbase.RegionLoad;
-057import 
org.apache.hadoop.hbase.RegionLocations;
-058import 
org.apache.hadoop.hbase.ServerName;
-059import 
org.apache.hadoop.hbase.NamespaceDescriptor;
-060import 
org.apache.hadoop.hbase.HConstants;
-061import 
org.apache.hadoop.hbase.TableExistsException;
-062import 
org.apache.hadoop.hbase.TableName;
-063import 
org.apache.hadoop.hbase.AsyncMetaTableAccessor;
-064import 
org.apache.hadoop.hbase.TableNotDisabledException;
-065import 
org.apache.hadoop.hbase.TableNotEnabledException;
-066import 
org.apache.hadoop.hbase.TableNotFoundException;
-067import 
org.apache.hadoop.hbase.UnknownRegionException;
-068import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
-069import 
org.apache.hadoop.hbase.client.AsyncRpcRetryingCallerFactory.AdminRequestCallerBuilder;
-070import 
org.apache.hadoop.hbase.client.AsyncRpcRetryingCallerFactory.MasterRequestCallerBuilder;
-071import 
org.apache.hadoop.hbase.client.Scan.ReadType;
-072import 
org.apache.hadoop.hbase.client.replication.ReplicationSerDeHelper;
-073import 
org.apache.hadoop.hbase.client.replication.TableCFs;
-074import 
org.apache.hadoop.hbase.exceptions.DeserializationException;
-075import 
org.apache.hadoop.hbase.ipc.HBaseRpcController;
-076import 
org.apache.hadoop.hbase.quotas.QuotaFilter;
-077import 
org.apache.hadoop.hbase.quotas.QuotaSettings;
-078import 
org.apache.hadoop.hbase.quotas.QuotaTableUtil;
-079import 
org.apache.hadoop.hbase.replication.ReplicationException;
-080import 
org.apache.hadoop.hbase.replication.ReplicationPeerConfig;
-081import 
org.apache.hadoop.hbase.replication.ReplicationPeerDescription;
-082import 
org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback;
-083import 
org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
-084import 
org.apache.hadoop.hbase.shaded.protobuf.RequestConverter;
-085import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminService;
-086import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CloseRegionRequest;
-087import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CloseRegionResponse;
-088import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CompactRegionRequest;
-089import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CompactRegionResponse;
-090import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.FlushRegionRequest;
-091import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.FlushRegionResponse;
-092import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetOnlineRegionRequest;
-093import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetOnlineRegionResponse;
-094import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionInfoRequest;
-095import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionInfoResponse;
-096import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionLoadRequest;
-097import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionLoadResponse;
-098import 
org.apache.hadoop.hbase.

[10/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-11 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/90c7dfe4/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.CoprocessorOperation.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.CoprocessorOperation.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.CoprocessorOperation.html
index 25d6b70..635798d 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.CoprocessorOperation.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.CoprocessorOperation.html
@@ -1234,540 +1234,541 @@
 1226   * @param result the result returned 
by the append
 1227   * @throws IOException if an error 
occurred on the coprocessor
 1228   */
-1229  public void postAppend(final Append 
append, final Result result) throws IOException {
-1230execOperation(coprocessors.isEmpty() 
? null : new RegionOperation() {
-1231  @Override
-1232  public void call(RegionObserver 
oserver, ObserverContext ctx)
-1233  throws IOException {
-1234oserver.postAppend(ctx, append, 
result);
-1235  }
-1236});
-1237  }
-1238
-1239  /**
-1240   * @param increment increment object
-1241   * @param result the result returned 
by postIncrement
-1242   * @throws IOException if an error 
occurred on the coprocessor
-1243   */
-1244  public Result postIncrement(final 
Increment increment, Result result) throws IOException {
-1245return 
execOperationWithResult(result,
-1246coprocessors.isEmpty() ? null : 
new RegionOperationWithResult() {
-1247  @Override
-1248  public void call(RegionObserver 
oserver, ObserverContext ctx)
-1249  throws IOException {
-1250
setResult(oserver.postIncrement(ctx, increment, getResult()));
-1251  }
-1252});
-1253  }
-1254
-1255  /**
-1256   * @param scan the Scan 
specification
-1257   * @return scanner id to return to 
client if default operation should be
-1258   * bypassed, null otherwise
-1259   * @exception IOException Exception
-1260   */
-1261  public RegionScanner 
preScannerOpen(final Scan scan) throws IOException {
-1262return execOperationWithResult(true, 
null,
-1263coprocessors.isEmpty() ? null : 
new RegionOperationWithResult() {
-1264  @Override
-1265  public void call(RegionObserver 
oserver, ObserverContext ctx)
-1266  throws IOException {
-1267
setResult(oserver.preScannerOpen(ctx, scan, getResult()));
-1268  }
-1269});
-1270  }
-1271
-1272  /**
-1273   * See
-1274   * {@link 
RegionObserver#preStoreScannerOpen(ObserverContext,
-1275   *Store, Scan, NavigableSet, 
KeyValueScanner)}
-1276   */
-1277  public KeyValueScanner 
preStoreScannerOpen(final Store store, final Scan scan,
-1278  final NavigableSet 
targetCols, final long readPt) throws IOException {
-1279return 
execOperationWithResult(null,
-1280coprocessors.isEmpty() ? null : 
new RegionOperationWithResult() {
-1281  @Override
-1282  public void call(RegionObserver 
oserver, ObserverContext ctx)
-1283  throws IOException {
-1284
setResult(oserver.preStoreScannerOpen(ctx, store, scan, targetCols, 
getResult(), readPt));
-1285  }
-1286});
-1287  }
-1288
-1289  /**
-1290   * @param scan the Scan 
specification
-1291   * @param s the scanner
-1292   * @return the scanner instance to 
use
-1293   * @exception IOException Exception
-1294   */
-1295  public RegionScanner 
postScannerOpen(final Scan scan, RegionScanner s) throws IOException {
-1296return execOperationWithResult(s,
-1297coprocessors.isEmpty() ? null : 
new RegionOperationWithResult() {
-1298  @Override
-1299  public void call(RegionObserver 
oserver, ObserverContext ctx)
-1300  throws IOException {
-1301
setResult(oserver.postScannerOpen(ctx, scan, getResult()));
-1302  }
-1303});
-1304  }
-1305
-1306  /**
-1307   * @param s the scanner
-1308   * @param results the result set 
returned by the region server
-1309   * @param limit the maximum number of 
results to return
-1310   * @return 'has next' indication to 
client if bypassing default behavior, or
-1311   * null otherwise
-1312   * @exception IOException Exception
-1313   */
-1314  public Boolean preScannerNext(final 
InternalScanner s,
-1315  final List results, 
final int limit) throws IOException {
-1316return execOperationWithResult(true, 
false,
-1317coprocessors.isEmpty() ? null : 
new RegionOperationWithResult() {
-1318  @Override
-1319  public void call(RegionObserver 
oserver, ObserverContext

[20/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-11 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/90c7dfe4/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.MergeTableRegionProcedureBiConsumer.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.MergeTableRegionProcedureBiConsumer.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.MergeTableRegionProcedureBiConsumer.html
index 75db22d..99a09f9 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.MergeTableRegionProcedureBiConsumer.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.MergeTableRegionProcedureBiConsumer.html
@@ -37,2710 +37,2816 @@
 029import java.util.List;
 030import java.util.Map;
 031import java.util.Optional;
-032import 
java.util.concurrent.CompletableFuture;
-033import java.util.concurrent.TimeUnit;
-034import 
java.util.concurrent.atomic.AtomicReference;
-035import java.util.function.BiConsumer;
-036import java.util.regex.Pattern;
-037import java.util.stream.Collectors;
-038
-039import 
com.google.common.annotations.VisibleForTesting;
-040
-041import io.netty.util.Timeout;
-042import io.netty.util.TimerTask;
-043
-044import java.util.stream.Stream;
-045
-046import org.apache.commons.io.IOUtils;
-047import org.apache.commons.logging.Log;
-048import 
org.apache.commons.logging.LogFactory;
-049import 
org.apache.hadoop.hbase.ClusterStatus;
-050import 
org.apache.hadoop.hbase.HRegionInfo;
-051import 
org.apache.hadoop.hbase.HRegionLocation;
-052import 
org.apache.hadoop.hbase.MetaTableAccessor;
-053import 
org.apache.hadoop.hbase.MetaTableAccessor.QueryType;
-054import 
org.apache.hadoop.hbase.NotServingRegionException;
-055import 
org.apache.hadoop.hbase.ProcedureInfo;
-056import 
org.apache.hadoop.hbase.RegionLoad;
-057import 
org.apache.hadoop.hbase.RegionLocations;
-058import 
org.apache.hadoop.hbase.ServerName;
-059import 
org.apache.hadoop.hbase.NamespaceDescriptor;
-060import 
org.apache.hadoop.hbase.HConstants;
-061import 
org.apache.hadoop.hbase.TableExistsException;
-062import 
org.apache.hadoop.hbase.TableName;
-063import 
org.apache.hadoop.hbase.AsyncMetaTableAccessor;
-064import 
org.apache.hadoop.hbase.TableNotDisabledException;
-065import 
org.apache.hadoop.hbase.TableNotEnabledException;
-066import 
org.apache.hadoop.hbase.TableNotFoundException;
-067import 
org.apache.hadoop.hbase.UnknownRegionException;
-068import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
-069import 
org.apache.hadoop.hbase.client.AsyncRpcRetryingCallerFactory.AdminRequestCallerBuilder;
-070import 
org.apache.hadoop.hbase.client.AsyncRpcRetryingCallerFactory.MasterRequestCallerBuilder;
-071import 
org.apache.hadoop.hbase.client.Scan.ReadType;
-072import 
org.apache.hadoop.hbase.client.replication.ReplicationSerDeHelper;
-073import 
org.apache.hadoop.hbase.client.replication.TableCFs;
-074import 
org.apache.hadoop.hbase.exceptions.DeserializationException;
-075import 
org.apache.hadoop.hbase.ipc.HBaseRpcController;
-076import 
org.apache.hadoop.hbase.quotas.QuotaFilter;
-077import 
org.apache.hadoop.hbase.quotas.QuotaSettings;
-078import 
org.apache.hadoop.hbase.quotas.QuotaTableUtil;
-079import 
org.apache.hadoop.hbase.replication.ReplicationException;
-080import 
org.apache.hadoop.hbase.replication.ReplicationPeerConfig;
-081import 
org.apache.hadoop.hbase.replication.ReplicationPeerDescription;
-082import 
org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback;
-083import 
org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
-084import 
org.apache.hadoop.hbase.shaded.protobuf.RequestConverter;
-085import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminService;
-086import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CloseRegionRequest;
-087import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CloseRegionResponse;
-088import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CompactRegionRequest;
-089import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CompactRegionResponse;
-090import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.FlushRegionRequest;
-091import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.FlushRegionResponse;
-092import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetOnlineRegionRequest;
-093import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetOnlineRegionResponse;
-094import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionInfoRequest;
-095import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionInfoResponse;
-096import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionLoadRequest;
-097import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionLoadResponse;
-098import 
org.apache.hadoop.hbase.shaded.pro

[28/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-11 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/90c7dfe4/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.CreateNamespaceProcedureBiConsumer.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.CreateNamespaceProcedureBiConsumer.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.CreateNamespaceProcedureBiConsumer.html
index 75db22d..99a09f9 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.CreateNamespaceProcedureBiConsumer.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.CreateNamespaceProcedureBiConsumer.html
@@ -37,2710 +37,2816 @@
 029import java.util.List;
 030import java.util.Map;
 031import java.util.Optional;
-032import 
java.util.concurrent.CompletableFuture;
-033import java.util.concurrent.TimeUnit;
-034import 
java.util.concurrent.atomic.AtomicReference;
-035import java.util.function.BiConsumer;
-036import java.util.regex.Pattern;
-037import java.util.stream.Collectors;
-038
-039import 
com.google.common.annotations.VisibleForTesting;
-040
-041import io.netty.util.Timeout;
-042import io.netty.util.TimerTask;
-043
-044import java.util.stream.Stream;
-045
-046import org.apache.commons.io.IOUtils;
-047import org.apache.commons.logging.Log;
-048import 
org.apache.commons.logging.LogFactory;
-049import 
org.apache.hadoop.hbase.ClusterStatus;
-050import 
org.apache.hadoop.hbase.HRegionInfo;
-051import 
org.apache.hadoop.hbase.HRegionLocation;
-052import 
org.apache.hadoop.hbase.MetaTableAccessor;
-053import 
org.apache.hadoop.hbase.MetaTableAccessor.QueryType;
-054import 
org.apache.hadoop.hbase.NotServingRegionException;
-055import 
org.apache.hadoop.hbase.ProcedureInfo;
-056import 
org.apache.hadoop.hbase.RegionLoad;
-057import 
org.apache.hadoop.hbase.RegionLocations;
-058import 
org.apache.hadoop.hbase.ServerName;
-059import 
org.apache.hadoop.hbase.NamespaceDescriptor;
-060import 
org.apache.hadoop.hbase.HConstants;
-061import 
org.apache.hadoop.hbase.TableExistsException;
-062import 
org.apache.hadoop.hbase.TableName;
-063import 
org.apache.hadoop.hbase.AsyncMetaTableAccessor;
-064import 
org.apache.hadoop.hbase.TableNotDisabledException;
-065import 
org.apache.hadoop.hbase.TableNotEnabledException;
-066import 
org.apache.hadoop.hbase.TableNotFoundException;
-067import 
org.apache.hadoop.hbase.UnknownRegionException;
-068import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
-069import 
org.apache.hadoop.hbase.client.AsyncRpcRetryingCallerFactory.AdminRequestCallerBuilder;
-070import 
org.apache.hadoop.hbase.client.AsyncRpcRetryingCallerFactory.MasterRequestCallerBuilder;
-071import 
org.apache.hadoop.hbase.client.Scan.ReadType;
-072import 
org.apache.hadoop.hbase.client.replication.ReplicationSerDeHelper;
-073import 
org.apache.hadoop.hbase.client.replication.TableCFs;
-074import 
org.apache.hadoop.hbase.exceptions.DeserializationException;
-075import 
org.apache.hadoop.hbase.ipc.HBaseRpcController;
-076import 
org.apache.hadoop.hbase.quotas.QuotaFilter;
-077import 
org.apache.hadoop.hbase.quotas.QuotaSettings;
-078import 
org.apache.hadoop.hbase.quotas.QuotaTableUtil;
-079import 
org.apache.hadoop.hbase.replication.ReplicationException;
-080import 
org.apache.hadoop.hbase.replication.ReplicationPeerConfig;
-081import 
org.apache.hadoop.hbase.replication.ReplicationPeerDescription;
-082import 
org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback;
-083import 
org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
-084import 
org.apache.hadoop.hbase.shaded.protobuf.RequestConverter;
-085import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminService;
-086import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CloseRegionRequest;
-087import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CloseRegionResponse;
-088import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CompactRegionRequest;
-089import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CompactRegionResponse;
-090import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.FlushRegionRequest;
-091import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.FlushRegionResponse;
-092import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetOnlineRegionRequest;
-093import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetOnlineRegionResponse;
-094import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionInfoRequest;
-095import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionInfoResponse;
-096import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionLoadRequest;
-097import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionLoadResponse;
-098import 
org.apache.hadoop.hbase.shaded.protobuf

[49/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-11 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/90c7dfe4/apidocs/org/apache/hadoop/hbase/client/security/class-use/SecurityCapability.html
--
diff --git 
a/apidocs/org/apache/hadoop/hbase/client/security/class-use/SecurityCapability.html
 
b/apidocs/org/apache/hadoop/hbase/client/security/class-use/SecurityCapability.html
index 9d705f6..e05439f 100644
--- 
a/apidocs/org/apache/hadoop/hbase/client/security/class-use/SecurityCapability.html
+++ 
b/apidocs/org/apache/hadoop/hbase/client/security/class-use/SecurityCapability.html
@@ -109,6 +109,10 @@
 
 
 
+http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";
 title="class or interface in java.util.concurrent">CompletableFutureList>
+AsyncAdmin.getSecurityCapabilities() 
+
+
 http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List
 Admin.getSecurityCapabilities()
 Return the set of supported security capabilities.



[16/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-11 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/90c7dfe4/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.ProcedureBiConsumer.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.ProcedureBiConsumer.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.ProcedureBiConsumer.html
index 75db22d..99a09f9 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.ProcedureBiConsumer.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.ProcedureBiConsumer.html
@@ -37,2710 +37,2816 @@
 029import java.util.List;
 030import java.util.Map;
 031import java.util.Optional;
-032import 
java.util.concurrent.CompletableFuture;
-033import java.util.concurrent.TimeUnit;
-034import 
java.util.concurrent.atomic.AtomicReference;
-035import java.util.function.BiConsumer;
-036import java.util.regex.Pattern;
-037import java.util.stream.Collectors;
-038
-039import 
com.google.common.annotations.VisibleForTesting;
-040
-041import io.netty.util.Timeout;
-042import io.netty.util.TimerTask;
-043
-044import java.util.stream.Stream;
-045
-046import org.apache.commons.io.IOUtils;
-047import org.apache.commons.logging.Log;
-048import 
org.apache.commons.logging.LogFactory;
-049import 
org.apache.hadoop.hbase.ClusterStatus;
-050import 
org.apache.hadoop.hbase.HRegionInfo;
-051import 
org.apache.hadoop.hbase.HRegionLocation;
-052import 
org.apache.hadoop.hbase.MetaTableAccessor;
-053import 
org.apache.hadoop.hbase.MetaTableAccessor.QueryType;
-054import 
org.apache.hadoop.hbase.NotServingRegionException;
-055import 
org.apache.hadoop.hbase.ProcedureInfo;
-056import 
org.apache.hadoop.hbase.RegionLoad;
-057import 
org.apache.hadoop.hbase.RegionLocations;
-058import 
org.apache.hadoop.hbase.ServerName;
-059import 
org.apache.hadoop.hbase.NamespaceDescriptor;
-060import 
org.apache.hadoop.hbase.HConstants;
-061import 
org.apache.hadoop.hbase.TableExistsException;
-062import 
org.apache.hadoop.hbase.TableName;
-063import 
org.apache.hadoop.hbase.AsyncMetaTableAccessor;
-064import 
org.apache.hadoop.hbase.TableNotDisabledException;
-065import 
org.apache.hadoop.hbase.TableNotEnabledException;
-066import 
org.apache.hadoop.hbase.TableNotFoundException;
-067import 
org.apache.hadoop.hbase.UnknownRegionException;
-068import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
-069import 
org.apache.hadoop.hbase.client.AsyncRpcRetryingCallerFactory.AdminRequestCallerBuilder;
-070import 
org.apache.hadoop.hbase.client.AsyncRpcRetryingCallerFactory.MasterRequestCallerBuilder;
-071import 
org.apache.hadoop.hbase.client.Scan.ReadType;
-072import 
org.apache.hadoop.hbase.client.replication.ReplicationSerDeHelper;
-073import 
org.apache.hadoop.hbase.client.replication.TableCFs;
-074import 
org.apache.hadoop.hbase.exceptions.DeserializationException;
-075import 
org.apache.hadoop.hbase.ipc.HBaseRpcController;
-076import 
org.apache.hadoop.hbase.quotas.QuotaFilter;
-077import 
org.apache.hadoop.hbase.quotas.QuotaSettings;
-078import 
org.apache.hadoop.hbase.quotas.QuotaTableUtil;
-079import 
org.apache.hadoop.hbase.replication.ReplicationException;
-080import 
org.apache.hadoop.hbase.replication.ReplicationPeerConfig;
-081import 
org.apache.hadoop.hbase.replication.ReplicationPeerDescription;
-082import 
org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback;
-083import 
org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
-084import 
org.apache.hadoop.hbase.shaded.protobuf.RequestConverter;
-085import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminService;
-086import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CloseRegionRequest;
-087import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CloseRegionResponse;
-088import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CompactRegionRequest;
-089import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CompactRegionResponse;
-090import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.FlushRegionRequest;
-091import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.FlushRegionResponse;
-092import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetOnlineRegionRequest;
-093import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetOnlineRegionResponse;
-094import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionInfoRequest;
-095import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionInfoResponse;
-096import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionLoadRequest;
-097import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionLoadResponse;
-098import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.SplitRegionRequest;
-099import 
org.apache.hadoop.hb

[37/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-11 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/90c7dfe4/devapidocs/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.html 
b/devapidocs/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.html
index 477054a..9ee14b8 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.html
@@ -18,7 +18,7 @@
 catch(err) {
 }
 //-->
-var methods = 
{"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10,"i14":10,"i15":10,"i16":10,"i17":10,"i18":10,"i19":10,"i20":10,"i21":10,"i22":10,"i23":10,"i24":10,"i25":10,"i26":10,"i27":10,"i28":10,"i29":10,"i30":10,"i31":10,"i32":10,"i33":10,"i34":10,"i35":10,"i36":10,"i37":10,"i38":10,"i39":10,"i40":10,"i41":10,"i42":10,"i43":10,"i44":10,"i45":10,"i46":10,"i47":10,"i48":10,"i49":10,"i50":10,"i51":10,"i52":10,"i53":10,"i54":10,"i55":10,"i56":10,"i57":10,"i58":10,"i59":10,"i60":10,"i61":10,"i62":10,"i63":10,"i64":10,"i65":10,"i66":10,"i67":10,"i68":10,"i69":10,"i70":10,"i71":10,"i72":10,"i73":10,"i74":10,"i75":10,"i76":10,"i77":10,"i78":10,"i79":10,"i80":10,"i81":10,"i82":10,"i83":10,"i84":10,"i85":10,"i86":10,"i87":10,"i88":10,"i89":10,"i90":10,"i91":10,"i92":10,"i93":10,"i94":10,"i95":10,"i96":10,"i97":10,"i98":10,"i99":10,"i100":10,"i101":10,"i102":10,"i103":10,"i104":10,"i105":10,"i106":10,"i107":10,"i108":10,"i
 
109":10,"i110":10,"i111":10,"i112":10,"i113":10,"i114":10,"i115":10,"i116":10,"i117":10,"i118":10,"i119":10,"i120":10,"i121":10,"i122":10,"i123":10,"i124":10,"i125":10,"i126":10};
+var methods = 
{"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10,"i14":10,"i15":10,"i16":10,"i17":10,"i18":10,"i19":10,"i20":10,"i21":10,"i22":10,"i23":10,"i24":10,"i25":10,"i26":10,"i27":10,"i28":10,"i29":10,"i30":10,"i31":10,"i32":10,"i33":10,"i34":10,"i35":10,"i36":10,"i37":10,"i38":10,"i39":10,"i40":10,"i41":10,"i42":10,"i43":10,"i44":10,"i45":10,"i46":10,"i47":10,"i48":10,"i49":10,"i50":10,"i51":10,"i52":10,"i53":10,"i54":10,"i55":10,"i56":10,"i57":10,"i58":10,"i59":10,"i60":10,"i61":10,"i62":10,"i63":10,"i64":10,"i65":10,"i66":10,"i67":10,"i68":10,"i69":10,"i70":10,"i71":10,"i72":10,"i73":10,"i74":10,"i75":10,"i76":10,"i77":10,"i78":10,"i79":10,"i80":10,"i81":10,"i82":10,"i83":10,"i84":10,"i85":10,"i86":10,"i87":10,"i88":10,"i89":10,"i90":10,"i91":10,"i92":10,"i93":10,"i94":10,"i95":10,"i96":10,"i97":10,"i98":10,"i99":10,"i100":10,"i101":10,"i102":10,"i103":10,"i104":10,"i105":10,"i106":10,"i107":10,"i108":10,"i
 
109":10,"i110":10,"i111":10,"i112":10,"i113":10,"i114":10,"i115":10,"i116":10,"i117":10,"i118":10,"i119":10,"i120":10,"i121":10,"i122":10,"i123":10,"i124":10,"i125":10,"i126":10,"i127":10,"i128":10,"i129":10,"i130":10,"i131":10,"i132":10,"i133":10,"i134":10,"i135":10,"i136":10};
 var tabs = {65535:["t0","All Methods"],2:["t2","Instance 
Methods"],8:["t4","Concrete Methods"]};
 var altColor = "altColor";
 var rowColor = "rowColor";
@@ -114,7 +114,7 @@ var activeTableTab = "activeTableTab";
 
 
 @InterfaceAudience.Private
-public class RawAsyncHBaseAdmin
+public class RawAsyncHBaseAdmin
 extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true";
 title="class or interface in java.lang">Object
 implements AsyncAdmin
 The implementation of AsyncAdmin.
@@ -373,24 +373,31 @@ implements 
 http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";
 title="class or interface in java.util.concurrent">CompletableFutureVoid>
+clearCompactionQueues(ServerName serverName,
+ http://docs.oracle.com/javase/8/docs/api/java/util/Set.html?is-external=true";
 title="class or interface in java.util">SetString> queues)
+Clear compacting queues on a region server.
+
+
+
+http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";
 title="class or interface in java.util.concurrent">CompletableFutureVoid>
 cloneSnapshot(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String snapshotName,
  TableName tableName)
 Create a new table by cloning the snapshot content.
 
 
-
+
 http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";
 title="class or interface in java.util.concurrent">CompletableFuture

[19/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-11 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/90c7dfe4/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.ModifyColumnFamilyProcedureBiConsumer.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.ModifyColumnFamilyProcedureBiConsumer.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.ModifyColumnFamilyProcedureBiConsumer.html
index 75db22d..99a09f9 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.ModifyColumnFamilyProcedureBiConsumer.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.ModifyColumnFamilyProcedureBiConsumer.html
@@ -37,2710 +37,2816 @@
 029import java.util.List;
 030import java.util.Map;
 031import java.util.Optional;
-032import 
java.util.concurrent.CompletableFuture;
-033import java.util.concurrent.TimeUnit;
-034import 
java.util.concurrent.atomic.AtomicReference;
-035import java.util.function.BiConsumer;
-036import java.util.regex.Pattern;
-037import java.util.stream.Collectors;
-038
-039import 
com.google.common.annotations.VisibleForTesting;
-040
-041import io.netty.util.Timeout;
-042import io.netty.util.TimerTask;
-043
-044import java.util.stream.Stream;
-045
-046import org.apache.commons.io.IOUtils;
-047import org.apache.commons.logging.Log;
-048import 
org.apache.commons.logging.LogFactory;
-049import 
org.apache.hadoop.hbase.ClusterStatus;
-050import 
org.apache.hadoop.hbase.HRegionInfo;
-051import 
org.apache.hadoop.hbase.HRegionLocation;
-052import 
org.apache.hadoop.hbase.MetaTableAccessor;
-053import 
org.apache.hadoop.hbase.MetaTableAccessor.QueryType;
-054import 
org.apache.hadoop.hbase.NotServingRegionException;
-055import 
org.apache.hadoop.hbase.ProcedureInfo;
-056import 
org.apache.hadoop.hbase.RegionLoad;
-057import 
org.apache.hadoop.hbase.RegionLocations;
-058import 
org.apache.hadoop.hbase.ServerName;
-059import 
org.apache.hadoop.hbase.NamespaceDescriptor;
-060import 
org.apache.hadoop.hbase.HConstants;
-061import 
org.apache.hadoop.hbase.TableExistsException;
-062import 
org.apache.hadoop.hbase.TableName;
-063import 
org.apache.hadoop.hbase.AsyncMetaTableAccessor;
-064import 
org.apache.hadoop.hbase.TableNotDisabledException;
-065import 
org.apache.hadoop.hbase.TableNotEnabledException;
-066import 
org.apache.hadoop.hbase.TableNotFoundException;
-067import 
org.apache.hadoop.hbase.UnknownRegionException;
-068import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
-069import 
org.apache.hadoop.hbase.client.AsyncRpcRetryingCallerFactory.AdminRequestCallerBuilder;
-070import 
org.apache.hadoop.hbase.client.AsyncRpcRetryingCallerFactory.MasterRequestCallerBuilder;
-071import 
org.apache.hadoop.hbase.client.Scan.ReadType;
-072import 
org.apache.hadoop.hbase.client.replication.ReplicationSerDeHelper;
-073import 
org.apache.hadoop.hbase.client.replication.TableCFs;
-074import 
org.apache.hadoop.hbase.exceptions.DeserializationException;
-075import 
org.apache.hadoop.hbase.ipc.HBaseRpcController;
-076import 
org.apache.hadoop.hbase.quotas.QuotaFilter;
-077import 
org.apache.hadoop.hbase.quotas.QuotaSettings;
-078import 
org.apache.hadoop.hbase.quotas.QuotaTableUtil;
-079import 
org.apache.hadoop.hbase.replication.ReplicationException;
-080import 
org.apache.hadoop.hbase.replication.ReplicationPeerConfig;
-081import 
org.apache.hadoop.hbase.replication.ReplicationPeerDescription;
-082import 
org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback;
-083import 
org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
-084import 
org.apache.hadoop.hbase.shaded.protobuf.RequestConverter;
-085import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminService;
-086import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CloseRegionRequest;
-087import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CloseRegionResponse;
-088import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CompactRegionRequest;
-089import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CompactRegionResponse;
-090import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.FlushRegionRequest;
-091import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.FlushRegionResponse;
-092import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetOnlineRegionRequest;
-093import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetOnlineRegionResponse;
-094import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionInfoRequest;
-095import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionInfoResponse;
-096import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionLoadRequest;
-097import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionLoadResponse;
-098import 
org.apache.hadoop.hbase.

[25/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-11 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/90c7dfe4/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.DeleteNamespaceProcedureBiConsumer.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.DeleteNamespaceProcedureBiConsumer.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.DeleteNamespaceProcedureBiConsumer.html
index 75db22d..99a09f9 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.DeleteNamespaceProcedureBiConsumer.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.DeleteNamespaceProcedureBiConsumer.html
@@ -37,2710 +37,2816 @@
 029import java.util.List;
 030import java.util.Map;
 031import java.util.Optional;
-032import 
java.util.concurrent.CompletableFuture;
-033import java.util.concurrent.TimeUnit;
-034import 
java.util.concurrent.atomic.AtomicReference;
-035import java.util.function.BiConsumer;
-036import java.util.regex.Pattern;
-037import java.util.stream.Collectors;
-038
-039import 
com.google.common.annotations.VisibleForTesting;
-040
-041import io.netty.util.Timeout;
-042import io.netty.util.TimerTask;
-043
-044import java.util.stream.Stream;
-045
-046import org.apache.commons.io.IOUtils;
-047import org.apache.commons.logging.Log;
-048import 
org.apache.commons.logging.LogFactory;
-049import 
org.apache.hadoop.hbase.ClusterStatus;
-050import 
org.apache.hadoop.hbase.HRegionInfo;
-051import 
org.apache.hadoop.hbase.HRegionLocation;
-052import 
org.apache.hadoop.hbase.MetaTableAccessor;
-053import 
org.apache.hadoop.hbase.MetaTableAccessor.QueryType;
-054import 
org.apache.hadoop.hbase.NotServingRegionException;
-055import 
org.apache.hadoop.hbase.ProcedureInfo;
-056import 
org.apache.hadoop.hbase.RegionLoad;
-057import 
org.apache.hadoop.hbase.RegionLocations;
-058import 
org.apache.hadoop.hbase.ServerName;
-059import 
org.apache.hadoop.hbase.NamespaceDescriptor;
-060import 
org.apache.hadoop.hbase.HConstants;
-061import 
org.apache.hadoop.hbase.TableExistsException;
-062import 
org.apache.hadoop.hbase.TableName;
-063import 
org.apache.hadoop.hbase.AsyncMetaTableAccessor;
-064import 
org.apache.hadoop.hbase.TableNotDisabledException;
-065import 
org.apache.hadoop.hbase.TableNotEnabledException;
-066import 
org.apache.hadoop.hbase.TableNotFoundException;
-067import 
org.apache.hadoop.hbase.UnknownRegionException;
-068import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
-069import 
org.apache.hadoop.hbase.client.AsyncRpcRetryingCallerFactory.AdminRequestCallerBuilder;
-070import 
org.apache.hadoop.hbase.client.AsyncRpcRetryingCallerFactory.MasterRequestCallerBuilder;
-071import 
org.apache.hadoop.hbase.client.Scan.ReadType;
-072import 
org.apache.hadoop.hbase.client.replication.ReplicationSerDeHelper;
-073import 
org.apache.hadoop.hbase.client.replication.TableCFs;
-074import 
org.apache.hadoop.hbase.exceptions.DeserializationException;
-075import 
org.apache.hadoop.hbase.ipc.HBaseRpcController;
-076import 
org.apache.hadoop.hbase.quotas.QuotaFilter;
-077import 
org.apache.hadoop.hbase.quotas.QuotaSettings;
-078import 
org.apache.hadoop.hbase.quotas.QuotaTableUtil;
-079import 
org.apache.hadoop.hbase.replication.ReplicationException;
-080import 
org.apache.hadoop.hbase.replication.ReplicationPeerConfig;
-081import 
org.apache.hadoop.hbase.replication.ReplicationPeerDescription;
-082import 
org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback;
-083import 
org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
-084import 
org.apache.hadoop.hbase.shaded.protobuf.RequestConverter;
-085import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminService;
-086import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CloseRegionRequest;
-087import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CloseRegionResponse;
-088import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CompactRegionRequest;
-089import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CompactRegionResponse;
-090import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.FlushRegionRequest;
-091import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.FlushRegionResponse;
-092import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetOnlineRegionRequest;
-093import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetOnlineRegionResponse;
-094import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionInfoRequest;
-095import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionInfoResponse;
-096import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionLoadRequest;
-097import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionLoadResponse;
-098import 
org.apache.hadoop.hbase.shaded.protobuf

[17/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-11 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/90c7dfe4/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.NamespaceProcedureBiConsumer.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.NamespaceProcedureBiConsumer.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.NamespaceProcedureBiConsumer.html
index 75db22d..99a09f9 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.NamespaceProcedureBiConsumer.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.NamespaceProcedureBiConsumer.html
@@ -37,2710 +37,2816 @@
 029import java.util.List;
 030import java.util.Map;
 031import java.util.Optional;
-032import 
java.util.concurrent.CompletableFuture;
-033import java.util.concurrent.TimeUnit;
-034import 
java.util.concurrent.atomic.AtomicReference;
-035import java.util.function.BiConsumer;
-036import java.util.regex.Pattern;
-037import java.util.stream.Collectors;
-038
-039import 
com.google.common.annotations.VisibleForTesting;
-040
-041import io.netty.util.Timeout;
-042import io.netty.util.TimerTask;
-043
-044import java.util.stream.Stream;
-045
-046import org.apache.commons.io.IOUtils;
-047import org.apache.commons.logging.Log;
-048import 
org.apache.commons.logging.LogFactory;
-049import 
org.apache.hadoop.hbase.ClusterStatus;
-050import 
org.apache.hadoop.hbase.HRegionInfo;
-051import 
org.apache.hadoop.hbase.HRegionLocation;
-052import 
org.apache.hadoop.hbase.MetaTableAccessor;
-053import 
org.apache.hadoop.hbase.MetaTableAccessor.QueryType;
-054import 
org.apache.hadoop.hbase.NotServingRegionException;
-055import 
org.apache.hadoop.hbase.ProcedureInfo;
-056import 
org.apache.hadoop.hbase.RegionLoad;
-057import 
org.apache.hadoop.hbase.RegionLocations;
-058import 
org.apache.hadoop.hbase.ServerName;
-059import 
org.apache.hadoop.hbase.NamespaceDescriptor;
-060import 
org.apache.hadoop.hbase.HConstants;
-061import 
org.apache.hadoop.hbase.TableExistsException;
-062import 
org.apache.hadoop.hbase.TableName;
-063import 
org.apache.hadoop.hbase.AsyncMetaTableAccessor;
-064import 
org.apache.hadoop.hbase.TableNotDisabledException;
-065import 
org.apache.hadoop.hbase.TableNotEnabledException;
-066import 
org.apache.hadoop.hbase.TableNotFoundException;
-067import 
org.apache.hadoop.hbase.UnknownRegionException;
-068import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
-069import 
org.apache.hadoop.hbase.client.AsyncRpcRetryingCallerFactory.AdminRequestCallerBuilder;
-070import 
org.apache.hadoop.hbase.client.AsyncRpcRetryingCallerFactory.MasterRequestCallerBuilder;
-071import 
org.apache.hadoop.hbase.client.Scan.ReadType;
-072import 
org.apache.hadoop.hbase.client.replication.ReplicationSerDeHelper;
-073import 
org.apache.hadoop.hbase.client.replication.TableCFs;
-074import 
org.apache.hadoop.hbase.exceptions.DeserializationException;
-075import 
org.apache.hadoop.hbase.ipc.HBaseRpcController;
-076import 
org.apache.hadoop.hbase.quotas.QuotaFilter;
-077import 
org.apache.hadoop.hbase.quotas.QuotaSettings;
-078import 
org.apache.hadoop.hbase.quotas.QuotaTableUtil;
-079import 
org.apache.hadoop.hbase.replication.ReplicationException;
-080import 
org.apache.hadoop.hbase.replication.ReplicationPeerConfig;
-081import 
org.apache.hadoop.hbase.replication.ReplicationPeerDescription;
-082import 
org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback;
-083import 
org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
-084import 
org.apache.hadoop.hbase.shaded.protobuf.RequestConverter;
-085import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminService;
-086import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CloseRegionRequest;
-087import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CloseRegionResponse;
-088import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CompactRegionRequest;
-089import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CompactRegionResponse;
-090import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.FlushRegionRequest;
-091import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.FlushRegionResponse;
-092import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetOnlineRegionRequest;
-093import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetOnlineRegionResponse;
-094import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionInfoRequest;
-095import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionInfoResponse;
-096import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionLoadRequest;
-097import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionLoadResponse;
-098import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.SplitRe

[15/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-11 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/90c7dfe4/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.TableOperator.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.TableOperator.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.TableOperator.html
index 75db22d..99a09f9 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.TableOperator.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.TableOperator.html
@@ -37,2710 +37,2816 @@
 029import java.util.List;
 030import java.util.Map;
 031import java.util.Optional;
-032import 
java.util.concurrent.CompletableFuture;
-033import java.util.concurrent.TimeUnit;
-034import 
java.util.concurrent.atomic.AtomicReference;
-035import java.util.function.BiConsumer;
-036import java.util.regex.Pattern;
-037import java.util.stream.Collectors;
-038
-039import 
com.google.common.annotations.VisibleForTesting;
-040
-041import io.netty.util.Timeout;
-042import io.netty.util.TimerTask;
-043
-044import java.util.stream.Stream;
-045
-046import org.apache.commons.io.IOUtils;
-047import org.apache.commons.logging.Log;
-048import 
org.apache.commons.logging.LogFactory;
-049import 
org.apache.hadoop.hbase.ClusterStatus;
-050import 
org.apache.hadoop.hbase.HRegionInfo;
-051import 
org.apache.hadoop.hbase.HRegionLocation;
-052import 
org.apache.hadoop.hbase.MetaTableAccessor;
-053import 
org.apache.hadoop.hbase.MetaTableAccessor.QueryType;
-054import 
org.apache.hadoop.hbase.NotServingRegionException;
-055import 
org.apache.hadoop.hbase.ProcedureInfo;
-056import 
org.apache.hadoop.hbase.RegionLoad;
-057import 
org.apache.hadoop.hbase.RegionLocations;
-058import 
org.apache.hadoop.hbase.ServerName;
-059import 
org.apache.hadoop.hbase.NamespaceDescriptor;
-060import 
org.apache.hadoop.hbase.HConstants;
-061import 
org.apache.hadoop.hbase.TableExistsException;
-062import 
org.apache.hadoop.hbase.TableName;
-063import 
org.apache.hadoop.hbase.AsyncMetaTableAccessor;
-064import 
org.apache.hadoop.hbase.TableNotDisabledException;
-065import 
org.apache.hadoop.hbase.TableNotEnabledException;
-066import 
org.apache.hadoop.hbase.TableNotFoundException;
-067import 
org.apache.hadoop.hbase.UnknownRegionException;
-068import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
-069import 
org.apache.hadoop.hbase.client.AsyncRpcRetryingCallerFactory.AdminRequestCallerBuilder;
-070import 
org.apache.hadoop.hbase.client.AsyncRpcRetryingCallerFactory.MasterRequestCallerBuilder;
-071import 
org.apache.hadoop.hbase.client.Scan.ReadType;
-072import 
org.apache.hadoop.hbase.client.replication.ReplicationSerDeHelper;
-073import 
org.apache.hadoop.hbase.client.replication.TableCFs;
-074import 
org.apache.hadoop.hbase.exceptions.DeserializationException;
-075import 
org.apache.hadoop.hbase.ipc.HBaseRpcController;
-076import 
org.apache.hadoop.hbase.quotas.QuotaFilter;
-077import 
org.apache.hadoop.hbase.quotas.QuotaSettings;
-078import 
org.apache.hadoop.hbase.quotas.QuotaTableUtil;
-079import 
org.apache.hadoop.hbase.replication.ReplicationException;
-080import 
org.apache.hadoop.hbase.replication.ReplicationPeerConfig;
-081import 
org.apache.hadoop.hbase.replication.ReplicationPeerDescription;
-082import 
org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback;
-083import 
org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
-084import 
org.apache.hadoop.hbase.shaded.protobuf.RequestConverter;
-085import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminService;
-086import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CloseRegionRequest;
-087import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CloseRegionResponse;
-088import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CompactRegionRequest;
-089import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CompactRegionResponse;
-090import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.FlushRegionRequest;
-091import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.FlushRegionResponse;
-092import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetOnlineRegionRequest;
-093import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetOnlineRegionResponse;
-094import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionInfoRequest;
-095import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionInfoResponse;
-096import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionLoadRequest;
-097import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionLoadResponse;
-098import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.SplitRegionRequest;
-099import 
org.apache.hadoop.hbase.shaded.protobuf.generated.

[08/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-11 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/90c7dfe4/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.EndpointOperationWithResult.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.EndpointOperationWithResult.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.EndpointOperationWithResult.html
index 25d6b70..635798d 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.EndpointOperationWithResult.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.EndpointOperationWithResult.html
@@ -1234,540 +1234,541 @@
 1226   * @param result the result returned 
by the append
 1227   * @throws IOException if an error 
occurred on the coprocessor
 1228   */
-1229  public void postAppend(final Append 
append, final Result result) throws IOException {
-1230execOperation(coprocessors.isEmpty() 
? null : new RegionOperation() {
-1231  @Override
-1232  public void call(RegionObserver 
oserver, ObserverContext ctx)
-1233  throws IOException {
-1234oserver.postAppend(ctx, append, 
result);
-1235  }
-1236});
-1237  }
-1238
-1239  /**
-1240   * @param increment increment object
-1241   * @param result the result returned 
by postIncrement
-1242   * @throws IOException if an error 
occurred on the coprocessor
-1243   */
-1244  public Result postIncrement(final 
Increment increment, Result result) throws IOException {
-1245return 
execOperationWithResult(result,
-1246coprocessors.isEmpty() ? null : 
new RegionOperationWithResult() {
-1247  @Override
-1248  public void call(RegionObserver 
oserver, ObserverContext ctx)
-1249  throws IOException {
-1250
setResult(oserver.postIncrement(ctx, increment, getResult()));
-1251  }
-1252});
-1253  }
-1254
-1255  /**
-1256   * @param scan the Scan 
specification
-1257   * @return scanner id to return to 
client if default operation should be
-1258   * bypassed, null otherwise
-1259   * @exception IOException Exception
-1260   */
-1261  public RegionScanner 
preScannerOpen(final Scan scan) throws IOException {
-1262return execOperationWithResult(true, 
null,
-1263coprocessors.isEmpty() ? null : 
new RegionOperationWithResult() {
-1264  @Override
-1265  public void call(RegionObserver 
oserver, ObserverContext ctx)
-1266  throws IOException {
-1267
setResult(oserver.preScannerOpen(ctx, scan, getResult()));
-1268  }
-1269});
-1270  }
-1271
-1272  /**
-1273   * See
-1274   * {@link 
RegionObserver#preStoreScannerOpen(ObserverContext,
-1275   *Store, Scan, NavigableSet, 
KeyValueScanner)}
-1276   */
-1277  public KeyValueScanner 
preStoreScannerOpen(final Store store, final Scan scan,
-1278  final NavigableSet 
targetCols, final long readPt) throws IOException {
-1279return 
execOperationWithResult(null,
-1280coprocessors.isEmpty() ? null : 
new RegionOperationWithResult() {
-1281  @Override
-1282  public void call(RegionObserver 
oserver, ObserverContext ctx)
-1283  throws IOException {
-1284
setResult(oserver.preStoreScannerOpen(ctx, store, scan, targetCols, 
getResult(), readPt));
-1285  }
-1286});
-1287  }
-1288
-1289  /**
-1290   * @param scan the Scan 
specification
-1291   * @param s the scanner
-1292   * @return the scanner instance to 
use
-1293   * @exception IOException Exception
-1294   */
-1295  public RegionScanner 
postScannerOpen(final Scan scan, RegionScanner s) throws IOException {
-1296return execOperationWithResult(s,
-1297coprocessors.isEmpty() ? null : 
new RegionOperationWithResult() {
-1298  @Override
-1299  public void call(RegionObserver 
oserver, ObserverContext ctx)
-1300  throws IOException {
-1301
setResult(oserver.postScannerOpen(ctx, scan, getResult()));
-1302  }
-1303});
-1304  }
-1305
-1306  /**
-1307   * @param s the scanner
-1308   * @param results the result set 
returned by the region server
-1309   * @param limit the maximum number of 
results to return
-1310   * @return 'has next' indication to 
client if bypassing default behavior, or
-1311   * null otherwise
-1312   * @exception IOException Exception
-1313   */
-1314  public Boolean preScannerNext(final 
InternalScanner s,
-1315  final List results, 
final int limit) throws IOException {
-1316return execOperationWithResult(true, 
false,
-1317coprocessors.isEmpty() ? null : 
new RegionOperationWithResult() {
-1318  @Override
-1319  public void call(RegionObserver 
oserver, O

[21/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-11 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/90c7dfe4/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.MasterRpcCall.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.MasterRpcCall.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.MasterRpcCall.html
index 75db22d..99a09f9 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.MasterRpcCall.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.MasterRpcCall.html
@@ -37,2710 +37,2816 @@
 029import java.util.List;
 030import java.util.Map;
 031import java.util.Optional;
-032import 
java.util.concurrent.CompletableFuture;
-033import java.util.concurrent.TimeUnit;
-034import 
java.util.concurrent.atomic.AtomicReference;
-035import java.util.function.BiConsumer;
-036import java.util.regex.Pattern;
-037import java.util.stream.Collectors;
-038
-039import 
com.google.common.annotations.VisibleForTesting;
-040
-041import io.netty.util.Timeout;
-042import io.netty.util.TimerTask;
-043
-044import java.util.stream.Stream;
-045
-046import org.apache.commons.io.IOUtils;
-047import org.apache.commons.logging.Log;
-048import 
org.apache.commons.logging.LogFactory;
-049import 
org.apache.hadoop.hbase.ClusterStatus;
-050import 
org.apache.hadoop.hbase.HRegionInfo;
-051import 
org.apache.hadoop.hbase.HRegionLocation;
-052import 
org.apache.hadoop.hbase.MetaTableAccessor;
-053import 
org.apache.hadoop.hbase.MetaTableAccessor.QueryType;
-054import 
org.apache.hadoop.hbase.NotServingRegionException;
-055import 
org.apache.hadoop.hbase.ProcedureInfo;
-056import 
org.apache.hadoop.hbase.RegionLoad;
-057import 
org.apache.hadoop.hbase.RegionLocations;
-058import 
org.apache.hadoop.hbase.ServerName;
-059import 
org.apache.hadoop.hbase.NamespaceDescriptor;
-060import 
org.apache.hadoop.hbase.HConstants;
-061import 
org.apache.hadoop.hbase.TableExistsException;
-062import 
org.apache.hadoop.hbase.TableName;
-063import 
org.apache.hadoop.hbase.AsyncMetaTableAccessor;
-064import 
org.apache.hadoop.hbase.TableNotDisabledException;
-065import 
org.apache.hadoop.hbase.TableNotEnabledException;
-066import 
org.apache.hadoop.hbase.TableNotFoundException;
-067import 
org.apache.hadoop.hbase.UnknownRegionException;
-068import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
-069import 
org.apache.hadoop.hbase.client.AsyncRpcRetryingCallerFactory.AdminRequestCallerBuilder;
-070import 
org.apache.hadoop.hbase.client.AsyncRpcRetryingCallerFactory.MasterRequestCallerBuilder;
-071import 
org.apache.hadoop.hbase.client.Scan.ReadType;
-072import 
org.apache.hadoop.hbase.client.replication.ReplicationSerDeHelper;
-073import 
org.apache.hadoop.hbase.client.replication.TableCFs;
-074import 
org.apache.hadoop.hbase.exceptions.DeserializationException;
-075import 
org.apache.hadoop.hbase.ipc.HBaseRpcController;
-076import 
org.apache.hadoop.hbase.quotas.QuotaFilter;
-077import 
org.apache.hadoop.hbase.quotas.QuotaSettings;
-078import 
org.apache.hadoop.hbase.quotas.QuotaTableUtil;
-079import 
org.apache.hadoop.hbase.replication.ReplicationException;
-080import 
org.apache.hadoop.hbase.replication.ReplicationPeerConfig;
-081import 
org.apache.hadoop.hbase.replication.ReplicationPeerDescription;
-082import 
org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback;
-083import 
org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
-084import 
org.apache.hadoop.hbase.shaded.protobuf.RequestConverter;
-085import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminService;
-086import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CloseRegionRequest;
-087import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CloseRegionResponse;
-088import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CompactRegionRequest;
-089import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CompactRegionResponse;
-090import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.FlushRegionRequest;
-091import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.FlushRegionResponse;
-092import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetOnlineRegionRequest;
-093import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetOnlineRegionResponse;
-094import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionInfoRequest;
-095import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionInfoResponse;
-096import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionLoadRequest;
-097import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionLoadResponse;
-098import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.SplitRegionRequest;
-099import 
org.apache.hadoop.hbase.shaded.protobuf.generated.

[39/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-11 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/90c7dfe4/devapidocs/org/apache/hadoop/hbase/client/AsyncMetaRegionLocator.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/client/AsyncMetaRegionLocator.html 
b/devapidocs/org/apache/hadoop/hbase/client/AsyncMetaRegionLocator.html
index 3287845..0c8d586 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/AsyncMetaRegionLocator.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/AsyncMetaRegionLocator.html
@@ -185,7 +185,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 (package private) http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";
 title="class or interface in java.util.concurrent">CompletableFuture
-getRegionLocation() 
+getRegionLocation(boolean reload) 
 
 
 (package private) void
@@ -275,13 +275,13 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 Method Detail
-
+
 
 
 
 
 getRegionLocation
-http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";
 title="class or interface in java.util.concurrent">CompletableFuture getRegionLocation()
+http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";
 title="class or interface in java.util.concurrent">CompletableFuture getRegionLocation(boolean reload)
 
 
 
@@ -290,7 +290,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 updateCachedLocation
-void updateCachedLocation(HRegionLocation loc,
+void updateCachedLocation(HRegionLocation loc,
   http://docs.oracle.com/javase/8/docs/api/java/lang/Throwable.html?is-external=true";
 title="class or interface in java.lang">Throwable exception)
 
 
@@ -300,7 +300,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 clearCache
-void clearCache()
+void clearCache()
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/90c7dfe4/devapidocs/org/apache/hadoop/hbase/client/AsyncNonMetaRegionLocator.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/client/AsyncNonMetaRegionLocator.html 
b/devapidocs/org/apache/hadoop/hbase/client/AsyncNonMetaRegionLocator.html
index d3f116b..1671bb4 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/AsyncNonMetaRegionLocator.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/AsyncNonMetaRegionLocator.html
@@ -232,15 +232,17 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 (package private) http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";
 title="class or interface in java.util.concurrent">CompletableFuture
-getRegionLocation(TableName tableName,
+getRegionLocation(TableName tableName,
  byte[] row,
- RegionLocateType locateType) 
+ RegionLocateType locateType,
+ boolean reload) 
 
 
 private http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";
 title="class or interface in java.util.concurrent">CompletableFuture
-getRegionLocationInternal(TableName tableName,
+getRegionLocationInternal(TableName tableName,
  byte[] row,
- RegionLocateType locateType) 
+ RegionLocateType locateType,
+ boolean reload) 
 
 
 private AsyncNonMetaRegionLocator.TableCache
@@ -517,7 +519,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
   RegionLocateType locateType)
 
 
-
+
 
 
 
@@ -525,18 +527,20 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 getRegionLocationInternal
 private http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";
 title="class or interface in java.util.concurrent">CompletableFuture getRegionLocationInternal(TableName tableName,
  
byte[] row,
- RegionLocateType locateType)
+ RegionLocateType locateType,
+ 
boolean reload)
 
 
-
+
 
 
 
 
 getRegionLocation
-http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";
 title="class or interface in java.util.concurrent">CompletableFuture getRegionLocation(TableName tableName,
+http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";

[34/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-11 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/90c7dfe4/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncAdmin.html
--
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncAdmin.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncAdmin.html
index e303773..2748f45 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncAdmin.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncAdmin.html
@@ -29,985 +29,1046 @@
 021import java.util.Collection;
 022import java.util.Map;
 023import java.util.Optional;
-024import 
java.util.concurrent.CompletableFuture;
-025import java.util.regex.Pattern;
-026
-027import 
org.apache.hadoop.hbase.ClusterStatus;
-028import 
org.apache.hadoop.hbase.HRegionInfo;
-029import 
org.apache.hadoop.hbase.ProcedureInfo;
-030import 
org.apache.hadoop.hbase.RegionLoad;
-031import 
org.apache.hadoop.hbase.ServerName;
-032import 
org.apache.hadoop.hbase.NamespaceDescriptor;
-033import 
org.apache.hadoop.hbase.TableName;
-034import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
-035import 
org.apache.hadoop.hbase.quotas.QuotaFilter;
-036import 
org.apache.hadoop.hbase.quotas.QuotaSettings;
-037import 
org.apache.hadoop.hbase.client.replication.TableCFs;
-038import 
org.apache.hadoop.hbase.replication.ReplicationPeerConfig;
-039import 
org.apache.hadoop.hbase.replication.ReplicationPeerDescription;
-040import 
org.apache.hadoop.hbase.util.Pair;
-041
-042/**
-043 * The asynchronous administrative API 
for HBase.
-044 * 

-045 * This feature is still under development, so marked as IA.Private. Will change to public when -046 * done. Use it with caution. -047 */ -048@InterfaceAudience.Public -049public interface AsyncAdmin { -050 -051 /** -052 * @param tableName Table to check. -053 * @return True if table exists already. The return value will be wrapped by a -054 * {@link CompletableFuture}. -055 */ -056 CompletableFuture tableExists(TableName tableName); -057 -058 /** -059 * List all the userspace tables. -060 * @return - returns a list of TableDescriptors wrapped by a {@link CompletableFuture}. -061 * @see #listTables(Optional, boolean) -062 */ -063 default CompletableFuture> listTables() { -064return listTables(Optional.empty(), false); -065 } -066 -067 /** -068 * List all the tables matching the given pattern. -069 * @param pattern The compiled regular expression to match against -070 * @param includeSysTables False to match only against userspace tables -071 * @return - returns a list of TableDescriptors wrapped by a {@link CompletableFuture}. -072 */ -073 CompletableFuture> listTables(Optional pattern, -074 boolean includeSysTables); -075 -076 /** -077 * List all of the names of userspace tables. -078 * @return a list of table names wrapped by a {@link CompletableFuture}. -079 * @see #listTableNames(Optional, boolean) -080 */ -081 default CompletableFuture> listTableNames() { -082return listTableNames(Optional.empty(), false); -083 } -084 -085 /** -086 * List all of the names of userspace tables. -087 * @param pattern The regular expression to match against -088 * @param includeSysTables False to match only against userspace tables -089 * @return a list of table names wrapped by a {@link CompletableFuture}. -090 */ -091 CompletableFuture> listTableNames(Optional pattern, -092 boolean includeSysTables); -093 -094 /** -095 * Method for getting the tableDescriptor -096 * @param tableName as a {@link TableName} -097 * @return the read-only tableDescriptor wrapped by a {@link CompletableFuture}. -098 */ -099 CompletableFuture getTableDescriptor(TableName tableName); -100 -101 /** -102 * Creates a new table. -103 * @param desc table descriptor for table -104 */ -105 default CompletableFuture createTable(TableDescriptor desc) { -106return createTable(desc, Optional.empty()); -107 } -108 -109 /** -110 * Creates a new table with the specified number of regions. The start key specified will become -111 * the end key of the first region of the table, and the end key specified will become the start -112 * key of the last region of the table (the first region has a null start key and the last region -113 * has a null end key). BigInteger math will be used to divide the key range specified into enough -114 * segments to make the required number of total regions. -115 * @param desc table descriptor for table -116 * @param startKey beginning of key range -117 * @param endKey end of key range -118 * @param numRegions the total number of regions to create -119 */ -120 CompletableFuture createTable(TableDescriptor desc, byte[] startKey, byte[] endKey, -121 int n


[12/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-11 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/90c7dfe4/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.html
index 75db22d..99a09f9 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.html
@@ -37,2710 +37,2816 @@
 029import java.util.List;
 030import java.util.Map;
 031import java.util.Optional;
-032import 
java.util.concurrent.CompletableFuture;
-033import java.util.concurrent.TimeUnit;
-034import 
java.util.concurrent.atomic.AtomicReference;
-035import java.util.function.BiConsumer;
-036import java.util.regex.Pattern;
-037import java.util.stream.Collectors;
-038
-039import 
com.google.common.annotations.VisibleForTesting;
-040
-041import io.netty.util.Timeout;
-042import io.netty.util.TimerTask;
-043
-044import java.util.stream.Stream;
-045
-046import org.apache.commons.io.IOUtils;
-047import org.apache.commons.logging.Log;
-048import 
org.apache.commons.logging.LogFactory;
-049import 
org.apache.hadoop.hbase.ClusterStatus;
-050import 
org.apache.hadoop.hbase.HRegionInfo;
-051import 
org.apache.hadoop.hbase.HRegionLocation;
-052import 
org.apache.hadoop.hbase.MetaTableAccessor;
-053import 
org.apache.hadoop.hbase.MetaTableAccessor.QueryType;
-054import 
org.apache.hadoop.hbase.NotServingRegionException;
-055import 
org.apache.hadoop.hbase.ProcedureInfo;
-056import 
org.apache.hadoop.hbase.RegionLoad;
-057import 
org.apache.hadoop.hbase.RegionLocations;
-058import 
org.apache.hadoop.hbase.ServerName;
-059import 
org.apache.hadoop.hbase.NamespaceDescriptor;
-060import 
org.apache.hadoop.hbase.HConstants;
-061import 
org.apache.hadoop.hbase.TableExistsException;
-062import 
org.apache.hadoop.hbase.TableName;
-063import 
org.apache.hadoop.hbase.AsyncMetaTableAccessor;
-064import 
org.apache.hadoop.hbase.TableNotDisabledException;
-065import 
org.apache.hadoop.hbase.TableNotEnabledException;
-066import 
org.apache.hadoop.hbase.TableNotFoundException;
-067import 
org.apache.hadoop.hbase.UnknownRegionException;
-068import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
-069import 
org.apache.hadoop.hbase.client.AsyncRpcRetryingCallerFactory.AdminRequestCallerBuilder;
-070import 
org.apache.hadoop.hbase.client.AsyncRpcRetryingCallerFactory.MasterRequestCallerBuilder;
-071import 
org.apache.hadoop.hbase.client.Scan.ReadType;
-072import 
org.apache.hadoop.hbase.client.replication.ReplicationSerDeHelper;
-073import 
org.apache.hadoop.hbase.client.replication.TableCFs;
-074import 
org.apache.hadoop.hbase.exceptions.DeserializationException;
-075import 
org.apache.hadoop.hbase.ipc.HBaseRpcController;
-076import 
org.apache.hadoop.hbase.quotas.QuotaFilter;
-077import 
org.apache.hadoop.hbase.quotas.QuotaSettings;
-078import 
org.apache.hadoop.hbase.quotas.QuotaTableUtil;
-079import 
org.apache.hadoop.hbase.replication.ReplicationException;
-080import 
org.apache.hadoop.hbase.replication.ReplicationPeerConfig;
-081import 
org.apache.hadoop.hbase.replication.ReplicationPeerDescription;
-082import 
org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback;
-083import 
org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
-084import 
org.apache.hadoop.hbase.shaded.protobuf.RequestConverter;
-085import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminService;
-086import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CloseRegionRequest;
-087import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CloseRegionResponse;
-088import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CompactRegionRequest;
-089import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CompactRegionResponse;
-090import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.FlushRegionRequest;
-091import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.FlushRegionResponse;
-092import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetOnlineRegionRequest;
-093import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetOnlineRegionResponse;
-094import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionInfoRequest;
-095import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionInfoResponse;
-096import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionLoadRequest;
-097import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionLoadResponse;
-098import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.SplitRegionRequest;
-099import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.SplitRegionResponse;
-100import 
org.apache.hadoop.hbase.shad

[04/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-11 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/90c7dfe4/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.TableCoprocessorAttribute.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.TableCoprocessorAttribute.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.TableCoprocessorAttribute.html
index 25d6b70..635798d 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.TableCoprocessorAttribute.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.TableCoprocessorAttribute.html
@@ -1234,540 +1234,541 @@
 1226   * @param result the result returned 
by the append
 1227   * @throws IOException if an error 
occurred on the coprocessor
 1228   */
-1229  public void postAppend(final Append 
append, final Result result) throws IOException {
-1230execOperation(coprocessors.isEmpty() 
? null : new RegionOperation() {
-1231  @Override
-1232  public void call(RegionObserver 
oserver, ObserverContext ctx)
-1233  throws IOException {
-1234oserver.postAppend(ctx, append, 
result);
-1235  }
-1236});
-1237  }
-1238
-1239  /**
-1240   * @param increment increment object
-1241   * @param result the result returned 
by postIncrement
-1242   * @throws IOException if an error 
occurred on the coprocessor
-1243   */
-1244  public Result postIncrement(final 
Increment increment, Result result) throws IOException {
-1245return 
execOperationWithResult(result,
-1246coprocessors.isEmpty() ? null : 
new RegionOperationWithResult() {
-1247  @Override
-1248  public void call(RegionObserver 
oserver, ObserverContext ctx)
-1249  throws IOException {
-1250
setResult(oserver.postIncrement(ctx, increment, getResult()));
-1251  }
-1252});
-1253  }
-1254
-1255  /**
-1256   * @param scan the Scan 
specification
-1257   * @return scanner id to return to 
client if default operation should be
-1258   * bypassed, null otherwise
-1259   * @exception IOException Exception
-1260   */
-1261  public RegionScanner 
preScannerOpen(final Scan scan) throws IOException {
-1262return execOperationWithResult(true, 
null,
-1263coprocessors.isEmpty() ? null : 
new RegionOperationWithResult() {
-1264  @Override
-1265  public void call(RegionObserver 
oserver, ObserverContext ctx)
-1266  throws IOException {
-1267
setResult(oserver.preScannerOpen(ctx, scan, getResult()));
-1268  }
-1269});
-1270  }
-1271
-1272  /**
-1273   * See
-1274   * {@link 
RegionObserver#preStoreScannerOpen(ObserverContext,
-1275   *Store, Scan, NavigableSet, 
KeyValueScanner)}
-1276   */
-1277  public KeyValueScanner 
preStoreScannerOpen(final Store store, final Scan scan,
-1278  final NavigableSet 
targetCols, final long readPt) throws IOException {
-1279return 
execOperationWithResult(null,
-1280coprocessors.isEmpty() ? null : 
new RegionOperationWithResult() {
-1281  @Override
-1282  public void call(RegionObserver 
oserver, ObserverContext ctx)
-1283  throws IOException {
-1284
setResult(oserver.preStoreScannerOpen(ctx, store, scan, targetCols, 
getResult(), readPt));
-1285  }
-1286});
-1287  }
-1288
-1289  /**
-1290   * @param scan the Scan 
specification
-1291   * @param s the scanner
-1292   * @return the scanner instance to 
use
-1293   * @exception IOException Exception
-1294   */
-1295  public RegionScanner 
postScannerOpen(final Scan scan, RegionScanner s) throws IOException {
-1296return execOperationWithResult(s,
-1297coprocessors.isEmpty() ? null : 
new RegionOperationWithResult() {
-1298  @Override
-1299  public void call(RegionObserver 
oserver, ObserverContext ctx)
-1300  throws IOException {
-1301
setResult(oserver.postScannerOpen(ctx, scan, getResult()));
-1302  }
-1303});
-1304  }
-1305
-1306  /**
-1307   * @param s the scanner
-1308   * @param results the result set 
returned by the region server
-1309   * @param limit the maximum number of 
results to return
-1310   * @return 'has next' indication to 
client if bypassing default behavior, or
-1311   * null otherwise
-1312   * @exception IOException Exception
-1313   */
-1314  public Boolean preScannerNext(final 
InternalScanner s,
-1315  final List results, 
final int limit) throws IOException {
-1316return execOperationWithResult(true, 
false,
-1317coprocessors.isEmpty() ? null : 
new RegionOperationWithResult() {
-1318  @Override
-1319  public void call(RegionObserver 
oserver, ObserverCon

[27/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-11 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/90c7dfe4/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.CreateTableProcedureBiConsumer.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.CreateTableProcedureBiConsumer.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.CreateTableProcedureBiConsumer.html
index 75db22d..99a09f9 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.CreateTableProcedureBiConsumer.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.CreateTableProcedureBiConsumer.html
@@ -37,2710 +37,2816 @@
 029import java.util.List;
 030import java.util.Map;
 031import java.util.Optional;
-032import 
java.util.concurrent.CompletableFuture;
-033import java.util.concurrent.TimeUnit;
-034import 
java.util.concurrent.atomic.AtomicReference;
-035import java.util.function.BiConsumer;
-036import java.util.regex.Pattern;
-037import java.util.stream.Collectors;
-038
-039import 
com.google.common.annotations.VisibleForTesting;
-040
-041import io.netty.util.Timeout;
-042import io.netty.util.TimerTask;
-043
-044import java.util.stream.Stream;
-045
-046import org.apache.commons.io.IOUtils;
-047import org.apache.commons.logging.Log;
-048import 
org.apache.commons.logging.LogFactory;
-049import 
org.apache.hadoop.hbase.ClusterStatus;
-050import 
org.apache.hadoop.hbase.HRegionInfo;
-051import 
org.apache.hadoop.hbase.HRegionLocation;
-052import 
org.apache.hadoop.hbase.MetaTableAccessor;
-053import 
org.apache.hadoop.hbase.MetaTableAccessor.QueryType;
-054import 
org.apache.hadoop.hbase.NotServingRegionException;
-055import 
org.apache.hadoop.hbase.ProcedureInfo;
-056import 
org.apache.hadoop.hbase.RegionLoad;
-057import 
org.apache.hadoop.hbase.RegionLocations;
-058import 
org.apache.hadoop.hbase.ServerName;
-059import 
org.apache.hadoop.hbase.NamespaceDescriptor;
-060import 
org.apache.hadoop.hbase.HConstants;
-061import 
org.apache.hadoop.hbase.TableExistsException;
-062import 
org.apache.hadoop.hbase.TableName;
-063import 
org.apache.hadoop.hbase.AsyncMetaTableAccessor;
-064import 
org.apache.hadoop.hbase.TableNotDisabledException;
-065import 
org.apache.hadoop.hbase.TableNotEnabledException;
-066import 
org.apache.hadoop.hbase.TableNotFoundException;
-067import 
org.apache.hadoop.hbase.UnknownRegionException;
-068import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
-069import 
org.apache.hadoop.hbase.client.AsyncRpcRetryingCallerFactory.AdminRequestCallerBuilder;
-070import 
org.apache.hadoop.hbase.client.AsyncRpcRetryingCallerFactory.MasterRequestCallerBuilder;
-071import 
org.apache.hadoop.hbase.client.Scan.ReadType;
-072import 
org.apache.hadoop.hbase.client.replication.ReplicationSerDeHelper;
-073import 
org.apache.hadoop.hbase.client.replication.TableCFs;
-074import 
org.apache.hadoop.hbase.exceptions.DeserializationException;
-075import 
org.apache.hadoop.hbase.ipc.HBaseRpcController;
-076import 
org.apache.hadoop.hbase.quotas.QuotaFilter;
-077import 
org.apache.hadoop.hbase.quotas.QuotaSettings;
-078import 
org.apache.hadoop.hbase.quotas.QuotaTableUtil;
-079import 
org.apache.hadoop.hbase.replication.ReplicationException;
-080import 
org.apache.hadoop.hbase.replication.ReplicationPeerConfig;
-081import 
org.apache.hadoop.hbase.replication.ReplicationPeerDescription;
-082import 
org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback;
-083import 
org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
-084import 
org.apache.hadoop.hbase.shaded.protobuf.RequestConverter;
-085import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminService;
-086import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CloseRegionRequest;
-087import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CloseRegionResponse;
-088import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CompactRegionRequest;
-089import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CompactRegionResponse;
-090import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.FlushRegionRequest;
-091import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.FlushRegionResponse;
-092import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetOnlineRegionRequest;
-093import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetOnlineRegionResponse;
-094import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionInfoRequest;
-095import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionInfoResponse;
-096import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionLoadRequest;
-097import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionLoadResponse;
-098import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProt

[41/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-11 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/90c7dfe4/devapidocs/org/apache/hadoop/hbase/client/AsyncAdmin.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/client/AsyncAdmin.html 
b/devapidocs/org/apache/hadoop/hbase/client/AsyncAdmin.html
index 1ebd57d..3ad3bd4 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/AsyncAdmin.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/AsyncAdmin.html
@@ -18,7 +18,7 @@
 catch(err) {
 }
 //-->
-var methods = 
{"i0":6,"i1":6,"i2":6,"i3":6,"i4":6,"i5":18,"i6":6,"i7":6,"i8":6,"i9":18,"i10":6,"i11":18,"i12":6,"i13":6,"i14":6,"i15":18,"i16":6,"i17":6,"i18":6,"i19":6,"i20":6,"i21":18,"i22":6,"i23":6,"i24":6,"i25":6,"i26":6,"i27":6,"i28":6,"i29":6,"i30":6,"i31":6,"i32":6,"i33":6,"i34":6,"i35":6,"i36":6,"i37":18,"i38":6,"i39":6,"i40":6,"i41":6,"i42":6,"i43":18,"i44":6,"i45":6,"i46":6,"i47":18,"i48":6,"i49":18,"i50":6,"i51":6,"i52":6,"i53":6,"i54":6,"i55":6,"i56":6,"i57":6,"i58":6,"i59":6,"i60":18,"i61":6,"i62":6,"i63":6,"i64":6,"i65":6,"i66":6,"i67":6,"i68":18,"i69":6,"i70":18,"i71":6,"i72":18,"i73":6,"i74":18,"i75":6,"i76":6,"i77":18,"i78":6,"i79":18,"i80":6,"i81":6,"i82":6,"i83":6,"i84":6,"i85":6,"i86":6,"i87":6,"i88":6,"i89":6,"i90":6,"i91":6,"i92":6,"i93":6,"i94":6,"i95":6,"i96":6,"i97":6,"i98":6,"i99":6,"i100":6,"i101":6,"i102":18,"i103":18,"i104":6,"i105":6,"i106":18,"i107":6,"i108":6,"i109":6,"i110":6,"i111":6,"i112":6,"i113":6,"i114":6,"i115":6};
+var methods = 
{"i0":6,"i1":6,"i2":6,"i3":6,"i4":6,"i5":18,"i6":6,"i7":6,"i8":6,"i9":6,"i10":18,"i11":6,"i12":18,"i13":6,"i14":6,"i15":6,"i16":18,"i17":6,"i18":6,"i19":6,"i20":6,"i21":6,"i22":18,"i23":6,"i24":6,"i25":6,"i26":6,"i27":6,"i28":6,"i29":6,"i30":6,"i31":6,"i32":6,"i33":6,"i34":6,"i35":6,"i36":6,"i37":6,"i38":18,"i39":6,"i40":6,"i41":6,"i42":6,"i43":6,"i44":18,"i45":6,"i46":6,"i47":6,"i48":18,"i49":6,"i50":18,"i51":6,"i52":6,"i53":6,"i54":6,"i55":6,"i56":6,"i57":6,"i58":6,"i59":6,"i60":6,"i61":6,"i62":6,"i63":6,"i64":18,"i65":6,"i66":6,"i67":6,"i68":6,"i69":6,"i70":6,"i71":6,"i72":6,"i73":18,"i74":6,"i75":18,"i76":6,"i77":18,"i78":6,"i79":18,"i80":6,"i81":6,"i82":18,"i83":6,"i84":18,"i85":6,"i86":6,"i87":6,"i88":6,"i89":6,"i90":6,"i91":6,"i92":6,"i93":6,"i94":6,"i95":6,"i96":6,"i97":6,"i98":6,"i99":6,"i100":6,"i101":6,"i102":6,"i103":6,"i104":6,"i105":6,"i106":6,"i107":6,"i108":6,"i109":6,"i110":18,"i111":18,"i112":6,"i113":6,"i114":18,"i115":6,"i116":6,"i117":6,"i118":6,"i
 119":6,"i120":6,"i121":6,"i122":6,"i123":6};
 var tabs = {65535:["t0","All Methods"],2:["t2","Instance 
Methods"],4:["t3","Abstract Methods"],16:["t5","Default Methods"]};
 var altColor = "altColor";
 var rowColor = "rowColor";
@@ -106,7 +106,7 @@ var activeTableTab = "activeTableTab";
 
 
 @InterfaceAudience.Public
-public interface AsyncAdmin
+public interface AsyncAdmin
 The asynchronous administrative API for HBase.
  
  This feature is still under development, so marked as IA.Private. Will change 
to public when
@@ -175,63 +175,70 @@ public interface 
 http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";
 title="class or interface in java.util.concurrent">CompletableFutureVoid>
+clearCompactionQueues(ServerName serverName,
+ http://docs.oracle.com/javase/8/docs/api/java/util/Set.html?is-external=true";
 title="class or interface in java.util">SetString> queues)
+Clear compacting queues on a region server.
+
+
+
+http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";
 title="class or interface in java.util.concurrent">CompletableFutureVoid>
 cloneSnapshot(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String snapshotName,
  TableName tableName)
 Create a new table by cloning the snapshot content.
 
 
-
+
 http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";
 title="class or interface in java.util.concurrent">CompletableFutureBoolean>
 closeRegion(byte[] regionName,
http://docs.oracle.com/javase/8/docs/api/java/util/Optional.html?is-external=true";
 title="class or interface in java.util">Optional serverName)
 Close a region.
 
 
-
+
 default http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";
 title="class or interface in java.util.c

[48/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-11 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/90c7dfe4/apidocs/src-html/org/apache/hadoop/hbase/client/AsyncAdmin.html
--
diff --git a/apidocs/src-html/org/apache/hadoop/hbase/client/AsyncAdmin.html 
b/apidocs/src-html/org/apache/hadoop/hbase/client/AsyncAdmin.html
index e303773..2748f45 100644
--- a/apidocs/src-html/org/apache/hadoop/hbase/client/AsyncAdmin.html
+++ b/apidocs/src-html/org/apache/hadoop/hbase/client/AsyncAdmin.html
@@ -29,985 +29,1046 @@
 021import java.util.Collection;
 022import java.util.Map;
 023import java.util.Optional;
-024import 
java.util.concurrent.CompletableFuture;
-025import java.util.regex.Pattern;
-026
-027import 
org.apache.hadoop.hbase.ClusterStatus;
-028import 
org.apache.hadoop.hbase.HRegionInfo;
-029import 
org.apache.hadoop.hbase.ProcedureInfo;
-030import 
org.apache.hadoop.hbase.RegionLoad;
-031import 
org.apache.hadoop.hbase.ServerName;
-032import 
org.apache.hadoop.hbase.NamespaceDescriptor;
-033import 
org.apache.hadoop.hbase.TableName;
-034import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
-035import 
org.apache.hadoop.hbase.quotas.QuotaFilter;
-036import 
org.apache.hadoop.hbase.quotas.QuotaSettings;
-037import 
org.apache.hadoop.hbase.client.replication.TableCFs;
-038import 
org.apache.hadoop.hbase.replication.ReplicationPeerConfig;
-039import 
org.apache.hadoop.hbase.replication.ReplicationPeerDescription;
-040import 
org.apache.hadoop.hbase.util.Pair;
-041
-042/**
-043 * The asynchronous administrative API 
for HBase.
-044 * 

-045 * This feature is still under development, so marked as IA.Private. Will change to public when -046 * done. Use it with caution. -047 */ -048@InterfaceAudience.Public -049public interface AsyncAdmin { -050 -051 /** -052 * @param tableName Table to check. -053 * @return True if table exists already. The return value will be wrapped by a -054 * {@link CompletableFuture}. -055 */ -056 CompletableFuture tableExists(TableName tableName); -057 -058 /** -059 * List all the userspace tables. -060 * @return - returns a list of TableDescriptors wrapped by a {@link CompletableFuture}. -061 * @see #listTables(Optional, boolean) -062 */ -063 default CompletableFuture> listTables() { -064return listTables(Optional.empty(), false); -065 } -066 -067 /** -068 * List all the tables matching the given pattern. -069 * @param pattern The compiled regular expression to match against -070 * @param includeSysTables False to match only against userspace tables -071 * @return - returns a list of TableDescriptors wrapped by a {@link CompletableFuture}. -072 */ -073 CompletableFuture> listTables(Optional pattern, -074 boolean includeSysTables); -075 -076 /** -077 * List all of the names of userspace tables. -078 * @return a list of table names wrapped by a {@link CompletableFuture}. -079 * @see #listTableNames(Optional, boolean) -080 */ -081 default CompletableFuture> listTableNames() { -082return listTableNames(Optional.empty(), false); -083 } -084 -085 /** -086 * List all of the names of userspace tables. -087 * @param pattern The regular expression to match against -088 * @param includeSysTables False to match only against userspace tables -089 * @return a list of table names wrapped by a {@link CompletableFuture}. -090 */ -091 CompletableFuture> listTableNames(Optional pattern, -092 boolean includeSysTables); -093 -094 /** -095 * Method for getting the tableDescriptor -096 * @param tableName as a {@link TableName} -097 * @return the read-only tableDescriptor wrapped by a {@link CompletableFuture}. -098 */ -099 CompletableFuture getTableDescriptor(TableName tableName); -100 -101 /** -102 * Creates a new table. -103 * @param desc table descriptor for table -104 */ -105 default CompletableFuture createTable(TableDescriptor desc) { -106return createTable(desc, Optional.empty()); -107 } -108 -109 /** -110 * Creates a new table with the specified number of regions. The start key specified will become -111 * the end key of the first region of the table, and the end key specified will become the start -112 * key of the last region of the table (the first region has a null start key and the last region -113 * has a null end key). BigInteger math will be used to divide the key range specified into enough -114 * segments to make the required number of total regions. -115 * @param desc table descriptor for table -116 * @param startKey beginning of key range -117 * @param endKey end of key range -118 * @param numRegions the total number of regions to create -119 */ -120 CompletableFuture createTable(TableDescriptor desc, byte[] startKey, byte[] endKey, -121 int numRegions); -12


[30/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-11 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/90c7dfe4/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.AdminRpcCall.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.AdminRpcCall.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.AdminRpcCall.html
index 75db22d..99a09f9 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.AdminRpcCall.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.AdminRpcCall.html
@@ -37,2710 +37,2816 @@
 029import java.util.List;
 030import java.util.Map;
 031import java.util.Optional;
-032import 
java.util.concurrent.CompletableFuture;
-033import java.util.concurrent.TimeUnit;
-034import 
java.util.concurrent.atomic.AtomicReference;
-035import java.util.function.BiConsumer;
-036import java.util.regex.Pattern;
-037import java.util.stream.Collectors;
-038
-039import 
com.google.common.annotations.VisibleForTesting;
-040
-041import io.netty.util.Timeout;
-042import io.netty.util.TimerTask;
-043
-044import java.util.stream.Stream;
-045
-046import org.apache.commons.io.IOUtils;
-047import org.apache.commons.logging.Log;
-048import 
org.apache.commons.logging.LogFactory;
-049import 
org.apache.hadoop.hbase.ClusterStatus;
-050import 
org.apache.hadoop.hbase.HRegionInfo;
-051import 
org.apache.hadoop.hbase.HRegionLocation;
-052import 
org.apache.hadoop.hbase.MetaTableAccessor;
-053import 
org.apache.hadoop.hbase.MetaTableAccessor.QueryType;
-054import 
org.apache.hadoop.hbase.NotServingRegionException;
-055import 
org.apache.hadoop.hbase.ProcedureInfo;
-056import 
org.apache.hadoop.hbase.RegionLoad;
-057import 
org.apache.hadoop.hbase.RegionLocations;
-058import 
org.apache.hadoop.hbase.ServerName;
-059import 
org.apache.hadoop.hbase.NamespaceDescriptor;
-060import 
org.apache.hadoop.hbase.HConstants;
-061import 
org.apache.hadoop.hbase.TableExistsException;
-062import 
org.apache.hadoop.hbase.TableName;
-063import 
org.apache.hadoop.hbase.AsyncMetaTableAccessor;
-064import 
org.apache.hadoop.hbase.TableNotDisabledException;
-065import 
org.apache.hadoop.hbase.TableNotEnabledException;
-066import 
org.apache.hadoop.hbase.TableNotFoundException;
-067import 
org.apache.hadoop.hbase.UnknownRegionException;
-068import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
-069import 
org.apache.hadoop.hbase.client.AsyncRpcRetryingCallerFactory.AdminRequestCallerBuilder;
-070import 
org.apache.hadoop.hbase.client.AsyncRpcRetryingCallerFactory.MasterRequestCallerBuilder;
-071import 
org.apache.hadoop.hbase.client.Scan.ReadType;
-072import 
org.apache.hadoop.hbase.client.replication.ReplicationSerDeHelper;
-073import 
org.apache.hadoop.hbase.client.replication.TableCFs;
-074import 
org.apache.hadoop.hbase.exceptions.DeserializationException;
-075import 
org.apache.hadoop.hbase.ipc.HBaseRpcController;
-076import 
org.apache.hadoop.hbase.quotas.QuotaFilter;
-077import 
org.apache.hadoop.hbase.quotas.QuotaSettings;
-078import 
org.apache.hadoop.hbase.quotas.QuotaTableUtil;
-079import 
org.apache.hadoop.hbase.replication.ReplicationException;
-080import 
org.apache.hadoop.hbase.replication.ReplicationPeerConfig;
-081import 
org.apache.hadoop.hbase.replication.ReplicationPeerDescription;
-082import 
org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback;
-083import 
org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
-084import 
org.apache.hadoop.hbase.shaded.protobuf.RequestConverter;
-085import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminService;
-086import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CloseRegionRequest;
-087import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CloseRegionResponse;
-088import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CompactRegionRequest;
-089import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CompactRegionResponse;
-090import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.FlushRegionRequest;
-091import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.FlushRegionResponse;
-092import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetOnlineRegionRequest;
-093import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetOnlineRegionResponse;
-094import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionInfoRequest;
-095import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionInfoResponse;
-096import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionLoadRequest;
-097import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionLoadResponse;
-098import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.SplitRegionRequest;
-099import 
org.apache.hadoop.hbase.shaded.protobuf.generated.Admin

[45/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-11 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/90c7dfe4/checkstyle.rss
--
diff --git a/checkstyle.rss b/checkstyle.rss
index 54a5ff5..2042c0f 100644
--- a/checkstyle.rss
+++ b/checkstyle.rss
@@ -26,7 +26,7 @@ under the License.
 ©2007 - 2017 The Apache Software Foundation
 
   File: 2244,
- Errors: 14778,
+ Errors: 14786,
  Warnings: 0,
  Infos: 0
   
@@ -5627,7 +5627,7 @@ under the License.
   0
 
 
-  186
+  190
 
   
   
@@ -8987,7 +8987,7 @@ under the License.
   0
 
 
-  116
+  117
 
   
   
@@ -19935,7 +19935,7 @@ under the License.
   0
 
 
-  16
+  19
 
   
   

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/90c7dfe4/coc.html
--
diff --git a/coc.html b/coc.html
index db663e4..dc9ae0b 100644
--- a/coc.html
+++ b/coc.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase – 
   Code of Conduct Policy
@@ -380,7 +380,7 @@ email to mailto:priv...@hbase.apache.org";>the priv
 https://www.apache.org/";>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2017-07-10
+  Last Published: 
2017-07-11
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/90c7dfe4/cygwin.html
--
diff --git a/cygwin.html b/cygwin.html
index 174b518..76cfc4a 100644
--- a/cygwin.html
+++ b/cygwin.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase – Installing Apache HBase (TM) on Windows using 
Cygwin
 
@@ -679,7 +679,7 @@ Now your HBase server is running, start 
coding and build that next
 https://www.apache.org/";>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2017-07-10
+  Last Published: 
2017-07-11
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/90c7dfe4/dependencies.html
--
diff --git a/dependencies.html b/dependencies.html
index 48c5ab7..948267a 100644
--- a/dependencies.html
+++ b/dependencies.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase – Project Dependencies
 
@@ -527,7 +527,7 @@
 https://www.apache.org/";>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2017-07-10
+  Last Published: 
2017-07-11
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/90c7dfe4/dependency-convergence.html
--
diff --git a/dependency-convergence.html b/dependency-convergence.html
index 2518b64..5bb3fa1 100644
--- a/dependency-convergence.html
+++ b/dependency-convergence.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase – Reactor Dependency Convergence
 
@@ -663,7 +663,7 @@
 https://www.apache.org/";>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2017-07-10
+  Last Published: 
2017-07-11
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/90c7dfe4/dependency-info.html
--
diff --git a/dependency-info.html b/dependency-info.html
index 12d831c..39a1be1 100644
--- a/dependency-info.html
+++ b/dependency-info.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase – Dependency Information
 
@@ -318,7 +318,7 @@
 https://www.apache.org/";>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2017-07-10
+  Last Published: 
2017-07-11
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/90c7dfe4/dependency-management.html
--
diff --git a/dependency-management.html b/dependency-management.html
index bb80df9..d45f72a 100644
--- a/dependency-management.html
+++ b/dependency-management.html
@@ -7,7 +7,7 @@
   
 

[11/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-11 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/90c7dfe4/devapidocs/src-html/org/apache/hadoop/hbase/client/RpcRetryingCallerImpl.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/RpcRetryingCallerImpl.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/RpcRetryingCallerImpl.html
index 39254b4..2ceb2fb 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/RpcRetryingCallerImpl.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/RpcRetryingCallerImpl.html
@@ -117,132 +117,136 @@
 109  } catch (Throwable t) {
 110Throwable e = t.getCause();
 111
ExceptionUtil.rethrowIfInterrupt(t);
-112
-113// translateException throws 
exception when should not retry: i.e. when request is bad.
-114
interceptor.handleFailure(context, t);
-115t = translateException(t);
-116
-117if (tries > startLogErrorsCnt) 
{
-118  LOG.info("Call exception, 
tries=" + tries + ", maxAttempts=" + maxAttempts + ", started="
-119  + 
(EnvironmentEdgeManager.currentTime() - tracker.getStartTime()) + " ms ago, "
-120  + "cancelled=" + 
cancelled.get() + ", msg="
-121  + t.getMessage() + " " + 
callable.getExceptionMessageAdditionalDetail());
-122}
-123
-124callable.throwable(t, maxAttempts 
!= 1);
-125
RetriesExhaustedException.ThrowableWithExtraContext qt =
-126new 
RetriesExhaustedException.ThrowableWithExtraContext(t,
-127
EnvironmentEdgeManager.currentTime(), toString());
-128exceptions.add(qt);
-129if (tries >= maxAttempts - 1) 
{
-130  throw new 
RetriesExhaustedException(tries, exceptions);
-131}
-132// If the server is dead, we need 
to wait a little before retrying, to give
-133// a chance to the regions to be 
moved
-134// get right pause time, start by 
RETRY_BACKOFF[0] * pauseBase, where pauseBase might be
-135// special when encountering 
CallQueueTooBigException, see #HBASE-17114
-136long pauseBase = (t instanceof 
CallQueueTooBigException) ? pauseForCQTBE : pause;
-137expectedSleep = 
callable.sleep(pauseBase, tries);
-138
-139// If, after the planned sleep, 
there won't be enough time left, we stop now.
-140long duration = 
singleCallDuration(expectedSleep);
-141if (duration > callTimeout) 
{
-142  String msg = "callTimeout=" + 
callTimeout + ", callDuration=" + duration +
-143  ": " +  t.getMessage() + " 
" + callable.getExceptionMessageAdditionalDetail();
-144  throw 
(SocketTimeoutException)(new SocketTimeoutException(msg).initCause(t));
-145}
-146  } finally {
-147
interceptor.updateFailureInfo(context);
-148  }
-149  try {
-150if (expectedSleep > 0) {
-151  synchronized (cancelled) {
-152if (cancelled.get()) return 
null;
-153
cancelled.wait(expectedSleep);
-154  }
-155}
-156if (cancelled.get()) return 
null;
-157  } catch (InterruptedException e) 
{
-158throw new 
InterruptedIOException("Interrupted after " + tries
-159+ " tries while maxAttempts=" 
+ maxAttempts);
-160  }
-161}
-162  }
-163
-164  /**
-165   * @return Calculate how long a single 
call took
-166   */
-167  private long singleCallDuration(final 
long expectedSleep) {
-168return 
(EnvironmentEdgeManager.currentTime() - tracker.getStartTime()) + 
expectedSleep;
-169  }
-170
-171  @Override
-172  public T 
callWithoutRetries(RetryingCallable callable, int callTimeout)
-173  throws IOException, RuntimeException 
{
-174// The code of this method should be 
shared with withRetries.
-175try {
-176  callable.prepare(false);
-177  return 
callable.call(callTimeout);
-178} catch (Throwable t) {
-179  Throwable t2 = 
translateException(t);
-180  
ExceptionUtil.rethrowIfInterrupt(t2);
-181  // It would be nice to clear the 
location cache here.
-182  if (t2 instanceof IOException) {
-183throw (IOException)t2;
-184  } else {
-185throw new RuntimeException(t2);
-186  }
-187}
-188  }
-189
-190  /**
-191   * Get the good or the remote exception 
if any, throws the DoNotRetryIOException.
-192   * @param t the throwable to analyze
-193   * @return the translated exception, if 
it's not a DoNotRetryIOException
-194   * @throws DoNotRetryIOException - if 
we find it, we throw it instead of translating.
-195   */
-196  static Throwable 
translateException(Throwable t) throws DoNotRetryIOException {
-197if (t instanceof 
UndeclaredThrowableException) {
-198  if (t.getCause() != null) {
-199t = t.getCause();
-200  }
-201}
-202if (t instanceof RemoteException) {
-203  t = 
((RemoteException)t).unwrapRemoteException();
-204}
-205if (t instanceof LinkageEr

[14/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-11 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/90c7dfe4/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.TableProcedureBiConsumer.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.TableProcedureBiConsumer.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.TableProcedureBiConsumer.html
index 75db22d..99a09f9 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.TableProcedureBiConsumer.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.TableProcedureBiConsumer.html
@@ -37,2710 +37,2816 @@
 029import java.util.List;
 030import java.util.Map;
 031import java.util.Optional;
-032import 
java.util.concurrent.CompletableFuture;
-033import java.util.concurrent.TimeUnit;
-034import 
java.util.concurrent.atomic.AtomicReference;
-035import java.util.function.BiConsumer;
-036import java.util.regex.Pattern;
-037import java.util.stream.Collectors;
-038
-039import 
com.google.common.annotations.VisibleForTesting;
-040
-041import io.netty.util.Timeout;
-042import io.netty.util.TimerTask;
-043
-044import java.util.stream.Stream;
-045
-046import org.apache.commons.io.IOUtils;
-047import org.apache.commons.logging.Log;
-048import 
org.apache.commons.logging.LogFactory;
-049import 
org.apache.hadoop.hbase.ClusterStatus;
-050import 
org.apache.hadoop.hbase.HRegionInfo;
-051import 
org.apache.hadoop.hbase.HRegionLocation;
-052import 
org.apache.hadoop.hbase.MetaTableAccessor;
-053import 
org.apache.hadoop.hbase.MetaTableAccessor.QueryType;
-054import 
org.apache.hadoop.hbase.NotServingRegionException;
-055import 
org.apache.hadoop.hbase.ProcedureInfo;
-056import 
org.apache.hadoop.hbase.RegionLoad;
-057import 
org.apache.hadoop.hbase.RegionLocations;
-058import 
org.apache.hadoop.hbase.ServerName;
-059import 
org.apache.hadoop.hbase.NamespaceDescriptor;
-060import 
org.apache.hadoop.hbase.HConstants;
-061import 
org.apache.hadoop.hbase.TableExistsException;
-062import 
org.apache.hadoop.hbase.TableName;
-063import 
org.apache.hadoop.hbase.AsyncMetaTableAccessor;
-064import 
org.apache.hadoop.hbase.TableNotDisabledException;
-065import 
org.apache.hadoop.hbase.TableNotEnabledException;
-066import 
org.apache.hadoop.hbase.TableNotFoundException;
-067import 
org.apache.hadoop.hbase.UnknownRegionException;
-068import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
-069import 
org.apache.hadoop.hbase.client.AsyncRpcRetryingCallerFactory.AdminRequestCallerBuilder;
-070import 
org.apache.hadoop.hbase.client.AsyncRpcRetryingCallerFactory.MasterRequestCallerBuilder;
-071import 
org.apache.hadoop.hbase.client.Scan.ReadType;
-072import 
org.apache.hadoop.hbase.client.replication.ReplicationSerDeHelper;
-073import 
org.apache.hadoop.hbase.client.replication.TableCFs;
-074import 
org.apache.hadoop.hbase.exceptions.DeserializationException;
-075import 
org.apache.hadoop.hbase.ipc.HBaseRpcController;
-076import 
org.apache.hadoop.hbase.quotas.QuotaFilter;
-077import 
org.apache.hadoop.hbase.quotas.QuotaSettings;
-078import 
org.apache.hadoop.hbase.quotas.QuotaTableUtil;
-079import 
org.apache.hadoop.hbase.replication.ReplicationException;
-080import 
org.apache.hadoop.hbase.replication.ReplicationPeerConfig;
-081import 
org.apache.hadoop.hbase.replication.ReplicationPeerDescription;
-082import 
org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback;
-083import 
org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
-084import 
org.apache.hadoop.hbase.shaded.protobuf.RequestConverter;
-085import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminService;
-086import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CloseRegionRequest;
-087import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CloseRegionResponse;
-088import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CompactRegionRequest;
-089import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CompactRegionResponse;
-090import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.FlushRegionRequest;
-091import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.FlushRegionResponse;
-092import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetOnlineRegionRequest;
-093import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetOnlineRegionResponse;
-094import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionInfoRequest;
-095import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionInfoResponse;
-096import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionLoadRequest;
-097import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionLoadResponse;
-098import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.SplitRegionRequest;
-099imp

[03/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-11 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/90c7dfe4/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.html
index 25d6b70..635798d 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.html
@@ -1234,540 +1234,541 @@
 1226   * @param result the result returned 
by the append
 1227   * @throws IOException if an error 
occurred on the coprocessor
 1228   */
-1229  public void postAppend(final Append 
append, final Result result) throws IOException {
-1230execOperation(coprocessors.isEmpty() 
? null : new RegionOperation() {
-1231  @Override
-1232  public void call(RegionObserver 
oserver, ObserverContext ctx)
-1233  throws IOException {
-1234oserver.postAppend(ctx, append, 
result);
-1235  }
-1236});
-1237  }
-1238
-1239  /**
-1240   * @param increment increment object
-1241   * @param result the result returned 
by postIncrement
-1242   * @throws IOException if an error 
occurred on the coprocessor
-1243   */
-1244  public Result postIncrement(final 
Increment increment, Result result) throws IOException {
-1245return 
execOperationWithResult(result,
-1246coprocessors.isEmpty() ? null : 
new RegionOperationWithResult() {
-1247  @Override
-1248  public void call(RegionObserver 
oserver, ObserverContext ctx)
-1249  throws IOException {
-1250
setResult(oserver.postIncrement(ctx, increment, getResult()));
-1251  }
-1252});
-1253  }
-1254
-1255  /**
-1256   * @param scan the Scan 
specification
-1257   * @return scanner id to return to 
client if default operation should be
-1258   * bypassed, null otherwise
-1259   * @exception IOException Exception
-1260   */
-1261  public RegionScanner 
preScannerOpen(final Scan scan) throws IOException {
-1262return execOperationWithResult(true, 
null,
-1263coprocessors.isEmpty() ? null : 
new RegionOperationWithResult() {
-1264  @Override
-1265  public void call(RegionObserver 
oserver, ObserverContext ctx)
-1266  throws IOException {
-1267
setResult(oserver.preScannerOpen(ctx, scan, getResult()));
-1268  }
-1269});
-1270  }
-1271
-1272  /**
-1273   * See
-1274   * {@link 
RegionObserver#preStoreScannerOpen(ObserverContext,
-1275   *Store, Scan, NavigableSet, 
KeyValueScanner)}
-1276   */
-1277  public KeyValueScanner 
preStoreScannerOpen(final Store store, final Scan scan,
-1278  final NavigableSet 
targetCols, final long readPt) throws IOException {
-1279return 
execOperationWithResult(null,
-1280coprocessors.isEmpty() ? null : 
new RegionOperationWithResult() {
-1281  @Override
-1282  public void call(RegionObserver 
oserver, ObserverContext ctx)
-1283  throws IOException {
-1284
setResult(oserver.preStoreScannerOpen(ctx, store, scan, targetCols, 
getResult(), readPt));
-1285  }
-1286});
-1287  }
-1288
-1289  /**
-1290   * @param scan the Scan 
specification
-1291   * @param s the scanner
-1292   * @return the scanner instance to 
use
-1293   * @exception IOException Exception
-1294   */
-1295  public RegionScanner 
postScannerOpen(final Scan scan, RegionScanner s) throws IOException {
-1296return execOperationWithResult(s,
-1297coprocessors.isEmpty() ? null : 
new RegionOperationWithResult() {
-1298  @Override
-1299  public void call(RegionObserver 
oserver, ObserverContext ctx)
-1300  throws IOException {
-1301
setResult(oserver.postScannerOpen(ctx, scan, getResult()));
-1302  }
-1303});
-1304  }
-1305
-1306  /**
-1307   * @param s the scanner
-1308   * @param results the result set 
returned by the region server
-1309   * @param limit the maximum number of 
results to return
-1310   * @return 'has next' indication to 
client if bypassing default behavior, or
-1311   * null otherwise
-1312   * @exception IOException Exception
-1313   */
-1314  public Boolean preScannerNext(final 
InternalScanner s,
-1315  final List results, 
final int limit) throws IOException {
-1316return execOperationWithResult(true, 
false,
-1317coprocessors.isEmpty() ? null : 
new RegionOperationWithResult() {
-1318  @Override
-1319  public void call(RegionObserver 
oserver, ObserverContext ctx)
-1320  throws IOException {
-1321
setResult(oserver.preScannerNext(ctx, s,

[06/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-11 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/90c7dfe4/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.RegionOperation.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.RegionOperation.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.RegionOperation.html
index 25d6b70..635798d 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.RegionOperation.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.RegionOperation.html
@@ -1234,540 +1234,541 @@
 1226   * @param result the result returned 
by the append
 1227   * @throws IOException if an error 
occurred on the coprocessor
 1228   */
-1229  public void postAppend(final Append 
append, final Result result) throws IOException {
-1230execOperation(coprocessors.isEmpty() 
? null : new RegionOperation() {
-1231  @Override
-1232  public void call(RegionObserver 
oserver, ObserverContext ctx)
-1233  throws IOException {
-1234oserver.postAppend(ctx, append, 
result);
-1235  }
-1236});
-1237  }
-1238
-1239  /**
-1240   * @param increment increment object
-1241   * @param result the result returned 
by postIncrement
-1242   * @throws IOException if an error 
occurred on the coprocessor
-1243   */
-1244  public Result postIncrement(final 
Increment increment, Result result) throws IOException {
-1245return 
execOperationWithResult(result,
-1246coprocessors.isEmpty() ? null : 
new RegionOperationWithResult() {
-1247  @Override
-1248  public void call(RegionObserver 
oserver, ObserverContext ctx)
-1249  throws IOException {
-1250
setResult(oserver.postIncrement(ctx, increment, getResult()));
-1251  }
-1252});
-1253  }
-1254
-1255  /**
-1256   * @param scan the Scan 
specification
-1257   * @return scanner id to return to 
client if default operation should be
-1258   * bypassed, null otherwise
-1259   * @exception IOException Exception
-1260   */
-1261  public RegionScanner 
preScannerOpen(final Scan scan) throws IOException {
-1262return execOperationWithResult(true, 
null,
-1263coprocessors.isEmpty() ? null : 
new RegionOperationWithResult() {
-1264  @Override
-1265  public void call(RegionObserver 
oserver, ObserverContext ctx)
-1266  throws IOException {
-1267
setResult(oserver.preScannerOpen(ctx, scan, getResult()));
-1268  }
-1269});
-1270  }
-1271
-1272  /**
-1273   * See
-1274   * {@link 
RegionObserver#preStoreScannerOpen(ObserverContext,
-1275   *Store, Scan, NavigableSet, 
KeyValueScanner)}
-1276   */
-1277  public KeyValueScanner 
preStoreScannerOpen(final Store store, final Scan scan,
-1278  final NavigableSet 
targetCols, final long readPt) throws IOException {
-1279return 
execOperationWithResult(null,
-1280coprocessors.isEmpty() ? null : 
new RegionOperationWithResult() {
-1281  @Override
-1282  public void call(RegionObserver 
oserver, ObserverContext ctx)
-1283  throws IOException {
-1284
setResult(oserver.preStoreScannerOpen(ctx, store, scan, targetCols, 
getResult(), readPt));
-1285  }
-1286});
-1287  }
-1288
-1289  /**
-1290   * @param scan the Scan 
specification
-1291   * @param s the scanner
-1292   * @return the scanner instance to 
use
-1293   * @exception IOException Exception
-1294   */
-1295  public RegionScanner 
postScannerOpen(final Scan scan, RegionScanner s) throws IOException {
-1296return execOperationWithResult(s,
-1297coprocessors.isEmpty() ? null : 
new RegionOperationWithResult() {
-1298  @Override
-1299  public void call(RegionObserver 
oserver, ObserverContext ctx)
-1300  throws IOException {
-1301
setResult(oserver.postScannerOpen(ctx, scan, getResult()));
-1302  }
-1303});
-1304  }
-1305
-1306  /**
-1307   * @param s the scanner
-1308   * @param results the result set 
returned by the region server
-1309   * @param limit the maximum number of 
results to return
-1310   * @return 'has next' indication to 
client if bypassing default behavior, or
-1311   * null otherwise
-1312   * @exception IOException Exception
-1313   */
-1314  public Boolean preScannerNext(final 
InternalScanner s,
-1315  final List results, 
final int limit) throws IOException {
-1316return execOperationWithResult(true, 
false,
-1317coprocessors.isEmpty() ? null : 
new RegionOperationWithResult() {
-1318  @Override
-1319  public void call(RegionObserver 
oserver, ObserverContext ctx)
-1320 

[01/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-11 Thread git-site-role
Repository: hbase-site
Updated Branches:
  refs/heads/asf-site 5a4910e7b -> 90c7dfe41


http://git-wip-us.apache.org/repos/asf/hbase-site/blob/90c7dfe4/hbase-archetypes/hbase-client-project/source-repository.html
--
diff --git a/hbase-archetypes/hbase-client-project/source-repository.html 
b/hbase-archetypes/hbase-client-project/source-repository.html
index 68b0a44..7ff8082 100644
--- a/hbase-archetypes/hbase-client-project/source-repository.html
+++ b/hbase-archetypes/hbase-client-project/source-repository.html
@@ -1,5 +1,5 @@
 http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd";>
-
+
 http://www.w3.org/1999/xhtml"; xml:lang="en" lang="en">
   
 
@@ -10,7 +10,7 @@
   @import url("./css/site.css");
 
 
-
+
 
 
 
@@ -27,7 +27,7 @@
 
 
 
-Last Published: 2017-07-10
+Last Published: 2017-07-11
    | Version: 
3.0.0-SNAPSHOT
   
 Apache HBase - Exemplar for 
hbase-client archetype



[38/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-11 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/90c7dfe4/devapidocs/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.ProcedureBiConsumer.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.ProcedureBiConsumer.html
 
b/devapidocs/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.ProcedureBiConsumer.html
index 7807639..42de821 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.ProcedureBiConsumer.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.ProcedureBiConsumer.html
@@ -121,7 +121,7 @@ var activeTableTab = "activeTableTab";
 
 
 
-private abstract class RawAsyncHBaseAdmin.ProcedureBiConsumer
+private abstract class RawAsyncHBaseAdmin.ProcedureBiConsumer
 extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true";
 title="class or interface in java.lang">Object
 implements http://docs.oracle.com/javase/8/docs/api/java/util/function/BiConsumer.html?is-external=true";
 title="class or interface in java.util.function">BiConsumerVoid,http://docs.oracle.com/javase/8/docs/api/java/lang/Throwable.html?is-external=true";
 title="class or interface in java.lang">Throwable>
 
@@ -226,7 +226,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/util/function/
 
 
 admin
-protected final AsyncAdmin admin
+protected final AsyncAdmin admin
 
 
 
@@ -243,7 +243,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/util/function/
 
 
 ProcedureBiConsumer
-ProcedureBiConsumer(AsyncAdmin admin)
+ProcedureBiConsumer(AsyncAdmin admin)
 
 
 
@@ -260,7 +260,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/util/function/
 
 
 onFinished
-abstract void onFinished()
+abstract void onFinished()
 
 
 
@@ -269,7 +269,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/util/function/
 
 
 onError
-abstract void onError(http://docs.oracle.com/javase/8/docs/api/java/lang/Throwable.html?is-external=true";
 title="class or interface in java.lang">Throwable error)
+abstract void onError(http://docs.oracle.com/javase/8/docs/api/java/lang/Throwable.html?is-external=true";
 title="class or interface in java.lang">Throwable error)
 
 
 
@@ -278,7 +278,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/util/function/
 
 
 accept
-public void accept(http://docs.oracle.com/javase/8/docs/api/java/lang/Void.html?is-external=true";
 title="class or interface in java.lang">Void v,
+public void accept(http://docs.oracle.com/javase/8/docs/api/java/lang/Void.html?is-external=true";
 title="class or interface in java.lang">Void v,
http://docs.oracle.com/javase/8/docs/api/java/lang/Throwable.html?is-external=true";
 title="class or interface in java.lang">Throwable error)
 
 Specified by:

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/90c7dfe4/devapidocs/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.TableOperator.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.TableOperator.html
 
b/devapidocs/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.TableOperator.html
index 816e6c1..18b1bf8 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.TableOperator.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.TableOperator.html
@@ -110,7 +110,7 @@ var activeTableTab = "activeTableTab";
 
 
 http://docs.oracle.com/javase/8/docs/api/java/lang/FunctionalInterface.html?is-external=true";
 title="class or interface in java.lang">@FunctionalInterface
-private static interface RawAsyncHBaseAdmin.TableOperator
+private static interface RawAsyncHBaseAdmin.TableOperator
 
 
 
@@ -154,7 +154,7 @@ private static interface 
 
 operate
-http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";
 title="class or interface in java.util.concurrent">CompletableFutureVoid> operate(TableName table)
+http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";
 title="class or interface in java.util.concurrent">CompletableFutureVoid> operate(TableName table)
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/90c7dfe4/devapidocs/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.TableProcedureBiConsumer.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.TableProcedureBiConsumer.html
 
b/devapidocs/org/apache/hadoop/hbase/clien

[36/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-11 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/90c7dfe4/devapidocs/org/apache/hadoop/hbase/client/RpcRetryingCallerImpl.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/client/RpcRetryingCallerImpl.html 
b/devapidocs/org/apache/hadoop/hbase/client/RpcRetryingCallerImpl.html
index 4339133..04b1a60 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/RpcRetryingCallerImpl.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/RpcRetryingCallerImpl.html
@@ -463,7 +463,7 @@ implements 
 
 singleCallDuration
-private long singleCallDuration(long expectedSleep)
+private long singleCallDuration(long expectedSleep)
 
 Returns:
 Calculate how long a single call took
@@ -476,7 +476,7 @@ implements 
 
 callWithoutRetries
-public T callWithoutRetries(RetryingCallable callable,
+public T callWithoutRetries(RetryingCallable callable,
 int callTimeout)
  throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true";
 title="class or interface in java.io">IOException,
 http://docs.oracle.com/javase/8/docs/api/java/lang/RuntimeException.html?is-external=true";
 title="class or interface in java.lang">RuntimeException
@@ -502,7 +502,7 @@ implements 
 
 translateException
-static http://docs.oracle.com/javase/8/docs/api/java/lang/Throwable.html?is-external=true";
 title="class or interface in java.lang">Throwable translateException(http://docs.oracle.com/javase/8/docs/api/java/lang/Throwable.html?is-external=true";
 title="class or interface in java.lang">Throwable t)
+static http://docs.oracle.com/javase/8/docs/api/java/lang/Throwable.html?is-external=true";
 title="class or interface in java.lang">Throwable translateException(http://docs.oracle.com/javase/8/docs/api/java/lang/Throwable.html?is-external=true";
 title="class or interface in java.lang">Throwable t)
  throws DoNotRetryIOException
 Get the good or the remote exception if any, throws the 
DoNotRetryIOException.
 
@@ -521,7 +521,7 @@ implements 
 
 getTimeout
-private int getTimeout(int callTimeout)
+private int getTimeout(int callTimeout)
 
 
 
@@ -530,7 +530,7 @@ implements 
 
 toString
-public http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String toString()
+public http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String toString()
 
 Overrides:
 http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#toString--";
 title="class or interface in java.lang">toString in 
class http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true";
 title="class or interface in java.lang">Object

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/90c7dfe4/devapidocs/org/apache/hadoop/hbase/client/class-use/Append.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/client/class-use/Append.html 
b/devapidocs/org/apache/hadoop/hbase/client/class-use/Append.html
index cb6e053..a8a3b56 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/class-use/Append.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/class-use/Append.html
@@ -317,7 +317,7 @@ service.
   long nonce) 
 
 
-void
+Result
 RegionCoprocessorHost.postAppend(Append append,
   Result result) 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/90c7dfe4/devapidocs/org/apache/hadoop/hbase/client/class-use/MasterSwitchType.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/client/class-use/MasterSwitchType.html 
b/devapidocs/org/apache/hadoop/hbase/client/class-use/MasterSwitchType.html
index ebe6b58..db2716a 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/class-use/MasterSwitchType.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/class-use/MasterSwitchType.html
@@ -159,6 +159,10 @@ the order they are declared.
 HBaseAdmin.isSplitOrMergeEnabled(MasterSwitchType switchType) 
 
 
+private http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";
 title="class or interface in java.util.concurrent">CompletableFutureBoolean>
+RawAsyncHBaseAdmin.isSplitOrMergeOn(MasterSwitchType switchType) 
+
+
 boolean[]
 Admin.setSplitOrMergeEnabled(boolean enabled,
   boolean synchronous,
@@ -166,12 +170,17 @@ the order they are declared.
 Turn the Split or Merge switches on or off.
 
 
-
+
 boolean[]
 HBaseAdmin.setSplitOrMergeEnabled(boolean enabled,
   boolean synchronous,
   MasterSwitchType... switchTypes) 
 
+
+private 

[32/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-11 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/90c7dfe4/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncMetaRegionLocator.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncMetaRegionLocator.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncMetaRegionLocator.html
index 4262b8f..0a3a353 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncMetaRegionLocator.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncMetaRegionLocator.html
@@ -53,81 +53,83 @@
 045this.registry = registry;
 046  }
 047
-048  
CompletableFuture getRegionLocation() {
+048  
CompletableFuture getRegionLocation(boolean reload) {
 049for (;;) {
-050  HRegionLocation metaRegionLocation 
= this.metaRegionLocation.get();
-051  if (metaRegionLocation != null) {
-052return 
CompletableFuture.completedFuture(metaRegionLocation);
-053  }
-054  if (LOG.isTraceEnabled()) {
-055LOG.trace("Meta region location 
cache is null, try fetching from registry.");
-056  }
-057  if 
(metaRelocateFuture.compareAndSet(null, new CompletableFuture<>())) {
-058if (LOG.isDebugEnabled()) {
-059  LOG.debug("Start fetching meta 
region location from registry.");
-060}
-061
CompletableFuture future = metaRelocateFuture.get();
-062
registry.getMetaRegionLocation().whenComplete((locs, error) -> {
-063  if (error != null) {
-064if (LOG.isDebugEnabled()) {
-065  LOG.debug("Failed to fetch 
meta region location from registry", error);
-066}
-067
metaRelocateFuture.getAndSet(null).completeExceptionally(error);
-068return;
-069  }
-070  HRegionLocation loc = 
locs.getDefaultRegionLocation();
-071  if (LOG.isDebugEnabled()) {
-072LOG.debug("The fetched meta 
region location is " + loc);
-073  }
-074  // Here we update cache before 
reset future, so it is possible that someone can get a
-075  // stale value. Consider 
this:
-076  // 1. update cache
-077  // 2. someone clear the cache 
and relocate again
-078  // 3. the metaRelocateFuture is 
not null so the old future is used.
-079  // 4. we clear 
metaRelocateFuture and complete the future in it with the value being
-080  // cleared in step 2.
-081  // But we do not think it is a 
big deal as it rarely happens, and even if it happens, the
-082  // caller will retry again 
later, no correctness problems.
-083  
this.metaRegionLocation.set(loc);
-084  metaRelocateFuture.set(null);
-085  future.complete(loc);
-086});
-087  } else {
-088
CompletableFuture future = metaRelocateFuture.get();
-089if (future != null) {
-090  return future;
-091}
-092  }
-093}
-094  }
-095
-096  void 
updateCachedLocation(HRegionLocation loc, Throwable exception) {
-097
AsyncRegionLocator.updateCachedLocation(loc, exception, l -> 
metaRegionLocation.get(),
-098  newLoc -> {
-099for (;;) {
-100  HRegionLocation oldLoc = 
metaRegionLocation.get();
-101  if (oldLoc != null && 
(oldLoc.getSeqNum() > newLoc.getSeqNum() ||
-102  
oldLoc.getServerName().equals(newLoc.getServerName( {
-103return;
-104  }
-105  if 
(metaRegionLocation.compareAndSet(oldLoc, newLoc)) {
-106return;
-107  }
-108}
-109  }, l -> {
-110for (;;) {
-111  HRegionLocation oldLoc = 
metaRegionLocation.get();
-112  if (!canUpdate(l, oldLoc) || 
metaRegionLocation.compareAndSet(oldLoc, null)) {
-113return;
-114  }
-115}
-116  });
-117  }
-118
-119  void clearCache() {
-120metaRegionLocation.set(null);
-121  }
-122}
+050  if (!reload) {
+051HRegionLocation 
metaRegionLocation = this.metaRegionLocation.get();
+052if (metaRegionLocation != null) 
{
+053  return 
CompletableFuture.completedFuture(metaRegionLocation);
+054}
+055  }
+056  if (LOG.isTraceEnabled()) {
+057LOG.trace("Meta region location 
cache is null, try fetching from registry.");
+058  }
+059  if 
(metaRelocateFuture.compareAndSet(null, new CompletableFuture<>())) {
+060if (LOG.isDebugEnabled()) {
+061  LOG.debug("Start fetching meta 
region location from registry.");
+062}
+063
CompletableFuture future = metaRelocateFuture.get();
+064
registry.getMetaRegionLocation().whenComplete((locs, error) -> {
+065  if (error != null) {
+066if (LOG.isDebugEnabled()) {
+067  LOG.debug("Failed to fetch 
meta region location from registry", error);
+068}
+0

[09/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-11 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/90c7dfe4/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.EndpointOperation.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.EndpointOperation.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.EndpointOperation.html
index 25d6b70..635798d 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.EndpointOperation.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.EndpointOperation.html
@@ -1234,540 +1234,541 @@
 1226   * @param result the result returned 
by the append
 1227   * @throws IOException if an error 
occurred on the coprocessor
 1228   */
-1229  public void postAppend(final Append 
append, final Result result) throws IOException {
-1230execOperation(coprocessors.isEmpty() 
? null : new RegionOperation() {
-1231  @Override
-1232  public void call(RegionObserver 
oserver, ObserverContext ctx)
-1233  throws IOException {
-1234oserver.postAppend(ctx, append, 
result);
-1235  }
-1236});
-1237  }
-1238
-1239  /**
-1240   * @param increment increment object
-1241   * @param result the result returned 
by postIncrement
-1242   * @throws IOException if an error 
occurred on the coprocessor
-1243   */
-1244  public Result postIncrement(final 
Increment increment, Result result) throws IOException {
-1245return 
execOperationWithResult(result,
-1246coprocessors.isEmpty() ? null : 
new RegionOperationWithResult() {
-1247  @Override
-1248  public void call(RegionObserver 
oserver, ObserverContext ctx)
-1249  throws IOException {
-1250
setResult(oserver.postIncrement(ctx, increment, getResult()));
-1251  }
-1252});
-1253  }
-1254
-1255  /**
-1256   * @param scan the Scan 
specification
-1257   * @return scanner id to return to 
client if default operation should be
-1258   * bypassed, null otherwise
-1259   * @exception IOException Exception
-1260   */
-1261  public RegionScanner 
preScannerOpen(final Scan scan) throws IOException {
-1262return execOperationWithResult(true, 
null,
-1263coprocessors.isEmpty() ? null : 
new RegionOperationWithResult() {
-1264  @Override
-1265  public void call(RegionObserver 
oserver, ObserverContext ctx)
-1266  throws IOException {
-1267
setResult(oserver.preScannerOpen(ctx, scan, getResult()));
-1268  }
-1269});
-1270  }
-1271
-1272  /**
-1273   * See
-1274   * {@link 
RegionObserver#preStoreScannerOpen(ObserverContext,
-1275   *Store, Scan, NavigableSet, 
KeyValueScanner)}
-1276   */
-1277  public KeyValueScanner 
preStoreScannerOpen(final Store store, final Scan scan,
-1278  final NavigableSet 
targetCols, final long readPt) throws IOException {
-1279return 
execOperationWithResult(null,
-1280coprocessors.isEmpty() ? null : 
new RegionOperationWithResult() {
-1281  @Override
-1282  public void call(RegionObserver 
oserver, ObserverContext ctx)
-1283  throws IOException {
-1284
setResult(oserver.preStoreScannerOpen(ctx, store, scan, targetCols, 
getResult(), readPt));
-1285  }
-1286});
-1287  }
-1288
-1289  /**
-1290   * @param scan the Scan 
specification
-1291   * @param s the scanner
-1292   * @return the scanner instance to 
use
-1293   * @exception IOException Exception
-1294   */
-1295  public RegionScanner 
postScannerOpen(final Scan scan, RegionScanner s) throws IOException {
-1296return execOperationWithResult(s,
-1297coprocessors.isEmpty() ? null : 
new RegionOperationWithResult() {
-1298  @Override
-1299  public void call(RegionObserver 
oserver, ObserverContext ctx)
-1300  throws IOException {
-1301
setResult(oserver.postScannerOpen(ctx, scan, getResult()));
-1302  }
-1303});
-1304  }
-1305
-1306  /**
-1307   * @param s the scanner
-1308   * @param results the result set 
returned by the region server
-1309   * @param limit the maximum number of 
results to return
-1310   * @return 'has next' indication to 
client if bypassing default behavior, or
-1311   * null otherwise
-1312   * @exception IOException Exception
-1313   */
-1314  public Boolean preScannerNext(final 
InternalScanner s,
-1315  final List results, 
final int limit) throws IOException {
-1316return execOperationWithResult(true, 
false,
-1317coprocessors.isEmpty() ? null : 
new RegionOperationWithResult() {
-1318  @Override
-1319  public void call(RegionObserver 
oserver, ObserverContext ctx)

[42/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-11 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/90c7dfe4/devapidocs/org/apache/hadoop/hbase/classification/package-tree.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/classification/package-tree.html 
b/devapidocs/org/apache/hadoop/hbase/classification/package-tree.html
index aef8196..6f1763e 100644
--- a/devapidocs/org/apache/hadoop/hbase/classification/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/classification/package-tree.html
@@ -89,11 +89,11 @@
 Annotation Type Hierarchy
 
 org.apache.hadoop.hbase.classification.InterfaceStability.Stable (implements 
java.lang.annotation.http://docs.oracle.com/javase/8/docs/api/java/lang/annotation/Annotation.html?is-external=true";
 title="class or interface in java.lang.annotation">Annotation)
-org.apache.hadoop.hbase.classification.InterfaceAudience.LimitedPrivate (implements 
java.lang.annotation.http://docs.oracle.com/javase/8/docs/api/java/lang/annotation/Annotation.html?is-external=true";
 title="class or interface in java.lang.annotation">Annotation)
-org.apache.hadoop.hbase.classification.InterfaceAudience.Private (implements 
java.lang.annotation.http://docs.oracle.com/javase/8/docs/api/java/lang/annotation/Annotation.html?is-external=true";
 title="class or interface in java.lang.annotation">Annotation)
-org.apache.hadoop.hbase.classification.InterfaceStability.Unstable (implements 
java.lang.annotation.http://docs.oracle.com/javase/8/docs/api/java/lang/annotation/Annotation.html?is-external=true";
 title="class or interface in java.lang.annotation">Annotation)
 org.apache.hadoop.hbase.classification.InterfaceStability.Evolving (implements 
java.lang.annotation.http://docs.oracle.com/javase/8/docs/api/java/lang/annotation/Annotation.html?is-external=true";
 title="class or interface in java.lang.annotation">Annotation)
 org.apache.hadoop.hbase.classification.InterfaceAudience.Public (implements 
java.lang.annotation.http://docs.oracle.com/javase/8/docs/api/java/lang/annotation/Annotation.html?is-external=true";
 title="class or interface in java.lang.annotation">Annotation)
+org.apache.hadoop.hbase.classification.InterfaceStability.Unstable (implements 
java.lang.annotation.http://docs.oracle.com/javase/8/docs/api/java/lang/annotation/Annotation.html?is-external=true";
 title="class or interface in java.lang.annotation">Annotation)
+org.apache.hadoop.hbase.classification.InterfaceAudience.Private (implements 
java.lang.annotation.http://docs.oracle.com/javase/8/docs/api/java/lang/annotation/Annotation.html?is-external=true";
 title="class or interface in java.lang.annotation">Annotation)
+org.apache.hadoop.hbase.classification.InterfaceAudience.LimitedPrivate (implements 
java.lang.annotation.http://docs.oracle.com/javase/8/docs/api/java/lang/annotation/Annotation.html?is-external=true";
 title="class or interface in java.lang.annotation">Annotation)
 
 
 



[05/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-11 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/90c7dfe4/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.RegionOperationWithResult.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.RegionOperationWithResult.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.RegionOperationWithResult.html
index 25d6b70..635798d 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.RegionOperationWithResult.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.RegionOperationWithResult.html
@@ -1234,540 +1234,541 @@
 1226   * @param result the result returned 
by the append
 1227   * @throws IOException if an error 
occurred on the coprocessor
 1228   */
-1229  public void postAppend(final Append 
append, final Result result) throws IOException {
-1230execOperation(coprocessors.isEmpty() 
? null : new RegionOperation() {
-1231  @Override
-1232  public void call(RegionObserver 
oserver, ObserverContext ctx)
-1233  throws IOException {
-1234oserver.postAppend(ctx, append, 
result);
-1235  }
-1236});
-1237  }
-1238
-1239  /**
-1240   * @param increment increment object
-1241   * @param result the result returned 
by postIncrement
-1242   * @throws IOException if an error 
occurred on the coprocessor
-1243   */
-1244  public Result postIncrement(final 
Increment increment, Result result) throws IOException {
-1245return 
execOperationWithResult(result,
-1246coprocessors.isEmpty() ? null : 
new RegionOperationWithResult() {
-1247  @Override
-1248  public void call(RegionObserver 
oserver, ObserverContext ctx)
-1249  throws IOException {
-1250
setResult(oserver.postIncrement(ctx, increment, getResult()));
-1251  }
-1252});
-1253  }
-1254
-1255  /**
-1256   * @param scan the Scan 
specification
-1257   * @return scanner id to return to 
client if default operation should be
-1258   * bypassed, null otherwise
-1259   * @exception IOException Exception
-1260   */
-1261  public RegionScanner 
preScannerOpen(final Scan scan) throws IOException {
-1262return execOperationWithResult(true, 
null,
-1263coprocessors.isEmpty() ? null : 
new RegionOperationWithResult() {
-1264  @Override
-1265  public void call(RegionObserver 
oserver, ObserverContext ctx)
-1266  throws IOException {
-1267
setResult(oserver.preScannerOpen(ctx, scan, getResult()));
-1268  }
-1269});
-1270  }
-1271
-1272  /**
-1273   * See
-1274   * {@link 
RegionObserver#preStoreScannerOpen(ObserverContext,
-1275   *Store, Scan, NavigableSet, 
KeyValueScanner)}
-1276   */
-1277  public KeyValueScanner 
preStoreScannerOpen(final Store store, final Scan scan,
-1278  final NavigableSet 
targetCols, final long readPt) throws IOException {
-1279return 
execOperationWithResult(null,
-1280coprocessors.isEmpty() ? null : 
new RegionOperationWithResult() {
-1281  @Override
-1282  public void call(RegionObserver 
oserver, ObserverContext ctx)
-1283  throws IOException {
-1284
setResult(oserver.preStoreScannerOpen(ctx, store, scan, targetCols, 
getResult(), readPt));
-1285  }
-1286});
-1287  }
-1288
-1289  /**
-1290   * @param scan the Scan 
specification
-1291   * @param s the scanner
-1292   * @return the scanner instance to 
use
-1293   * @exception IOException Exception
-1294   */
-1295  public RegionScanner 
postScannerOpen(final Scan scan, RegionScanner s) throws IOException {
-1296return execOperationWithResult(s,
-1297coprocessors.isEmpty() ? null : 
new RegionOperationWithResult() {
-1298  @Override
-1299  public void call(RegionObserver 
oserver, ObserverContext ctx)
-1300  throws IOException {
-1301
setResult(oserver.postScannerOpen(ctx, scan, getResult()));
-1302  }
-1303});
-1304  }
-1305
-1306  /**
-1307   * @param s the scanner
-1308   * @param results the result set 
returned by the region server
-1309   * @param limit the maximum number of 
results to return
-1310   * @return 'has next' indication to 
client if bypassing default behavior, or
-1311   * null otherwise
-1312   * @exception IOException Exception
-1313   */
-1314  public Boolean preScannerNext(final 
InternalScanner s,
-1315  final List results, 
final int limit) throws IOException {
-1316return execOperationWithResult(true, 
false,
-1317coprocessors.isEmpty() ? null : 
new RegionOperationWithResult() {
-1318  @Override
-1319  public void call(RegionObserver 
oserver, ObserverCon

[47/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-11 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/90c7dfe4/bulk-loads.html
--
diff --git a/bulk-loads.html b/bulk-loads.html
index d662593..ba28022 100644
--- a/bulk-loads.html
+++ b/bulk-loads.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase –  
   Bulk Loads in Apache HBase (TM)
@@ -311,7 +311,7 @@ under the License. -->
 https://www.apache.org/";>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2017-07-10
+  Last Published: 
2017-07-11
 
 
 



[40/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-11 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/90c7dfe4/devapidocs/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.html 
b/devapidocs/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.html
index 8c06db0..42e641b 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.html
@@ -18,7 +18,7 @@
 catch(err) {
 }
 //-->
-var methods = 
{"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10,"i14":10,"i15":10,"i16":10,"i17":10,"i18":10,"i19":10,"i20":10,"i21":10,"i22":10,"i23":10,"i24":10,"i25":10,"i26":10,"i27":10,"i28":10,"i29":10,"i30":10,"i31":10,"i32":10,"i33":10,"i34":10,"i35":10,"i36":10,"i37":10,"i38":10,"i39":10,"i40":10,"i41":10,"i42":10,"i43":10,"i44":10,"i45":10,"i46":10,"i47":10,"i48":10,"i49":10,"i50":10,"i51":10,"i52":10,"i53":10,"i54":10,"i55":10,"i56":10,"i57":10,"i58":10,"i59":10,"i60":10,"i61":10,"i62":10,"i63":10,"i64":10,"i65":10,"i66":10,"i67":10,"i68":10,"i69":10,"i70":10,"i71":10,"i72":10,"i73":10,"i74":10,"i75":10,"i76":10,"i77":10,"i78":10,"i79":10,"i80":10,"i81":10,"i82":10,"i83":10,"i84":10,"i85":10,"i86":10,"i87":10,"i88":10,"i89":10,"i90":10,"i91":10,"i92":10,"i93":10,"i94":10,"i95":10,"i96":10,"i97":10};
+var methods = 
{"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10,"i14":10,"i15":10,"i16":10,"i17":10,"i18":10,"i19":10,"i20":10,"i21":10,"i22":10,"i23":10,"i24":10,"i25":10,"i26":10,"i27":10,"i28":10,"i29":10,"i30":10,"i31":10,"i32":10,"i33":10,"i34":10,"i35":10,"i36":10,"i37":10,"i38":10,"i39":10,"i40":10,"i41":10,"i42":10,"i43":10,"i44":10,"i45":10,"i46":10,"i47":10,"i48":10,"i49":10,"i50":10,"i51":10,"i52":10,"i53":10,"i54":10,"i55":10,"i56":10,"i57":10,"i58":10,"i59":10,"i60":10,"i61":10,"i62":10,"i63":10,"i64":10,"i65":10,"i66":10,"i67":10,"i68":10,"i69":10,"i70":10,"i71":10,"i72":10,"i73":10,"i74":10,"i75":10,"i76":10,"i77":10,"i78":10,"i79":10,"i80":10,"i81":10,"i82":10,"i83":10,"i84":10,"i85":10,"i86":10,"i87":10,"i88":10,"i89":10,"i90":10,"i91":10,"i92":10,"i93":10,"i94":10,"i95":10,"i96":10,"i97":10,"i98":10,"i99":10,"i100":10,"i101":10,"i102":10,"i103":10,"i104":10,"i105":10};
 var tabs = {65535:["t0","All Methods"],2:["t2","Instance 
Methods"],8:["t4","Concrete Methods"]};
 var altColor = "altColor";
 var rowColor = "rowColor";
@@ -114,7 +114,7 @@ var activeTableTab = "activeTableTab";
 
 
 @InterfaceAudience.Private
-public class AsyncHBaseAdmin
+public class AsyncHBaseAdmin
 extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true";
 title="class or interface in java.lang">Object
 implements AsyncAdmin
 The implementation of AsyncAdmin.
@@ -221,45 +221,52 @@ implements 
 http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";
 title="class or interface in java.util.concurrent">CompletableFutureVoid>
+clearCompactionQueues(ServerName serverName,
+ http://docs.oracle.com/javase/8/docs/api/java/util/Set.html?is-external=true";
 title="class or interface in java.util">SetString> queues)
+Clear compacting queues on a region server.
+
+
+
+http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";
 title="class or interface in java.util.concurrent">CompletableFutureVoid>
 cloneSnapshot(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String snapshotName,
  TableName tableName)
 Create a new table by cloning the snapshot content.
 
 
-
+
 http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";
 title="class or interface in java.util.concurrent">CompletableFutureBoolean>
 closeRegion(byte[] regionName,
http://docs.oracle.com/javase/8/docs/api/java/util/Optional.html?is-external=true";
 title="class or interface in java.util">Optional serverName)
 Close a region.
 
 
-
+
 http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";
 title="class or interface in java.util.concurrent">CompletableFuture[13/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/90c7dfe4/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.TruncateTableProcedureBiConsumer.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.TruncateTableProcedureBiConsumer.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.TruncateTableProcedureBiConsumer.html
index 75db22d..99a09f9 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.TruncateTableProcedureBiConsumer.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.TruncateTableProcedureBiConsumer.html
@@ -37,2710 +37,2816 @@
 029import java.util.List;
 030import java.util.Map;
 031import java.util.Optional;
-032import 
java.util.concurrent.CompletableFuture;
-033import java.util.concurrent.TimeUnit;
-034import 
java.util.concurrent.atomic.AtomicReference;
-035import java.util.function.BiConsumer;
-036import java.util.regex.Pattern;
-037import java.util.stream.Collectors;
-038
-039import 
com.google.common.annotations.VisibleForTesting;
-040
-041import io.netty.util.Timeout;
-042import io.netty.util.TimerTask;
-043
-044import java.util.stream.Stream;
-045
-046import org.apache.commons.io.IOUtils;
-047import org.apache.commons.logging.Log;
-048import 
org.apache.commons.logging.LogFactory;
-049import 
org.apache.hadoop.hbase.ClusterStatus;
-050import 
org.apache.hadoop.hbase.HRegionInfo;
-051import 
org.apache.hadoop.hbase.HRegionLocation;
-052import 
org.apache.hadoop.hbase.MetaTableAccessor;
-053import 
org.apache.hadoop.hbase.MetaTableAccessor.QueryType;
-054import 
org.apache.hadoop.hbase.NotServingRegionException;
-055import 
org.apache.hadoop.hbase.ProcedureInfo;
-056import 
org.apache.hadoop.hbase.RegionLoad;
-057import 
org.apache.hadoop.hbase.RegionLocations;
-058import 
org.apache.hadoop.hbase.ServerName;
-059import 
org.apache.hadoop.hbase.NamespaceDescriptor;
-060import 
org.apache.hadoop.hbase.HConstants;
-061import 
org.apache.hadoop.hbase.TableExistsException;
-062import 
org.apache.hadoop.hbase.TableName;
-063import 
org.apache.hadoop.hbase.AsyncMetaTableAccessor;
-064import 
org.apache.hadoop.hbase.TableNotDisabledException;
-065import 
org.apache.hadoop.hbase.TableNotEnabledException;
-066import 
org.apache.hadoop.hbase.TableNotFoundException;
-067import 
org.apache.hadoop.hbase.UnknownRegionException;
-068import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
-069import 
org.apache.hadoop.hbase.client.AsyncRpcRetryingCallerFactory.AdminRequestCallerBuilder;
-070import 
org.apache.hadoop.hbase.client.AsyncRpcRetryingCallerFactory.MasterRequestCallerBuilder;
-071import 
org.apache.hadoop.hbase.client.Scan.ReadType;
-072import 
org.apache.hadoop.hbase.client.replication.ReplicationSerDeHelper;
-073import 
org.apache.hadoop.hbase.client.replication.TableCFs;
-074import 
org.apache.hadoop.hbase.exceptions.DeserializationException;
-075import 
org.apache.hadoop.hbase.ipc.HBaseRpcController;
-076import 
org.apache.hadoop.hbase.quotas.QuotaFilter;
-077import 
org.apache.hadoop.hbase.quotas.QuotaSettings;
-078import 
org.apache.hadoop.hbase.quotas.QuotaTableUtil;
-079import 
org.apache.hadoop.hbase.replication.ReplicationException;
-080import 
org.apache.hadoop.hbase.replication.ReplicationPeerConfig;
-081import 
org.apache.hadoop.hbase.replication.ReplicationPeerDescription;
-082import 
org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback;
-083import 
org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
-084import 
org.apache.hadoop.hbase.shaded.protobuf.RequestConverter;
-085import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminService;
-086import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CloseRegionRequest;
-087import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CloseRegionResponse;
-088import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CompactRegionRequest;
-089import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CompactRegionResponse;
-090import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.FlushRegionRequest;
-091import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.FlushRegionResponse;
-092import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetOnlineRegionRequest;
-093import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetOnlineRegionResponse;
-094import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionInfoRequest;
-095import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionInfoResponse;
-096import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionLoadRequest;
-097import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionLoadResponse;
-098import 
org.apache.hadoop.hbase.shaded.protobuf.generated