[3/4] hbase git commit: HBASE-15816 Provide client with ability to set priority on Operations

2017-07-21 Thread apurtell
HBASE-15816 Provide client with ability to set priority on Operations

Signed-off-by: Andrew Purtell 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/26247996
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/26247996
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/26247996

Branch: refs/heads/branch-1
Commit: 26247996d25dad38678fed2e2a1b8f0d383df082
Parents: 6f1cc2c
Author: rgidwani 
Authored: Fri Jul 21 12:20:24 2017 -0700
Committer: Andrew Purtell 
Committed: Fri Jul 21 17:12:25 2017 -0700

--
 .../org/apache/hadoop/hbase/client/Action.java  |  8 ++
 .../org/apache/hadoop/hbase/client/Append.java  |  6 +
 .../hadoop/hbase/client/AsyncProcess.java   | 18 ++---
 .../org/apache/hadoop/hbase/client/Delete.java  |  7 +
 .../org/apache/hadoop/hbase/client/Get.java |  6 +
 .../org/apache/hadoop/hbase/client/HTable.java  | 27 +---
 .../apache/hadoop/hbase/client/Increment.java   |  6 +
 .../apache/hadoop/hbase/client/MultiAction.java | 10 
 .../hbase/client/MultiServerCallable.java   |  5 ++--
 .../apache/hadoop/hbase/client/Mutation.java|  5 +++-
 .../hbase/client/OperationWithAttributes.java   | 11 
 .../client/PayloadCarryingServerCallable.java   | 10 ++--
 .../hbase/client/RegionServerCallable.java  | 11 
 .../hadoop/hbase/client/RowMutations.java   |  8 ++
 .../RpcRetryingCallerWithReadReplicas.java  |  3 ++-
 .../org/apache/hadoop/hbase/client/Scan.java|  7 +
 .../hadoop/hbase/client/ScannerCallable.java|  2 +-
 .../hadoop/hbase/ipc/HBaseRpcController.java|  2 --
 .../hbase/ipc/HBaseRpcControllerImpl.java   |  6 ++---
 .../org/apache/hadoop/hbase/ipc/IPCUtil.java|  3 ++-
 .../hbase/ipc/RegionCoprocessorRpcChannel.java  |  3 ++-
 .../org/apache/hadoop/hbase/HConstants.java |  1 +
 .../hbase/client/TestRpcControllerFactory.java  | 27 +---
 .../apache/hadoop/hbase/io/TestHeapSize.java|  2 ++
 24 files changed, 164 insertions(+), 30 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/26247996/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Action.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Action.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Action.java
index 2bc5d79..5417b6b 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Action.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Action.java
@@ -34,11 +34,17 @@ public class Action implements Comparable {
   private int originalIndex;
   private long nonce = HConstants.NO_NONCE;
   private int replicaId = RegionReplicaUtil.DEFAULT_REPLICA_ID;
+  private int priority;
 
   public Action(Row action, int originalIndex) {
+this(action, originalIndex, HConstants.PRIORITY_UNSET);
+  }
+
+  public Action(Row action, int originalIndex, int priority) {
 super();
 this.action = action;
 this.originalIndex = originalIndex;
+this.priority = priority;
   }
 
   /**
@@ -75,6 +81,8 @@ public class Action implements Comparable {
 return replicaId;
   }
 
+  public int getPriority() { return priority; }
+
   @SuppressWarnings("rawtypes")
   @Override
   public int compareTo(Object o) {

http://git-wip-us.apache.org/repos/asf/hbase/blob/26247996/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Append.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Append.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Append.java
index f20f727..ec4ea37 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Append.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Append.java
@@ -86,6 +86,7 @@ public class Append extends Mutation {
 for (Map.Entry entry : a.getAttributesMap().entrySet()) {
   this.setAttribute(entry.getKey(), entry.getValue());
 }
+this.setPriority(a.getPriority());
   }
 
   /** Create a Append operation for the specified row.
@@ -184,6 +185,11 @@ public class Append extends Mutation {
   }
 
   @Override
+  public Append setPriority(int priority) {
+return (Append) super.setPriority(priority);
+  }
+
+  @Override
   public Append setTTL(long ttl) {
 return (Append) super.setTTL(ttl);
   }

http://git-wip-us.apache.org/repos/asf/hbase/blob/26247996/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncProcess.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncProcess.java 
b/hbase-client/src/main/java/org/apa

[2/4] hbase git commit: HBASE-15816 Provide client with ability to set priority on Operations

2017-07-21 Thread apurtell
HBASE-15816 Provide client with ability to set priority on Operations

Signed-off-by: Andrew Purtell 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/d461bec6
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/d461bec6
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/d461bec6

Branch: refs/heads/branch-2
Commit: d461bec6c2c6d4035dc6d2ad2cebe976eba24aef
Parents: 9462891
Author: rgidwani 
Authored: Fri Jul 14 10:18:26 2017 -0700
Committer: Andrew Purtell 
Committed: Fri Jul 21 17:12:21 2017 -0700

--
 .../org/apache/hadoop/hbase/client/Action.java  |  8 +++
 .../org/apache/hadoop/hbase/client/Append.java  |  6 +
 .../hadoop/hbase/client/AsyncProcess.java   | 17 +++---
 .../hbase/client/AsyncRequestFutureImpl.java|  2 +-
 .../client/CancellableRegionServerCallable.java |  4 ++--
 .../hbase/client/ClientServiceCallable.java |  5 ++--
 .../org/apache/hadoop/hbase/client/Delete.java  |  6 +
 .../org/apache/hadoop/hbase/client/Get.java |  5 
 .../org/apache/hadoop/hbase/client/HTable.java  | 20 
 .../apache/hadoop/hbase/client/Increment.java   |  6 +
 .../apache/hadoop/hbase/client/MultiAction.java | 12 ++
 .../hbase/client/MultiServerCallable.java   |  4 ++--
 .../apache/hadoop/hbase/client/Mutation.java|  5 +++-
 .../client/NoncedRegionServerCallable.java  |  4 ++--
 .../hbase/client/OperationWithAttributes.java   | 12 ++
 .../client/RegionCoprocessorRpcChannel.java |  3 ++-
 .../hbase/client/RegionServerCallable.java  | 11 +
 .../hadoop/hbase/client/RowMutations.java   |  8 +++
 .../RpcRetryingCallerWithReadReplicas.java  |  4 ++--
 .../org/apache/hadoop/hbase/client/Scan.java|  7 ++
 .../hadoop/hbase/client/ScannerCallable.java|  2 +-
 .../hbase/client/SecureBulkLoadClient.java  |  7 +++---
 .../hadoop/hbase/ipc/HBaseRpcController.java|  2 --
 .../hbase/ipc/HBaseRpcControllerImpl.java   |  7 +++---
 .../org/apache/hadoop/hbase/ipc/IPCUtil.java|  3 ++-
 .../org/apache/hadoop/hbase/HConstants.java |  1 +
 .../hbase/client/TestRpcControllerFactory.java  | 24 ++--
 ...gionServerBulkLoadWithOldSecureEndpoint.java |  5 ++--
 .../hadoop/hbase/ipc/SimpleRpcScheduler.java|  3 +++
 .../hbase/mapreduce/LoadIncrementalHFiles.java  |  2 +-
 .../regionserver/wal/WALEditsReplaySink.java|  2 +-
 .../org/apache/hadoop/hbase/client/TestHCM.java |  2 +-
 .../hbase/client/TestReplicaWithCluster.java|  2 +-
 .../apache/hadoop/hbase/io/TestHeapSize.java|  2 ++
 .../TestLoadIncrementalHFilesSplitRecovery.java |  2 +-
 .../hadoop/hbase/quotas/TestSpaceQuotas.java|  3 ++-
 .../regionserver/TestHRegionServerBulkLoad.java |  5 ++--
 .../TestHRegionServerBulkLoadWithOldClient.java |  5 ++--
 38 files changed, 178 insertions(+), 50 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/d461bec6/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Action.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Action.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Action.java
index ef05912..f4b696a 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Action.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Action.java
@@ -32,10 +32,16 @@ public class Action implements Comparable {
   private final int originalIndex;
   private long nonce = HConstants.NO_NONCE;
   private int replicaId = RegionReplicaUtil.DEFAULT_REPLICA_ID;
+  private int priority;
 
   public Action(Row action, int originalIndex) {
+this(action, originalIndex, HConstants.PRIORITY_UNSET);
+  }
+
+  public Action(Row action, int originalIndex, int priority) {
 this.action = action;
 this.originalIndex = originalIndex;
+this.priority = priority;
   }
 
   /**
@@ -70,6 +76,8 @@ public class Action implements Comparable {
 return replicaId;
   }
 
+  public int getPriority() { return priority; }
+
   @Override
   public int compareTo(Action other) {
 return action.compareTo(other.getAction());

http://git-wip-us.apache.org/repos/asf/hbase/blob/d461bec6/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Append.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Append.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Append.java
index 346eb0e..02ec770 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Append.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Append.java
@@ -84,6 +84,7 @@ public class Append extends Mutation {
 for (Map.E

[4/4] hbase git commit: HBASE-15816 Provide client with ability to set priority on Operations

2017-07-21 Thread apurtell
HBASE-15816 Provide client with ability to set priority on Operations

Signed-off-by: Andrew Purtell 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/f70b5f89
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/f70b5f89
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/f70b5f89

Branch: refs/heads/branch-1.4
Commit: f70b5f8948fbd1c6759ea2b4982b59d78ef6e199
Parents: 8cfcd12
Author: rgidwani 
Authored: Fri Jul 21 12:20:24 2017 -0700
Committer: Andrew Purtell 
Committed: Fri Jul 21 17:12:31 2017 -0700

--
 .../org/apache/hadoop/hbase/client/Action.java  |  8 ++
 .../org/apache/hadoop/hbase/client/Append.java  |  6 +
 .../hadoop/hbase/client/AsyncProcess.java   | 18 ++---
 .../org/apache/hadoop/hbase/client/Delete.java  |  7 +
 .../org/apache/hadoop/hbase/client/Get.java |  6 +
 .../org/apache/hadoop/hbase/client/HTable.java  | 27 +---
 .../apache/hadoop/hbase/client/Increment.java   |  6 +
 .../apache/hadoop/hbase/client/MultiAction.java | 10 
 .../hbase/client/MultiServerCallable.java   |  5 ++--
 .../apache/hadoop/hbase/client/Mutation.java|  5 +++-
 .../hbase/client/OperationWithAttributes.java   | 11 
 .../client/PayloadCarryingServerCallable.java   | 10 ++--
 .../hbase/client/RegionServerCallable.java  | 11 
 .../hadoop/hbase/client/RowMutations.java   |  8 ++
 .../RpcRetryingCallerWithReadReplicas.java  |  3 ++-
 .../org/apache/hadoop/hbase/client/Scan.java|  7 +
 .../hadoop/hbase/client/ScannerCallable.java|  2 +-
 .../hadoop/hbase/ipc/HBaseRpcController.java|  2 --
 .../hbase/ipc/HBaseRpcControllerImpl.java   |  6 ++---
 .../org/apache/hadoop/hbase/ipc/IPCUtil.java|  3 ++-
 .../hbase/ipc/RegionCoprocessorRpcChannel.java  |  3 ++-
 .../org/apache/hadoop/hbase/HConstants.java |  1 +
 .../hbase/client/TestRpcControllerFactory.java  | 27 +---
 .../apache/hadoop/hbase/io/TestHeapSize.java|  2 ++
 24 files changed, 164 insertions(+), 30 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/f70b5f89/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Action.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Action.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Action.java
index 2bc5d79..5417b6b 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Action.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Action.java
@@ -34,11 +34,17 @@ public class Action implements Comparable {
   private int originalIndex;
   private long nonce = HConstants.NO_NONCE;
   private int replicaId = RegionReplicaUtil.DEFAULT_REPLICA_ID;
+  private int priority;
 
   public Action(Row action, int originalIndex) {
+this(action, originalIndex, HConstants.PRIORITY_UNSET);
+  }
+
+  public Action(Row action, int originalIndex, int priority) {
 super();
 this.action = action;
 this.originalIndex = originalIndex;
+this.priority = priority;
   }
 
   /**
@@ -75,6 +81,8 @@ public class Action implements Comparable {
 return replicaId;
   }
 
+  public int getPriority() { return priority; }
+
   @SuppressWarnings("rawtypes")
   @Override
   public int compareTo(Object o) {

http://git-wip-us.apache.org/repos/asf/hbase/blob/f70b5f89/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Append.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Append.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Append.java
index f20f727..ec4ea37 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Append.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Append.java
@@ -86,6 +86,7 @@ public class Append extends Mutation {
 for (Map.Entry entry : a.getAttributesMap().entrySet()) {
   this.setAttribute(entry.getKey(), entry.getValue());
 }
+this.setPriority(a.getPriority());
   }
 
   /** Create a Append operation for the specified row.
@@ -184,6 +185,11 @@ public class Append extends Mutation {
   }
 
   @Override
+  public Append setPriority(int priority) {
+return (Append) super.setPriority(priority);
+  }
+
+  @Override
   public Append setTTL(long ttl) {
 return (Append) super.setTTL(ttl);
   }

http://git-wip-us.apache.org/repos/asf/hbase/blob/f70b5f89/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncProcess.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncProcess.java 
b/hbase-client/src/main/java/org/a

[1/4] hbase git commit: HBASE-15816 Provide client with ability to set priority on Operations

2017-07-21 Thread apurtell
Repository: hbase
Updated Branches:
  refs/heads/branch-1 6f1cc2c89 -> 26247996d
  refs/heads/branch-1.4 8cfcd12e9 -> f70b5f894
  refs/heads/branch-2 946289113 -> d461bec6c
  refs/heads/master 70a357dc5 -> ec3cb1966


HBASE-15816 Provide client with ability to set priority on Operations

Signed-off-by: Andrew Purtell 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/ec3cb196
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/ec3cb196
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/ec3cb196

Branch: refs/heads/master
Commit: ec3cb196641498edfa71c4f9e1bde5bc15acd8ed
Parents: 70a357d
Author: rgidwani 
Authored: Fri Jul 14 10:18:26 2017 -0700
Committer: Andrew Purtell 
Committed: Fri Jul 21 17:12:16 2017 -0700

--
 .../org/apache/hadoop/hbase/client/Action.java  |  8 +++
 .../org/apache/hadoop/hbase/client/Append.java  |  6 +
 .../hadoop/hbase/client/AsyncProcess.java   | 17 +++---
 .../hbase/client/AsyncRequestFutureImpl.java|  2 +-
 .../client/CancellableRegionServerCallable.java |  4 ++--
 .../hbase/client/ClientServiceCallable.java |  5 ++--
 .../org/apache/hadoop/hbase/client/Delete.java  |  6 +
 .../org/apache/hadoop/hbase/client/Get.java |  5 
 .../org/apache/hadoop/hbase/client/HTable.java  | 20 
 .../apache/hadoop/hbase/client/Increment.java   |  6 +
 .../apache/hadoop/hbase/client/MultiAction.java | 12 ++
 .../hbase/client/MultiServerCallable.java   |  4 ++--
 .../apache/hadoop/hbase/client/Mutation.java|  5 +++-
 .../client/NoncedRegionServerCallable.java  |  4 ++--
 .../hbase/client/OperationWithAttributes.java   | 12 ++
 .../client/RegionCoprocessorRpcChannel.java |  3 ++-
 .../hbase/client/RegionServerCallable.java  | 11 +
 .../hadoop/hbase/client/RowMutations.java   |  8 +++
 .../RpcRetryingCallerWithReadReplicas.java  |  4 ++--
 .../org/apache/hadoop/hbase/client/Scan.java|  7 ++
 .../hadoop/hbase/client/ScannerCallable.java|  2 +-
 .../hbase/client/SecureBulkLoadClient.java  |  7 +++---
 .../hadoop/hbase/ipc/HBaseRpcController.java|  2 --
 .../hbase/ipc/HBaseRpcControllerImpl.java   |  7 +++---
 .../org/apache/hadoop/hbase/ipc/IPCUtil.java|  3 ++-
 .../org/apache/hadoop/hbase/HConstants.java |  1 +
 .../hbase/client/TestRpcControllerFactory.java  | 24 ++--
 ...gionServerBulkLoadWithOldSecureEndpoint.java |  5 ++--
 .../hadoop/hbase/ipc/SimpleRpcScheduler.java|  3 +++
 .../hbase/mapreduce/LoadIncrementalHFiles.java  |  2 +-
 .../regionserver/wal/WALEditsReplaySink.java|  2 +-
 .../org/apache/hadoop/hbase/client/TestHCM.java |  2 +-
 .../hbase/client/TestReplicaWithCluster.java|  2 +-
 .../apache/hadoop/hbase/io/TestHeapSize.java|  2 ++
 .../TestLoadIncrementalHFilesSplitRecovery.java |  2 +-
 .../hadoop/hbase/quotas/TestSpaceQuotas.java|  3 ++-
 .../regionserver/TestHRegionServerBulkLoad.java |  5 ++--
 .../TestHRegionServerBulkLoadWithOldClient.java |  5 ++--
 38 files changed, 178 insertions(+), 50 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/ec3cb196/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Action.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Action.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Action.java
index ef05912..f4b696a 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Action.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Action.java
@@ -32,10 +32,16 @@ public class Action implements Comparable {
   private final int originalIndex;
   private long nonce = HConstants.NO_NONCE;
   private int replicaId = RegionReplicaUtil.DEFAULT_REPLICA_ID;
+  private int priority;
 
   public Action(Row action, int originalIndex) {
+this(action, originalIndex, HConstants.PRIORITY_UNSET);
+  }
+
+  public Action(Row action, int originalIndex, int priority) {
 this.action = action;
 this.originalIndex = originalIndex;
+this.priority = priority;
   }
 
   /**
@@ -70,6 +76,8 @@ public class Action implements Comparable {
 return replicaId;
   }
 
+  public int getPriority() { return priority; }
+
   @Override
   public int compareTo(Action other) {
 return action.compareTo(other.getAction());

http://git-wip-us.apache.org/repos/asf/hbase/blob/ec3cb196/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Append.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Append.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Append.java
index 346eb0e..02ec770 100644
--- a/hb

hbase git commit: HBASE-18407 [C++] make Configuration::Set/GetBool work for both true/false and 1/0 (Xiaobing Zhou)

2017-07-21 Thread enis
Repository: hbase
Updated Branches:
  refs/heads/HBASE-14850 1193812d7 -> 6c442d54c


HBASE-18407 [C++] make Configuration::Set/GetBool work for both true/false and 
1/0 (Xiaobing Zhou)


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/6c442d54
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/6c442d54
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/6c442d54

Branch: refs/heads/HBASE-14850
Commit: 6c442d54ca118d2af763336ec1ea79f2c8470d95
Parents: 1193812
Author: Enis Soztutar 
Authored: Fri Jul 21 16:46:54 2017 -0700
Committer: Enis Soztutar 
Committed: Fri Jul 21 16:46:54 2017 -0700

--
 hbase-native-client/core/configuration-test.cc | 58 -
 hbase-native-client/core/configuration.cc  | 10 ++--
 2 files changed, 64 insertions(+), 4 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/6c442d54/hbase-native-client/core/configuration-test.cc
--
diff --git a/hbase-native-client/core/configuration-test.cc 
b/hbase-native-client/core/configuration-test.cc
index 192ed46..abdf0c7 100644
--- a/hbase-native-client/core/configuration-test.cc
+++ b/hbase-native-client/core/configuration-test.cc
@@ -22,6 +22,62 @@
 
 using hbase::Configuration;
 
+TEST(Configuration, SetGetBool) {
+  Configuration conf;
+
+  /* test true/false */
+  conf.SetBool("bool_key1", true);
+  EXPECT_EQ(true, conf.GetBool("bool_key1", false));
+  conf.SetBool("bool_key2", false);
+  EXPECT_EQ(false, conf.GetBool("bool_key2", true));
+
+  /* test 1/0 */
+  conf.SetBool("bool_key3", 1);
+  EXPECT_EQ(true, conf.GetBool("bool_key3", false));
+  conf.SetBool("bool_key4", 0);
+  EXPECT_EQ(false, conf.GetBool("bool_key4", true));
+
+  /* test non zero integer */
+  conf.SetBool("bool_key5", 5);
+  EXPECT_EQ(true, conf.GetBool("bool_key5", false));
+  conf.SetBool("bool_key6", -1);
+  EXPECT_EQ(true, conf.GetBool("bool_key5", false));
+
+  /* test non zero float */
+  conf.SetBool("bool_key7", 5.1);
+  EXPECT_EQ(true, conf.GetBool("bool_key7", false));
+  conf.SetBool("bool_key8", -1.2);
+  EXPECT_EQ(true, conf.GetBool("bool_key8", false));
+}
+
+TEST(Configuration, SetGetForBool) {
+  Configuration conf;
+
+  /* test true/false */
+  conf.Set("bool_key1", "true");
+  EXPECT_EQ(true, conf.GetBool("bool_key1", false));
+  conf.Set("bool_key2", "false");
+  EXPECT_EQ(false, conf.GetBool("bool_key2", true));
+
+  /* test 1/0 */
+  conf.Set("bool_key3", "1");
+  EXPECT_EQ(true, conf.GetBool("bool_key3", false));
+  conf.Set("bool_key4", "0");
+  EXPECT_EQ(false, conf.GetBool("bool_key4", true));
+
+  /* test non zero integer */
+  conf.Set("bool_key5", "5");
+  EXPECT_THROW(conf.GetBool("bool_key5", false), std::runtime_error);
+  conf.Set("bool_key6", "-1");
+  EXPECT_THROW(conf.GetBool("bool_key6", false), std::runtime_error);
+
+  /* test non zero float */
+  conf.Set("bool_key7", "5.1");
+  EXPECT_THROW(conf.GetBool("bool_key7", false), std::runtime_error);
+  conf.Set("bool_key8", "-1.2");
+  EXPECT_THROW(conf.GetBool("bool_key8", false), std::runtime_error);
+}
+
 TEST(Configuration, SetGet) {
   Configuration conf;
 
@@ -54,7 +110,7 @@ TEST(Configuration, SetGetDouble) {
   EXPECT_EQ(conf.GetDouble("foo", 0), 42.0);
 }
 
-TEST(Configuration, SetGetBool) {
+TEST(Configuration, SetGetBoolBasic) {
   Configuration conf;
 
   EXPECT_EQ(conf.GetBool("foo", false), false);

http://git-wip-us.apache.org/repos/asf/hbase/blob/6c442d54/hbase-native-client/core/configuration.cc
--
diff --git a/hbase-native-client/core/configuration.cc 
b/hbase-native-client/core/configuration.cc
index afca122..f4fc46d 100644
--- a/hbase-native-client/core/configuration.cc
+++ b/hbase-native-client/core/configuration.cc
@@ -25,6 +25,7 @@
 
 #include 
 #include 
+#include 
 
 namespace hbase {
 
@@ -202,12 +203,15 @@ double Configuration::GetDouble(const std::string &key, 
double default_value) co
 optional Configuration::GetBool(const std::string &key) const {
   optional raw = Get(key);
   if (raw) {
-if (!strcasecmp((*raw).c_str(), "true")) {
+if (!strcasecmp((*raw).c_str(), "true") || !strcasecmp((*raw).c_str(), 
"1")) {
   return std::experimental::make_optional(true);
-} else if (!strcasecmp((*raw).c_str(), "false")) {
+} else if (!strcasecmp((*raw).c_str(), "false") || 
!strcasecmp((*raw).c_str(), "0")) {
   return std::experimental::make_optional(false);
 } else {
-  throw std::runtime_error("Unexpected value found while conversion to 
bool.");
+  boost::format what("Unexpected value \"%s\" found being converted to 
bool for key \"%s\"");
+  what % (*raw);
+  what % key;
+  throw std::runtime_error(what.str());
 }
   }
   return op

hbase git commit: HBASE-18338 [C++] Implement RpcTestServer (Xiaobing Zhou)

2017-07-21 Thread enis
Repository: hbase
Updated Branches:
  refs/heads/HBASE-14850 a93c6a998 -> 1193812d7


HBASE-18338 [C++] Implement RpcTestServer (Xiaobing Zhou)


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/1193812d
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/1193812d
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/1193812d

Branch: refs/heads/HBASE-14850
Commit: 1193812d784f407ab8596380e003b65de27a117a
Parents: a93c6a9
Author: Enis Soztutar 
Authored: Fri Jul 21 16:29:44 2017 -0700
Committer: Enis Soztutar 
Committed: Fri Jul 21 16:29:44 2017 -0700

--
 hbase-native-client/connection/BUCK |  13 ++
 .../connection/client-handler.cc|  21 +-
 hbase-native-client/connection/client-handler.h |   7 +-
 hbase-native-client/connection/pipeline.cc  |  12 +-
 .../connection/rpc-test-server-handler.cc   |  77 ++
 .../connection/rpc-test-server-handler.h|  47 
 .../connection/rpc-test-server.cc   |  70 ++
 .../connection/rpc-test-server.h|  50 
 hbase-native-client/connection/rpc-test.cc  |  86 +++
 hbase-native-client/connection/sasl-handler.h   |   2 +-
 hbase-native-client/if/test.proto   |  43 
 hbase-native-client/if/test_rpc_service.proto   |  35 +++
 hbase-native-client/serde/BUCK  |   4 +-
 .../serde/client-deserializer-test.cc   |   3 +-
 .../serde/client-serializer-test.cc |   2 +-
 hbase-native-client/serde/rpc-serde.cc  | 234 +++
 hbase-native-client/serde/rpc-serde.h   | 141 +++
 hbase-native-client/serde/rpc.cc| 222 --
 hbase-native-client/serde/rpc.h | 125 --
 19 files changed, 827 insertions(+), 367 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/1193812d/hbase-native-client/connection/BUCK
--
diff --git a/hbase-native-client/connection/BUCK 
b/hbase-native-client/connection/BUCK
index c3119eb..aaf8fdb 100644
--- a/hbase-native-client/connection/BUCK
+++ b/hbase-native-client/connection/BUCK
@@ -33,6 +33,8 @@ cxx_library(
 "service.h",
 "rpc-client.h",
 "sasl-util.h",
+"rpc-test-server.h",
+"rpc-test-server-handler.h",
 ],
 srcs=[
 "client-dispatcher.cc",
@@ -44,6 +46,8 @@ cxx_library(
 "rpc-client.cc",
 "sasl-handler.cc",
 "sasl-util.cc",
+"rpc-test-server.cc",
+"rpc-test-server-handler.cc",
 ],
 deps=[
 "//if:if",
@@ -68,3 +72,12 @@ cxx_test(
 deps=[
 ":connection",
 ],)
+cxx_test(
+name="rpc-test",
+srcs=[
+"rpc-test.cc",
+],
+deps=[
+":connection",
+],
+run_test_separately=True,)

http://git-wip-us.apache.org/repos/asf/hbase/blob/1193812d/hbase-native-client/connection/client-handler.cc
--
diff --git a/hbase-native-client/connection/client-handler.cc 
b/hbase-native-client/connection/client-handler.cc
index 052c171..39227d3 100644
--- a/hbase-native-client/connection/client-handler.cc
+++ b/hbase-native-client/connection/client-handler.cc
@@ -35,9 +35,10 @@ using google::protobuf::Message;
 namespace hbase {
 
 ClientHandler::ClientHandler(std::string user_name, std::shared_ptr 
codec,
- const std::string &server)
+ std::shared_ptr conf, const 
std::string &server)
 : user_name_(user_name),
   serde_(codec),
+  conf_(conf),
   server_(server),
   once_flag_(std::make_unique()),
   resp_msgs_(
@@ -115,13 +116,17 @@ void ClientHandler::read(Context *ctx, 
std::unique_ptr buf) {
 }
 
 folly::Future ClientHandler::write(Context *ctx, 
std::unique_ptr r) {
-  // We need to send the header once.
-  // So use call_once to make sure that only one thread wins this.
-  std::call_once((*once_flag_), [ctx, this]() {
-VLOG(3) << "Writing RPC Header to server: " << server_;
-auto header = serde_.Header(user_name_);
-ctx->fireWrite(std::move(header));
-  });
+  /* for RPC test, there's no need to send connection header */
+  if (!conf_->GetBool(RpcSerde::HBASE_CLIENT_RPC_TEST_MODE,
+  RpcSerde::DEFAULT_HBASE_CLIENT_RPC_TEST_MODE)) {
+// We need to send the header once.
+// So use call_once to make sure that only one thread wins this.
+std::call_once((*once_flag_), [ctx, this]() {
+  VLOG(3) << "Writing RPC Header to server: " << server_;
+  auto header = serde_.Header(user_name_);
+  ctx->fireWrite(std::move(header));
+});
+  }
 
   VLOG(3) << "Writing RPC Request:" << r->DebugString() << ", server: " << 
se

[41/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-21 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ca5b0275/devapidocs/org/apache/hadoop/hbase/backup/package-tree.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/backup/package-tree.html 
b/devapidocs/org/apache/hadoop/hbase/backup/package-tree.html
index e5ede39..d4b122e 100644
--- a/devapidocs/org/apache/hadoop/hbase/backup/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/backup/package-tree.html
@@ -115,7 +115,7 @@
 org.apache.hadoop.hbase.backup.HFileArchiver.FileableStoreFile
 
 
-org.apache.hadoop.hbase.backup.HFileArchiver.FileConverter 
(implements com.google.common.base.Function)
+org.apache.hadoop.hbase.backup.HFileArchiver.FileConverter 
(implements 
org.apache.hadoop.hbase.shaded.com.google.common.base.Function)
 
 org.apache.hadoop.hbase.backup.HFileArchiver.FileStatusConverter
 org.apache.hadoop.hbase.backup.HFileArchiver.StoreToFile
@@ -166,10 +166,10 @@
 
 java.lang.http://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true";
 title="class or interface in java.lang">Enum (implements java.lang.http://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true";
 title="class or interface in java.lang">Comparable, java.io.http://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true";
 title="class or interface in java.io">Serializable)
 
-org.apache.hadoop.hbase.backup.BackupRestoreConstants.BackupCommand
-org.apache.hadoop.hbase.backup.BackupType
 org.apache.hadoop.hbase.backup.BackupInfo.BackupState
+org.apache.hadoop.hbase.backup.BackupRestoreConstants.BackupCommand
 org.apache.hadoop.hbase.backup.BackupInfo.BackupPhase
+org.apache.hadoop.hbase.backup.BackupType
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ca5b0275/devapidocs/org/apache/hadoop/hbase/class-use/Cell.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/class-use/Cell.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/Cell.html
index a34b149..56052d4 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/Cell.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/Cell.html
@@ -7127,8 +7127,8 @@ service.
 
 
 Cell
-BulkLoadCellFilter.filterCell(Cell cell,
-  
com.google.common.base.Predicate famPredicate)
+BulkLoadCellFilter.filterCell(Cell cell,
+  
org.apache.hadoop.hbase.shaded.com.google.common.base.Predicate famPredicate)
 Filters the bulk load cell using the supplied 
predicate.
 
 
@@ -7160,8 +7160,8 @@ service.
 
 
 Cell
-BulkLoadCellFilter.filterCell(Cell cell,
-  
com.google.common.base.Predicate famPredicate)
+BulkLoadCellFilter.filterCell(Cell cell,
+  
org.apache.hadoop.hbase.shaded.com.google.common.base.Predicate famPredicate)
 Filters the bulk load cell using the supplied 
predicate.
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ca5b0275/devapidocs/org/apache/hadoop/hbase/class-use/HBaseIOException.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/class-use/HBaseIOException.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/HBaseIOException.html
index 4568865..e65c773 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/HBaseIOException.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/HBaseIOException.html
@@ -1014,10 +1014,10 @@
 
 
 private void
-RSGroupBasedLoadBalancer.generateGroupMaps(http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List regions,
+RSGroupBasedLoadBalancer.generateGroupMaps(http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List regions,
  http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List servers,
- com.google.common.collect.ListMultimapString,HRegionInfo> regionMap,
- com.google.common.collect.ListMultimapString,ServerName> serverMap) 
+ 
org.apache.hadoop.hbase.shaded.com.google.common.collect.ListMultimapString,HRegionInfo> regionMap,
+ 
org.apache.hadoop.hbase.shaded.com.google.common.collect.ListMultimapString,ServerName> serverMap) 
 
 
 void

h

[47/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-21 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ca5b0275/apidocs/src-html/org/apache/hadoop/hbase/client/HTableMultiplexer.HTableMultiplexerStatus.html
--
diff --git 
a/apidocs/src-html/org/apache/hadoop/hbase/client/HTableMultiplexer.HTableMultiplexerStatus.html
 
b/apidocs/src-html/org/apache/hadoop/hbase/client/HTableMultiplexer.HTableMultiplexerStatus.html
index 284fd2f..1c4cf6e 100644
--- 
a/apidocs/src-html/org/apache/hadoop/hbase/client/HTableMultiplexer.HTableMultiplexerStatus.html
+++ 
b/apidocs/src-html/org/apache/hadoop/hbase/client/HTableMultiplexer.HTableMultiplexerStatus.html
@@ -27,8 +27,8 @@
 019 */
 020package org.apache.hadoop.hbase.client;
 021
-022import 
com.google.common.annotations.VisibleForTesting;
-023import 
com.google.common.util.concurrent.ThreadFactoryBuilder;
+022import 
org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
+023import 
org.apache.hadoop.hbase.shaded.com.google.common.util.concurrent.ThreadFactoryBuilder;
 024
 025import java.io.IOException;
 026import 
java.util.AbstractMap.SimpleEntry;

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ca5b0275/apidocs/src-html/org/apache/hadoop/hbase/client/HTableMultiplexer.html
--
diff --git 
a/apidocs/src-html/org/apache/hadoop/hbase/client/HTableMultiplexer.html 
b/apidocs/src-html/org/apache/hadoop/hbase/client/HTableMultiplexer.html
index 284fd2f..1c4cf6e 100644
--- a/apidocs/src-html/org/apache/hadoop/hbase/client/HTableMultiplexer.html
+++ b/apidocs/src-html/org/apache/hadoop/hbase/client/HTableMultiplexer.html
@@ -27,8 +27,8 @@
 019 */
 020package org.apache.hadoop.hbase.client;
 021
-022import 
com.google.common.annotations.VisibleForTesting;
-023import 
com.google.common.util.concurrent.ThreadFactoryBuilder;
+022import 
org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
+023import 
org.apache.hadoop.hbase.shaded.com.google.common.util.concurrent.ThreadFactoryBuilder;
 024
 025import java.io.IOException;
 026import 
java.util.AbstractMap.SimpleEntry;

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ca5b0275/apidocs/src-html/org/apache/hadoop/hbase/client/Mutation.html
--
diff --git a/apidocs/src-html/org/apache/hadoop/hbase/client/Mutation.html 
b/apidocs/src-html/org/apache/hadoop/hbase/client/Mutation.html
index c6df3f4..c628528 100644
--- a/apidocs/src-html/org/apache/hadoop/hbase/client/Mutation.html
+++ b/apidocs/src-html/org/apache/hadoop/hbase/client/Mutation.html
@@ -58,11 +58,11 @@
 050import 
org.apache.hadoop.hbase.util.Bytes;
 051import 
org.apache.hadoop.hbase.util.ClassSize;
 052
-053import 
com.google.common.collect.ArrayListMultimap;
-054import 
com.google.common.collect.ListMultimap;
-055import 
com.google.common.io.ByteArrayDataInput;
-056import 
com.google.common.io.ByteArrayDataOutput;
-057import 
com.google.common.io.ByteStreams;
+053import 
org.apache.hadoop.hbase.shaded.com.google.common.collect.ArrayListMultimap;
+054import 
org.apache.hadoop.hbase.shaded.com.google.common.collect.ListMultimap;
+055import 
org.apache.hadoop.hbase.shaded.com.google.common.io.ByteArrayDataInput;
+056import 
org.apache.hadoop.hbase.shaded.com.google.common.io.ByteArrayDataOutput;
+057import 
org.apache.hadoop.hbase.shaded.com.google.common.io.ByteStreams;
 058
 059@InterfaceAudience.Public
 060public abstract class Mutation extends 
OperationWithAttributes implements Row, CellScannable,

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ca5b0275/apidocs/src-html/org/apache/hadoop/hbase/client/Query.html
--
diff --git a/apidocs/src-html/org/apache/hadoop/hbase/client/Query.html 
b/apidocs/src-html/org/apache/hadoop/hbase/client/Query.html
index 1d0ff6a..d9ab92a 100644
--- a/apidocs/src-html/org/apache/hadoop/hbase/client/Query.html
+++ b/apidocs/src-html/org/apache/hadoop/hbase/client/Query.html
@@ -28,7 +28,7 @@
 020import java.io.IOException;
 021import java.util.Map;
 022
-023import com.google.common.collect.Maps;
+023import 
org.apache.hadoop.hbase.shaded.com.google.common.collect.Maps;
 024import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
 025import 
org.apache.hadoop.hbase.exceptions.DeserializationException;
 026import 
org.apache.hadoop.hbase.filter.Filter;
@@ -40,8 +40,8 @@
 032import 
org.apache.hadoop.hbase.security.visibility.VisibilityConstants;
 033import 
org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
 034
-035import 
com.google.common.collect.ArrayListMultimap;
-036import 
com.google.common.collect.ListMultimap;
+035import 
org.apache.hadoop.hbase.shaded.com.google.common.collect.ArrayListMultimap;
+036import 
org.apache.hadoop.hbase.shaded.com.google.common.collect.ListMultimap;
 037import 
org.apache.hadoop.hbase.util.Bytes;
 

[32/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-21 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ca5b0275/devapidocs/org/apache/hadoop/hbase/io/hfile/package-tree.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/io/hfile/package-tree.html 
b/devapidocs/org/apache/hadoop/hbase/io/hfile/package-tree.html
index 0d66ce8..04c172d 100644
--- a/devapidocs/org/apache/hadoop/hbase/io/hfile/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/io/hfile/package-tree.html
@@ -273,12 +273,12 @@
 
 java.lang.http://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true";
 title="class or interface in java.lang">Enum (implements java.lang.http://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true";
 title="class or interface in java.lang">Comparable, java.io.http://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true";
 title="class or interface in java.io">Serializable)
 
-org.apache.hadoop.hbase.io.hfile.Cacheable.MemoryType
-org.apache.hadoop.hbase.io.hfile.HFileBlock.Writer.State
 org.apache.hadoop.hbase.io.hfile.BlockType
-org.apache.hadoop.hbase.io.hfile.BlockType.BlockCategory
+org.apache.hadoop.hbase.io.hfile.HFileBlock.Writer.State
+org.apache.hadoop.hbase.io.hfile.Cacheable.MemoryType
 org.apache.hadoop.hbase.io.hfile.CacheConfig.ExternalBlockCaches
 org.apache.hadoop.hbase.io.hfile.BlockPriority
+org.apache.hadoop.hbase.io.hfile.BlockType.BlockCategory
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ca5b0275/devapidocs/org/apache/hadoop/hbase/ipc/AbstractRpcClient.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/ipc/AbstractRpcClient.html 
b/devapidocs/org/apache/hadoop/hbase/ipc/AbstractRpcClient.html
index 82b1548..9db3972 100644
--- a/devapidocs/org/apache/hadoop/hbase/ipc/AbstractRpcClient.html
+++ b/devapidocs/org/apache/hadoop/hbase/ipc/AbstractRpcClient.html
@@ -213,7 +213,7 @@ implements compressor 
 
 
-private static 
com.google.common.cache.LoadingCacheInetSocketAddress,http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/AtomicInteger.html?is-external=true";
 title="class or interface in 
java.util.concurrent.atomic">AtomicInteger>
+private static 
org.apache.hadoop.hbase.shaded.com.google.common.cache.LoadingCacheInetSocketAddress,http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/AtomicInteger.html?is-external=true";
 title="class or interface in 
java.util.concurrent.atomic">AtomicInteger>
 concurrentCounterCache 
 
 
@@ -746,7 +746,7 @@ implements 
 
 concurrentCounterCache
-private static final com.google.common.cache.LoadingCacheInetSocketAddress,http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/AtomicInteger.html?is-external=true";
 title="class or interface in 
java.util.concurrent.atomic">AtomicInteger> concurrentCounterCache
+private static 
final org.apache.hadoop.hbase.shaded.com.google.common.cache.LoadingCacheInetSocketAddress,http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/AtomicInteger.html?is-external=true";
 title="class or interface in 
java.util.concurrent.atomic">AtomicInteger> concurrentCounterCache
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ca5b0275/devapidocs/org/apache/hadoop/hbase/ipc/RpcClientFactory.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/ipc/RpcClientFactory.html 
b/devapidocs/org/apache/hadoop/hbase/ipc/RpcClientFactory.html
index 5bbb857..4d7285f 100644
--- a/devapidocs/org/apache/hadoop/hbase/ipc/RpcClientFactory.html
+++ b/devapidocs/org/apache/hadoop/hbase/ipc/RpcClientFactory.html
@@ -136,7 +136,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 CUSTOM_RPC_CLIENT_IMPL_CONF_KEY 
 
 
-private static 
com.google.common.collect.ImmutableMapString,http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String>
+private static 
org.apache.hadoop.hbase.shaded.com.google.common.collect.ImmutableMapString,http://docs.oracle.com/javase/8/docs/api/ja

[25/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-21 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ca5b0275/devapidocs/org/apache/hadoop/hbase/regionserver/class-use/Store.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/regionserver/class-use/Store.html 
b/devapidocs/org/apache/hadoop/hbase/regionserver/class-use/Store.html
index 79eba86..ddaf2f1 100644
--- a/devapidocs/org/apache/hadoop/hbase/regionserver/class-use/Store.html
+++ b/devapidocs/org/apache/hadoop/hbase/regionserver/class-use/Store.html
@@ -150,20 +150,20 @@
 
 
 default void
-RegionObserver.postCompactSelection(ObserverContext c,
+RegionObserver.postCompactSelection(ObserverContext c,
 Store store,
-com.google.common.collect.ImmutableList selected)
+
org.apache.hadoop.hbase.shaded.com.google.common.collect.ImmutableList selected)
 Deprecated. 
-use RegionObserver.postCompactSelection(ObserverContext,
 Store, ImmutableList,
+use RegionObserver.postCompactSelection(ObserverContext,
 Store, ImmutableList,
  CompactionRequest) instead.
 
 
 
 
 default void
-RegionObserver.postCompactSelection(ObserverContext c,
+RegionObserver.postCompactSelection(ObserverContext c,
 Store store,
-com.google.common.collect.ImmutableList selected,
+
org.apache.hadoop.hbase.shaded.com.google.common.collect.ImmutableList selected,
 CompactionRequest request)
 Called after the StoreFiles to compact 
have been selected from the available
  candidates.
@@ -697,8 +697,8 @@
 
 
 void
-RegionCoprocessorHost.postCompactSelection(Store store,
-com.google.common.collect.ImmutableList selected,
+RegionCoprocessorHost.postCompactSelection(Store store,
+
org.apache.hadoop.hbase.shaded.com.google.common.collect.ImmutableList selected,
 CompactionRequest request,
 User user)
 Called after the StoreFiles to be 
compacted have been selected from the available

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ca5b0275/devapidocs/org/apache/hadoop/hbase/regionserver/class-use/StoreFile.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/regionserver/class-use/StoreFile.html 
b/devapidocs/org/apache/hadoop/hbase/regionserver/class-use/StoreFile.html
index 2307512..1338464 100644
--- a/devapidocs/org/apache/hadoop/hbase/regionserver/class-use/StoreFile.html
+++ b/devapidocs/org/apache/hadoop/hbase/regionserver/class-use/StoreFile.html
@@ -236,20 +236,20 @@
 
 
 default void
-RegionObserver.postCompactSelection(ObserverContext c,
+RegionObserver.postCompactSelection(ObserverContext c,
 Store store,
-com.google.common.collect.ImmutableList selected)
+
org.apache.hadoop.hbase.shaded.com.google.common.collect.ImmutableList selected)
 Deprecated. 
-use RegionObserver.postCompactSelection(ObserverContext,
 Store, ImmutableList,
+use RegionObserver.postCompactSelection(ObserverContext,
 Store, ImmutableList,
  CompactionRequest) instead.
 
 
 
 
 default void
-RegionObserver.postCompactSelection(ObserverContext c,
+RegionObserver.postCompactSelection(ObserverContext c,
 Store store,
-com.google.common.collect.ImmutableList selected,
+
org.apache.hadoop.hbase.shaded.com.google.common.collect.ImmutableList selected,
 CompactionRequest request)
 Called after the StoreFiles to compact 
have been selected from the available
  candidates.
@@ -514,11 +514,11 @@
 
 
 
-private 
com.google.common.collect.ImmutableList
+private 
org.apache.hadoop.hbase.shaded.com.google.common.collect.ImmutableList
 StripeStoreFileManager.State.allCompactedFilesCached 
 
 
-com.google.common.collect.ImmutableList
+org.apache.hadoop.hbase.shaded.com.google.common.collect.ImmutableList
 StripeStoreFileManager.State.allFilesCached
 Cached list of all files in the structure, to return from 
some calls
 
@@ -557,7 +557,7 @@
 StripeStoreFileManager.CompactionOrFlushMergeCopy.l0Results 
 
 
-com.google.common.collect.ImmutableList
+org.apache.hadoop.hbase.shaded.com.google.common.collect.ImmutableList
 StripeStoreFileManager.State.level0Files
 Level 0.
 
@@ -587,13 +587,13 @@
 DefaultStoreFileManager.storeFileComparator 
 
 
-private 
com.google.common.collect.ImmutableList
+private 
org.apache.hadoop.hbase.shaded.com.

[12/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-21 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ca5b0275/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.html
index 3558cd0..1051fec 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.html
@@ -34,614 +34,620 @@
 026import 
java.util.concurrent.ExecutorService;
 027import java.util.function.Function;
 028import java.util.regex.Pattern;
-029
-030import org.apache.commons.logging.Log;
-031import 
org.apache.commons.logging.LogFactory;
-032import 
org.apache.hadoop.hbase.ClusterStatus;
-033import 
org.apache.hadoop.hbase.HRegionInfo;
-034import 
org.apache.hadoop.hbase.ProcedureInfo;
-035import 
org.apache.hadoop.hbase.RegionLoad;
-036import 
org.apache.hadoop.hbase.ServerName;
-037import 
org.apache.hadoop.hbase.NamespaceDescriptor;
-038import 
org.apache.hadoop.hbase.TableName;
-039import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
-040import 
org.apache.hadoop.hbase.client.RawAsyncTable.CoprocessorCallable;
-041import 
org.apache.hadoop.hbase.client.replication.TableCFs;
-042import 
org.apache.hadoop.hbase.client.security.SecurityCapability;
-043import 
org.apache.hadoop.hbase.procedure2.LockInfo;
-044import 
org.apache.hadoop.hbase.quotas.QuotaFilter;
-045import 
org.apache.hadoop.hbase.quotas.QuotaSettings;
-046import 
org.apache.hadoop.hbase.replication.ReplicationPeerConfig;
-047import 
org.apache.hadoop.hbase.replication.ReplicationPeerDescription;
-048import 
org.apache.hadoop.hbase.util.Pair;
-049
-050import com.google.protobuf.RpcChannel;
-051
-052/**
-053 * The implementation of AsyncAdmin.
-054 */
-055@InterfaceAudience.Private
-056public class AsyncHBaseAdmin implements 
AsyncAdmin {
+029import java.util.stream.Collectors;
+030
+031import 
org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
+032
+033import io.netty.util.Timeout;
+034import io.netty.util.TimerTask;
+035
+036import org.apache.commons.logging.Log;
+037import 
org.apache.commons.logging.LogFactory;
+038import 
org.apache.hadoop.hbase.ClusterStatus;
+039import 
org.apache.hadoop.hbase.HRegionInfo;
+040import 
org.apache.hadoop.hbase.ProcedureInfo;
+041import 
org.apache.hadoop.hbase.RegionLoad;
+042import 
org.apache.hadoop.hbase.ServerName;
+043import 
org.apache.hadoop.hbase.NamespaceDescriptor;
+044import 
org.apache.hadoop.hbase.TableName;
+045import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
+046import 
org.apache.hadoop.hbase.client.RawAsyncTable.CoprocessorCallable;
+047import 
org.apache.hadoop.hbase.client.replication.TableCFs;
+048import 
org.apache.hadoop.hbase.client.security.SecurityCapability;
+049import 
org.apache.hadoop.hbase.procedure2.LockInfo;
+050import 
org.apache.hadoop.hbase.quotas.QuotaFilter;
+051import 
org.apache.hadoop.hbase.quotas.QuotaSettings;
+052import 
org.apache.hadoop.hbase.replication.ReplicationPeerConfig;
+053import 
org.apache.hadoop.hbase.replication.ReplicationPeerDescription;
+054import 
org.apache.hadoop.hbase.util.Pair;
+055
+056import com.google.protobuf.RpcChannel;
 057
-058  private static final Log LOG = 
LogFactory.getLog(AsyncHBaseAdmin.class);
-059
-060  private final RawAsyncHBaseAdmin 
rawAdmin;
-061
-062  private final ExecutorService pool;
+058/**
+059 * The implementation of AsyncAdmin.
+060 */
+061@InterfaceAudience.Private
+062public class AsyncHBaseAdmin implements 
AsyncAdmin {
 063
-064  AsyncHBaseAdmin(RawAsyncHBaseAdmin 
rawAdmin, ExecutorService pool) {
-065this.rawAdmin = rawAdmin;
-066this.pool = pool;
-067  }
-068
-069  private  
CompletableFuture wrap(CompletableFuture future) {
-070CompletableFuture 
asyncFuture = new CompletableFuture<>();
-071future.whenCompleteAsync((r, e) -> 
{
-072  if (e != null) {
-073
asyncFuture.completeExceptionally(e);
-074  } else {
-075asyncFuture.complete(r);
-076  }
-077}, pool);
-078return asyncFuture;
-079  }
-080
-081  @Override
-082  public CompletableFuture 
tableExists(TableName tableName) {
-083return 
wrap(rawAdmin.tableExists(tableName));
-084  }
-085
-086  @Override
-087  public 
CompletableFuture> 
listTables(Optional pattern,
-088  boolean includeSysTables) {
-089return 
wrap(rawAdmin.listTables(pattern, includeSysTables));
+064  private static final Log LOG = 
LogFactory.getLog(AsyncHBaseAdmin.class);
+065
+066  private final RawAsyncHBaseAdmin 
rawAdmin;
+067
+068  private final ExecutorService pool;
+069
+070  AsyncHBaseAdmin(RawAsyncHBaseAdmin 
rawAdmin, ExecutorService pool) {
+071this.rawAdmin = rawAdmin;
+072this.pool = pool;
+073  }
+074
+075  private  
CompletableFuture

[18/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-21 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ca5b0275/devapidocs/org/apache/hadoop/hbase/security/visibility/VisibilityReplicationEndpoint.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/security/visibility/VisibilityReplicationEndpoint.html
 
b/devapidocs/org/apache/hadoop/hbase/security/visibility/VisibilityReplicationEndpoint.html
index 20d91ce..07607fc 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/security/visibility/VisibilityReplicationEndpoint.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/security/visibility/VisibilityReplicationEndpoint.html
@@ -18,7 +18,7 @@
 catch(err) {
 }
 //-->
-var methods = 
{"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10};
+var methods = 
{"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10,"i14":10,"i15":10};
 var tabs = {65535:["t0","All Methods"],2:["t2","Instance 
Methods"],8:["t4","Concrete Methods"]};
 var altColor = "altColor";
 var rowColor = "rowColor";
@@ -109,12 +109,12 @@ var activeTableTab = "activeTableTab";
 
 
 All Implemented Interfaces:
-com.google.common.util.concurrent.Service, ReplicationEndpoint, ReplicationPeerConfigListener
+ReplicationEndpoint, ReplicationPeerConfigListener, 
org.apache.hadoop.hbase.shaded.com.google.common.util.concurrent.Service
 
 
 
 @InterfaceAudience.Private
-public class VisibilityReplicationEndpoint
+public class VisibilityReplicationEndpoint
 extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true";
 title="class or interface in java.lang">Object
 implements ReplicationEndpoint
 
@@ -137,11 +137,11 @@ implements ReplicationEndpoint.Context, ReplicationEndpoint.ReplicateContext
 
 
-
+
 
 
-Nested classes/interfaces inherited from 
interface com.google.common.util.concurrent.Service
-com.google.common.util.concurrent.Service.State
+Nested classes/interfaces inherited from 
interface org.apache.hadoop.hbase.shaded.com.google.common.util.concurrent.Service
+org.apache.hadoop.hbase.shaded.com.google.common.util.concurrent.Service.Listener,
 
org.apache.hadoop.hbase.shaded.com.google.common.util.concurrent.Service.State
 
 
 
@@ -203,65 +203,84 @@ implements Method and Description
 
 
+void
+addListener(org.apache.hadoop.hbase.shaded.com.google.common.util.concurrent.Service.Listener listener,
+   http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/Executor.html?is-external=true";
 title="class or interface in 
java.util.concurrent">Executor executor) 
+
+
+void
+awaitRunning() 
+
+
+void
+awaitRunning(long l,
+http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/TimeUnit.html?is-external=true";
 title="class or interface in 
java.util.concurrent">TimeUnit timeUnit) 
+
+
+void
+awaitTerminated() 
+
+
+void
+awaitTerminated(long l,
+   http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/TimeUnit.html?is-external=true";
 title="class or interface in 
java.util.concurrent">TimeUnit timeUnit) 
+
+
 boolean
 canReplicateToSameCluster()
 Whether or not, the replication endpoint can replicate to 
it's source cluster with the same
  UUID
 
 
-
+
+http://docs.oracle.com/javase/8/docs/api/java/lang/Throwable.html?is-external=true";
 title="class or interface in java.lang">Throwable
+failureCause() 
+
+
 http://docs.oracle.com/javase/8/docs/api/java/util/UUID.html?is-external=true";
 title="class or interface in java.util">UUID
 getPeerUUID()
 Returns a UUID of the provided peer id.
 
 
-
+
 WALEntryFilter
 getWALEntryfilter()
 Returns a WALEntryFilter to use for filtering out 
WALEntries from the log.
 
 
-
+
 void
 init(ReplicationEndpoint.Context context)
 Initialize the replication endpoint with the given 
context.
 
 
-
+
 boolean
 isRunning() 
 
-
+
 void
 peerConfigUpdated(ReplicationPeerConfig rpc)
 Callback method for when users update the 
ReplicationPeerConfig for this peer
 
 
-
+
 boolean
 replicate(ReplicationEndpoint.ReplicateContext replicateContext)
 Replicate the given set of entries (in the context) to the 
other cluster.
 
 
-
-com.google.common.util.concurrent.ListenableFuture
-start() 
+
+org.apache.hadoop.hbase.shaded.com.google.common.util.concurrent.Service
+startAsync() 
 
-
-com.google.common.util.concurrent.Service.State
-startAndWait() 
-
-
-com.google.common.util.concurrent.Service.State
+
+org.apache.hadoop.hbase.shaded.com.google.common.util.concurrent.Service.State
 state() 
 
-
-com.google.common.util.concurrent.ListenableFuture
-stop() 
-
-
-com.google.common.util.concurrent.Service.State
-stopAndWait() 
+
+org.apache.hadoop.hbase.shaded.com.google.common.util.concurrent.Service
+stopAsync() 
 
 
 
@@ -291,7 +310,7 @@ implements 
 
 LOG
-private static final org.apache.commons.logging.Log LOG
+private static

[06/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-21 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ca5b0275/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutput.Callback.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutput.Callback.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutput.Callback.html
index 0c07a2f..c90d203 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutput.Callback.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutput.Callback.html
@@ -34,553 +34,554 @@
 026import static 
org.apache.hadoop.hbase.io.asyncfs.FanOutOneBlockAsyncDFSOutputHelper.getStatus;
 027import static 
org.apache.hadoop.hdfs.DFSConfigKeys.DFS_CLIENT_SOCKET_TIMEOUT_KEY;
 028
-029import 
com.google.common.annotations.VisibleForTesting;
-030
-031import io.netty.buffer.ByteBuf;
-032import 
io.netty.buffer.ByteBufAllocator;
-033import io.netty.channel.Channel;
-034import 
io.netty.channel.ChannelHandler.Sharable;
-035import 
io.netty.channel.ChannelHandlerContext;
-036import io.netty.channel.EventLoop;
-037import 
io.netty.channel.SimpleChannelInboundHandler;
-038import 
io.netty.handler.codec.protobuf.ProtobufDecoder;
-039import 
io.netty.handler.codec.protobuf.ProtobufVarint32FrameDecoder;
-040import 
io.netty.handler.timeout.IdleStateEvent;
-041import 
io.netty.handler.timeout.IdleStateHandler;
-042import io.netty.util.concurrent.Future;
-043import 
io.netty.util.concurrent.Promise;
-044import 
io.netty.util.concurrent.PromiseCombiner;
-045
-046import java.io.IOException;
-047import java.nio.ByteBuffer;
-048import java.util.ArrayDeque;
-049import java.util.Collection;
-050import java.util.Collections;
-051import java.util.Deque;
-052import java.util.IdentityHashMap;
-053import java.util.List;
-054import java.util.Set;
-055import 
java.util.concurrent.CompletableFuture;
-056import java.util.concurrent.TimeUnit;
-057import java.util.function.Supplier;
-058
-059import 
org.apache.hadoop.conf.Configuration;
-060import 
org.apache.hadoop.crypto.Encryptor;
-061import org.apache.hadoop.fs.Path;
-062import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
-063import 
org.apache.hadoop.hbase.io.asyncfs.FanOutOneBlockAsyncDFSOutputHelper.CancelOnClose;
-064import 
org.apache.hadoop.hbase.util.CancelableProgressable;
-065import 
org.apache.hadoop.hbase.util.FSUtils;
-066import 
org.apache.hadoop.hdfs.DFSClient;
-067import 
org.apache.hadoop.hdfs.DistributedFileSystem;
-068import 
org.apache.hadoop.hdfs.protocol.ClientProtocol;
-069import 
org.apache.hadoop.hdfs.protocol.DatanodeInfo;
-070import 
org.apache.hadoop.hdfs.protocol.LocatedBlock;
-071import 
org.apache.hadoop.hdfs.protocol.datatransfer.PacketHeader;
-072import 
org.apache.hadoop.hdfs.protocol.datatransfer.PipelineAck;
-073import 
org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.PipelineAckProto;
-074import 
org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.Status;
-075import 
org.apache.hadoop.util.DataChecksum;
-076
-077/**
-078 * An asynchronous HDFS output stream 
implementation which fans out data to datanode and only
-079 * supports writing file with only one 
block.
-080 * 

-081 * Use the createOutput method in {@link FanOutOneBlockAsyncDFSOutputHelper} to create. The mainly -082 * usage of this class is implementing WAL, so we only expose a little HDFS configurations in the -083 * method. And we place it here under util package because we want to make it independent of WAL -084 * implementation thus easier to move it to HDFS project finally. -085 *

-086 * Note that, all connections to datanode will run in the same {@link EventLoop} which means we only -087 * need one thread here. But be careful, we do some blocking operations in {@link #close()} and -088 * {@link #recoverAndClose(CancelableProgressable)} methods, so do not call them inside -089 * {@link EventLoop}. And for {@link #write(byte[])} {@link #write(byte[], int, int)}, -090 * {@link #buffered()} and {@link #flush(boolean)}, if you call them outside {@link EventLoop}, -091 * there will be an extra context-switch. -092 *

-093 * Advantages compare to DFSOutputStream: -094 *

    -095 *
  1. The fan out mechanism. This will reduce the latency.
  2. -096 *
  3. The asynchronous WAL could also run in the same EventLoop, we could just call write and flush -097 * inside the EventLoop thread, so generally we only have one thread to do all the things.
  4. -098 *
  5. Fail-fast when connection to datanode error. The WAL implementation could open new writer -099 * ASAP.
  6. -100 *
  7. We could benefit from netty's ByteBuf management mechanism.
  8. -101 *
-102 */ -103@InterfaceAudience.Private -104public class FanOutOneBlockAsyncDFSOutput implements AsyncFSOutput { -105 -106 // The MAX_PACKET_SIZE is 16MB but it include the header

[17/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-21 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ca5b0275/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.html 
b/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.html
index 6d81cef..7430542 100644
--- a/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.html
+++ b/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.html
@@ -172,11 +172,11 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 htds 
 
 
-(package private) 
com.google.common.collect.Multimap
+(package private) 
org.apache.hadoop.hbase.shaded.com.google.common.collect.Multimap
 overlapGroups 
 
 
-private 
com.google.common.collect.ImmutableList
+private 
org.apache.hadoop.hbase.shaded.com.google.common.collect.ImmutableList
 regionsFromMeta 
 
 
@@ -239,8 +239,8 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 (package private) void
-dump(http://docs.oracle.com/javase/8/docs/api/java/util/SortedSet.html?is-external=true";
 title="class or interface in 
java.util">SortedSet splits,
-com.google.common.collect.Multimap regions)
+dump(http://docs.oracle.com/javase/8/docs/api/java/util/SortedSet.html?is-external=true";
 title="class or interface in 
java.util">SortedSet splits,
+
org.apache.hadoop.hbase.shaded.com.google.common.collect.Multimap regions)
 This dumps data in a visually reasonable way for visual 
debugging
 
 
@@ -257,7 +257,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 getNumRegions() 
 
 
-com.google.common.collect.ImmutableList
+org.apache.hadoop.hbase.shaded.com.google.common.collect.ImmutableList
 getRegionsFromMeta() 
 
 
@@ -347,7 +347,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 overlapGroups
-final com.google.common.collect.Multimap overlapGroups
+final org.apache.hadoop.hbase.shaded.com.google.common.collect.Multimap overlapGroups
 
 
 
@@ -356,7 +356,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 regionsFromMeta
-private com.google.common.collect.ImmutableList regionsFromMeta
+private org.apache.hadoop.hbase.shaded.com.google.common.collect.ImmutableList regionsFromMeta
 
 
 
@@ -439,7 +439,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 getRegionsFromMeta
-public com.google.common.collect.ImmutableList getRegionsFromMeta()
+public org.apache.hadoop.hbase.shaded.com.google.common.collect.ImmutableList getRegionsFromMeta()
 
 
 
@@ -475,14 +475,14 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 
-
+
 
 
 
 
 dump
 void dump(http://docs.oracle.com/javase/8/docs/api/java/util/SortedSet.html?is-external=true";
 title="class or interface in 
java.util">SortedSet splits,
-  com.google.common.collect.Multimap regions)
+  
org.apache.hadoop.hbase.shaded.com.google.common.collect.Multimap regions)
 This dumps data in a visually reasonable way for visual 
debugging
 
 Parameters:

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ca5b0275/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.html 
b/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.html
index 8db38b3..1e809ce 100644
--- a/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.html
+++ b/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.html
@@ -779,7 +779,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/io/Closeable.h
 
 
 void
-dumpOverlapProblems(com.google.common.collect.Multimap regions) 
+dumpOverlapProblems(org.apache.hadoop.hbase.shaded.com.google.common.collect.Multimap regions) 
 
 
 void
@@ -865,7 +865,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/io/Closeable.h
 
 
 
-com.google.common.collect.Multimap
+org.apache.hadoop.hbase.shaded.com.google.common.collect.Multimap
 getOverlapGroups(TableName table) 
 
 
@@ -3204,13 +3204,13 @@ implements http://docs.oracle.com/javase/8/docs/api/java/io/Closeable.h
 
 
 
-
+
 
 
 
 
 dumpOverlapProblems
-public void dumpOverlapProblems(com.google.common.collect.Multimap regions)
+public void dumpOverlapProblems(org.apache.hadoop.hbase.shaded.com.google.common.collect.Multimap regions)
 
 
 
@@ -3228,7 +3228,7 @@ implements http

[04/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-21 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ca5b0275/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutput.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutput.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutput.html
index 0c07a2f..c90d203 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutput.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutput.html
@@ -34,553 +34,554 @@
 026import static 
org.apache.hadoop.hbase.io.asyncfs.FanOutOneBlockAsyncDFSOutputHelper.getStatus;
 027import static 
org.apache.hadoop.hdfs.DFSConfigKeys.DFS_CLIENT_SOCKET_TIMEOUT_KEY;
 028
-029import 
com.google.common.annotations.VisibleForTesting;
-030
-031import io.netty.buffer.ByteBuf;
-032import 
io.netty.buffer.ByteBufAllocator;
-033import io.netty.channel.Channel;
-034import 
io.netty.channel.ChannelHandler.Sharable;
-035import 
io.netty.channel.ChannelHandlerContext;
-036import io.netty.channel.EventLoop;
-037import 
io.netty.channel.SimpleChannelInboundHandler;
-038import 
io.netty.handler.codec.protobuf.ProtobufDecoder;
-039import 
io.netty.handler.codec.protobuf.ProtobufVarint32FrameDecoder;
-040import 
io.netty.handler.timeout.IdleStateEvent;
-041import 
io.netty.handler.timeout.IdleStateHandler;
-042import io.netty.util.concurrent.Future;
-043import 
io.netty.util.concurrent.Promise;
-044import 
io.netty.util.concurrent.PromiseCombiner;
-045
-046import java.io.IOException;
-047import java.nio.ByteBuffer;
-048import java.util.ArrayDeque;
-049import java.util.Collection;
-050import java.util.Collections;
-051import java.util.Deque;
-052import java.util.IdentityHashMap;
-053import java.util.List;
-054import java.util.Set;
-055import 
java.util.concurrent.CompletableFuture;
-056import java.util.concurrent.TimeUnit;
-057import java.util.function.Supplier;
-058
-059import 
org.apache.hadoop.conf.Configuration;
-060import 
org.apache.hadoop.crypto.Encryptor;
-061import org.apache.hadoop.fs.Path;
-062import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
-063import 
org.apache.hadoop.hbase.io.asyncfs.FanOutOneBlockAsyncDFSOutputHelper.CancelOnClose;
-064import 
org.apache.hadoop.hbase.util.CancelableProgressable;
-065import 
org.apache.hadoop.hbase.util.FSUtils;
-066import 
org.apache.hadoop.hdfs.DFSClient;
-067import 
org.apache.hadoop.hdfs.DistributedFileSystem;
-068import 
org.apache.hadoop.hdfs.protocol.ClientProtocol;
-069import 
org.apache.hadoop.hdfs.protocol.DatanodeInfo;
-070import 
org.apache.hadoop.hdfs.protocol.LocatedBlock;
-071import 
org.apache.hadoop.hdfs.protocol.datatransfer.PacketHeader;
-072import 
org.apache.hadoop.hdfs.protocol.datatransfer.PipelineAck;
-073import 
org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.PipelineAckProto;
-074import 
org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.Status;
-075import 
org.apache.hadoop.util.DataChecksum;
-076
-077/**
-078 * An asynchronous HDFS output stream 
implementation which fans out data to datanode and only
-079 * supports writing file with only one 
block.
-080 * 

-081 * Use the createOutput method in {@link FanOutOneBlockAsyncDFSOutputHelper} to create. The mainly -082 * usage of this class is implementing WAL, so we only expose a little HDFS configurations in the -083 * method. And we place it here under util package because we want to make it independent of WAL -084 * implementation thus easier to move it to HDFS project finally. -085 *

-086 * Note that, all connections to datanode will run in the same {@link EventLoop} which means we only -087 * need one thread here. But be careful, we do some blocking operations in {@link #close()} and -088 * {@link #recoverAndClose(CancelableProgressable)} methods, so do not call them inside -089 * {@link EventLoop}. And for {@link #write(byte[])} {@link #write(byte[], int, int)}, -090 * {@link #buffered()} and {@link #flush(boolean)}, if you call them outside {@link EventLoop}, -091 * there will be an extra context-switch. -092 *

-093 * Advantages compare to DFSOutputStream: -094 *

    -095 *
  1. The fan out mechanism. This will reduce the latency.
  2. -096 *
  3. The asynchronous WAL could also run in the same EventLoop, we could just call write and flush -097 * inside the EventLoop thread, so generally we only have one thread to do all the things.
  4. -098 *
  5. Fail-fast when connection to datanode error. The WAL implementation could open new writer -099 * ASAP.
  6. -100 *
  7. We could benefit from netty's ByteBuf management mechanism.
  8. -101 *
-102 */ -103@InterfaceAudience.Private -104public class FanOutOneBlockAsyncDFSOutput implements AsyncFSOutput { -105 -106 // The MAX_PACKET_SIZE is 16MB but it include the header size and checksum size. So here we set a -107

[10/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-21 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ca5b0275/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.DisableTableFuture.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.DisableTableFuture.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.DisableTableFuture.html
index 4bd98f4..046cb95 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.DisableTableFuture.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.DisableTableFuture.html
@@ -212,7 +212,7 @@
 204import 
org.apache.hadoop.util.StringUtils;
 205import 
org.apache.zookeeper.KeeperException;
 206
-207import 
com.google.common.annotations.VisibleForTesting;
+207import 
org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
 208import com.google.protobuf.Descriptors;
 209import com.google.protobuf.Message;
 210import 
com.google.protobuf.RpcController;
@@ -1008,7 +1008,7 @@
 1000  /**
 1001   * {@inheritDoc}
 1002   * @deprecated Since 2.0. Will be 
removed in 3.0. Use
-1003   * {@link 
#addColumnFamily(TableName, HColumnDescriptor)} instead.
+1003   * {@link 
#addColumnFamily(TableName, ColumnFamilyDescriptor)} instead.
 1004   */
 1005  @Override
 1006  @Deprecated
@@ -1018,14 +1018,14 @@
 1010  }
 1011
 1012  @Override
-1013  public void addColumnFamily(final 
TableName tableName, final HColumnDescriptor columnFamily)
+1013  public void addColumnFamily(final 
TableName tableName, final ColumnFamilyDescriptor columnFamily)
 1014  throws IOException {
 1015get(addColumnFamilyAsync(tableName, 
columnFamily), syncWaitTimeout, TimeUnit.MILLISECONDS);
 1016  }
 1017
 1018  @Override
 1019  public Future 
addColumnFamilyAsync(final TableName tableName,
-1020  final HColumnDescriptor 
columnFamily) throws IOException {
+1020  final ColumnFamilyDescriptor 
columnFamily) throws IOException {
 1021AddColumnResponse response =
 1022executeCallable(new 
MasterCallable(getConnection(),
 1023getRpcControllerFactory()) 
{
@@ -1106,7 +1106,7 @@
 1098  /**
 1099   * {@inheritDoc}
 1100   * @deprecated As of 2.0. Will be 
removed in 3.0. Use
-1101   * {@link 
#modifyColumnFamily(TableName, HColumnDescriptor)} instead.
+1101   * {@link 
#modifyColumnFamily(TableName, ColumnFamilyDescriptor)} instead.
 1102   */
 1103  @Override
 1104  @Deprecated
@@ -1117,13 +1117,13 @@
 1109
 1110  @Override
   public void modifyColumnFamily(final 
TableName tableName,
-1112  final HColumnDescriptor 
columnFamily) throws IOException {
+1112  final ColumnFamilyDescriptor 
columnFamily) throws IOException {
 1113
get(modifyColumnFamilyAsync(tableName, columnFamily), syncWaitTimeout, 
TimeUnit.MILLISECONDS);
 1114  }
 1115
 1116  @Override
 1117  public Future 
modifyColumnFamilyAsync(final TableName tableName,
-1118  final HColumnDescriptor 
columnFamily) throws IOException {
+1118  final ColumnFamilyDescriptor 
columnFamily) throws IOException {
 1119ModifyColumnResponse response =
 1120executeCallable(new 
MasterCallable(getConnection(),
 1121getRpcControllerFactory()) 
{

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ca5b0275/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.EnableTableFuture.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.EnableTableFuture.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.EnableTableFuture.html
index 4bd98f4..046cb95 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.EnableTableFuture.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.EnableTableFuture.html
@@ -212,7 +212,7 @@
 204import 
org.apache.hadoop.util.StringUtils;
 205import 
org.apache.zookeeper.KeeperException;
 206
-207import 
com.google.common.annotations.VisibleForTesting;
+207import 
org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
 208import com.google.protobuf.Descriptors;
 209import com.google.protobuf.Message;
 210import 
com.google.protobuf.RpcController;
@@ -1008,7 +1008,7 @@
 1000  /**
 1001   * {@inheritDoc}
 1002   * @deprecated Since 2.0. Will be 
removed in 3.0. Use
-1003   * {@link 
#addColumnFamily(TableName, HColumnDescriptor)} instead.
+1003   * {@link 
#addColumnFamily(TableName, ColumnFamilyDescriptor)} instead.
 1004   */
 1005  @Override
 1006  @Deprecated
@@ -1018,14 +1018,14 @@
 1010  }
 1011
 1012  @Override
-1013  public void addColumnFamily(final 
TableName tableName, final HColumnDescriptor columnFamily)
+1013  public void addColumnFamily(final 
TableName tableName, final ColumnFamilyDescriptor columnFamily)
 1014  throws IOException {
 1015get(addColumnFam

[11/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-21 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ca5b0275/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncProcess.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncProcess.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncProcess.html
index c38e112..ab61235 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncProcess.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncProcess.html
@@ -28,7 +28,7 @@
 020package org.apache.hadoop.hbase.client;
 021
 022
-023import 
com.google.common.annotations.VisibleForTesting;
+023import 
org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
 024
 025import java.io.IOException;
 026import java.io.InterruptedIOException;

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ca5b0275/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncRequestFutureImpl.ReplicaCallIssuingRunnable.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncRequestFutureImpl.ReplicaCallIssuingRunnable.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncRequestFutureImpl.ReplicaCallIssuingRunnable.html
index 48ea40e..5dcc5c2 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncRequestFutureImpl.ReplicaCallIssuingRunnable.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncRequestFutureImpl.ReplicaCallIssuingRunnable.html
@@ -27,7 +27,7 @@
 019
 020package org.apache.hadoop.hbase.client;
 021
-022import 
com.google.common.annotations.VisibleForTesting;
+022import 
org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
 023
 024import java.io.IOException;
 025import java.io.InterruptedIOException;

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ca5b0275/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncRequestFutureImpl.ReplicaResultState.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncRequestFutureImpl.ReplicaResultState.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncRequestFutureImpl.ReplicaResultState.html
index 48ea40e..5dcc5c2 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncRequestFutureImpl.ReplicaResultState.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncRequestFutureImpl.ReplicaResultState.html
@@ -27,7 +27,7 @@
 019
 020package org.apache.hadoop.hbase.client;
 021
-022import 
com.google.common.annotations.VisibleForTesting;
+022import 
org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
 023
 024import java.io.IOException;
 025import java.io.InterruptedIOException;

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ca5b0275/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncRequestFutureImpl.Retry.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncRequestFutureImpl.Retry.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncRequestFutureImpl.Retry.html
index 48ea40e..5dcc5c2 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncRequestFutureImpl.Retry.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncRequestFutureImpl.Retry.html
@@ -27,7 +27,7 @@
 019
 020package org.apache.hadoop.hbase.client;
 021
-022import 
com.google.common.annotations.VisibleForTesting;
+022import 
org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
 023
 024import java.io.IOException;
 025import java.io.InterruptedIOException;

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ca5b0275/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncRequestFutureImpl.SingleServerRequestRunnable.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncRequestFutureImpl.SingleServerRequestRunnable.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncRequestFutureImpl.SingleServerRequestRunnable.html
index 48ea40e..5dcc5c2 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncRequestFutureImpl.SingleServerRequestRunnable.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncRequestFutureImpl.SingleServerRequestRunnable.html
@@ -27,7 +27,7 @@
 019
 020package org.apache.hadoop.hbase.client;
 021
-022import 
com.google.common.annotations.VisibleForTesting;
+022import 
org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
 023
 024import java.io.IOException;
 025import java.io.InterruptedIOException;

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ca5b0275/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncRequestFutureImpl.html
---

[27/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-21 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ca5b0275/devapidocs/org/apache/hadoop/hbase/regionserver/HStore.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/regionserver/HStore.html 
b/devapidocs/org/apache/hadoop/hbase/regionserver/HStore.html
index 073a715..7c48389 100644
--- a/devapidocs/org/apache/hadoop/hbase/regionserver/HStore.html
+++ b/devapidocs/org/apache/hadoop/hbase/regionserver/HStore.html
@@ -474,7 +474,7 @@ implements clearCompactedfiles(http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List filesToRemove) 
 
 
-com.google.common.collect.ImmutableCollection
+org.apache.hadoop.hbase.shaded.com.google.common.collect.ImmutableCollection
 close()
 Close all the readers We don't need to worry about 
subsequent requests because the Region
  holds a write lock that will prevent any more reads or writes.
@@ -2413,8 +2413,8 @@ public static org.apache.hadoop.fs.Path 
 
 close
-public com.google.common.collect.ImmutableCollection close()
-   throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true";
 title="class or interface in java.io">IOException
+public org.apache.hadoop.hbase.shaded.com.google.common.collect.ImmutableCollection close()
+   
   throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true";
 title="class or interface in java.io">IOException
 Description copied from 
interface: Store
 Close all the readers We don't need to worry about 
subsequent requests because the Region
  holds a write lock that will prevent any more reads or writes.

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ca5b0275/devapidocs/org/apache/hadoop/hbase/regionserver/RSRpcServices.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/regionserver/RSRpcServices.html 
b/devapidocs/org/apache/hadoop/hbase/regionserver/RSRpcServices.html
index 8b0bc7e..c99c156 100644
--- a/devapidocs/org/apache/hadoop/hbase/regionserver/RSRpcServices.html
+++ b/devapidocs/org/apache/hadoop/hbase/regionserver/RSRpcServices.html
@@ -207,7 +207,7 @@ implements clearCompactionQueues 
 
 
-private com.google.common.cache.CacheString,http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String>
+private 
org.apache.hadoop.hbase.shaded.com.google.common.cache.CacheString,http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String>
 closedScanners 
 
 
@@ -1093,7 +1093,7 @@ implements 
 
 closedScanners
-private final com.google.common.cache.CacheString,http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String> closedScanners
+private 
final org.apache.hadoop.hbase.shaded.com.google.common.cache.CacheString,http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String> closedScanners
 
 
 
@@ -1170,7 +1170,7 @@ implements 
 SCANNER_ALREADY_CLOSED
 http://docs.oracle.com/javase/8/docs/api/java/lang/Deprecated.html?is-external=true";
 title="class or interface in java.lang">@Deprecated
-private static final http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true";
 title="class or interface in java.io">IOException SCANNER_ALREADY_CLOSED
+private static final http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true";
 title="class or interface in java.io">IOException SCANNER_ALREADY_CLOSED
 Deprecated. 
 
 
@@ -1647,7 +1647,7 @@ private static final http://docs.oracle.com/javase/8/docs/api/java
 
 
 getRegion
-public Region getRegion(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier regionSpecifier)
+public Region getRegion(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier regionSpecifier)
  throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true";
 title="class or interface in java.io">IOException
 Find the HRegion based on a region

[48/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-21 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ca5b0275/apidocs/src-html/org/apache/hadoop/hbase/TableName.html
--
diff --git a/apidocs/src-html/org/apache/hadoop/hbase/TableName.html 
b/apidocs/src-html/org/apache/hadoop/hbase/TableName.html
index b54153b..655084f 100644
--- a/apidocs/src-html/org/apache/hadoop/hbase/TableName.html
+++ b/apidocs/src-html/org/apache/hadoop/hbase/TableName.html
@@ -149,415 +149,417 @@
 141  throw new 
IllegalArgumentException("Name is null or empty");
 142}
 143
-144int namespaceDelimIndex = 
com.google.common.primitives.Bytes.lastIndexOf(tableName,
-145(byte) NAMESPACE_DELIM);
-146if (namespaceDelimIndex < 0){
-147  
isLegalTableQualifierName(tableName);
-148} else {
-149  isLegalNamespaceName(tableName, 0, 
namespaceDelimIndex);
-150  
isLegalTableQualifierName(tableName, namespaceDelimIndex + 1, 
tableName.length);
-151}
-152return tableName;
-153  }
-154
-155  public static byte [] 
isLegalTableQualifierName(final byte[] qualifierName) {
-156
isLegalTableQualifierName(qualifierName, 0, qualifierName.length, false);
-157return qualifierName;
-158  }
-159
-160  public static byte [] 
isLegalTableQualifierName(final byte[] qualifierName, boolean isSnapshot) {
-161
isLegalTableQualifierName(qualifierName, 0, qualifierName.length, 
isSnapshot);
-162return qualifierName;
-163  }
-164
+144int namespaceDelimIndex =
+145  
org.apache.hadoop.hbase.shaded.com.google.common.primitives.Bytes.lastIndexOf(tableName,
+146(byte) NAMESPACE_DELIM);
+147if (namespaceDelimIndex < 0){
+148  
isLegalTableQualifierName(tableName);
+149} else {
+150  isLegalNamespaceName(tableName, 0, 
namespaceDelimIndex);
+151  
isLegalTableQualifierName(tableName, namespaceDelimIndex + 1, 
tableName.length);
+152}
+153return tableName;
+154  }
+155
+156  public static byte [] 
isLegalTableQualifierName(final byte[] qualifierName) {
+157
isLegalTableQualifierName(qualifierName, 0, qualifierName.length, false);
+158return qualifierName;
+159  }
+160
+161  public static byte [] 
isLegalTableQualifierName(final byte[] qualifierName, boolean isSnapshot) {
+162
isLegalTableQualifierName(qualifierName, 0, qualifierName.length, 
isSnapshot);
+163return qualifierName;
+164  }
 165
-166  /**
-167   * Qualifier names can only contain 
'word' characters
-168   * 
[\p{IsAlphabetic}\p{Digit}] or '_', '.' or '-'.
-169   * The name may not start with '.' or 
'-'.
-170   *
-171   * @param qualifierName byte array 
containing the qualifier name
-172   * @param start start index
-173   * @param end end index (exclusive)
-174   */
-175  public static void 
isLegalTableQualifierName(final byte[] qualifierName,
-176  
  int start,
-177  
  int end) {
-178  
isLegalTableQualifierName(qualifierName, start, end, false);
-179  }
-180
-181  public static void 
isLegalTableQualifierName(final byte[] qualifierName,
-182  
  int start,
-183  
  int end,
-184  
  boolean isSnapshot) {
-185if(end - start < 1) {
-186  throw new 
IllegalArgumentException(isSnapshot ? "Snapshot" : "Table" + " qualifier must 
not be empty");
-187}
-188if (qualifierName[start] == '.' || 
qualifierName[start] == '-') {
-189  throw new 
IllegalArgumentException("Illegal first character <" + qualifierName[start] 
+
-190 
"> at 0. " + (isSnapshot ? "Snapshot" : "User-space table") +
-191 
" qualifiers can only start with 'alphanumeric " +
-192 
"characters' from any language: " +
-193 
Bytes.toString(qualifierName, start, end));
-194}
-195// Treat the bytes as UTF-8
-196String qualifierString = new 
String(
-197qualifierName, start, (end - 
start), StandardCharsets.UTF_8);
-198if 
(qualifierString.equals(DISALLOWED_TABLE_NAME)) {
-199  // Per 
https://zookeeper.apache.org/doc/r3.4.10/zookeeperProgrammers.html#ch_zkDataModel
-200  // A znode named "zookeeper" is 
disallowed by zookeeper.
-201  throw new 
IllegalArgumentException("Tables may not be named '" + DISALLOWED_TABLE_NAME + 
"'");
-202}
-203for (int i = 0; i < 
qualifierString.length(); i++) {
-204  // Treat the string as a char-array 
as some characters may be multi-byte
-205  char c = 
qualifierString.charAt(i);
-206  // Check for letter, digit, 
underscore, hyphen, or period, and allowed by ZK.
-207  // ZooKeeper also has limitations, 
but Character.isAlphabetic omits those all
-208  //   See 
https://zookeeper.apache.org/doc/r3.4.10/zoo

[49/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-21 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ca5b0275/apidocs/org/apache/hadoop/hbase/class-use/TableName.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/class-use/TableName.html 
b/apidocs/org/apache/hadoop/hbase/class-use/TableName.html
index b175fd3..4fed5d0 100644
--- a/apidocs/org/apache/hadoop/hbase/class-use/TableName.html
+++ b/apidocs/org/apache/hadoop/hbase/class-use/TableName.html
@@ -510,7 +510,7 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 As of release 2.0.0.
  (https://issues.apache.org/jira/browse/HBASE-1989";>HBASE-1989).
  This will be removed in HBase 3.0.0.
- Use Admin.addColumnFamily(TableName,
 HColumnDescriptor).
+ Use Admin.addColumnFamily(TableName,
 ColumnFamilyDescriptor).
 
 
 
@@ -523,15 +523,15 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 void
-Admin.addColumnFamily(TableName tableName,
-   HColumnDescriptor columnFamily)
+Admin.addColumnFamily(TableName tableName,
+   ColumnFamilyDescriptor columnFamily)
 Add a column family to an existing table.
 
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/Future.html?is-external=true";
 title="class or interface in java.util.concurrent">FutureVoid>
-Admin.addColumnFamilyAsync(TableName tableName,
-HColumnDescriptor columnFamily)
+Admin.addColumnFamilyAsync(TableName tableName,
+ColumnFamilyDescriptor columnFamily)
 Add a column family to an existing table.
 
 
@@ -966,7 +966,7 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 As of release 2.0.0.
  (https://issues.apache.org/jira/browse/HBASE-1989";>HBASE-1989).
  This will be removed in HBase 3.0.0.
- Use Admin.modifyColumnFamily(TableName,
 HColumnDescriptor).
+ Use Admin.modifyColumnFamily(TableName,
 ColumnFamilyDescriptor).
 
 
 
@@ -979,15 +979,15 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 void
-Admin.modifyColumnFamily(TableName tableName,
-  HColumnDescriptor columnFamily)
+Admin.modifyColumnFamily(TableName tableName,
+  ColumnFamilyDescriptor columnFamily)
 Modify an existing column family on a table.
 
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/Future.html?is-external=true";
 title="class or interface in java.util.concurrent">FutureVoid>
-Admin.modifyColumnFamilyAsync(TableName tableName,
-   HColumnDescriptor columnFamily)
+Admin.modifyColumnFamilyAsync(TableName tableName,
+   ColumnFamilyDescriptor columnFamily)
 Modify an existing column family on a table.
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ca5b0275/apidocs/org/apache/hadoop/hbase/client/Admin.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/client/Admin.html 
b/apidocs/org/apache/hadoop/hbase/client/Admin.html
index 98da31f..5b6e63e 100644
--- a/apidocs/org/apache/hadoop/hbase/client/Admin.html
+++ b/apidocs/org/apache/hadoop/hbase/client/Admin.html
@@ -167,21 +167,21 @@ extends org.apache.hadoop.hbase.Abortable, http://docs.oracle.com/javas
 As of release 2.0.0.
  (https://issues.apache.org/jira/browse/HBASE-1989";>HBASE-1989).
  This will be removed in HBase 3.0.0.
- Use addColumnFamily(TableName,
 HColumnDescriptor).
+ Use addColumnFamily(TableName,
 ColumnFamilyDescriptor).
 
 
 
 
 void
-addColumnFamily(TableName tableName,
-   HColumnDescriptor columnFamily)
+addColumnFamily(TableName tableName,
+   ColumnFamilyDescriptor columnFamily)
 Add a column family to an existing table.
 
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/Future.html?is-external=true";
 title="class or interface in java.util.concurrent">FutureVoid>
-addColumnFamilyAsync(TableName tableName,
-HColumnDescriptor columnFamily)
+addColumnFamilyAsync(TableName tableName,
+ColumnFamilyDescriptor columnFamily)
 Add a column family to an existing table.
 
 
@@ -1203,21 +1203,21 @@ extends org.apache.hadoop.hbase.Abortable, http://docs.oracle.com/javas
 As of release 2.0.0.
  (https://issues.apache.org/jira/browse/HBASE-1989";>HBASE-1989).
  This will be removed in HBase 3.0.0.
- Use modifyColumnFamily(TableName,
 HColumnDescriptor).
+  

hbase-site git commit: INFRA-10751 Empty commit

2017-07-21 Thread git-site-role
Repository: hbase-site
Updated Branches:
  refs/heads/asf-site ca5b02753 -> b86866c7b


INFRA-10751 Empty commit


Project: http://git-wip-us.apache.org/repos/asf/hbase-site/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase-site/commit/b86866c7
Tree: http://git-wip-us.apache.org/repos/asf/hbase-site/tree/b86866c7
Diff: http://git-wip-us.apache.org/repos/asf/hbase-site/diff/b86866c7

Branch: refs/heads/asf-site
Commit: b86866c7b81144ef1503668f72a6b7a009e4c519
Parents: ca5b027
Author: jenkins 
Authored: Fri Jul 21 22:08:05 2017 +
Committer: jenkins 
Committed: Fri Jul 21 22:08:05 2017 +

--

--




[01/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-21 Thread git-site-role
Repository: hbase-site
Updated Branches:
  refs/heads/asf-site be6740763 -> ca5b02753


http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ca5b0275/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.EncryptHandler.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.EncryptHandler.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.EncryptHandler.html
index 42b6f60..37a733a 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.EncryptHandler.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.EncryptHandler.html
@@ -28,10 +28,10 @@
 020import static 
io.netty.handler.timeout.IdleState.READER_IDLE;
 021import static 
org.apache.hadoop.hdfs.DFSConfigKeys.DFS_ENCRYPT_DATA_TRANSFER_CIPHER_SUITES_KEY;
 022
-023import com.google.common.base.Charsets;
-024import 
com.google.common.base.Throwables;
-025import 
com.google.common.collect.ImmutableSet;
-026import com.google.common.collect.Maps;
+023import 
org.apache.hadoop.hbase.shaded.com.google.common.base.Charsets;
+024import 
org.apache.hadoop.hbase.shaded.com.google.common.base.Throwables;
+025import 
org.apache.hadoop.hbase.shaded.com.google.common.collect.ImmutableSet;
+026import 
org.apache.hadoop.hbase.shaded.com.google.common.collect.Maps;
 027import 
com.google.protobuf.CodedOutputStream;
 028
 029import io.netty.buffer.ByteBuf;
@@ -93,7 +93,7 @@
 085import 
org.apache.hadoop.crypto.key.KeyProvider.KeyVersion;
 086import 
org.apache.hadoop.fs.FileEncryptionInfo;
 087import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
-088import 
org.apache.hadoop.hbase.util.ByteStringer;
+088import com.google.protobuf.ByteString;
 089import 
org.apache.hadoop.hdfs.DFSClient;
 090import 
org.apache.hadoop.hdfs.protocol.DatanodeInfo;
 091import 
org.apache.hadoop.hdfs.protocol.HdfsFileStatus;
@@ -373,416 +373,418 @@
 365  
DataTransferEncryptorMessageProto.newBuilder();
 366  
builder.setStatus(DataTransferEncryptorStatus.SUCCESS);
 367  if (payload != null) {
-368
builder.setPayload(ByteStringer.wrap(payload));
-369  }
-370  if (options != null) {
-371
builder.addAllCipherOption(PB_HELPER.convertCipherOptions(options));
-372  }
-373  DataTransferEncryptorMessageProto 
proto = builder.build();
-374  int size = 
proto.getSerializedSize();
-375  size += 
CodedOutputStream.computeRawVarint32Size(size);
-376  ByteBuf buf = 
ctx.alloc().buffer(size);
-377  proto.writeDelimitedTo(new 
ByteBufOutputStream(buf));
-378  ctx.write(buf);
-379}
-380
-381@Override
-382public void 
handlerAdded(ChannelHandlerContext ctx) throws Exception {
-383  
ctx.write(ctx.alloc().buffer(4).writeInt(SASL_TRANSFER_MAGIC_NUMBER));
-384  sendSaslMessage(ctx, new 
byte[0]);
-385  ctx.flush();
-386  step++;
-387}
-388
-389@Override
-390public void 
channelInactive(ChannelHandlerContext ctx) throws Exception {
-391  saslClient.dispose();
-392}
-393
-394private void 
check(DataTransferEncryptorMessageProto proto) throws IOException {
-395  if (proto.getStatus() == 
DataTransferEncryptorStatus.ERROR_UNKNOWN_KEY) {
-396throw new 
InvalidEncryptionKeyException(proto.getMessage());
-397  } else if (proto.getStatus() == 
DataTransferEncryptorStatus.ERROR) {
-398throw new 
IOException(proto.getMessage());
-399  }
-400}
-401
-402private String getNegotiatedQop() {
-403  return (String) 
saslClient.getNegotiatedProperty(Sasl.QOP);
-404}
-405
-406private boolean 
isNegotiatedQopPrivacy() {
-407  String qop = getNegotiatedQop();
-408  return qop != null && 
"auth-conf".equalsIgnoreCase(qop);
-409}
-410
-411private boolean 
requestedQopContainsPrivacy() {
-412  Set requestedQop =
-413  
ImmutableSet.copyOf(Arrays.asList(saslProps.get(Sasl.QOP).split(",")));
-414  return 
requestedQop.contains("auth-conf");
-415}
-416
-417private void checkSaslComplete() 
throws IOException {
-418  if (!saslClient.isComplete()) {
-419throw new IOException("Failed to 
complete SASL handshake");
-420  }
-421  Set requestedQop =
-422  
ImmutableSet.copyOf(Arrays.asList(saslProps.get(Sasl.QOP).split(",")));
-423  String negotiatedQop = 
getNegotiatedQop();
-424  LOG.debug(
-425"Verifying QOP, requested QOP = " 
+ requestedQop + ", negotiated QOP = " + negotiatedQop);
-426  if 
(!requestedQop.contains(negotiatedQop)) {
-427throw new 
IOException(String.format("SASL handshake completed, but "
-428+ "channel does not have 
acceptable quality of protection, "
-429+ "requested = %s, negotiated 
= %s",

[36/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-21 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ca5b0275/devapidocs/org/apache/hadoop/hbase/coprocessor/example/ExampleRegionObserverWithMetrics.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/coprocessor/example/ExampleRegionObserverWithMetrics.html
 
b/devapidocs/org/apache/hadoop/hbase/coprocessor/example/ExampleRegionObserverWithMetrics.html
index 17b8a4b..cee2f9b 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/coprocessor/example/ExampleRegionObserverWithMetrics.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/coprocessor/example/ExampleRegionObserverWithMetrics.html
@@ -255,7 +255,7 @@ implements RegionObserver
-postAppend,
 postBatchMutate,
 postBatchMutateIndispensably,
 postBulkLoadHFile,
 postBulkLoadHFile,
 postCheckAndDelete,
 postCheckAndPut,
 postClose, postCloseRegionOperation,
 postCommitStoreFile,
 postCompact,
 postCompact,
 postCompactSelection,
 postCompactSelection,
 postCompleteSplit,
 postDelete,
 postExists,
 postFlush,
 postFlush,
 postIncrement, postIncrementColumnValue,
 postInstantiateDeleteTracker,
 postLogReplay,
 postMutationBeforeWAL,
 postOpen,
 postPut,
 postReplayWALs,
 postRollBackSplit,
 postScannerClose,
 postScannerFilterRow,
 postScannerFilterRow,
 postScannerNext,
 postScannerOpen,
 postSplit,
 postStartRegionOperation,
 postStoreFileReaderOpen,
 postWALRestore,
 preAppend,
 preAppendAfterRowLock,
 preBatchMutate,
 preBulkLoadHFile,
 preCheckAndDelete,
 preCheckAndDeleteAfterRowLock, preCheckAndPut,
 preCheckAndPutAfterRowLock,
 preClose,
 preCommitStoreFile,
 preCompact,
 preCompact,
 preCompactScannerOpen,
 preCompactScannerOpen,
 preCompactScannerOpen,
 preCompactSelection,
 preCompactSelection,
 preDelete,
 preExists, preFlush,
 preFlush,
 preFlushScannerOpen,
 preFlushScannerOpen,
 preFlushScannerOpen,
 preIncrement,
 preIncrementAfterRowLock,
 preIncrementColumnValue, preOpen,
 prePrepareTimeStampForDeleteVersion,
 prePut,
 preReplayWALs, preRollBackSplit,
 preScannerClose,
 preScannerNext,
 preScannerOpen,
 preSplit,
 preSplit,
 preSplitAfterPONR,
 preSplitBeforePONR,
 preStoreFileReaderOpen,
 preStoreScannerOpen,
 preStoreScannerOpen,
 preWALRestore
+postAppend,
 postBatchMutate,
 postBatchMutateIndispensably,
 postBulkLoadHFile,
 postBulkLoadHFile,
 postCheckAndDelete,
 postCheckAndPut,
 postClose, postCloseRegionOperation,
 postCommitStoreFile,
 postCompact,
 postCompact,
 postCompactSelection,
 postCompactSelection,
 postCompleteSplit,
 postDelete,
 postExists,
 postFlush,
 postFlush,
 postIncrement,
 postIncrementColumnValue,
 postInstantiateDeleteTracker,
 postLogReplay,
 postMutationBeforeWAL,
 postOpen,
 postPut,
 postReplayWALs,
 postRollBackSplit, href="../../../../../../org/apache/hadoop/hbase/coprocessor/RegionObserver.html#postScannerClose-org.apache.hadoop.hbase.coprocessor.ObserverContext-org.apache.hadoop.hbase.regionserver.InternalScanner-">postScannerClose,
 > href="../../../../../../org/apache/hadoop/hbase/coprocessor/RegionObserver.html#postScannerFilterRow-org.apache.hadoop.hbase.coprocessor.ObserverContext-org.apache.hadoop.hbase.regionserver.InternalScanner-byte:A-int-short-boolean-">postScannerFilterRow,
 > href="../../../../../../org/apache/hadoop/hbase/coprocessor/RegionObserver.html#postScannerFilterRow-org.apache.hadoop.hbase.coprocessor.ObserverContext-org.apache.hadoop.hbase.regionserver.InternalScanner-org.apache.hadoop.hbase.Cell-boolean-">postScannerFilterRow,
 > href="../../../../../../org/apache/hadoop/hbase/coprocessor/RegionObserver.html#postScannerNext-org.apache.hadoop.hbase.coprocessor.ObserverContext-org.apache.hadoop.hbase.regionserver.InternalScanner-java.util.List-int-boolea
 n-">postScannerNext, postScannerOpen,
 postSplit,
 postStartRegionOperation,
 postStoreFileReaderOpen,
 postWALRestore,
 preAppend,
 preAppendAfterRowLock,
 preBatchMutate,
 preBulkLoadHFile,
 preCheckAndDelete,
 preCheckAndDeleteAfterRowLock,
 preCheckAndPut,
 preCheckAndPutAfterRowLock,
 preClose,
 preCommitStoreFile,
 preCompact,
 preCompact,
 preCompactScannerOpen,
 preCompactScannerOpen,
 preCompactScannerOpen, preCompactSelection,
 preCompactSelection,
 preDelete,
 preExists,
 preFlush,
 preFlush,
 preFlushScannerOpen,
 preFlushScannerOpen,
 preFlushScannerOpen,
 preIncrement,
 preIncrementAfterRowLock,
 preIncrementColumnValue,
 preOpen,
 prePrepareTimeStampForDeleteVersion,
 prePut,
 preReplayWALs,
 preRollBackSplit,
 preScannerClose,
 preScannerNext,
 preScannerOpen, preSplit,
 preSplit,
 preSplitAfterPONR,
 preSplitBeforePONR,
 preStoreFileReaderOpen,
 preStoreScannerOpen,
 preStoreScannerOpen,
 preWALRestore
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ca5b0275/devapidocs/org/apache/hadoop/hbase/coprocessor/example/ZooKeeperScanPolicyObserver.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/coprocessor/example/ZooKeeperScanPolicyObserver.html
 
b/devapidocs/org

[30/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-21 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ca5b0275/devapidocs/org/apache/hadoop/hbase/master/class-use/RegionPlan.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/master/class-use/RegionPlan.html 
b/devapidocs/org/apache/hadoop/hbase/master/class-use/RegionPlan.html
index 144336c..e7b3b80 100644
--- a/devapidocs/org/apache/hadoop/hbase/master/class-use/RegionPlan.html
+++ b/devapidocs/org/apache/hadoop/hbase/master/class-use/RegionPlan.html
@@ -326,7 +326,7 @@
 
 
 private void
-SimpleLoadBalancer.addRegionPlan(com.google.common.collect.MinMaxPriorityQueue regionsToMove,
+SimpleLoadBalancer.addRegionPlan(org.apache.hadoop.hbase.shaded.com.google.common.collect.MinMaxPriorityQueue regionsToMove,
  boolean fetchFromTail,
  ServerName sn,
  http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List regionsToReturn)
@@ -335,7 +335,7 @@
 
 
 private void
-SimpleLoadBalancer.addRegionPlan(com.google.common.collect.MinMaxPriorityQueue regionsToMove,
+SimpleLoadBalancer.addRegionPlan(org.apache.hadoop.hbase.shaded.com.google.common.collect.MinMaxPriorityQueue regionsToMove,
  boolean fetchFromTail,
  ServerName sn,
  http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List regionsToReturn)
@@ -344,10 +344,10 @@
 
 
 void
-SimpleLoadBalancer.balanceOverall(http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List regionsToReturn,
+SimpleLoadBalancer.balanceOverall(http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List regionsToReturn,
   http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true";
 title="class or interface in java.util">Map serverBalanceInfo,
   boolean fetchFromTail,
-  com.google.common.collect.MinMaxPriorityQueue regionsToMove,
+  
org.apache.hadoop.hbase.shaded.com.google.common.collect.MinMaxPriorityQueue regionsToMove,
   int max,
   int min)
 If we need to balanceoverall, we need to add one more round 
to peel off one region from each max.
@@ -355,10 +355,10 @@
 
 
 void
-SimpleLoadBalancer.balanceOverall(http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List regionsToReturn,
+SimpleLoadBalancer.balanceOverall(http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List regionsToReturn,
   http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true";
 title="class or interface in java.util">Map serverBalanceInfo,
   boolean fetchFromTail,
-  com.google.common.collect.MinMaxPriorityQueue regionsToMove,
+  
org.apache.hadoop.hbase.shaded.com.google.common.collect.MinMaxPriorityQueue regionsToMove,
   int max,
   int min)
 If we need to balanceoverall, we need to add one more round 
to peel off one region from each max.

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ca5b0275/devapidocs/org/apache/hadoop/hbase/master/locking/LockProcedure.LockType.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/master/locking/LockProcedure.LockType.html 
b/devapidocs/org/apache/hadoop/hbase/master/locking/LockProcedure.LockType.html
index 6b6fdc0..0cc0763 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/master/locking/LockProcedure.LockType.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/master/locking/LockProcedure.LockType.html
@@ -236,7 +236,7 @@ the order they are declared.
 
 
 values
-public static LockProcedure.LockType[] values()
+public static LockProcedure.LockType[] values()
 Returns an array containing the constants of this enum 
type, in
 the order they are declared.  This method may be used to iterate
 over the constants as follows:
@@ -256,7 +256,7 @@ for (LockProcedure.LockType c : 
LockProcedure.LockType.values())
 
 
 valueOf
-public static LockProcedure.LockType valueOf(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String name)
+public static LockProcedure.LockType valueOf(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String name)
 Returns the enum constant of this type wi

[23/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-21 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ca5b0275/devapidocs/org/apache/hadoop/hbase/replication/regionserver/ReplicationSource.LogsComparator.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/replication/regionserver/ReplicationSource.LogsComparator.html
 
b/devapidocs/org/apache/hadoop/hbase/replication/regionserver/ReplicationSource.LogsComparator.html
index 8c8883f..86d5438 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/replication/regionserver/ReplicationSource.LogsComparator.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/replication/regionserver/ReplicationSource.LogsComparator.html
@@ -117,7 +117,7 @@ var activeTableTab = "activeTableTab";
 
 
 
-public static class ReplicationSource.LogsComparator
+public static class ReplicationSource.LogsComparator
 extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true";
 title="class or interface in java.lang">Object
 implements http://docs.oracle.com/javase/8/docs/api/java/util/Comparator.html?is-external=true";
 title="class or interface in 
java.util">Comparator
 Comparator used to compare logs together based on their 
start time
@@ -203,7 +203,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/util/Comparato
 
 
 LogsComparator
-public LogsComparator()
+public LogsComparator()
 
 
 
@@ -220,7 +220,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/util/Comparato
 
 
 compare
-public int compare(org.apache.hadoop.fs.Path o1,
+public int compare(org.apache.hadoop.fs.Path o1,
org.apache.hadoop.fs.Path o2)
 
 Specified by:
@@ -234,7 +234,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/util/Comparato
 
 
 getTS
-private static long getTS(org.apache.hadoop.fs.Path p)
+private static long getTS(org.apache.hadoop.fs.Path p)
 Split a path to get the start time
  For example: 10.20.20.171%3A60020.1277499063250
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ca5b0275/devapidocs/org/apache/hadoop/hbase/replication/regionserver/ReplicationSource.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/replication/regionserver/ReplicationSource.html
 
b/devapidocs/org/apache/hadoop/hbase/replication/regionserver/ReplicationSource.html
index a5a0b3e..663f736 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/replication/regionserver/ReplicationSource.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/replication/regionserver/ReplicationSource.html
@@ -123,7 +123,7 @@ var activeTableTab = "activeTableTab";
 
 
 @InterfaceAudience.Private
-public class ReplicationSource
+public class ReplicationSource
 extends http://docs.oracle.com/javase/8/docs/api/java/lang/Thread.html?is-external=true";
 title="class or interface in java.lang">Thread
 implements ReplicationSourceInterface
 Class that handles the source of a replication stream.
@@ -524,7 +524,7 @@ implements 
 
 LOG
-private static final org.apache.commons.logging.Log LOG
+private static final org.apache.commons.logging.Log LOG
 
 
 
@@ -533,7 +533,7 @@ implements 
 
 queues
-private http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true";
 title="class or interface in java.util">MapString,http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/PriorityBlockingQueue.html?is-external=true";
 title="class or interface in 
java.util.concurrent">PriorityBlockingQueue>
 queues
+private http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true";
 title="class or interface in java.util">MapString,http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/PriorityBlockingQueue.html?is-external=true";
 title="class or interface in 
java.util.concurrent">PriorityBlockingQueue>
 queues
 
 
 
@@ -542,7 +542,7 @@ implements 
 
 queueSizePerGroup
-protected int queueSizePerGroup
+protected int queueSizePerGroup
 
 
 
@@ -551,7 +551,7 @@ implements 
 
 replicationQueues
-protected ReplicationQueues 
replicationQueues
+protected ReplicationQueues 
replicationQueues
 
 
 
@@ -560,7 +560,7 @@ implements 
 
 replicationPeers
-private ReplicationPeers 
replicationPeers
+private ReplicationPeers 
replicationPeers
 
 
 
@@ -569,7 +569,7 @@ implements 
 
 conf
-protected org.apache.hadoop.conf.Configuration conf
+protected org.apache.hadoop.conf.Configuration conf
 
 
 
@@ -578,7 +578,7 @@ implements 
 
 replicationQueueInfo
-protected ReplicationQueueInfo 
replicationQueueInfo
+protected ReplicationQueueInfo 
replicationQueueInfo
 
 
 
@@ -587,7 +587,7 @@ implements 
 
 peerId
-private http://docs.oracle.com/javase/8/docs/a

[19/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-21 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ca5b0275/devapidocs/org/apache/hadoop/hbase/security/visibility/VisibilityController.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/security/visibility/VisibilityController.html
 
b/devapidocs/org/apache/hadoop/hbase/security/visibility/VisibilityController.html
index 150ecf6..0d719a5 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/security/visibility/VisibilityController.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/security/visibility/VisibilityController.html
@@ -114,7 +114,7 @@ var activeTableTab = "activeTableTab";
 
 
 @InterfaceAudience.LimitedPrivate(value="Configuration")
-public class VisibilityController
+public class VisibilityController
 extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true";
 title="class or interface in java.lang">Object
 implements MasterObserver, 
RegionObserver, 
org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsService.Interface,
 CoprocessorService
 Coprocessor that has both the MasterObserver and 
RegionObserver implemented that supports in
@@ -579,7 +579,7 @@ implements RegionObserver
-postAppend,
 postBatchMutate,
 postBatchMutateIndispensably,
 postBulkLoadHFile,
 postBulkLoadHFile,
 postCheckAndDelete,
 postCheckAndPut,
 postClose, postCloseRegionOperation,
 postCommitStoreFile,
 postCompact,
 postCompact,
 postCompactSelection,
 postCompactSelection,
 postCompleteSplit,
 postDelete,
 postExists,
 postFlush,
 postFlush,
 postGetOp, postIncrement,
 postIncrementColumnValue,
 postPut,
 postReplayWALs, postRollBackSplit,
 postScannerFilterRow,
 postScannerNext,
 postSplit,
 postStartRegionOperation,
 postStoreFileReaderOpen,
 postWALRestore,
 preAppendAfterRowLock,
 preBulkLoadHFile,
 preCheckAndDelete,
 preCheckAndDeleteAfterRowLock,
 preCheckAndPut,
 preCheckAndPutAfterRowLock,
 preClose,
 preCommitStoreFile,
 preCompact,
 preCompact,
 preCompactScannerOpen,
 preCompactScannerOpen,
 preCompactScannerOpen, preCompactSelection,
 preCompactSelection,
 preDelete,
 preExists,
 preFlush,
 preFlush,
 preFlushScannerOpen,
 preFlushScannerOpen,
 preFlushScannerOpen,
 preIncrementAfterRowLock,
 preIncrementColumnValue,
 preOpen, prePut,
 preReplayWALs,
 preRollBackSplit,
 preSplit,
 preSplit,
 preSplitAfterPONR,
 preSplitBeforePONR,
 preStoreFileReaderOpen,
 preStoreScannerOpen,
 preStoreScannerOpen,
 preWALRestore
+postAppend,
 postBatchMutate,
 postBatchMutateIndispensably,
 postBulkLoadHFile,
 postBulkLoadHFile,
 postCheckAndDelete,
 postCheckAndPut,
 postClose, postCloseRegionOperation,
 postCommitStoreFile,
 postCompact,
 postCompact,
 postCompactSelection,
 postCompactSelection,
 postCompleteSplit,
 postDelete,
 postExists,
 postFlush,
 postFlush,
 postGetOp,
 postIncrement,
 postIncrementColumnValue,
 postPut,
 postReplayWALs,
 postRollBackSplit,
 postScannerFilterRow,
 postScannerNext,
 postSplit, postStartRegionOperation,
 postStoreFileReaderOpen,
 
 postWALRestore, preAppendAfterRowLock,
 preBulkLoadHFile,
 preCheckAndDelete,
 preCheckAndDeleteAfterRowLock,
 preCheckAndPut,
 preCheckAndPutAfterRowLock,
 preClose, preCommitStoreFile,
 preCompact,
 preCompact,
 preCompactScannerOpen,
 preCompactScannerOpen,
 preCompactScannerOpen,
 preCompactSelection,
 preCompactSelection,
 preDelete,
 preExists,
 preFlush,
 preFlush,
 preFlushScannerOpen,
 preFlushScannerOpen,
 preFlushScannerOpen,
 preIncrementAfterRowLock,
 preIncrementColumnValue,
 preOpen,
 prePut,
 preReplayWALs,
 preRollBackSplit,
 preSplit,
 preSplit,
 preSplitAfterPONR,
 preSplitBeforePONR,
 preStoreFileReaderOpen,
 preStoreScannerOpen,
 preStoreScannerOpen,
 preWALRestore
 
 
 
@@ -601,7 +601,7 @@ implements 
 
 LOG
-private static final org.apache.commons.logging.Log LOG
+private static final org.apache.commons.logging.Log LOG
 
 
 
@@ -610,7 +610,7 @@ implements 
 
 AUDITLOG
-private static final org.apache.commons.logging.Log AUDITLOG
+private static final org.apache.commons.logging.Log AUDITLOG
 
 
 
@@ -619,7 +619,7 @@ implements 
 
 labelsRegion
-private boolean labelsRegion
+private boolean labelsRegion
 
 
 
@@ -628,7 +628,7 @@ implements 
 
 accessControllerAvailable
-private boolean accessControllerAvailable
+private boolean accessControllerAvailable
 
 
 
@@ -637,7 +637,7 @@ implements 
 
 conf
-private org.apache.hadoop.conf.Configuration conf
+private org.apache.hadoop.conf.Configuration conf
 
 
 
@@ -646,7 +646,7 @@ implements 
 
 initialized
-private volatile boolean initialized
+private volatile boolean initialized
 
 
 
@@ -655,7 +655,7 @@ implements 
 
 checkAuths
-private boolean checkAuths
+private boolean checkAuths
 
 
 
@@ -664,7 +664,7 @@ implements 
 
 scannerOwners
-private http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true";
 title="class or interface in java.util">Map

[44/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-21 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ca5b0275/dependency-management.html
--
diff --git a/dependency-management.html b/dependency-management.html
index e21c9bd..b3f9572 100644
--- a/dependency-management.html
+++ b/dependency-management.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase – Project Dependency Management
 
@@ -293,299 +293,299 @@
 jar
 http://www.apache.org/licenses/LICENSE-2.0.txt";>Apache License, Version 
2.0
 
-com.google.guava
-http://code.google.com/p/guava-libraries/guava";>guava
-12.0.1
-jar
-http://www.apache.org/licenses/LICENSE-2.0.txt";>The Apache Software 
License, Version 2.0
-
 com.google.protobuf
 http://code.google.com/p/protobuf";>protobuf-java
 2.5.0
 jar
 http://www.opensource.org/licenses/bsd-license.php";>New BSD 
license
-
+
 com.lmax
 http://lmax-exchange.github.com/disruptor";>disruptor
 3.3.6
 jar
 http://www.apache.org/licenses/LICENSE-2.0.txt";>The Apache Software 
License, Version 2.0
-
+
 commons-cli
 http://commons.apache.org/proper/commons-cli/";>commons-cli
 1.4
 jar
 https://www.apache.org/licenses/LICENSE-2.0.txt";>Apache License, Version 
2.0
-
+
 commons-codec
 http://commons.apache.org/proper/commons-codec/";>commons-codec
 1.9
 jar
 http://www.apache.org/licenses/LICENSE-2.0.txt";>The Apache Software 
License, Version 2.0
-
+
 commons-collections
 http://commons.apache.org/collections/";>commons-collections
 3.2.2
 jar
 http://www.apache.org/licenses/LICENSE-2.0.txt";>Apache License, Version 
2.0
-
+
 commons-io
 http://commons.apache.org/proper/commons-io/";>commons-io
 2.5
 jar
 http://www.apache.org/licenses/LICENSE-2.0.txt";>Apache License, Version 
2.0
-
+
 commons-lang
 http://commons.apache.org/lang/";>commons-lang
 2.6
 jar
 http://www.apache.org/licenses/LICENSE-2.0.txt";>The Apache Software 
License, Version 2.0
-
+
 commons-logging
 http://commons.apache.org/proper/commons-logging/";>commons-logging
 1.2
 jar
 http://www.apache.org/licenses/LICENSE-2.0.txt";>The Apache Software 
License, Version 2.0
-
+
 io.dropwizard.metrics
 http://metrics.codahale.com/metrics-core/";>metrics-core
 3.2.1
 jar
 http://www.apache.org/licenses/LICENSE-2.0.html";>Apache License 
2.0
-
+
 io.netty
 http://netty.io/netty-all/";>netty-all
 4.1.9.Final
 jar
 http://www.apache.org/licenses/LICENSE-2.0";>Apache License, Version 
2.0
-
+
 javax.servlet
 http://servlet-spec.java.net";>javax.servlet-api
 3.1.0
 jar
 https://glassfish.dev.java.net/nonav/public/CDDL+GPL.html";>CDDL + GPLv2 
with classpath exception
-
+
 javax.ws.rs
 http://jax-rs-spec.java.net";>javax.ws.rs-api
 2.0.1
 jar
 http://glassfish.java.net/public/CDDL+GPL_1_1.html";>CDDL 1.1, http://glassfish.java.net/public/CDDL+GPL_1_1.html";>GPL2 w/ 
CPE
-
+
 javax.xml.bind
 http://jaxb.java.net/";>jaxb-api
 2.2.12
 jar
 https://glassfish.java.net/public/CDDL+GPL_1_1.html";>CDDL 1.1, https://glassfish.java.net/public/CDDL+GPL_1_1.html";>GPL2 w/ 
CPE
-
+
 junit
 http://junit.org";>junit
 4.12
 jar
 http://www.eclipse.org/legal/epl-v10.html";>Eclipse Public License 
1.0
-
+
 log4j
 http://logging.apache.org/log4j/1.2/";>log4j
 1.2.17
 jar
 http://www.apache.org/licenses/LICENSE-2.0.txt";>The Apache Software 
License, Version 2.0
-
+
 net.spy
 http://www.couchbase.org/code/couchbase/java";>spymemcached
 2.12.2
 jar
 http://www.apache.org/licenses/LICENSE-2.0.txt";>The Apache Software 
License, Version 2.0
-
+
 org.apache.avro
 http://avro.apache.org";>avro
 1.7.7
 jar
 http://www.apache.org/licenses/LICENSE-2.0.txt";>The Apache Software 
License, Version 2.0
-
+
 org.apache.commons
 http://commons.apache.org/proper/commons-crypto/";>commons-crypto
 1.0.0
 jar
 http://www.apache.org/licenses/LICENSE-2.0.txt";>Apache License, Version 
2.0
-
+
 org.apache.commons
 http://commons.apache.org/math/";>commons-math
 2.2
 jar
 http://www.apache.org/licenses/LICENSE-2.0.txt";>The Apache Software 
License, Version 2.0
-
+
 org.apache.curator
 http://curator.apache.org/curator-client";>curator-client
 2.12.0
 jar
 http://www.apache.org/licenses/LICENSE-2.0.txt";>The Apache Software 
License, Version 2.0
-
+
 org.apache.curator
 http://curator.apache.org/curator-framework";>curator-framework
 2.12.0
 jar
 http://www.apache.org/licenses/LICENSE-2.0.txt";>The Apache Software 
License, Version 2.0
-
+
 org.apache.curator
 http://curator.apache.org/curator-recipes";>curator-recipes
 2.12.0
 jar
 http://www.apache.org/licenses/LICENSE-2.0.txt";>The Apache Software 
License, Version 2.0
-
+
 org.apache.hadoop
 hadoop-auth
 2.7.1
 jar
 http://www.apache.org/licenses/LICENSE-2.0.txt";>Apache License, Version 
2.0
-
+
 org.apache.hadoop
 hadoop-client
 2.7.1
 jar
 http://www.apache.org/licenses/LICENSE-2.0.txt";>Apache License, Version 
2.0
-
+
 org.apache.hadoop
 hadoop-common
 2.7.1
 jar
 http://www.apache.org/licenses/LICENSE-2.0.txt";>Apache License, Version 
2.0
-
+
 org.apache.hadoop
 hadoop-hdfs
 2.7.1
 jar
 http://www

[51/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-21 Thread git-site-role
Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.


Project: http://git-wip-us.apache.org/repos/asf/hbase-site/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase-site/commit/ca5b0275
Tree: http://git-wip-us.apache.org/repos/asf/hbase-site/tree/ca5b0275
Diff: http://git-wip-us.apache.org/repos/asf/hbase-site/diff/ca5b0275

Branch: refs/heads/asf-site
Commit: ca5b027539e293982334e4fc33c3bcf09319c512
Parents: be67407
Author: jenkins 
Authored: Fri Jul 21 22:07:13 2017 +
Committer: jenkins 
Committed: Fri Jul 21 22:07:13 2017 +

--
 acid-semantics.html | 8 +-
 apache_hbase_reference_guide.pdf|  2066 +-
 apidocs/deprecated-list.html| 4 +-
 apidocs/index-all.html  |16 +-
 apidocs/org/apache/hadoop/hbase/TableName.html  |48 +-
 .../hbase/class-use/HColumnDescriptor.html  |32 +-
 .../hadoop/hbase/class-use/TableName.html   |20 +-
 .../org/apache/hadoop/hbase/client/Admin.html   |40 +-
 .../apache/hadoop/hbase/client/Consistency.html | 4 +-
 .../hadoop/hbase/client/IsolationLevel.html | 4 +-
 .../hadoop/hbase/client/Scan.ReadType.html  | 4 +-
 .../class-use/ColumnFamilyDescriptor.html   |28 +
 .../hbase/client/class-use/Connection.html  | 4 +-
 .../hadoop/hbase/client/class-use/Table.html| 6 +-
 .../hbase/filter/CompareFilter.CompareOp.html   | 4 +-
 .../hbase/mapreduce/LoadIncrementalHFiles.html  |14 +-
 .../hadoop/hbase/util/class-use/Pair.html   | 4 +-
 .../org/apache/hadoop/hbase/CellUtil.html   | 8 +-
 .../org/apache/hadoop/hbase/ChoreService.html   | 2 +-
 .../org/apache/hadoop/hbase/ScheduledChore.html | 2 +-
 .../org/apache/hadoop/hbase/ServerName.html | 2 +-
 .../org/apache/hadoop/hbase/TableName.html  |   806 +-
 .../org/apache/hadoop/hbase/client/Admin.html   |12 +-
 .../hadoop/hbase/client/AsyncTableBase.html | 2 +-
 .../client/ColumnFamilyDescriptorBuilder.html   | 2 +-
 ...ableMultiplexer.HTableMultiplexerStatus.html | 4 +-
 .../hadoop/hbase/client/HTableMultiplexer.html  | 4 +-
 .../apache/hadoop/hbase/client/Mutation.html|10 +-
 .../org/apache/hadoop/hbase/client/Query.html   | 6 +-
 .../apache/hadoop/hbase/client/RowAccess.html   | 2 +-
 .../backoff/ExponentialClientBackoffPolicy.html | 2 +-
 .../hadoop/hbase/client/locking/EntityLock.html | 2 +-
 .../client/replication/ReplicationAdmin.html| 4 +-
 .../hbase/filter/ColumnCountGetFilter.html  | 2 +-
 .../hbase/filter/ColumnPaginationFilter.html| 2 +-
 .../hadoop/hbase/filter/ColumnPrefixFilter.html | 2 +-
 .../hadoop/hbase/filter/ColumnRangeFilter.html  | 2 +-
 .../hbase/filter/CompareFilter.CompareOp.html   | 2 +-
 .../hadoop/hbase/filter/CompareFilter.html  | 2 +-
 .../hbase/filter/DependentColumnFilter.html | 2 +-
 .../hadoop/hbase/filter/FirstKeyOnlyFilter.html | 2 +-
 .../hadoop/hbase/filter/FuzzyRowFilter.html | 2 +-
 .../hbase/filter/InclusiveStopFilter.html   | 2 +-
 .../hadoop/hbase/filter/KeyOnlyFilter.html  | 2 +-
 .../apache/hadoop/hbase/filter/PageFilter.html  | 2 +-
 .../hadoop/hbase/filter/PrefixFilter.html   | 2 +-
 .../hbase/filter/SingleColumnValueFilter.html   | 2 +-
 .../hadoop/hbase/filter/TimestampsFilter.html   | 2 +-
 .../apache/hadoop/hbase/io/crypto/Context.html  | 2 +-
 .../hbase/ipc/NettyRpcClientConfigHelper.html   | 2 +-
 .../hadoop/hbase/mapreduce/CellCounter.html | 2 +-
 .../hbase/mapreduce/HFileOutputFormat2.html | 2 +-
 .../apache/hadoop/hbase/mapreduce/Import.html   | 4 +-
 .../hadoop/hbase/mapreduce/ImportTsv.html   |12 +-
 .../hbase/mapreduce/LoadIncrementalHFiles.html  | 8 +-
 .../mapreduce/MultiTableHFileOutputFormat.html  |24 +-
 .../MultiTableSnapshotInputFormat.html  | 2 +-
 .../hbase/mapreduce/TableMapReduceUtil.html | 2 +-
 .../hbase/mapreduce/TableRecordReaderImpl.html  | 2 +-
 .../mapreduce/TableSnapshotInputFormat.html | 2 +-
 .../hadoop/hbase/mapreduce/WALPlayer.html   | 2 +-
 .../org/apache/hadoop/hbase/net/Address.html| 4 +-
 .../org/apache/hadoop/hbase/security/User.html  | 2 +-
 .../hadoop/hbase/util/ByteBufferUtils.html  | 2 +-
 .../hadoop/hbase/util/ByteRangeUtils.html   | 2 +-
 .../hbase/util/Bytes.ByteArrayComparator.html   |10 +-
 .../hbase/util/Bytes.RowEndKeyComparator.html   |10 +-
 .../org/apache/hadoop/hbase/util/Bytes.html |10 +-
 .../hadoop/hbase/util/ConfigurationUtil.html| 2 +-
 .../apache/hadoop/hbase/util/OrderedBytes.html  | 2 +-
 .../hbase/zookeeper/MiniZooKeeperCluster.html   | 2 +-
 book.html   | 2 +-
 bulk-loads.html |   

[14/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-21 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ca5b0275/devapidocs/src-html/org/apache/hadoop/hbase/TableName.html
--
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/TableName.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/TableName.html
index b54153b..655084f 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/TableName.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/TableName.html
@@ -149,415 +149,417 @@
 141  throw new 
IllegalArgumentException("Name is null or empty");
 142}
 143
-144int namespaceDelimIndex = 
com.google.common.primitives.Bytes.lastIndexOf(tableName,
-145(byte) NAMESPACE_DELIM);
-146if (namespaceDelimIndex < 0){
-147  
isLegalTableQualifierName(tableName);
-148} else {
-149  isLegalNamespaceName(tableName, 0, 
namespaceDelimIndex);
-150  
isLegalTableQualifierName(tableName, namespaceDelimIndex + 1, 
tableName.length);
-151}
-152return tableName;
-153  }
-154
-155  public static byte [] 
isLegalTableQualifierName(final byte[] qualifierName) {
-156
isLegalTableQualifierName(qualifierName, 0, qualifierName.length, false);
-157return qualifierName;
-158  }
-159
-160  public static byte [] 
isLegalTableQualifierName(final byte[] qualifierName, boolean isSnapshot) {
-161
isLegalTableQualifierName(qualifierName, 0, qualifierName.length, 
isSnapshot);
-162return qualifierName;
-163  }
-164
+144int namespaceDelimIndex =
+145  
org.apache.hadoop.hbase.shaded.com.google.common.primitives.Bytes.lastIndexOf(tableName,
+146(byte) NAMESPACE_DELIM);
+147if (namespaceDelimIndex < 0){
+148  
isLegalTableQualifierName(tableName);
+149} else {
+150  isLegalNamespaceName(tableName, 0, 
namespaceDelimIndex);
+151  
isLegalTableQualifierName(tableName, namespaceDelimIndex + 1, 
tableName.length);
+152}
+153return tableName;
+154  }
+155
+156  public static byte [] 
isLegalTableQualifierName(final byte[] qualifierName) {
+157
isLegalTableQualifierName(qualifierName, 0, qualifierName.length, false);
+158return qualifierName;
+159  }
+160
+161  public static byte [] 
isLegalTableQualifierName(final byte[] qualifierName, boolean isSnapshot) {
+162
isLegalTableQualifierName(qualifierName, 0, qualifierName.length, 
isSnapshot);
+163return qualifierName;
+164  }
 165
-166  /**
-167   * Qualifier names can only contain 
'word' characters
-168   * 
[\p{IsAlphabetic}\p{Digit}] or '_', '.' or '-'.
-169   * The name may not start with '.' or 
'-'.
-170   *
-171   * @param qualifierName byte array 
containing the qualifier name
-172   * @param start start index
-173   * @param end end index (exclusive)
-174   */
-175  public static void 
isLegalTableQualifierName(final byte[] qualifierName,
-176  
  int start,
-177  
  int end) {
-178  
isLegalTableQualifierName(qualifierName, start, end, false);
-179  }
-180
-181  public static void 
isLegalTableQualifierName(final byte[] qualifierName,
-182  
  int start,
-183  
  int end,
-184  
  boolean isSnapshot) {
-185if(end - start < 1) {
-186  throw new 
IllegalArgumentException(isSnapshot ? "Snapshot" : "Table" + " qualifier must 
not be empty");
-187}
-188if (qualifierName[start] == '.' || 
qualifierName[start] == '-') {
-189  throw new 
IllegalArgumentException("Illegal first character <" + qualifierName[start] 
+
-190 
"> at 0. " + (isSnapshot ? "Snapshot" : "User-space table") +
-191 
" qualifiers can only start with 'alphanumeric " +
-192 
"characters' from any language: " +
-193 
Bytes.toString(qualifierName, start, end));
-194}
-195// Treat the bytes as UTF-8
-196String qualifierString = new 
String(
-197qualifierName, start, (end - 
start), StandardCharsets.UTF_8);
-198if 
(qualifierString.equals(DISALLOWED_TABLE_NAME)) {
-199  // Per 
https://zookeeper.apache.org/doc/r3.4.10/zookeeperProgrammers.html#ch_zkDataModel
-200  // A znode named "zookeeper" is 
disallowed by zookeeper.
-201  throw new 
IllegalArgumentException("Tables may not be named '" + DISALLOWED_TABLE_NAME + 
"'");
-202}
-203for (int i = 0; i < 
qualifierString.length(); i++) {
-204  // Treat the string as a char-array 
as some characters may be multi-byte
-205  char c = 
qualifierString.charAt(i);
-206  // Check for letter, digit, 
underscore, hyphen, or period, and allowed by ZK.
-207  // ZooKeeper also has limitations, 
but Character.isAlphabetic omits those all
-208  //   See 
https://zookeeper.apache.org/

[31/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-21 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ca5b0275/devapidocs/org/apache/hadoop/hbase/mapreduce/SyncTable.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/mapreduce/SyncTable.html 
b/devapidocs/org/apache/hadoop/hbase/mapreduce/SyncTable.html
index 030a25f..5c7f656 100644
--- a/devapidocs/org/apache/hadoop/hbase/mapreduce/SyncTable.html
+++ b/devapidocs/org/apache/hadoop/hbase/mapreduce/SyncTable.html
@@ -118,7 +118,7 @@ var activeTableTab = "activeTableTab";
 
 
 
-public class SyncTable
+public class SyncTable
 extends org.apache.hadoop.conf.Configured
 implements org.apache.hadoop.util.Tool
 
@@ -314,7 +314,7 @@ implements org.apache.hadoop.util.Tool
 
 
 LOG
-private static final org.apache.commons.logging.Log LOG
+private static final org.apache.commons.logging.Log LOG
 
 
 
@@ -323,7 +323,7 @@ implements org.apache.hadoop.util.Tool
 
 
 SOURCE_HASH_DIR_CONF_KEY
-static final http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String SOURCE_HASH_DIR_CONF_KEY
+static final http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String SOURCE_HASH_DIR_CONF_KEY
 
 See Also:
 Constant
 Field Values
@@ -336,7 +336,7 @@ implements org.apache.hadoop.util.Tool
 
 
 SOURCE_TABLE_CONF_KEY
-static final http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String SOURCE_TABLE_CONF_KEY
+static final http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String SOURCE_TABLE_CONF_KEY
 
 See Also:
 Constant
 Field Values
@@ -349,7 +349,7 @@ implements org.apache.hadoop.util.Tool
 
 
 TARGET_TABLE_CONF_KEY
-static final http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String TARGET_TABLE_CONF_KEY
+static final http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String TARGET_TABLE_CONF_KEY
 
 See Also:
 Constant
 Field Values
@@ -362,7 +362,7 @@ implements org.apache.hadoop.util.Tool
 
 
 SOURCE_ZK_CLUSTER_CONF_KEY
-static final http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String SOURCE_ZK_CLUSTER_CONF_KEY
+static final http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String SOURCE_ZK_CLUSTER_CONF_KEY
 
 See Also:
 Constant
 Field Values
@@ -375,7 +375,7 @@ implements org.apache.hadoop.util.Tool
 
 
 TARGET_ZK_CLUSTER_CONF_KEY
-static final http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String TARGET_ZK_CLUSTER_CONF_KEY
+static final http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String TARGET_ZK_CLUSTER_CONF_KEY
 
 See Also:
 Constant
 Field Values
@@ -388,7 +388,7 @@ implements org.apache.hadoop.util.Tool
 
 
 DRY_RUN_CONF_KEY
-static final http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String DRY_RUN_CONF_KEY
+static final http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String DRY_RUN_CONF_KEY
 
 See Also:
 Constant
 Field Values
@@ -401,7 +401,7 @@ implements org.apache.hadoop.util.Tool
 
 
 sourceHashDir
-org.apache.hadoop.fs.Path sourceHashDir
+org.apache.hadoop.fs.Path sourceHashDir
 
 
 
@@ -410,7 +410,7 @@ implements org.apache.hadoop.util.Tool
 
 
 sourceTableName
-http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String sourceTableName
+http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String sourceTableName
 
 
 
@@ -419,7 +419,7 @@ implements org.apache.hadoop.util.Tool
 
 
 targetTableName
-http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String targetTableName
+http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String targetTableName
 
 
 
@@ -428,7 +428,7 @@ implements org.apache.hadoop.util.Tool
 
 
 sourceZkCluster
-http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String sourceZkCluster
+http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String sourceZkCluster
 
 
 
@@ -437,7 +437,7 @@ implements org.apache.hadoop.util.Tool
 

[38/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-21 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ca5b0275/devapidocs/org/apache/hadoop/hbase/client/Consistency.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/client/Consistency.html 
b/devapidocs/org/apache/hadoop/hbase/client/Consistency.html
index 0741a05..9483c5c 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/Consistency.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/Consistency.html
@@ -253,7 +253,7 @@ the order they are declared.
 
 
 values
-public static Consistency[] values()
+public static Consistency[] values()
 Returns an array containing the constants of this enum 
type, in
 the order they are declared.  This method may be used to iterate
 over the constants as follows:
@@ -273,7 +273,7 @@ for (Consistency c : Consistency.values())
 
 
 valueOf
-public static Consistency valueOf(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String name)
+public static Consistency valueOf(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String name)
 Returns the enum constant of this type with the specified 
name.
 The string must match exactly an identifier used to declare an
 enum constant in this type.  (Extraneous whitespace characters are 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ca5b0275/devapidocs/org/apache/hadoop/hbase/client/HBaseAdmin.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/client/HBaseAdmin.html 
b/devapidocs/org/apache/hadoop/hbase/client/HBaseAdmin.html
index b679df5..704e1fc 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/HBaseAdmin.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/HBaseAdmin.html
@@ -356,21 +356,21 @@ implements HColumnDescriptor columnFamily)
 Deprecated. 
 Since 2.0. Will be removed 
in 3.0. Use
- addColumnFamily(TableName,
 HColumnDescriptor) instead.
+ addColumnFamily(TableName,
 ColumnFamilyDescriptor) instead.
 
 
 
 
 void
-addColumnFamily(TableName tableName,
-   HColumnDescriptor columnFamily)
+addColumnFamily(TableName tableName,
+   ColumnFamilyDescriptor columnFamily)
 Add a column family to an existing table.
 
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/Future.html?is-external=true";
 title="class or interface in java.util.concurrent">FutureVoid>
-addColumnFamilyAsync(TableName tableName,
-HColumnDescriptor columnFamily)
+addColumnFamilyAsync(TableName tableName,
+ColumnFamilyDescriptor columnFamily)
 Add a column family to an existing table.
 
 
@@ -1478,21 +1478,21 @@ implements HColumnDescriptor columnFamily)
 Deprecated. 
 As of 2.0. Will be removed 
in 3.0. Use
- modifyColumnFamily(TableName,
 HColumnDescriptor) instead.
+ modifyColumnFamily(TableName,
 ColumnFamilyDescriptor) instead.
 
 
 
 
 void
-modifyColumnFamily(TableName tableName,
-  HColumnDescriptor columnFamily)
+modifyColumnFamily(TableName tableName,
+  ColumnFamilyDescriptor columnFamily)
 Modify an existing column family on a table.
 
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/Future.html?is-external=true";
 title="class or interface in java.util.concurrent">FutureVoid>
-modifyColumnFamilyAsync(TableName tableName,
-   HColumnDescriptor columnFamily)
+modifyColumnFamilyAsync(TableName tableName,
+   ColumnFamilyDescriptor columnFamily)
 Modify an existing column family on a table.
 
 
@@ -3334,7 +3334,7 @@ public void HColumnDescriptor columnFamily)
throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true";
 title="class or interface in java.io">IOException
 Deprecated. Since 2.0. Will be removed in 3.0. Use
- addColumnFamily(TableName,
 HColumnDescriptor) instead.
+ addColumnFamily(TableName,
 ColumnFamilyDescriptor) instead.
 Add a column family to an existing table. Asynchronous 
operation.
 
 Specified by:
@@ -3347,20 +3347,20 @@ public void 
+
 
 
 
 
 addColumnFamily
 public void addColumnFamily(TableName tableName,
-HColumnDescriptor columnFamily)
+ColumnFamilyDescriptor columnFamily)
  throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true";
 title="class or interface in java.io">IOException
-Description copied from 
interface: Admin
+Description copied from 
interface: Admin
 Add a column family to an existing table.
 
 Spec

[09/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-21 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ca5b0275/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.ThrowableAbortable.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.ThrowableAbortable.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.ThrowableAbortable.html
index 4bd98f4..046cb95 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.ThrowableAbortable.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.ThrowableAbortable.html
@@ -212,7 +212,7 @@
 204import 
org.apache.hadoop.util.StringUtils;
 205import 
org.apache.zookeeper.KeeperException;
 206
-207import 
com.google.common.annotations.VisibleForTesting;
+207import 
org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
 208import com.google.protobuf.Descriptors;
 209import com.google.protobuf.Message;
 210import 
com.google.protobuf.RpcController;
@@ -1008,7 +1008,7 @@
 1000  /**
 1001   * {@inheritDoc}
 1002   * @deprecated Since 2.0. Will be 
removed in 3.0. Use
-1003   * {@link 
#addColumnFamily(TableName, HColumnDescriptor)} instead.
+1003   * {@link 
#addColumnFamily(TableName, ColumnFamilyDescriptor)} instead.
 1004   */
 1005  @Override
 1006  @Deprecated
@@ -1018,14 +1018,14 @@
 1010  }
 1011
 1012  @Override
-1013  public void addColumnFamily(final 
TableName tableName, final HColumnDescriptor columnFamily)
+1013  public void addColumnFamily(final 
TableName tableName, final ColumnFamilyDescriptor columnFamily)
 1014  throws IOException {
 1015get(addColumnFamilyAsync(tableName, 
columnFamily), syncWaitTimeout, TimeUnit.MILLISECONDS);
 1016  }
 1017
 1018  @Override
 1019  public Future 
addColumnFamilyAsync(final TableName tableName,
-1020  final HColumnDescriptor 
columnFamily) throws IOException {
+1020  final ColumnFamilyDescriptor 
columnFamily) throws IOException {
 1021AddColumnResponse response =
 1022executeCallable(new 
MasterCallable(getConnection(),
 1023getRpcControllerFactory()) 
{
@@ -1106,7 +1106,7 @@
 1098  /**
 1099   * {@inheritDoc}
 1100   * @deprecated As of 2.0. Will be 
removed in 3.0. Use
-1101   * {@link 
#modifyColumnFamily(TableName, HColumnDescriptor)} instead.
+1101   * {@link 
#modifyColumnFamily(TableName, ColumnFamilyDescriptor)} instead.
 1102   */
 1103  @Override
 1104  @Deprecated
@@ -1117,13 +1117,13 @@
 1109
 1110  @Override
   public void modifyColumnFamily(final 
TableName tableName,
-1112  final HColumnDescriptor 
columnFamily) throws IOException {
+1112  final ColumnFamilyDescriptor 
columnFamily) throws IOException {
 1113
get(modifyColumnFamilyAsync(tableName, columnFamily), syncWaitTimeout, 
TimeUnit.MILLISECONDS);
 1114  }
 1115
 1116  @Override
 1117  public Future 
modifyColumnFamilyAsync(final TableName tableName,
-1118  final HColumnDescriptor 
columnFamily) throws IOException {
+1118  final ColumnFamilyDescriptor 
columnFamily) throws IOException {
 1119ModifyColumnResponse response =
 1120executeCallable(new 
MasterCallable(getConnection(),
 1121getRpcControllerFactory()) 
{

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ca5b0275/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.TruncateTableFuture.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.TruncateTableFuture.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.TruncateTableFuture.html
index 4bd98f4..046cb95 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.TruncateTableFuture.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.TruncateTableFuture.html
@@ -212,7 +212,7 @@
 204import 
org.apache.hadoop.util.StringUtils;
 205import 
org.apache.zookeeper.KeeperException;
 206
-207import 
com.google.common.annotations.VisibleForTesting;
+207import 
org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
 208import com.google.protobuf.Descriptors;
 209import com.google.protobuf.Message;
 210import 
com.google.protobuf.RpcController;
@@ -1008,7 +1008,7 @@
 1000  /**
 1001   * {@inheritDoc}
 1002   * @deprecated Since 2.0. Will be 
removed in 3.0. Use
-1003   * {@link 
#addColumnFamily(TableName, HColumnDescriptor)} instead.
+1003   * {@link 
#addColumnFamily(TableName, ColumnFamilyDescriptor)} instead.
 1004   */
 1005  @Override
 1006  @Deprecated
@@ -1018,14 +1018,14 @@
 1010  }
 1011
 1012  @Override
-1013  public void addColumnFamily(final 
TableName tableName, final HColumnDescriptor columnFamily)
+1013  public void addColumnFamily(final 
TableName tableName, final ColumnFamilyDescriptor columnFamily)
 1014  throws IOException {
 1015get(ad

[33/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-21 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ca5b0275/devapidocs/org/apache/hadoop/hbase/io/hfile/LruBlockCache.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/io/hfile/LruBlockCache.html 
b/devapidocs/org/apache/hadoop/hbase/io/hfile/LruBlockCache.html
index ad15c2c..8fffd0c 100644
--- a/devapidocs/org/apache/hadoop/hbase/io/hfile/LruBlockCache.html
+++ b/devapidocs/org/apache/hadoop/hbase/io/hfile/LruBlockCache.html
@@ -114,7 +114,7 @@ var activeTableTab = "activeTableTab";
 
 
 @InterfaceAudience.Private
-public class LruBlockCache
+public class LruBlockCache
 extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true";
 title="class or interface in java.lang">Object
 implements ResizableBlockCache, 
HeapSize
 A block cache implementation that is memory-aware using HeapSize,
@@ -758,7 +758,7 @@ implements 
 
 LOG
-private static final org.apache.commons.logging.Log LOG
+private static final org.apache.commons.logging.Log LOG
 
 
 
@@ -767,7 +767,7 @@ implements 
 
 LRU_MIN_FACTOR_CONFIG_NAME
-private static final http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String LRU_MIN_FACTOR_CONFIG_NAME
+private static final http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String LRU_MIN_FACTOR_CONFIG_NAME
 Percentage of total size that eviction will evict until; 
e.g. if set to .8, then we will keep
  evicting during an eviction run till the cache size is down to 80% of the 
total.
 
@@ -782,7 +782,7 @@ implements 
 
 LRU_ACCEPTABLE_FACTOR_CONFIG_NAME
-private static final http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String LRU_ACCEPTABLE_FACTOR_CONFIG_NAME
+private static final http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String LRU_ACCEPTABLE_FACTOR_CONFIG_NAME
 Acceptable size of cache (no evictions if size < 
acceptable)
 
 See Also:
@@ -796,7 +796,7 @@ implements 
 
 LRU_HARD_CAPACITY_LIMIT_FACTOR_CONFIG_NAME
-static final http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String LRU_HARD_CAPACITY_LIMIT_FACTOR_CONFIG_NAME
+static final http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String LRU_HARD_CAPACITY_LIMIT_FACTOR_CONFIG_NAME
 Hard capacity limit of cache, will reject any put if size > 
this * acceptable
 
 See Also:
@@ -810,7 +810,7 @@ implements 
 
 LRU_SINGLE_PERCENTAGE_CONFIG_NAME
-private static final http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String LRU_SINGLE_PERCENTAGE_CONFIG_NAME
+private static final http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String LRU_SINGLE_PERCENTAGE_CONFIG_NAME
 
 See Also:
 Constant
 Field Values
@@ -823,7 +823,7 @@ implements 
 
 LRU_MULTI_PERCENTAGE_CONFIG_NAME
-private static final http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String LRU_MULTI_PERCENTAGE_CONFIG_NAME
+private static final http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String LRU_MULTI_PERCENTAGE_CONFIG_NAME
 
 See Also:
 Constant
 Field Values
@@ -836,7 +836,7 @@ implements 
 
 LRU_MEMORY_PERCENTAGE_CONFIG_NAME
-private static final http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String LRU_MEMORY_PERCENTAGE_CONFIG_NAME
+private static final http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String LRU_MEMORY_PERCENTAGE_CONFIG_NAME
 
 See Also:
 Constant
 Field Values
@@ -849,7 +849,7 @@ implements 
 
 LRU_IN_MEMORY_FORCE_MODE_CONFIG_NAME
-private static final http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String LRU_IN_MEMORY_FORCE_MODE_CONFIG_NAME
+private static final http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String LRU_IN_MEMORY_FORCE_MODE_CONFIG_NAME
 Configuration key to force data-block always (except 
in-memory are too much)
  cached in memory for in-memory hfile, unlike inMemory, which is a 
column-family
  configuration, inMemoryForceMode is a cluster-wide configuration
@@ -865,7 +865,7 @@ implements 
 
 DEFAULT_LOAD_FACTOR
-static final float DEFAULT_LOAD_FACTOR
+static final float DEFAULT_LOAD_FACTOR
 
 See Also:
 Constant
 Fie

[16/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-21 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ca5b0275/devapidocs/src-html/org/apache/hadoop/hbase/CellUtil.EmptyByteBufferCell.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/CellUtil.EmptyByteBufferCell.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/CellUtil.EmptyByteBufferCell.html
index 924a575..18d00df 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/CellUtil.EmptyByteBufferCell.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/CellUtil.EmptyByteBufferCell.html
@@ -572,7 +572,7 @@
 564public long heapSize() {
 565  long sum = HEAP_SIZE_OVERHEAD + 
CellUtil.estimatedHeapSizeOf(cell);
 566  if (this.tags != null) {
-567sum += 
ClassSize.sizeOf(this.tags, this.tags.length);
+567sum += 
ClassSize.sizeOf(this.tags);
 568  }
 569  return sum;
 570}
@@ -771,7 +771,7 @@
 763  long sum = HEAP_SIZE_OVERHEAD + 
CellUtil.estimatedHeapSizeOf(cell);
 764  // this.tags is on heap byte[]
 765  if (this.tags != null) {
-766sum += 
ClassSize.sizeOf(this.tags, this.tags.length);
+766sum += 
ClassSize.sizeOf(this.tags);
 767  }
 768  return sum;
 769}
@@ -897,7 +897,7 @@
 889public long heapSize() {
 890  long sum = ClassSize.REFERENCE + 
super.heapSize();
 891  if (this.value != null) {
-892sum += 
ClassSize.sizeOf(this.value, this.value.length);
+892sum += 
ClassSize.sizeOf(this.value);
 893  }
 894  return sum;
 895}
@@ -997,7 +997,7 @@
 989public long heapSize() {
 990  long sum = ClassSize.REFERENCE + 
super.heapSize();
 991  if (this.value != null) {
-992sum += 
ClassSize.sizeOf(this.value, this.value.length);
+992sum += 
ClassSize.sizeOf(this.value);
 993  }
 994  return sum;
 995}

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ca5b0275/devapidocs/src-html/org/apache/hadoop/hbase/CellUtil.EmptyCell.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/CellUtil.EmptyCell.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/CellUtil.EmptyCell.html
index 924a575..18d00df 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/CellUtil.EmptyCell.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/CellUtil.EmptyCell.html
@@ -572,7 +572,7 @@
 564public long heapSize() {
 565  long sum = HEAP_SIZE_OVERHEAD + 
CellUtil.estimatedHeapSizeOf(cell);
 566  if (this.tags != null) {
-567sum += 
ClassSize.sizeOf(this.tags, this.tags.length);
+567sum += 
ClassSize.sizeOf(this.tags);
 568  }
 569  return sum;
 570}
@@ -771,7 +771,7 @@
 763  long sum = HEAP_SIZE_OVERHEAD + 
CellUtil.estimatedHeapSizeOf(cell);
 764  // this.tags is on heap byte[]
 765  if (this.tags != null) {
-766sum += 
ClassSize.sizeOf(this.tags, this.tags.length);
+766sum += 
ClassSize.sizeOf(this.tags);
 767  }
 768  return sum;
 769}
@@ -897,7 +897,7 @@
 889public long heapSize() {
 890  long sum = ClassSize.REFERENCE + 
super.heapSize();
 891  if (this.value != null) {
-892sum += 
ClassSize.sizeOf(this.value, this.value.length);
+892sum += 
ClassSize.sizeOf(this.value);
 893  }
 894  return sum;
 895}
@@ -997,7 +997,7 @@
 989public long heapSize() {
 990  long sum = ClassSize.REFERENCE + 
super.heapSize();
 991  if (this.value != null) {
-992sum += 
ClassSize.sizeOf(this.value, this.value.length);
+992sum += 
ClassSize.sizeOf(this.value);
 993  }
 994  return sum;
 995}

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ca5b0275/devapidocs/src-html/org/apache/hadoop/hbase/CellUtil.FirstOnRowByteBufferCell.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/CellUtil.FirstOnRowByteBufferCell.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/CellUtil.FirstOnRowByteBufferCell.html
index 924a575..18d00df 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/CellUtil.FirstOnRowByteBufferCell.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/CellUtil.FirstOnRowByteBufferCell.html
@@ -572,7 +572,7 @@
 564public long heapSize() {
 565  long sum = HEAP_SIZE_OVERHEAD + 
CellUtil.estimatedHeapSizeOf(cell);
 566  if (this.tags != null) {
-567sum += 
ClassSize.sizeOf(this.tags, this.tags.length);
+567sum += 
ClassSize.sizeOf(this.tags);
 568  }
 569  return sum;
 570}
@@ -771,7 +771,7 @@
 763  long sum = HEAP_SIZE_OVERHEAD + 
CellUtil.estimatedHeapSizeOf(cell);
 764  // this.tags is on heap byte[]
 765  if (this.tags != null) {
-766sum += 
ClassSize.sizeOf(this.tags, this.tags.length);
+766sum += 
ClassSize.sizeOf(this.tags);
 767  }
 768  return sum;
 769}
@@ -897,7 +897,7 @@
 88

[40/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-21 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ca5b0275/devapidocs/org/apache/hadoop/hbase/class-use/TableName.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/class-use/TableName.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/TableName.html
index 6222d48..5933927 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/TableName.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/TableName.html
@@ -2322,7 +2322,7 @@ service.
 As of release 2.0.0.
  (https://issues.apache.org/jira/browse/HBASE-1989";>HBASE-1989).
  This will be removed in HBase 3.0.0.
- Use Admin.addColumnFamily(TableName,
 HColumnDescriptor).
+ Use Admin.addColumnFamily(TableName,
 ColumnFamilyDescriptor).
 
 
 
@@ -2332,7 +2332,7 @@ service.
  HColumnDescriptor columnFamily)
 Deprecated. 
 Since 2.0. Will be removed 
in 3.0. Use
- HBaseAdmin.addColumnFamily(TableName,
 HColumnDescriptor) instead.
+ HBaseAdmin.addColumnFamily(TableName,
 ColumnFamilyDescriptor) instead.
 
 
 
@@ -2342,40 +2342,40 @@ service.
ColumnFamilyDescriptor columnFamily) 
 
 
-http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";
 title="class or interface in java.util.concurrent">CompletableFutureVoid>
-AsyncAdmin.addColumnFamily(TableName tableName,
+void
+Admin.addColumnFamily(TableName tableName,
ColumnFamilyDescriptor columnFamily)
 Add a column family to an existing table.
 
 
 
-http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";
 title="class or interface in java.util.concurrent">CompletableFutureVoid>
-RawAsyncHBaseAdmin.addColumnFamily(TableName tableName,
+void
+HBaseAdmin.addColumnFamily(TableName tableName,
ColumnFamilyDescriptor columnFamily) 
 
 
-void
-Admin.addColumnFamily(TableName tableName,
-   HColumnDescriptor columnFamily)
+http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";
 title="class or interface in java.util.concurrent">CompletableFutureVoid>
+AsyncAdmin.addColumnFamily(TableName tableName,
+   ColumnFamilyDescriptor columnFamily)
 Add a column family to an existing table.
 
 
 
-void
-HBaseAdmin.addColumnFamily(TableName tableName,
-   HColumnDescriptor columnFamily) 
+http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";
 title="class or interface in java.util.concurrent">CompletableFutureVoid>
+RawAsyncHBaseAdmin.addColumnFamily(TableName tableName,
+   ColumnFamilyDescriptor columnFamily) 
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/Future.html?is-external=true";
 title="class or interface in java.util.concurrent">FutureVoid>
-Admin.addColumnFamilyAsync(TableName tableName,
-HColumnDescriptor columnFamily)
+Admin.addColumnFamilyAsync(TableName tableName,
+ColumnFamilyDescriptor columnFamily)
 Add a column family to an existing table.
 
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/Future.html?is-external=true";
 title="class or interface in java.util.concurrent">FutureVoid>
-HBaseAdmin.addColumnFamilyAsync(TableName tableName,
-HColumnDescriptor columnFamily) 
+HBaseAdmin.addColumnFamilyAsync(TableName tableName,
+ColumnFamilyDescriptor columnFamily) 
 
 
 void
@@ -3638,7 +3638,7 @@ service.
 As of release 2.0.0.
  (https://issues.apache.org/jira/browse/HBASE-1989";>HBASE-1989).
  This will be removed in HBase 3.0.0.
- Use Admin.modifyColumnFamily(TableName,
 HColumnDescriptor).
+ Use Admin.modifyColumnFamily(TableName,
 ColumnFamilyDescriptor).
 
 
 
@@ -3648,7 +3648,7 @@ service.
 HColumnDescriptor columnFamily)
 Deprecated. 
 As of 2.0. Will be removed 
in 3.0. Use
- HBaseAdmin.modifyColumnFamily(TableName,
 HColumnDescriptor) instead.
+ HBaseAdmin.modifyColumnFamily(TableName,
 ColumnFamilyDescriptor) instead.
 
 
 
@@ -3658,40 +3658,40 @@ service.
   Column

[03/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-21 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ca5b0275/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.BlockAdder.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.BlockAdder.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.BlockAdder.html
index cf44d69..3fdfe16 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.BlockAdder.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.BlockAdder.html
@@ -36,8 +36,8 @@
 028import static 
org.apache.hadoop.hdfs.DFSConfigKeys.DFS_CLIENT_USE_DN_HOSTNAME_DEFAULT;
 029import static 
org.apache.hadoop.hdfs.protocol.datatransfer.BlockConstructionStage.PIPELINE_SETUP_CREATE;
 030
-031import 
com.google.common.base.Throwables;
-032import 
com.google.common.collect.ImmutableMap;
+031import 
org.apache.hadoop.hbase.shaded.com.google.common.base.Throwables;
+032import 
org.apache.hadoop.hbase.shaded.com.google.common.collect.ImmutableMap;
 033import 
com.google.protobuf.CodedOutputStream;
 034
 035import io.netty.bootstrap.Bootstrap;

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ca5b0275/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.CancelOnClose.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.CancelOnClose.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.CancelOnClose.html
index cf44d69..3fdfe16 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.CancelOnClose.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.CancelOnClose.html
@@ -36,8 +36,8 @@
 028import static 
org.apache.hadoop.hdfs.DFSConfigKeys.DFS_CLIENT_USE_DN_HOSTNAME_DEFAULT;
 029import static 
org.apache.hadoop.hdfs.protocol.datatransfer.BlockConstructionStage.PIPELINE_SETUP_CREATE;
 030
-031import 
com.google.common.base.Throwables;
-032import 
com.google.common.collect.ImmutableMap;
+031import 
org.apache.hadoop.hbase.shaded.com.google.common.base.Throwables;
+032import 
org.apache.hadoop.hbase.shaded.com.google.common.collect.ImmutableMap;
 033import 
com.google.protobuf.CodedOutputStream;
 034
 035import io.netty.bootstrap.Bootstrap;

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ca5b0275/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.ChecksumCreater.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.ChecksumCreater.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.ChecksumCreater.html
index cf44d69..3fdfe16 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.ChecksumCreater.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.ChecksumCreater.html
@@ -36,8 +36,8 @@
 028import static 
org.apache.hadoop.hdfs.DFSConfigKeys.DFS_CLIENT_USE_DN_HOSTNAME_DEFAULT;
 029import static 
org.apache.hadoop.hdfs.protocol.datatransfer.BlockConstructionStage.PIPELINE_SETUP_CREATE;
 030
-031import 
com.google.common.base.Throwables;
-032import 
com.google.common.collect.ImmutableMap;
+031import 
org.apache.hadoop.hbase.shaded.com.google.common.base.Throwables;
+032import 
org.apache.hadoop.hbase.shaded.com.google.common.collect.ImmutableMap;
 033import 
com.google.protobuf.CodedOutputStream;
 034
 035import io.netty.bootstrap.Bootstrap;

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ca5b0275/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.DFSClientAdaptor.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.DFSClientAdaptor.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.DFSClientAdaptor.html
index cf44d69..3fdfe16 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.DFSClientAdaptor.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.DFSClientAdaptor.html
@@ -36,8 +36,8 @@
 028import static 
org.apache.hadoop.hdfs.DFSConfigKeys.DFS_CLIENT_USE_DN_HOSTNAME_DEFAULT;
 029import static 
org.apache.hadoop.hdfs.protocol.datatransfer.BlockConstructionStage.PIPELINE_SETUP_CREATE;
 030
-031import 
com.google.common.base.Throwables;

[39/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-21 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ca5b0275/devapidocs/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.html 
b/devapidocs/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.html
index faf7382..2eb8361 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.html
@@ -114,7 +114,7 @@ var activeTableTab = "activeTableTab";
 
 
 @InterfaceAudience.Private
-public class AsyncHBaseAdmin
+public class AsyncHBaseAdmin
 extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true";
 title="class or interface in java.lang">Object
 implements AsyncAdmin
 The implementation of AsyncAdmin.
@@ -897,7 +897,7 @@ implements 
 
 LOG
-private static final org.apache.commons.logging.Log LOG
+private static final org.apache.commons.logging.Log LOG
 
 
 
@@ -906,7 +906,7 @@ implements 
 
 rawAdmin
-private final RawAsyncHBaseAdmin rawAdmin
+private final RawAsyncHBaseAdmin rawAdmin
 
 
 
@@ -915,7 +915,7 @@ implements 
 
 pool
-private final http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/ExecutorService.html?is-external=true";
 title="class or interface in java.util.concurrent">ExecutorService pool
+private final http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/ExecutorService.html?is-external=true";
 title="class or interface in java.util.concurrent">ExecutorService pool
 
 
 
@@ -932,7 +932,7 @@ implements 
 
 AsyncHBaseAdmin
-AsyncHBaseAdmin(RawAsyncHBaseAdmin rawAdmin,
+AsyncHBaseAdmin(RawAsyncHBaseAdmin rawAdmin,
 http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/ExecutorService.html?is-external=true";
 title="class or interface in 
java.util.concurrent">ExecutorService pool)
 
 
@@ -950,7 +950,7 @@ implements 
 
 wrap
-private  http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";
 title="class or interface in 
java.util.concurrent">CompletableFuture wrap(http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";
 title="class or interface in 
java.util.concurrent">CompletableFuture future)
+private  http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";
 title="class or interface in 
java.util.concurrent">CompletableFuture wrap(http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";
 title="class or interface in 
java.util.concurrent">CompletableFuture future)
 
 
 
@@ -959,7 +959,7 @@ implements 
 
 tableExists
-public http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";
 title="class or interface in java.util.concurrent">CompletableFutureBoolean> tableExists(TableName tableName)
+public http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";
 title="class or interface in java.util.concurrent">CompletableFutureBoolean> tableExists(TableName tableName)
 
 Specified by:
 tableExists in
 interface AsyncAdmin
@@ -977,7 +977,7 @@ implements 
 
 listTables
-public http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";
 title="class or interface in java.util.concurrent">CompletableFutureList> listTables(http://docs.oracle.com/javase/8/docs/api/java/util/Optional.html?is-external=true";
 title="class or interface in java.util">OptionalPattern> pattern,
+public http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";
 title="class or interface in java.util.concurrent">CompletableFutureList> listTables(http://docs.oracle.com/javase/8/docs/api/java/util/Optional.html?is-external=true";
 title="class or interface in java.util">OptionalPattern> pattern,

boolean includeSysTables)
 Description copied from 
interface: AsyncAdmin
 List all the tables matching the given pa

[08/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-21 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ca5b0275/devapidocs/src-html/org/apache/hadoop/hbase/codec/prefixtree/encode/tokenize/Tokenizer.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/codec/prefixtree/encode/tokenize/Tokenizer.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/codec/prefixtree/encode/tokenize/Tokenizer.html
index b60a13d..dc03a6e 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/codec/prefixtree/encode/tokenize/Tokenizer.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/codec/prefixtree/encode/tokenize/Tokenizer.html
@@ -35,7 +35,7 @@
 027import 
org.apache.hadoop.hbase.util.Bytes;
 028import 
org.apache.hadoop.hbase.util.CollectionUtils;
 029
-030import com.google.common.collect.Lists;
+030import 
org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
 031
 032/**
 033 * Data structure used in the first stage 
of PrefixTree encoding:

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ca5b0275/devapidocs/src-html/org/apache/hadoop/hbase/codec/prefixtree/encode/tokenize/TokenizerNode.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/codec/prefixtree/encode/tokenize/TokenizerNode.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/codec/prefixtree/encode/tokenize/TokenizerNode.html
index c60167e..48bfd3e 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/codec/prefixtree/encode/tokenize/TokenizerNode.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/codec/prefixtree/encode/tokenize/TokenizerNode.html
@@ -37,7 +37,7 @@
 029import 
org.apache.hadoop.hbase.util.SimpleMutableByteRange;
 030import 
org.apache.hadoop.hbase.util.Strings;
 031
-032import com.google.common.collect.Lists;
+032import 
org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
 033
 034/**
 035 * Individual node in a Trie structure.  
Each node is one of 3 types:

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ca5b0275/devapidocs/src-html/org/apache/hadoop/hbase/coordination/SplitLogManagerCoordination.SplitLogManagerDetails.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/coordination/SplitLogManagerCoordination.SplitLogManagerDetails.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/coordination/SplitLogManagerCoordination.SplitLogManagerDetails.html
index 4431781..7a4a5e3 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/coordination/SplitLogManagerCoordination.SplitLogManagerDetails.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/coordination/SplitLogManagerCoordination.SplitLogManagerDetails.html
@@ -40,7 +40,7 @@
 032import 
org.apache.hadoop.hbase.master.SplitLogManager.Task;
 033import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.SplitLogTask.RecoveryMode;
 034
-035import 
com.google.common.annotations.VisibleForTesting;
+035import 
org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
 036
 037/**
 038 * Coordination for SplitLogManager. It 
creates and works with tasks for split log operations
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ca5b0275/devapidocs/src-html/org/apache/hadoop/hbase/coordination/SplitLogManagerCoordination.html -- diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/coordination/SplitLogManagerCoordination.html b/devapidocs/src-html/org/apache/hadoop/hbase/coordination/SplitLogManagerCoordination.html index 4431781..7a4a5e3 100644 --- a/devapidocs/src-html/org/apache/hadoop/hbase/coordination/SplitLogManagerCoordination.html +++ b/devapidocs/src-html/org/apache/hadoop/hbase/coordination/SplitLogManagerCoordination.html @@ -40,7 +40,7 @@ 032import org.apache.hadoop.hbase.master.SplitLogManager.Task; 033import org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.SplitLogTask.RecoveryMode; 034 -035import com.google.common.annotations.VisibleForTesting; +035import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; 036 037/** 038 * Coordination for SplitLogManager. It creates and works with tasks for split log operations
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ca5b0275/devapidocs/src-html/org/apache/hadoop/hbase/coordination/SplitLogWorkerCoordination.SplitTaskDetails.html -- diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/coordination/SplitLogWorkerCoordination.SplitTaskDetails.html b/devapidocs/src-html/org/apache/hadoop/hbase/coordination/SplitLogWorkerCoordination.SplitTaskDetails.html index f122431..21bf820 100644 --- a/devapidocs/src-html/org/apache/hadoop/hbase/coordination/SplitLogWorkerCoordination.SplitTaskDetails.html +++ b/devapidocs/src-html/org/apac

[50/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-21 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ca5b0275/acid-semantics.html
--
diff --git a/acid-semantics.html b/acid-semantics.html
index f68152e..0ec9aef 100644
--- a/acid-semantics.html
+++ b/acid-semantics.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase –  
   Apache HBase (TM) ACID Properties
@@ -395,7 +395,7 @@ under the License. -->
 
 
   
-All mutations are atomic within a row. Any put will either wholely succeed 
or wholely fail.[3]
+All mutations are atomic within a row. Any put will either wholly succeed 
or wholly fail.[3]
   
 
 
@@ -586,7 +586,7 @@ under the License. -->
   written to the OS cache on all replicas of the log. In the case of a 
full datacenter power loss, it is
   possible that the edits are not truly durable.
   
-[3] Puts will either wholely succeed or wholely fail, provided that they 
are actually sent
+[3] Puts will either wholly succeed or wholly fail, provided that they are 
actually sent
   to the RegionServer.  If the writebuffer is used, Puts will not be sent 
until the writebuffer is filled
   or it is explicitly flushed.
   
@@ -606,7 +606,7 @@ under the License. -->
 https://www.apache.org/";>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2017-07-20
+  Last Published: 
2017-07-21
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ca5b0275/apache_hbase_reference_guide.pdf
--
diff --git a/apache_hbase_reference_guide.pdf b/apache_hbase_reference_guide.pdf
index ff5a5a8..75361ee 100644
--- a/apache_hbase_reference_guide.pdf
+++ b/apache_hbase_reference_guide.pdf
@@ -5,8 +5,8 @@
 /Author (Apache HBase Team)
 /Creator (Asciidoctor PDF 1.5.0.alpha.15, based on Prawn 2.2.2)
 /Producer (Apache HBase Team)
-/ModDate (D:20170720214839+00'00')
-/CreationDate (D:20170720214839+00'00')
+/ModDate (D:20170721214854+00'00')
+/CreationDate (D:20170721214854+00'00')
 >>
 endobj
 2 0 obj
@@ -27334,7 +27334,7 @@ endobj
 endobj
 136 0 obj
 << /Limits [(__anchor-top) (adding.new.node)]
-/Names [(__anchor-top) 25 0 R (__indexterm-6954594) 3262 0 R 
(__indexterm-6956844) 3264 0 R (__indexterm-6958906) 3265 0 R 
(__indexterm-6960780) 3266 0 R (acid) 891 0 R 
(add-metric-name-and-function-to-hadoop-compat-interface) 3361 0 R 
(add-the-implementation-to-both-hadoop-1-and-hadoop-2-compat-modules) 3362 0 R 
(add.metrics) 3359 0 R (adding-a-new-chapter-to-the-hbase-reference-guide) 3596 
0 R (adding.new.node) 2859 0 R]
+/Names [(__anchor-top) 25 0 R (__indexterm-6954592) 3262 0 R 
(__indexterm-6956842) 3264 0 R (__indexterm-6958904) 3265 0 R 
(__indexterm-6960778) 3266 0 R (acid) 891 0 R 
(add-metric-name-and-function-to-hadoop-compat-interface) 3361 0 R 
(add-the-implementation-to-both-hadoop-1-and-hadoop-2-compat-modules) 3362 0 R 
(add.metrics) 3359 0 R (adding-a-new-chapter-to-the-hbase-reference-guide) 3596 
0 R (adding.new.node) 2859 0 R]
 >>
 endobj
 137 0 obj
@@ -688294,7 +688294,7 @@ endobj
 >>
 endobj
 3546 0 obj
-<< /Length 11860
+<< /Length 11866
 >>
 stream
 q
@@ -688741,12 +688741,12 @@ ET
 0.2 0.2 0.2 scn
 0.2 0.2 0.2 SCN
 
-0.4465 Tw
+1.3697 Tw
 
 BT
 48.24 354.866 Td
 /F1.0 10.5 Tf
-[<6164646974696f6e20616e> 20.0195 
<79206f7468657220696e766f6c76656420636f6d706f6e656e74732e204d6f737420636f6d706f6e656e74732068617665206f6e65206f72206d6f72652064656661756c74206f776e6572732c2077686f>]
 TJ
+[<6164646974696f6e20746f20616e> 20.0195 
<79206f7468657220696e766f6c76656420636f6d706f6e656e74732e204d6f737420636f6d706f6e656e74732068617665206f6e65206f72206d6f72652064656661756c74206f776e6572732c>]
 TJ
 ET
 
 
@@ -688756,12 +688756,12 @@ ET
 0.2 0.2 0.2 scn
 0.2 0.2 0.2 SCN
 
-1.0912 Tw
+0.7296 Tw
 
 BT
 48.24 339.086 Td
 /F1.0 10.5 Tf
-<6d6f6e69746f72206e65772069737375657320776869636820636f6d6520696e746f2074686f7365207175657565732e205265676172646c657373206f66207768657468657220796f75206665656c2061626c6520746f2066697820746865>
 Tj
+<77686f206d6f6e69746f72206e65772069737375657320776869636820636f6d6520696e746f2074686f7365207175657565732e205265676172646c657373206f66207768657468657220796f75206665656c2061626c6520746f20666978>
 Tj
 ET
 
 
@@ -688774,7 +688774,7 @@ ET
 BT
 48.24 323.306 Td
 /F1.0 10.5 Tf
-<6275672c20796f752073686f756c64207374696c6c2066696c65206275677320776865726520796f7520736565207468656d2e>
 Tj
+<746865206275672c20796f752073686f756c64207374696c6c2066696c65206275677320776865726520796f7520736565207468656d2e>
 Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -758328,1034 +758328,1034 @@ xref
 0010299652 0 n 
 0010299700 0 n 
 0010300211 0 n 
-0010312127 0 n 
-0010312538 0 n 
-0010312586 0 n 
-0010312762 0 n 
-0010312928 0 n 
-0010313053 0 n 
-0010313101 0 n 
-0010313149 0 n 
-

[34/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-21 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ca5b0275/devapidocs/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.SaslUnwrapHandler.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.SaslUnwrapHandler.html
 
b/devapidocs/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.SaslUnwrapHandler.html
index 628e2aa..3ec28b0 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.SaslUnwrapHandler.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.SaslUnwrapHandler.html
@@ -132,7 +132,7 @@ var activeTableTab = "activeTableTab";
 
 
 
-private static final class FanOutOneBlockAsyncDFSOutputSaslHelper.SaslUnwrapHandler
+private static final class FanOutOneBlockAsyncDFSOutputSaslHelper.SaslUnwrapHandler
 extends 
io.netty.channel.SimpleChannelInboundHandler
 
 
@@ -268,7 +268,7 @@ extends 
io.netty.channel.SimpleChannelInboundHandlerSaslClient saslClient
+private final http://docs.oracle.com/javase/8/docs/api/javax/security/sasl/SaslClient.html?is-external=true";
 title="class or interface in javax.security.sasl">SaslClient saslClient
 
 
 
@@ -285,7 +285,7 @@ extends 
io.netty.channel.SimpleChannelInboundHandlerSaslClient saslClient)
+public SaslUnwrapHandler(http://docs.oracle.com/javase/8/docs/api/javax/security/sasl/SaslClient.html?is-external=true";
 title="class or interface in 
javax.security.sasl">SaslClient saslClient)
 
 
 
@@ -302,7 +302,7 @@ extends 
io.netty.channel.SimpleChannelInboundHandlerException
 
 Specified by:
@@ -320,7 +320,7 @@ extends 
io.netty.channel.SimpleChannelInboundHandlerException
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ca5b0275/devapidocs/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.SaslWrapHandler.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.SaslWrapHandler.html
 
b/devapidocs/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.SaslWrapHandler.html
index 9495ef6..0131257 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.SaslWrapHandler.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.SaslWrapHandler.html
@@ -127,7 +127,7 @@ var activeTableTab = "activeTableTab";
 
 
 
-private static final class FanOutOneBlockAsyncDFSOutputSaslHelper.SaslWrapHandler
+private static final class FanOutOneBlockAsyncDFSOutputSaslHelper.SaslWrapHandler
 extends io.netty.channel.ChannelOutboundHandlerAdapter
 
 
@@ -270,7 +270,7 @@ extends io.netty.channel.ChannelOutboundHandlerAdapter
 
 
 saslClient
-private final http://docs.oracle.com/javase/8/docs/api/javax/security/sasl/SaslClient.html?is-external=true";
 title="class or interface in javax.security.sasl">SaslClient saslClient
+private final http://docs.oracle.com/javase/8/docs/api/javax/security/sasl/SaslClient.html?is-external=true";
 title="class or interface in javax.security.sasl">SaslClient saslClient
 
 
 
@@ -279,7 +279,7 @@ extends io.netty.channel.ChannelOutboundHandlerAdapter
 
 
 cBuf
-private io.netty.buffer.CompositeByteBuf cBuf
+private io.netty.buffer.CompositeByteBuf cBuf
 
 
 
@@ -296,7 +296,7 @@ extends io.netty.channel.ChannelOutboundHandlerAdapter
 
 
 SaslWrapHandler
-public SaslWrapHandler(http://docs.oracle.com/javase/8/docs/api/javax/security/sasl/SaslClient.html?is-external=true";
 title="class or interface in 
javax.security.sasl">SaslClient saslClient)
+public SaslWrapHandler(http://docs.

[21/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-21 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ca5b0275/devapidocs/org/apache/hadoop/hbase/security/access/TableAuthManager.PermissionCache.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/security/access/TableAuthManager.PermissionCache.html
 
b/devapidocs/org/apache/hadoop/hbase/security/access/TableAuthManager.PermissionCache.html
index 8a9a80c..5880360 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/security/access/TableAuthManager.PermissionCache.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/security/access/TableAuthManager.PermissionCache.html
@@ -134,13 +134,13 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 Field and Description
 
 
-private 
com.google.common.collect.ListMultimapString,T>
+private 
org.apache.hadoop.hbase.shaded.com.google.common.collect.ListMultimapString,T>
 groupCache
 Cache of group permissions
 
 
 
-private 
com.google.common.collect.ListMultimapString,T>
+private 
org.apache.hadoop.hbase.shaded.com.google.common.collect.ListMultimapString,T>
 userCache
 Cache of user permissions
 
@@ -180,7 +180,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 Method and Description
 
 
-com.google.common.collect.ListMultimapString,T>
+org.apache.hadoop.hbase.shaded.com.google.common.collect.ListMultimapString,T>
 getAllPermissions()
 Returns a combined map of user and group permissions, with 
group names
  distinguished according to AuthUtil.isGroupPrincipal(String).
@@ -242,7 +242,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 userCache
-private com.google.common.collect.ListMultimapString,T extends Permission> userCache
+private org.apache.hadoop.hbase.shaded.com.google.common.collect.ListMultimapString,T extends Permission> userCache
 Cache of user permissions
 
 
@@ -252,7 +252,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 groupCache
-private com.google.common.collect.ListMultimapString,T extends Permission> groupCache
+private org.apache.hadoop.hbase.shaded.com.google.common.collect.ListMultimapString,T extends Permission> groupCache
 Cache of group permissions
 
 
@@ -349,7 +349,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 getAllPermissions
-public com.google.common.collect.ListMultimapString,T> getAllPermissions()
+public org.apache.hadoop.hbase.shaded.com.google.common.collect.ListMultimapString,T> getAllPermissions()
 Returns a combined map of user and group permissions, with 
group names
  distinguished according to AuthUtil.isGroupPrincipal(String).
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ca5b0275/devapidocs/org/apache/hadoop/hbase/security/access/TableAuthManager.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/security/access/TableAuthManager.html 
b/devapidocs/org/apache/hadoop/hbase/security/access/TableAuthManager.html
index 54a9426..566da9c 100644
--- a/devapidocs/org/apache/hadoop/hbase/security/access/TableAuthManager.html
+++ b/devapidocs/org/apache/hadoop/hbase/security/access/TableAuthManager.html
@@ -463,14 +463,14 @@ implements http://docs.oracle.com/javase/8/docs/api/java/io/Closeable.h
 
 
 private void
-updateGlobalCache(com.google.common.collect.ListMultimap[45/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ca5b0275/checkstyle.rss
--
diff --git a/checkstyle.rss b/checkstyle.rss
index 55440a4..c99029c 100644
--- a/checkstyle.rss
+++ b/checkstyle.rss
@@ -26,7 +26,7 @@ under the License.
 ©2007 - 2017 The Apache Software Foundation
 
   File: 2247,
- Errors: 14814,
+ Errors: 14906,
  Warnings: 0,
  Infos: 0
   
@@ -125,7 +125,7 @@ under the License.
   0
 
 
-  13
+  15
 
   
   
@@ -223,7 +223,7 @@ under the License.
   0
 
 
-  1
+  2
 
   
   
@@ -279,7 +279,7 @@ under the License.
   0
 
 
-  1
+  2
 
   
   
@@ -1049,7 +1049,7 @@ under the License.
   0
 
 
-  7
+  6
 
   
   
@@ -1385,7 +1385,7 @@ under the License.
   0
 
 
-  55
+  56
 
   
   
@@ -1553,7 +1553,7 @@ under the License.
   0
 
 
-  1
+  2
 
   
   
@@ -2197,7 +2197,7 @@ under the License.
   0
 
 
-  1
+  0
 
   
   
@@ -2239,7 +2239,7 @@ under the License.
   0
 
 
-  1
+  0
 
   
   
@@ -2253,7 +2253,7 @@ under the License.
   0
 
 
-  14
+  13
 
   
   
@@ -2407,7 +2407,7 @@ under the License.
   0
 
 
-  6
+  7
 
   
   
@@ -2771,7 +2771,7 @@ under the License.
   0
 
 
-  0
+  1
 
   
   
@@ -3415,7 +3415,7 @@ under the License.
   0
 
 
-  8
+  9
 
   
   
@@ -3555,7 +3555,7 @@ under the License.
   0
 
 
-  6
+  5
 
   
   
@@ -3695,7 +3695,7 @@ under the License.
   0
 
 
-  0
+  1
 
   
   
@@ -3793,7 +3793,7 @@ under the License.
   0
 
 
-  5
+  4
 
   
   
@@ -3863,7 +3863,7 @@ under the License.
   0
 
 
-  0
+  2
 
   
   
@@ -4199,7 +4199,7 @@ under the License.
   0
 
 
-  0
+  1
 
   
   
@@ -4311,7 +4311,7 @@ under the License.
   0
 
 
-  1
+  0
 
   
   
@@ -4465,7 +4465,7 @@ under the License.
   0
 
 
-  47
+  48
 
   
   
@@ -4829,7 +4829,7 @@ under the License.
   0
 
 
-  6
+  5
 
   
   
@@ -4843,7 +4843,7 @@ under the License.
   0
 
 
-  11
+  12
 
   
   
@@ -4969,7 +4969,7 @@ under the License.
   0
 
 
-  0
+  1
 
   
   
@@ -5417,7 +5417,7 @@ under the License.

[26/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ca5b0275/devapidocs/org/apache/hadoop/hbase/regionserver/ScannerIdGenerator.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/regionserver/ScannerIdGenerator.html 
b/devapidocs/org/apache/hadoop/hbase/regionserver/ScannerIdGenerator.html
index 0962ac6..c870249 100644
--- a/devapidocs/org/apache/hadoop/hbase/regionserver/ScannerIdGenerator.html
+++ b/devapidocs/org/apache/hadoop/hbase/regionserver/ScannerIdGenerator.html
@@ -18,8 +18,8 @@
 catch(err) {
 }
 //-->
-var methods = {"i0":10};
-var tabs = {65535:["t0","All Methods"],2:["t2","Instance 
Methods"],8:["t4","Concrete Methods"]};
+var methods = {"i0":10,"i1":9};
+var tabs = {65535:["t0","All Methods"],1:["t1","Static 
Methods"],2:["t2","Instance Methods"],8:["t4","Concrete Methods"]};
 var altColor = "altColor";
 var rowColor = "rowColor";
 var tableTab = "tableTab";
@@ -110,7 +110,7 @@ var activeTableTab = "activeTableTab";
 
 
 @InterfaceAudience.Private
-public class ScannerIdGenerator
+public class ScannerIdGenerator
 extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true";
 title="class or interface in java.lang">Object
 Generate a new style scanner id to prevent collision with 
previous started server or other RSs.
  We have 64 bits to use.
@@ -170,7 +170,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 Method Summary
 
-All Methods Instance Methods Concrete Methods 
+All Methods Static Methods Instance Methods Concrete Methods 
 
 Modifier and Type
 Method and Description
@@ -179,6 +179,10 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 long
 generateNewScannerId() 
 
+
+static void
+main(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in 
java.lang">String[] args) 
+
 
 
 
@@ -207,7 +211,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 serverNameHash
-private final long serverNameHash
+private final long serverNameHash
 
 
 
@@ -216,7 +220,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 scannerIdGen
-private final http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/AtomicInteger.html?is-external=true";
 title="class or interface in java.util.concurrent.atomic">AtomicInteger scannerIdGen
+private final http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/AtomicInteger.html?is-external=true";
 title="class or interface in java.util.concurrent.atomic">AtomicInteger scannerIdGen
 
 
 
@@ -233,7 +237,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 ScannerIdGenerator
-public ScannerIdGenerator(ServerName serverName)
+public ScannerIdGenerator(ServerName serverName)
 
 
 
@@ -247,10 +251,19 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 
-
+
 
 generateNewScannerId
-public long generateNewScannerId()
+public long generateNewScannerId()
+
+
+
+
+
+
+
+main
+public static void main(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String[] args)
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ca5b0275/devapidocs/org/apache/hadoop/hbase/regionserver/SegmentFactory.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/regionserver/SegmentFactory.html 
b/devapidocs/org/apache/hadoop/hbase/regionserver/SegmentFactory.html
index 916e889..a50fb34 100644
--- a/devapidocs/org/apache/hadoop/hbase/regionserver/SegmentFactory.html
+++ b/devapidocs/org/apache/hadoop/hbase/regionserver/SegmentFactory.html
@@ -110,7 +110,7 @@ var activeTableTab = "activeTableTab";
 
 
 @InterfaceAudience.Private
-public final class SegmentFactory
+public final class SegmentFactory
 extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true";
 title="class or interface in java.lang">Object
 A singleton store segment factory.
  Generate concrete store segments.
@@ -264,7 +264,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 instance
-private static SegmentFactory instance
+private static SegmentFactory instance
 
 
 
@@ -281,7 +281,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 SegmentFactory
-private SegmentFactory()
+private SegmentFactory()
 
 
 
@@ -298,7 +298,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 instance
-public static SegmentFactory instance()
+public static SegmentFactory instance()
 
 
 
@@ -307,7 +307,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 createCompositeImmutableSegment
-public CompositeImmutableSegment createCompositeImmutableSegment(CellComparator comparator,
+public CompositeImmutableSeg

[28/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ca5b0275/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionServer.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionServer.html 
b/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionServer.html
index 2ca99ba..2227828 100644
--- a/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionServer.html
+++ b/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionServer.html
@@ -123,7 +123,7 @@ var activeTableTab = "activeTableTab";
 
 
 @InterfaceAudience.LimitedPrivate(value="Tools")
-public class HRegionServer
+public class HRegionServer
 extends HasThread
 implements RegionServerServices, LastSequenceId, 
ConfigurationObserver
 HRegionServer makes a set of HRegions available to clients. 
It checks in with
@@ -1534,7 +1534,7 @@ implements 
 
 INIT_PAUSE_TIME_MS
-private static final int INIT_PAUSE_TIME_MS
+private static final int INIT_PAUSE_TIME_MS
 
 See Also:
 Constant
 Field Values
@@ -1547,7 +1547,7 @@ implements 
 
 REGION_LOCK_AWAIT_TIME_SEC
-public static final http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String REGION_LOCK_AWAIT_TIME_SEC
+public static final http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String REGION_LOCK_AWAIT_TIME_SEC
 
 See Also:
 Constant
 Field Values
@@ -1560,7 +1560,7 @@ implements 
 
 DEFAULT_REGION_LOCK_AWAIT_TIME_SEC
-public static final int DEFAULT_REGION_LOCK_AWAIT_TIME_SEC
+public static final int DEFAULT_REGION_LOCK_AWAIT_TIME_SEC
 
 See Also:
 Constant
 Field Values
@@ -1573,7 +1573,7 @@ implements 
 
 LOG
-private static final org.apache.commons.logging.Log LOG
+private static final org.apache.commons.logging.Log LOG
 
 
 
@@ -1582,7 +1582,7 @@ implements 
 
 TEST_SKIP_REPORTING_TRANSITION
-public static boolean TEST_SKIP_REPORTING_TRANSITION
+public static boolean TEST_SKIP_REPORTING_TRANSITION
 For testing only!  Set to true to skip notifying region 
assignment to master .
 
 
@@ -1592,7 +1592,7 @@ implements 
 
 OPEN
-protected static final http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String OPEN
+protected static final http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String OPEN
 
 See Also:
 Constant
 Field Values
@@ -1605,7 +1605,7 @@ implements 
 
 CLOSE
-protected static final http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String CLOSE
+protected static final http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String CLOSE
 
 See Also:
 Constant
 Field Values
@@ -1618,7 +1618,7 @@ implements 
 
 regionsInTransitionInRS
-protected final http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/ConcurrentMap.html?is-external=true";
 title="class or interface in 
java.util.concurrent">ConcurrentMapBoolean> regionsInTransitionInRS
+protected final http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/ConcurrentMap.html?is-external=true";
 title="class or interface in 
java.util.concurrent">ConcurrentMapBoolean> regionsInTransitionInRS
 
 
 
@@ -1627,7 +1627,7 @@ implements 
 
 cacheFlusher
-protected MemStoreFlusher cacheFlusher
+protected MemStoreFlusher cacheFlusher
 
 
 
@@ -1636,7 +1636,7 @@ implements 
 
 hMemManager
-protected HeapMemoryManager hMemManager
+protected HeapMemoryManager hMemManager
 
 
 
@@ -1645,7 +1645,7 @@ implements 
 
 initLatch
-protected http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CountDownLatch.html?is-external=true";
 title="class or interface in java.util.concurrent">CountDownLatch initLatch
+protected http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CountDownLatch.html?is-external=true";
 title="class or interface in java.util.concurrent">CountDownLatch initLatch
 
 
 
@@ -1654,7 +1654,7 @@ implements 
 
 clusterConnection
-protected ClusterConnection clusterConnection
+protected ClusterConnection clusterConnection
 Cluster connection to be shared by services.
  Initialized at server startup and closed when server shuts down.
  Clients must never close it explicitly.
@@ -1666,7 +1666,7 @@ implements 
 
 metaTableLocator
-protected MetaTableLocator metaTableLocator
+protected MetaTableLocator metaTableLocator
 
 
 
@@ -1675,7 +1675,7 @@ implements 
 
 recoveringRegionWatcher
-private RecoveringRegionWatcher 
recoveringRegionWatcher
+p

[29/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ca5b0275/devapidocs/org/apache/hadoop/hbase/quotas/SnapshotQuotaObserverChore.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/quotas/SnapshotQuotaObserverChore.html 
b/devapidocs/org/apache/hadoop/hbase/quotas/SnapshotQuotaObserverChore.html
index 3e745ba..d6bbe5b 100644
--- a/devapidocs/org/apache/hadoop/hbase/quotas/SnapshotQuotaObserverChore.html
+++ b/devapidocs/org/apache/hadoop/hbase/quotas/SnapshotQuotaObserverChore.html
@@ -263,8 +263,8 @@ extends 
 
 
-(package private) 
com.google.common.collect.Multimap
-computeSnapshotSizes(com.google.common.collect.MultimapString> snapshotsToComputeSize)
+(package private) 
org.apache.hadoop.hbase.shaded.com.google.common.collect.Multimap
+computeSnapshotSizes(org.apache.hadoop.hbase.shaded.com.google.common.collect.MultimapString> snapshotsToComputeSize)
 Computes the size of each snapshot provided given the 
current files referenced by the table.
 
 
@@ -305,7 +305,7 @@ extends 
 
 
-(package private) 
com.google.common.collect.MultimapString>
+(package private) 
org.apache.hadoop.hbase.shaded.com.google.common.collect.MultimapString>
 getSnapshotsFromTables(Admin admin,
   http://docs.oracle.com/javase/8/docs/api/java/util/Set.html?is-external=true";
 title="class or interface in java.util">Set tablesToFetchSnapshotsFrom)
 Computes a mapping of originating TableName to 
snapshots, when the TableName
@@ -313,7 +313,7 @@ extends 
 
 
-(package private) 
com.google.common.collect.MultimapString>
+(package private) 
org.apache.hadoop.hbase.shaded.com.google.common.collect.MultimapString>
 getSnapshotsToComputeSize()
 Fetches each table with a quota (table or namespace quota), 
and then fetch the name of each
  snapshot which was created from that table.
@@ -341,27 +341,27 @@ extends 
 
 (package private) http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true";
 title="class or interface in java.util">MapString,http://docs.oracle.com/javase/8/docs/api/java/lang/Long.html?is-external=true";
 title="class or interface in java.lang">Long>
-groupSnapshotSizesByNamespace(com.google.common.collect.Multimap snapshotsWithSize)
+groupSnapshotSizesByNamespace(org.apache.hadoop.hbase.shaded.com.google.common.collect.Multimap snapshotsWithSize)
 Sums the snapshot sizes for each namespace.
 
 
 
 (package private) void
-persistSnapshotSizes(com.google.common.collect.Multimap snapshotsWithSize)
+persistSnapshotSizes(org.apache.hadoop.hbase.shaded.com.google.common.collect.Multimap snapshotsWithSize)
 Writes the snapshot sizes to the hbase:quota 
table.
 
 
 
 (package private) void
-persistSnapshotSizes(Table table,
-com.google.common.collect.Multimap snapshotsWithSize)
+persistSnapshotSizes(Table table,
+
org.apache.hadoop.hbase.shaded.com.google.common.collect.Multimap snapshotsWithSize)
 Writes the snapshot sizes to the provided 
table.
 
 
 
 (package private) void
-persistSnapshotSizesByNS(Table quotaTable,
-com.google.common.collect.Multimap snapshotsWithSize)
+persistSnapshotSizesByNS(Table quotaTable,
+
org.apache.hadoop.hbase.shaded.com.google.common.collect.Multimap snapshotsWithSize)
 Rolls up the snapshot sizes by namespace and writes a 
single record for each namespace
  which is the size of all snapshots in that namespace.
 
@@ -588,8 +588,8 @

[42/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ca5b0275/devapidocs/org/apache/hadoop/hbase/TableName.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/TableName.html 
b/devapidocs/org/apache/hadoop/hbase/TableName.html
index a3b30fa..0552f44 100644
--- a/devapidocs/org/apache/hadoop/hbase/TableName.html
+++ b/devapidocs/org/apache/hadoop/hbase/TableName.html
@@ -693,7 +693,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/lang/Comparabl
 
 
 TableName
-private TableName(http://docs.oracle.com/javase/8/docs/api/java/nio/ByteBuffer.html?is-external=true";
 title="class or interface in java.nio">ByteBuffer namespace,
+private TableName(http://docs.oracle.com/javase/8/docs/api/java/nio/ByteBuffer.html?is-external=true";
 title="class or interface in java.nio">ByteBuffer namespace,
   http://docs.oracle.com/javase/8/docs/api/java/nio/ByteBuffer.html?is-external=true";
 title="class or interface in java.nio">ByteBuffer qualifier)
throws http://docs.oracle.com/javase/8/docs/api/java/lang/IllegalArgumentException.html?is-external=true";
 title="class or interface in java.lang">IllegalArgumentException
 
@@ -708,7 +708,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/lang/Comparabl
 
 
 TableName
-private TableName(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String qualifier)
+private TableName(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String qualifier)
 This is only for the old and meta tables.
 
 
@@ -769,7 +769,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/lang/Comparabl
 
 
 isLegalTableQualifierName
-public static byte[] isLegalTableQualifierName(byte[] qualifierName)
+public static byte[] isLegalTableQualifierName(byte[] qualifierName)
 
 
 
@@ -778,7 +778,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/lang/Comparabl
 
 
 isLegalTableQualifierName
-public static byte[] isLegalTableQualifierName(byte[] qualifierName,
+public static byte[] isLegalTableQualifierName(byte[] qualifierName,
boolean isSnapshot)
 
 
@@ -788,7 +788,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/lang/Comparabl
 
 
 isLegalTableQualifierName
-public static void isLegalTableQualifierName(byte[] qualifierName,
+public static void isLegalTableQualifierName(byte[] qualifierName,
  int start,
  int end)
 Qualifier names can only contain 'word' characters
@@ -808,7 +808,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/lang/Comparabl
 
 
 isLegalTableQualifierName
-public static void isLegalTableQualifierName(byte[] qualifierName,
+public static void isLegalTableQualifierName(byte[] qualifierName,
  int start,
  int end,
  boolean isSnapshot)
@@ -820,7 +820,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/lang/Comparabl
 
 
 isLegalNamespaceName
-public static void isLegalNamespaceName(byte[] namespaceName)
+public static void isLegalNamespaceName(byte[] namespaceName)
 
 
 
@@ -829,7 +829,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/lang/Comparabl
 
 
 isLegalNamespaceName
-public static void isLegalNamespaceName(byte[] namespaceName,
+public static void isLegalNamespaceName(byte[] namespaceName,
 int start,
 int end)
 Valid namespace characters are alphabetic characters, 
numbers, and underscores.
@@ -841,7 +841,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/lang/Comparabl
 
 
 getName
-public byte[] getName()
+public byte[] getName()
 
 
 
@@ -850,7 +850,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/lang/Comparabl
 
 
 getNameAsString
-public http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String getNameAsString()
+public http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String getNameAsString()
 
 
 
@@ -859,7 +859,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/lang/Comparabl
 
 
 getNamespace
-public byte[] getNamespace()
+public byte[] getNamespace()
 
 
 
@@ -868,7 +868,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/lang/Comparabl
 
 
 getNamespaceAsString
-public http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String getNamespaceAsString()
+public http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="cla

[15/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ca5b0275/devapidocs/src-html/org/apache/hadoop/hbase/KeyValueUtil.html
--
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/KeyValueUtil.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/KeyValueUtil.html
index 4bd895c..e9cc458 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/KeyValueUtil.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/KeyValueUtil.html
@@ -45,8 +45,8 @@
 037import org.apache.hadoop.io.IOUtils;
 038import 
org.apache.hadoop.io.WritableUtils;
 039
-040import com.google.common.base.Function;
-041import com.google.common.collect.Lists;
+040import 
org.apache.hadoop.hbase.shaded.com.google.common.base.Function;
+041import 
org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
 042
 043/**
 044 * static convenience methods for dealing 
with KeyValues and collections of KeyValues

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ca5b0275/devapidocs/src-html/org/apache/hadoop/hbase/MetaTableAccessor.CloseableVisitor.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/MetaTableAccessor.CloseableVisitor.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/MetaTableAccessor.CloseableVisitor.html
index f0a063c..93cc8f3 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/MetaTableAccessor.CloseableVisitor.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/MetaTableAccessor.CloseableVisitor.html
@@ -75,7 +75,7 @@
 067import 
org.apache.hadoop.hbase.util.Pair;
 068import 
org.apache.hadoop.hbase.util.PairOfSameType;
 069
-070import 
com.google.common.annotations.VisibleForTesting;
+070import 
org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
 071
 072import 
edu.umd.cs.findbugs.annotations.NonNull;
 073import 
edu.umd.cs.findbugs.annotations.Nullable;

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ca5b0275/devapidocs/src-html/org/apache/hadoop/hbase/MetaTableAccessor.CollectAllVisitor.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/MetaTableAccessor.CollectAllVisitor.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/MetaTableAccessor.CollectAllVisitor.html
index f0a063c..93cc8f3 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/MetaTableAccessor.CollectAllVisitor.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/MetaTableAccessor.CollectAllVisitor.html
@@ -75,7 +75,7 @@
 067import 
org.apache.hadoop.hbase.util.Pair;
 068import 
org.apache.hadoop.hbase.util.PairOfSameType;
 069
-070import 
com.google.common.annotations.VisibleForTesting;
+070import 
org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
 071
 072import 
edu.umd.cs.findbugs.annotations.NonNull;
 073import 
edu.umd.cs.findbugs.annotations.Nullable;

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ca5b0275/devapidocs/src-html/org/apache/hadoop/hbase/MetaTableAccessor.CollectingVisitor.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/MetaTableAccessor.CollectingVisitor.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/MetaTableAccessor.CollectingVisitor.html
index f0a063c..93cc8f3 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/MetaTableAccessor.CollectingVisitor.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/MetaTableAccessor.CollectingVisitor.html
@@ -75,7 +75,7 @@
 067import 
org.apache.hadoop.hbase.util.Pair;
 068import 
org.apache.hadoop.hbase.util.PairOfSameType;
 069
-070import 
com.google.common.annotations.VisibleForTesting;
+070import 
org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
 071
 072import 
edu.umd.cs.findbugs.annotations.NonNull;
 073import 
edu.umd.cs.findbugs.annotations.Nullable;

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ca5b0275/devapidocs/src-html/org/apache/hadoop/hbase/MetaTableAccessor.DefaultVisitorBase.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/MetaTableAccessor.DefaultVisitorBase.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/MetaTableAccessor.DefaultVisitorBase.html
index f0a063c..93cc8f3 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/MetaTableAccessor.DefaultVisitorBase.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/MetaTableAccessor.DefaultVisitorBase.html
@@ -75,7 +75,7 @@
 067import 
org.apache.hadoop.hbase.util.Pair;
 068import 
org.apache.hadoop.hbase.util.PairOfSameType;
 069
-070import 
com.google.common.annotations.VisibleForTesting;
+070import 
org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
 071
 072import 
edu.umd.cs.findbugs.annotations.NonNull;
 073import 
edu.umd.cs.findbugs.an

[24/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ca5b0275/devapidocs/org/apache/hadoop/hbase/replication/ReplicationEndpoint.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/replication/ReplicationEndpoint.html 
b/devapidocs/org/apache/hadoop/hbase/replication/ReplicationEndpoint.html
index 4e9bdd4..de52857 100644
--- a/devapidocs/org/apache/hadoop/hbase/replication/ReplicationEndpoint.html
+++ b/devapidocs/org/apache/hadoop/hbase/replication/ReplicationEndpoint.html
@@ -101,7 +101,7 @@ var activeTableTab = "activeTableTab";
 
 
 All Superinterfaces:
-ReplicationPeerConfigListener, 
com.google.common.util.concurrent.Service
+ReplicationPeerConfigListener, 
org.apache.hadoop.hbase.shaded.com.google.common.util.concurrent.Service
 
 
 All Known Implementing Classes:
@@ -111,7 +111,7 @@ var activeTableTab = "activeTableTab";
 
 @InterfaceAudience.LimitedPrivate(value="Replication")
 public interface ReplicationEndpoint
-extends com.google.common.util.concurrent.Service, ReplicationPeerConfigListener
+extends 
org.apache.hadoop.hbase.shaded.com.google.common.util.concurrent.Service, ReplicationPeerConfigListener
 ReplicationEndpoint is a plugin which implements replication
  to other HBase clusters, or other systems. ReplicationEndpoint implementation
  can be specified at the peer creation time by specifying it
@@ -153,11 +153,11 @@ extends com.google.common.util.concurrent.Service, 
-
+
 
 
-Nested classes/interfaces inherited from 
interface com.google.common.util.concurrent.Service
-com.google.common.util.concurrent.Service.State
+Nested classes/interfaces inherited from 
interface org.apache.hadoop.hbase.shaded.com.google.common.util.concurrent.Service
+org.apache.hadoop.hbase.shaded.com.google.common.util.concurrent.Service.Listener,
 
org.apache.hadoop.hbase.shaded.com.google.common.util.concurrent.Service.State
 
 
 
@@ -206,11 +206,11 @@ extends com.google.common.util.concurrent.Service, 
-
+
 
 
-Methods inherited from 
interface com.google.common.util.concurrent.Service
-isRunning, start, startAndWait, state, stop, stopAndWait
+Methods inherited from 
interface org.apache.hadoop.hbase.shaded.com.google.common.util.concurrent.Service
+addListener, awaitRunning, awaitRunning, awaitTerminated, 
awaitTerminated, failureCause, isRunning, startAsync, state, 
stopAsync
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ca5b0275/devapidocs/org/apache/hadoop/hbase/replication/package-tree.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/replication/package-tree.html 
b/devapidocs/org/apache/hadoop/hbase/replication/package-tree.html
index eac14f3..6d5eda0 100644
--- a/devapidocs/org/apache/hadoop/hbase/replication/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/replication/package-tree.html
@@ -81,7 +81,7 @@
 
 java.lang.http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true";
 title="class or interface in java.lang">Object
 
-com.google.common.util.concurrent.AbstractService 
(implements com.google.common.util.concurrent.Service)
+org.apache.hadoop.hbase.shaded.com.google.common.util.concurrent.AbstractService
 (implements 
org.apache.hadoop.hbase.shaded.com.google.common.util.concurrent.Service)
 
 org.apache.hadoop.hbase.replication.BaseReplicationEndpoint (implements 
org.apache.hadoop.hbase.replication.ReplicationEndpoint)
 
@@ -161,14 +161,14 @@
 org.apache.hadoop.hbase.replication.ReplicationPeer
 org.apache.hadoop.hbase.replication.ReplicationPeerConfigListener
 
-org.apache.hadoop.hbase.replication.ReplicationEndpoint (also extends 
com.google.common.util.concurrent.Service)
+org.apache.hadoop.hbase.replication.ReplicationEndpoint (also extends 
org.apache.hadoop.hbase.shaded.com.google.common.util.concurrent.Service)
 
 
 org.apache.hadoop.hbase.replication.ReplicationPeers
 org.apache.hadoop.hbase.replication.ReplicationQueues
 org.apache.hadoop.hbase.replication.ReplicationQueuesClient
 org.apache.hadoop.hbase.replication.ReplicationTracker
-com.google.common.util.concurrent.Service
+org.apache.hadoop.hbase.shaded.com.google.common.util.concurrent.Service
 
 org.apache.hadoop.hbase.replication.ReplicationEndpoint (also extends 
org.apache.hadoop.hbase.replication.ReplicationPeerConfigListener)
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ca5b0275/devapidocs/org/apache/hadoop/hbase/replication/regionserver/DumpReplicationQueues.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/replication/regionserver/DumpReplicationQueues.html
 
b/devapidocs/org/apache/hadoop/hbase/replication/regionserver/DumpReplicationQueues.html
index cb1c51f..ebadb27 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/replication/regionserver/DumpReplicationQueues.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/replication/regionserver/DumpRe

[05/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ca5b0275/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutput.State.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutput.State.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutput.State.html
index 0c07a2f..c90d203 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutput.State.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutput.State.html
@@ -34,553 +34,554 @@
 026import static 
org.apache.hadoop.hbase.io.asyncfs.FanOutOneBlockAsyncDFSOutputHelper.getStatus;
 027import static 
org.apache.hadoop.hdfs.DFSConfigKeys.DFS_CLIENT_SOCKET_TIMEOUT_KEY;
 028
-029import 
com.google.common.annotations.VisibleForTesting;
-030
-031import io.netty.buffer.ByteBuf;
-032import 
io.netty.buffer.ByteBufAllocator;
-033import io.netty.channel.Channel;
-034import 
io.netty.channel.ChannelHandler.Sharable;
-035import 
io.netty.channel.ChannelHandlerContext;
-036import io.netty.channel.EventLoop;
-037import 
io.netty.channel.SimpleChannelInboundHandler;
-038import 
io.netty.handler.codec.protobuf.ProtobufDecoder;
-039import 
io.netty.handler.codec.protobuf.ProtobufVarint32FrameDecoder;
-040import 
io.netty.handler.timeout.IdleStateEvent;
-041import 
io.netty.handler.timeout.IdleStateHandler;
-042import io.netty.util.concurrent.Future;
-043import 
io.netty.util.concurrent.Promise;
-044import 
io.netty.util.concurrent.PromiseCombiner;
-045
-046import java.io.IOException;
-047import java.nio.ByteBuffer;
-048import java.util.ArrayDeque;
-049import java.util.Collection;
-050import java.util.Collections;
-051import java.util.Deque;
-052import java.util.IdentityHashMap;
-053import java.util.List;
-054import java.util.Set;
-055import 
java.util.concurrent.CompletableFuture;
-056import java.util.concurrent.TimeUnit;
-057import java.util.function.Supplier;
-058
-059import 
org.apache.hadoop.conf.Configuration;
-060import 
org.apache.hadoop.crypto.Encryptor;
-061import org.apache.hadoop.fs.Path;
-062import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
-063import 
org.apache.hadoop.hbase.io.asyncfs.FanOutOneBlockAsyncDFSOutputHelper.CancelOnClose;
-064import 
org.apache.hadoop.hbase.util.CancelableProgressable;
-065import 
org.apache.hadoop.hbase.util.FSUtils;
-066import 
org.apache.hadoop.hdfs.DFSClient;
-067import 
org.apache.hadoop.hdfs.DistributedFileSystem;
-068import 
org.apache.hadoop.hdfs.protocol.ClientProtocol;
-069import 
org.apache.hadoop.hdfs.protocol.DatanodeInfo;
-070import 
org.apache.hadoop.hdfs.protocol.LocatedBlock;
-071import 
org.apache.hadoop.hdfs.protocol.datatransfer.PacketHeader;
-072import 
org.apache.hadoop.hdfs.protocol.datatransfer.PipelineAck;
-073import 
org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.PipelineAckProto;
-074import 
org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.Status;
-075import 
org.apache.hadoop.util.DataChecksum;
-076
-077/**
-078 * An asynchronous HDFS output stream 
implementation which fans out data to datanode and only
-079 * supports writing file with only one 
block.
-080 * 

-081 * Use the createOutput method in {@link FanOutOneBlockAsyncDFSOutputHelper} to create. The mainly -082 * usage of this class is implementing WAL, so we only expose a little HDFS configurations in the -083 * method. And we place it here under util package because we want to make it independent of WAL -084 * implementation thus easier to move it to HDFS project finally. -085 *

-086 * Note that, all connections to datanode will run in the same {@link EventLoop} which means we only -087 * need one thread here. But be careful, we do some blocking operations in {@link #close()} and -088 * {@link #recoverAndClose(CancelableProgressable)} methods, so do not call them inside -089 * {@link EventLoop}. And for {@link #write(byte[])} {@link #write(byte[], int, int)}, -090 * {@link #buffered()} and {@link #flush(boolean)}, if you call them outside {@link EventLoop}, -091 * there will be an extra context-switch. -092 *

-093 * Advantages compare to DFSOutputStream: -094 *

    -095 *
  1. The fan out mechanism. This will reduce the latency.
  2. -096 *
  3. The asynchronous WAL could also run in the same EventLoop, we could just call write and flush -097 * inside the EventLoop thread, so generally we only have one thread to do all the things.
  4. -098 *
  5. Fail-fast when connection to datanode error. The WAL implementation could open new writer -099 * ASAP.
  6. -100 *
  7. We could benefit from netty's ByteBuf management mechanism.
  8. -101 *
-102 */ -103@InterfaceAudience.Private -104public class FanOutOneBlockAsyncDFSOutput implements AsyncFSOutput { -105 -106 // The MAX_PACKET_SIZE is 16MB but it include the header size and checks

[02/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ca5b0275/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.DecryptHandler.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.DecryptHandler.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.DecryptHandler.html
index 42b6f60..37a733a 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.DecryptHandler.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.DecryptHandler.html
@@ -28,10 +28,10 @@
 020import static 
io.netty.handler.timeout.IdleState.READER_IDLE;
 021import static 
org.apache.hadoop.hdfs.DFSConfigKeys.DFS_ENCRYPT_DATA_TRANSFER_CIPHER_SUITES_KEY;
 022
-023import com.google.common.base.Charsets;
-024import 
com.google.common.base.Throwables;
-025import 
com.google.common.collect.ImmutableSet;
-026import com.google.common.collect.Maps;
+023import 
org.apache.hadoop.hbase.shaded.com.google.common.base.Charsets;
+024import 
org.apache.hadoop.hbase.shaded.com.google.common.base.Throwables;
+025import 
org.apache.hadoop.hbase.shaded.com.google.common.collect.ImmutableSet;
+026import 
org.apache.hadoop.hbase.shaded.com.google.common.collect.Maps;
 027import 
com.google.protobuf.CodedOutputStream;
 028
 029import io.netty.buffer.ByteBuf;
@@ -93,7 +93,7 @@
 085import 
org.apache.hadoop.crypto.key.KeyProvider.KeyVersion;
 086import 
org.apache.hadoop.fs.FileEncryptionInfo;
 087import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
-088import 
org.apache.hadoop.hbase.util.ByteStringer;
+088import com.google.protobuf.ByteString;
 089import 
org.apache.hadoop.hdfs.DFSClient;
 090import 
org.apache.hadoop.hdfs.protocol.DatanodeInfo;
 091import 
org.apache.hadoop.hdfs.protocol.HdfsFileStatus;
@@ -373,416 +373,418 @@
 365  
DataTransferEncryptorMessageProto.newBuilder();
 366  
builder.setStatus(DataTransferEncryptorStatus.SUCCESS);
 367  if (payload != null) {
-368
builder.setPayload(ByteStringer.wrap(payload));
-369  }
-370  if (options != null) {
-371
builder.addAllCipherOption(PB_HELPER.convertCipherOptions(options));
-372  }
-373  DataTransferEncryptorMessageProto 
proto = builder.build();
-374  int size = 
proto.getSerializedSize();
-375  size += 
CodedOutputStream.computeRawVarint32Size(size);
-376  ByteBuf buf = 
ctx.alloc().buffer(size);
-377  proto.writeDelimitedTo(new 
ByteBufOutputStream(buf));
-378  ctx.write(buf);
-379}
-380
-381@Override
-382public void 
handlerAdded(ChannelHandlerContext ctx) throws Exception {
-383  
ctx.write(ctx.alloc().buffer(4).writeInt(SASL_TRANSFER_MAGIC_NUMBER));
-384  sendSaslMessage(ctx, new 
byte[0]);
-385  ctx.flush();
-386  step++;
-387}
-388
-389@Override
-390public void 
channelInactive(ChannelHandlerContext ctx) throws Exception {
-391  saslClient.dispose();
-392}
-393
-394private void 
check(DataTransferEncryptorMessageProto proto) throws IOException {
-395  if (proto.getStatus() == 
DataTransferEncryptorStatus.ERROR_UNKNOWN_KEY) {
-396throw new 
InvalidEncryptionKeyException(proto.getMessage());
-397  } else if (proto.getStatus() == 
DataTransferEncryptorStatus.ERROR) {
-398throw new 
IOException(proto.getMessage());
-399  }
-400}
-401
-402private String getNegotiatedQop() {
-403  return (String) 
saslClient.getNegotiatedProperty(Sasl.QOP);
-404}
-405
-406private boolean 
isNegotiatedQopPrivacy() {
-407  String qop = getNegotiatedQop();
-408  return qop != null && 
"auth-conf".equalsIgnoreCase(qop);
-409}
-410
-411private boolean 
requestedQopContainsPrivacy() {
-412  Set requestedQop =
-413  
ImmutableSet.copyOf(Arrays.asList(saslProps.get(Sasl.QOP).split(",")));
-414  return 
requestedQop.contains("auth-conf");
-415}
-416
-417private void checkSaslComplete() 
throws IOException {
-418  if (!saslClient.isComplete()) {
-419throw new IOException("Failed to 
complete SASL handshake");
-420  }
-421  Set requestedQop =
-422  
ImmutableSet.copyOf(Arrays.asList(saslProps.get(Sasl.QOP).split(",")));
-423  String negotiatedQop = 
getNegotiatedQop();
-424  LOG.debug(
-425"Verifying QOP, requested QOP = " 
+ requestedQop + ", negotiated QOP = " + negotiatedQop);
-426  if 
(!requestedQop.contains(negotiatedQop)) {
-427throw new 
IOException(String.format("SASL handshake completed, but "
-428+ "channel does not have 
acceptable quality of protection, "
-429+ "requested = %s, negotiated 
= %s",
-430  requestedQop, 
negotiatedQop));
-431  }
-432}
-433
-434private

[43/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ca5b0275/devapidocs/index-all.html
--
diff --git a/devapidocs/index-all.html b/devapidocs/index-all.html
index 412070e..8b8f33b 100644
--- a/devapidocs/index-all.html
+++ b/devapidocs/index-all.html
@@ -1351,7 +1351,7 @@
 As of release 2.0.0.
  (https://issues.apache.org/jira/browse/HBASE-1989";>HBASE-1989).
  This will be removed in HBase 3.0.0.
- Use Admin.addColumnFamily(TableName,
 HColumnDescriptor).
+ Use Admin.addColumnFamily(TableName,
 ColumnFamilyDescriptor).
 
 
 addColumn(byte[],
 byte[], byte[]) - Method in class org.apache.hadoop.hbase.client.Append
@@ -1374,7 +1374,7 @@
 
 Deprecated.
 Since 2.0. Will be removed 
in 3.0. Use
- HBaseAdmin.addColumnFamily(TableName,
 HColumnDescriptor) instead.
+ HBaseAdmin.addColumnFamily(TableName,
 ColumnFamilyDescriptor) instead.
 
 
 addColumn(byte[],
 byte[], long) - Method in class org.apache.hadoop.hbase.client.Increment
@@ -1421,7 +1421,7 @@
 
 addColumn(byte[])
 - Method in class org.apache.hadoop.hbase.rest.RowSpec
  
-addColumnFamily(TableName,
 HColumnDescriptor) - Method in interface 
org.apache.hadoop.hbase.client.Admin
+addColumnFamily(TableName,
 ColumnFamilyDescriptor) - Method in interface 
org.apache.hadoop.hbase.client.Admin
 
 Add a column family to an existing table.
 
@@ -1431,7 +1431,7 @@
 
 addColumnFamily(TableName,
 ColumnFamilyDescriptor) - Method in class 
org.apache.hadoop.hbase.client.AsyncHBaseAdmin
  
-addColumnFamily(TableName,
 HColumnDescriptor) - Method in class 
org.apache.hadoop.hbase.client.HBaseAdmin
+addColumnFamily(TableName,
 ColumnFamilyDescriptor) - Method in class 
org.apache.hadoop.hbase.client.HBaseAdmin
  
 addColumnFamily(TableName,
 ColumnFamilyDescriptor) - Method in class 
org.apache.hadoop.hbase.client.RawAsyncHBaseAdmin
  
@@ -1445,11 +1445,11 @@
 
 Add a column family to the table descriptor
 
-addColumnFamilyAsync(TableName,
 HColumnDescriptor) - Method in interface 
org.apache.hadoop.hbase.client.Admin
+addColumnFamilyAsync(TableName,
 ColumnFamilyDescriptor) - Method in interface 
org.apache.hadoop.hbase.client.Admin
 
 Add a column family to an existing table.
 
-addColumnFamilyAsync(TableName,
 HColumnDescriptor) - Method in class 
org.apache.hadoop.hbase.client.HBaseAdmin
+addColumnFamilyAsync(TableName,
 ColumnFamilyDescriptor) - Method in class 
org.apache.hadoop.hbase.client.HBaseAdmin
  
 AddColumnFamilyFuture(HBaseAdmin,
 TableName, MasterProtos.AddColumnResponse) - Constructor for class 
org.apache.hadoop.hbase.client.HBaseAdmin.AddColumnFamilyFuture
  
@@ -1923,6 +1923,8 @@
 
 Listen for failures to a given process.
 
+addListener(Service.Listener,
 Executor) - Method in class 
org.apache.hadoop.hbase.security.visibility.VisibilityReplicationEndpoint
+ 
 addLiveNode(String,
 long, int, int) - Method in class 
org.apache.hadoop.hbase.rest.model.StorageClusterStatusModel
 
 Add a live node to the cluster representation.
@@ -2162,7 +2164,7 @@
  
 addRegionInTransition(RegionStates.RegionStateNode,
 RegionTransitionProcedure) - Method in class 
org.apache.hadoop.hbase.master.assignment.RegionStates
  
-addRegionPlan(MinMaxPriorityQueue,
 boolean, ServerName, List) - Method in class 
org.apache.hadoop.hbase.master.balancer.SimpleLoadBalancer
+addRegionPlan(MinMaxPriorityQueue,
 boolean, ServerName, List) - Method in class 
org.apache.hadoop.hbase.master.balancer.SimpleLoadBalancer
 
 Add a region from the head or tail to the List of regions 
to return.
 
@@ -2293,7 +2295,7 @@
  resolve -- we do NOT want a resolve happening every time we want
  to hold a hostname and port combo).
 
-Address(HostAndPort)
 - Constructor for class org.apache.hadoop.hbase.net.Address
+Address(HostAndPort)
 - Constructor for class org.apache.hadoop.hbase.net.Address
  
 address - 
Variable in class org.apache.hadoop.hbase.ServerName
  
@@ -4654,6 +4656,14 @@
 
 Wait until the read point catches up to the write point; 
i.e.
 
+awaitRunning()
 - Method in class org.apache.hadoop.hbase.security.visibility.VisibilityReplicationEndpoint
+ 
+awaitRunning(long,
 TimeUnit) - Method in class 
org.apache.hadoop.hbase.security.visibility.VisibilityReplicationEndpoint
+ 
+awaitTerminated()
 - Method in class org.apache.hadoop.hbase.security.visibility.VisibilityReplicationEndpoint
+ 
+awaitTerminated(long,
 TimeUnit) - Method in class 
org.apache.hadoop.hbase.security.visibility.VisibilityReplicationEndpoint
+ 
 awaitTermination()
 - Method in class org.apache.hadoop.hbase.procedure2.ProcedureExecutor.StoppableThread
  
 awaitTermination()
 - Method in class org.apache.hadoop.hbase.procedure2.RemoteProcedureDispatcher.TimeoutExecutorThread
@@ -5172,7 +5182,7 @@
 
 Balance the regions that should be on master 
regionserver.
 
-balanceOverall(List,
 Map,

[46/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ca5b0275/checkstyle-aggregate.html
--
diff --git a/checkstyle-aggregate.html b/checkstyle-aggregate.html
index 163159c..5b6f058 100644
--- a/checkstyle-aggregate.html
+++ b/checkstyle-aggregate.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase – Checkstyle Results
 
@@ -289,7 +289,7 @@
 2247
 0
 0
-14814
+14906
 
 Files
 
@@ -482,7 +482,7 @@
 org/apache/hadoop/hbase/MetaTableAccessor.java
 0
 0
-119
+120
 
 org/apache/hadoop/hbase/NamespaceDescriptor.java
 0
@@ -512,7 +512,7 @@
 org/apache/hadoop/hbase/ScheduledChore.java
 0
 0
-6
+5
 
 org/apache/hadoop/hbase/ServerLoad.java
 0
@@ -572,7 +572,7 @@
 org/apache/hadoop/hbase/ZKNamespaceManager.java
 0
 0
-2
+3
 
 org/apache/hadoop/hbase/ZNodeClearer.java
 0
@@ -752,7 +752,7 @@
 org/apache/hadoop/hbase/client/Admin.java
 0
 0
-96
+97
 
 org/apache/hadoop/hbase/client/Append.java
 0
@@ -794,49 +794,59 @@
 0
 1
 
-org/apache/hadoop/hbase/client/AsyncHBaseAdmin.java
+org/apache/hadoop/hbase/client/AsyncConnectionImpl.java
 0
 0
-4
+1
 
+org/apache/hadoop/hbase/client/AsyncHBaseAdmin.java
+0
+0
+9
+
 org/apache/hadoop/hbase/client/AsyncMetaRegionLocator.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/client/AsyncNonMetaRegionLocator.java
 0
 0
 4
-
+
 org/apache/hadoop/hbase/client/AsyncProcess.java
 0
 0
-11
-
+12
+
 org/apache/hadoop/hbase/client/AsyncProcessTask.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/client/AsyncRequestFuture.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/client/AsyncRequestFutureImpl.java
 0
 0
-24
-
+25
+
 org/apache/hadoop/hbase/client/AsyncRpcRetryingCaller.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/client/AsyncRpcRetryingCallerFactory.java
 0
 0
+2
+
+org/apache/hadoop/hbase/client/AsyncScanSingleRegionRpcRetryingCaller.java
+0
+0
 1
 
 org/apache/hadoop/hbase/client/AsyncSingleRequestRpcRetryingCaller.java
@@ -844,10 +854,20 @@
 0
 1
 
+org/apache/hadoop/hbase/client/AsyncTableBase.java
+0
+0
+1
+
 org/apache/hadoop/hbase/client/AsyncTableImpl.java
 0
 0
 3
+
+org/apache/hadoop/hbase/client/AsyncTableResultScanner.java
+0
+0
+1
 
 org/apache/hadoop/hbase/client/BatchErrors.java
 0
@@ -862,7 +882,7 @@
 org/apache/hadoop/hbase/client/BufferedMutatorImpl.java
 0
 0
-4
+5
 
 org/apache/hadoop/hbase/client/CancellableRegionServerCallable.java
 0
@@ -882,7 +902,7 @@
 org/apache/hadoop/hbase/client/ClientScanner.java
 0
 0
-2
+3
 
 org/apache/hadoop/hbase/client/ClientServiceCallable.java
 0
@@ -907,27 +927,27 @@
 org/apache/hadoop/hbase/client/ColumnFamilyDescriptorBuilder.java
 0
 0
-52
+53
 
 org/apache/hadoop/hbase/client/CompactType.java
 0
 0
 2
 
-org/apache/hadoop/hbase/client/ConnectionConfiguration.java
+org/apache/hadoop/hbase/client/ConnectionFactory.java
 0
 0
-1
+2
 
-org/apache/hadoop/hbase/client/ConnectionFactory.java
+org/apache/hadoop/hbase/client/ConnectionImplementation.java
 0
 0
-2
+8
 
-org/apache/hadoop/hbase/client/ConnectionImplementation.java
+org/apache/hadoop/hbase/client/ConnectionUtils.java
 0
 0
-7
+1
 
 org/apache/hadoop/hbase/client/CoprocessorHConnection.java
 0
@@ -952,7 +972,7 @@
 org/apache/hadoop/hbase/client/HBaseAdmin.java
 0
 0
-100
+101
 
 org/apache/hadoop/hbase/client/HRegionLocator.java
 0
@@ -962,12 +982,12 @@
 org/apache/hadoop/hbase/client/HTable.java
 0
 0
-25
+26
 
 org/apache/hadoop/hbase/client/HTableMultiplexer.java
 0
 0
-5
+6
 
 org/apache/hadoop/hbase/client/HTableWrapper.java
 0
@@ -1007,7 +1027,7 @@
 org/apache/hadoop/hbase/client/MetricsConnection.java
 0
 0
-39
+40
 
 org/apache/hadoop/hbase/client/MultiAction.java
 0
@@ -1042,7 +1062,7 @@
 org/apache/hadoop/hbase/client/PreemptiveFastFailInterceptor.java
 0
 0
-11
+12
 
 org/apache/hadoop/hbase/client/Put.java
 0
@@ -1087,7 +1107,7 @@
 org/apache/hadoop/hbase/client/RegionCoprocessorServiceExec.java
 0
 0
-1
+2
 
 org/apache/hadoop/hbase/client/RegionReplicaUtil.java
 0
@@ -1167,7 +1187,7 @@
 org/apache/hadoop/hbase/client/RowAccess.java
 0
 0
-2
+3
 
 org/apache/hadoop/hbase/client/RowMutations.java
 0
@@ -1207,64 +1227,64 @@
 org/apache/hadoop/hbase/client/ScannerCallableWithReplicas.java
 0
 0
-4
+5
 
 org/apache/hadoop/hbase/client/SecureBulkLoadClient.java
 0
 0
 17
 
+org/apache/hadoop/hbase/client/ServerStatisticTracker.java
+0
+0
+1
+
 org/apache/hadoop/hbase/client/ShortCircuitMasterConnection.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/client/SimpleRequestController.java
 0
 0
-8
-
+9
+
 org/apache/hadoop/hbase/client/Table.java
 0
 0
 18
-
+
 org/apache/hadoop/hbase/client/TableDescriptor.java
 0
 0
 8
-
+
 org/apache/hadoop/hbase/client/TableDescriptorBuilder.java
 0
 0
 43
-
+
 org/apache/hadoop/hbase/client/TableSnapshotScanner.java
 0
 0
 4
-
+
 org/apache/hadoop/hbase/client/TableState.java
 0
 0
 35
-
+
 org/apache/hadoop/hbase/client/VersionInfoUtil.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/client/ZooKeeperRegistry.java
 0
 0
 6
-
-org/apache/hadoop/hbase/client/backoff/Cl

[37/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ca5b0275/devapidocs/org/apache/hadoop/hbase/constraint/ConstraintProcessor.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/constraint/ConstraintProcessor.html 
b/devapidocs/org/apache/hadoop/hbase/constraint/ConstraintProcessor.html
index be80b7f..6470278 100644
--- a/devapidocs/org/apache/hadoop/hbase/constraint/ConstraintProcessor.html
+++ b/devapidocs/org/apache/hadoop/hbase/constraint/ConstraintProcessor.html
@@ -250,7 +250,7 @@ implements RegionObserver
-postAppend,
 postBatchMutate,
 postBatchMutateIndispensably,
 postBulkLoadHFile,
 postBulkLoadHFile,
 postCheckAndDelete,
 postCheckAndPut,
 postClose, postCloseRegionOperation,
 postCommitStoreFile,
 postCompact,
 postCompact,
 postCompactSelection,
 postCompactSelection,
 postCompleteSplit,
 postDelete,
 postExists,
 postFlush,
 postFlush,
 postGetOp,
 postIncrement,
 postIncrementColumnValue,
 postInstantiateDeleteTracker,
 postLogReplay,
 postMutationBeforeWAL,
 postOpen,
 postPut,
 postReplayWALs,
 postRollBackSplit, postScannerClose,
 postScannerFilterRow,
 postScannerNext,
 postScannerOpen, postSplit,
 postStartRegionOperation,
 postStoreFileReaderOpen,
 postWALRestore,
 preAppend,
 preAppendAfterRowLock,
 preBatchMutate,
 preBulkLoadHFile,
 preCheckAndDelete,
 preCheckAndDeleteAfterRowLock,
 preCheckAndPut,
 preCheckAndPutAfterRowLock,
 preClose,
 preCommitStoreFile,
 preCompact,
 preCompact,
 preCompactScannerOpen,
 preCompactScannerOpen,
 preCompactScannerOpen,
 preCompactSelecti
 on, preCompactSelection,
 preDelete,
 preExists,
 preFlush,
 preFlush,
 preFlushScannerOpen,
 preFlushScannerOpen,
 preFlushScannerOpen,
 preGetOp,
 preIncrement,
 preIncrementAfterRowLock,
 preIncrementColumnValue, href="../../../../../org/apache/hadoop/hbase/coprocessor/RegionObserver.html#preOpen-org.apache.hadoop.hbase.coprocessor.ObserverContext-">preOpen,
 > href="../../../../../org/apache/hadoop/hbase/coprocessor/RegionObserver.html#prePrepareTimeStampForDeleteVersion-org.apache.hadoop.hbase.coprocessor.ObserverContext-org.apache.hadoop.hbase.client.Mutation-org.apache.hadoop.hbase.Cell-byte:A-org.apache.hadoop.hbase.client.Get-">prePrepareTimeStampForDeleteVersion,
 > href="../../../../../org/apache/hadoop/hbase/coprocessor/RegionObserver.html#preReplayWALs-org.apache.hadoop.hbase.coprocessor.ObserverContext-org.apache.hadoop.hbase.HRegionInfo-org.apache.hadoop.fs.Path-">preReplayWALs,
 > href="../../../../../org/apache/hadoop/hbase/coprocessor/RegionObserver.html#preRollBackSplit-org.apache.hadoop.hbase.coprocessor.ObserverContext-">preRollBackSplit,
 > href="../../../../../org/apache/hadoop/hbase/coprocessor/RegionObserver.html#preScannerClose-org.apache.hadoop.h
 
base.coprocessor.ObserverContext-org.apache.hadoop.hbase.regionserver.InternalScanner-">preScannerClose,
 preScannerNext,
 preScannerOpen,
 preSplit,
 preSplit,
 preSplitAfterPONR,
 preSplitBeforePONR,
 preStoreFileReaderOpen,
 preStoreScannerOpen,
 preStoreScannerOpen,
 preWALRestore
+postAppend,
 postBatchMutate,
 postBatchMutateIndispensably,
 postBulkLoadHFile,
 postBulkLoadHFile,
 postCheckAndDelete,
 postCheckAndPut,
 postClose, postCloseRegionOperation,
 postCommitStoreFile,
 postCompact,
 postCompact,
 postCompactSelection,
 postCompactSelection,
 postCompleteSplit,
 postDelete,
 postExists,
 postFlush,
 postFlush,
 postGetOp, postIncrement,
 postIncrementColumnValue,
 postInstantiateDeleteTracker,
 postLogReplay,
 postMutationBeforeWAL,
 postOpen,
 postPut,
 postReplayWALs,
 postRollBackSplit,
 postScannerClose,
 postScannerFilterRow,
 postScannerNext,
 postScannerOpen,
 postSplit,
 postStartRegionOperation,
 postStoreFileReaderOpen, postWALRestore,
 preAppend,
 preAppendAfterRowLock,
 preBatchMutate,
 
 preBulkLoadHFile,
 preCheckAndDelete,
 preCheckAndDeleteAfterRowLock,
 preCheckAndPut,
 preCheckAndPutAfterRowLock,
 preClose,
 preCommitStoreFile,
 preCompact,
 preCompact,
 preCompactScannerOpen,
  preCompactScannerOpen,
 preCompactScannerOpen,
 preCompactSelection, preCompactSelection,
 preDelete,
 preExists,
 preFlush, preFlush,
 preFlushScannerOpen,
 preFlushScannerOpen,
 preFlushScannerOpen,
 preGetOp,
 preIncrement,
 preIncrementAfterRowLock,
 preIncrementColumnValue, preOpen,
 prePrepareTimeStampForDeleteVersion,
 preReplayWALs,
 preRollBackSplit,
 preScannerClose,
 preScannerNext,
 preScannerOpen,
 preSplit,
 preSplit,
 
 preSplitAfterPONR,
 preSplitBeforePONR,
 preStoreFileReaderOpen,
 preStoreScannerOpen,
 preStoreScannerOpen,
 preWALRestore
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ca5b0275/devapidocs/org/apache/hadoop/hbase/coprocessor/RegionObserver.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/coprocessor/RegionObserver.html 
b/devapidocs/org/apache/hadoop/hbase/coprocessor/RegionObserver.html
index f4db034..ee25eee 100644
--- a/devapidocs/org/a

[22/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ca5b0275/devapidocs/org/apache/hadoop/hbase/security/access/AccessControlLists.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/security/access/AccessControlLists.html 
b/devapidocs/org/apache/hadoop/hbase/security/access/AccessControlLists.html
index cba8014..a95a076 100644
--- a/devapidocs/org/apache/hadoop/hbase/security/access/AccessControlLists.html
+++ b/devapidocs/org/apache/hadoop/hbase/security/access/AccessControlLists.html
@@ -272,12 +272,12 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
  Cell cell) 
 
 
-(package private) static 
com.google.common.collect.ListMultimapString,TablePermission>
+(package private) static 
org.apache.hadoop.hbase.shaded.com.google.common.collect.ListMultimapString,TablePermission>
 getNamespacePermissions(org.apache.hadoop.conf.Configuration conf,
http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in 
java.lang">String namespace) 
 
 
-(package private) static 
com.google.common.collect.ListMultimapString,TablePermission>
+(package private) static 
org.apache.hadoop.hbase.shaded.com.google.common.collect.ListMultimapString,TablePermission>
 getPermissions(org.apache.hadoop.conf.Configuration conf,
   byte[] entryName,
   Table t)
@@ -286,7 +286,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 
-static com.google.common.collect.ListMultimapString,TablePermission>
+static 
org.apache.hadoop.hbase.shaded.com.google.common.collect.ListMultimapString,TablePermission>
 getTablePermissions(org.apache.hadoop.conf.Configuration conf,
TableName tableName) 
 
@@ -330,14 +330,14 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 isNamespaceEntry(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in 
java.lang">String entryName) 
 
 
-(package private) static http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true";
 title="class or interface in 
java.util">MapString,TablePermission>>
+(package private) static http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true";
 title="class or interface in 
java.util">MapString,TablePermission>>
 loadAll(org.apache.hadoop.conf.Configuration conf)
 Load all permissions from the region server holding 
_acl_,
  primarily intended for testing purposes.
 
 
 
-(package private) static http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true";
 title="class or interface in 
java.util">MapString,TablePermission>>
+(package private) static http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true";
 title="class or interface in 
java.util">MapString,TablePermission>>
 loadAll(Region aclRegion)
 Loads all of the permission grants stored in a region of 
the _acl_
  table.
@@ -349,12 +349,12 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
  Cell kv) 
 
 
-private static 
com.google.common.collect.ListMultimapString,TablePermission>
+private static 
org.apache.hadoop.hbase.shaded.com.google.common.collect.ListMultimap

[07/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ca5b0275/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutput.AckHandler.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutput.AckHandler.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutput.AckHandler.html
index 0c07a2f..c90d203 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutput.AckHandler.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutput.AckHandler.html
@@ -34,553 +34,554 @@
 026import static 
org.apache.hadoop.hbase.io.asyncfs.FanOutOneBlockAsyncDFSOutputHelper.getStatus;
 027import static 
org.apache.hadoop.hdfs.DFSConfigKeys.DFS_CLIENT_SOCKET_TIMEOUT_KEY;
 028
-029import 
com.google.common.annotations.VisibleForTesting;
-030
-031import io.netty.buffer.ByteBuf;
-032import 
io.netty.buffer.ByteBufAllocator;
-033import io.netty.channel.Channel;
-034import 
io.netty.channel.ChannelHandler.Sharable;
-035import 
io.netty.channel.ChannelHandlerContext;
-036import io.netty.channel.EventLoop;
-037import 
io.netty.channel.SimpleChannelInboundHandler;
-038import 
io.netty.handler.codec.protobuf.ProtobufDecoder;
-039import 
io.netty.handler.codec.protobuf.ProtobufVarint32FrameDecoder;
-040import 
io.netty.handler.timeout.IdleStateEvent;
-041import 
io.netty.handler.timeout.IdleStateHandler;
-042import io.netty.util.concurrent.Future;
-043import 
io.netty.util.concurrent.Promise;
-044import 
io.netty.util.concurrent.PromiseCombiner;
-045
-046import java.io.IOException;
-047import java.nio.ByteBuffer;
-048import java.util.ArrayDeque;
-049import java.util.Collection;
-050import java.util.Collections;
-051import java.util.Deque;
-052import java.util.IdentityHashMap;
-053import java.util.List;
-054import java.util.Set;
-055import 
java.util.concurrent.CompletableFuture;
-056import java.util.concurrent.TimeUnit;
-057import java.util.function.Supplier;
-058
-059import 
org.apache.hadoop.conf.Configuration;
-060import 
org.apache.hadoop.crypto.Encryptor;
-061import org.apache.hadoop.fs.Path;
-062import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
-063import 
org.apache.hadoop.hbase.io.asyncfs.FanOutOneBlockAsyncDFSOutputHelper.CancelOnClose;
-064import 
org.apache.hadoop.hbase.util.CancelableProgressable;
-065import 
org.apache.hadoop.hbase.util.FSUtils;
-066import 
org.apache.hadoop.hdfs.DFSClient;
-067import 
org.apache.hadoop.hdfs.DistributedFileSystem;
-068import 
org.apache.hadoop.hdfs.protocol.ClientProtocol;
-069import 
org.apache.hadoop.hdfs.protocol.DatanodeInfo;
-070import 
org.apache.hadoop.hdfs.protocol.LocatedBlock;
-071import 
org.apache.hadoop.hdfs.protocol.datatransfer.PacketHeader;
-072import 
org.apache.hadoop.hdfs.protocol.datatransfer.PipelineAck;
-073import 
org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.PipelineAckProto;
-074import 
org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.Status;
-075import 
org.apache.hadoop.util.DataChecksum;
-076
-077/**
-078 * An asynchronous HDFS output stream 
implementation which fans out data to datanode and only
-079 * supports writing file with only one 
block.
-080 * 

-081 * Use the createOutput method in {@link FanOutOneBlockAsyncDFSOutputHelper} to create. The mainly -082 * usage of this class is implementing WAL, so we only expose a little HDFS configurations in the -083 * method. And we place it here under util package because we want to make it independent of WAL -084 * implementation thus easier to move it to HDFS project finally. -085 *

-086 * Note that, all connections to datanode will run in the same {@link EventLoop} which means we only -087 * need one thread here. But be careful, we do some blocking operations in {@link #close()} and -088 * {@link #recoverAndClose(CancelableProgressable)} methods, so do not call them inside -089 * {@link EventLoop}. And for {@link #write(byte[])} {@link #write(byte[], int, int)}, -090 * {@link #buffered()} and {@link #flush(boolean)}, if you call them outside {@link EventLoop}, -091 * there will be an extra context-switch. -092 *

-093 * Advantages compare to DFSOutputStream: -094 *

    -095 *
  1. The fan out mechanism. This will reduce the latency.
  2. -096 *
  3. The asynchronous WAL could also run in the same EventLoop, we could just call write and flush -097 * inside the EventLoop thread, so generally we only have one thread to do all the things.
  4. -098 *
  5. Fail-fast when connection to datanode error. The WAL implementation could open new writer -099 * ASAP.
  6. -100 *
  7. We could benefit from netty's ByteBuf management mechanism.
  8. -101 *
-102 */ -103@InterfaceAudience.Private -104public class FanOutOneBlockAsyncDFSOutput implements AsyncFSOutput { -105 -106 // The MAX_PACKET_SIZE is 16MB but it include t

[35/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ca5b0275/devapidocs/org/apache/hadoop/hbase/favored/StartcodeAgnosticServerName.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/favored/StartcodeAgnosticServerName.html 
b/devapidocs/org/apache/hadoop/hbase/favored/StartcodeAgnosticServerName.html
index 16c16fc..949cd54 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/favored/StartcodeAgnosticServerName.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/favored/StartcodeAgnosticServerName.html
@@ -187,7 +187,7 @@ extends 
 static StartcodeAgnosticServerName
-valueOf(com.google.common.net.HostAndPort hostnameAndPort,
+valueOf(org.apache.hadoop.hbase.shaded.com.google.common.net.HostAndPort hostnameAndPort,
long startcode) 
 
 
@@ -266,13 +266,13 @@ extends 
+
 
 
 
 
 valueOf
-public static StartcodeAgnosticServerName valueOf(com.google.common.net.HostAndPort hostnameAndPort,
+public static StartcodeAgnosticServerName valueOf(org.apache.hadoop.hbase.shaded.com.google.common.net.HostAndPort hostnameAndPort,
   long startcode)
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ca5b0275/devapidocs/org/apache/hadoop/hbase/favored/class-use/StartcodeAgnosticServerName.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/favored/class-use/StartcodeAgnosticServerName.html
 
b/devapidocs/org/apache/hadoop/hbase/favored/class-use/StartcodeAgnosticServerName.html
index fd0601d..e18633b 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/favored/class-use/StartcodeAgnosticServerName.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/favored/class-use/StartcodeAgnosticServerName.html
@@ -104,7 +104,7 @@
 
 
 static StartcodeAgnosticServerName
-StartcodeAgnosticServerName.valueOf(com.google.common.net.HostAndPort hostnameAndPort,
+StartcodeAgnosticServerName.valueOf(org.apache.hadoop.hbase.shaded.com.google.common.net.HostAndPort hostnameAndPort,
long startcode) 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ca5b0275/devapidocs/org/apache/hadoop/hbase/filter/CompareFilter.CompareOp.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/filter/CompareFilter.CompareOp.html 
b/devapidocs/org/apache/hadoop/hbase/filter/CompareFilter.CompareOp.html
index 4ef40ce..705e2b3 100644
--- a/devapidocs/org/apache/hadoop/hbase/filter/CompareFilter.CompareOp.html
+++ b/devapidocs/org/apache/hadoop/hbase/filter/CompareFilter.CompareOp.html
@@ -319,7 +319,7 @@ the order they are declared.
 
 
 values
-public static CompareFilter.CompareOp[] values()
+public static CompareFilter.CompareOp[] values()
 Returns an array containing the constants of this enum 
type, in
 the order they are declared.  This method may be used to iterate
 over the constants as follows:
@@ -339,7 +339,7 @@ for (CompareFilter.CompareOp c : 
CompareFilter.CompareOp.values())
 
 
 valueOf
-public static CompareFilter.CompareOp valueOf(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String name)
+public static CompareFilter.CompareOp valueOf(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String name)
 Returns the enum constant of this type with the specified 
name.
 The string must match exactly an identifier used to declare an
 enum constant in this type.  (Extraneous whitespace characters are 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ca5b0275/devapidocs/org/apache/hadoop/hbase/filter/package-tree.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/filter/package-tree.html 
b/devapidocs/org/apache/hadoop/hbase/filter/package-tree.html
index 20ffc60..a6e9a0d 100644
--- a/devapidocs/org/apache/hadoop/hbase/filter/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/filter/package-tree.html
@@ -175,14 +175,14 @@
 
 java.lang.http://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true";
 title="class or interface in java.lang">Enum (implements java.lang.http://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true";
 title="class or interface in java.lang">Comparable, java.io.http://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true";
 title="class or interface in java.io">Serializable)
 
+org.apache.hadoop.hbase.filter.CompareFilter.CompareOp
+org.apache.hadoop.hbase.filter.RegexStringComparator.EngineType
+org.apache.hadoop.hbase.filter.FuzzyRowFilter.Order
 org.apache.hadoop.hbase.filter.FilterList.Operator
-org.apache.hadoop.hbase.filter.FuzzyRowFilter.SatisfiesCode
 org.apache.hadoop.hbase.filter.Filter.ReturnCode
-org.apache.hadoop.hbase.filter.FuzzyRowFilter.Or

[20/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ca5b0275/devapidocs/org/apache/hadoop/hbase/security/visibility/VisibilityController.VisibilityReplication.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/security/visibility/VisibilityController.VisibilityReplication.html
 
b/devapidocs/org/apache/hadoop/hbase/security/visibility/VisibilityController.VisibilityReplication.html
index 5dd7acd..c387500 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/security/visibility/VisibilityController.VisibilityReplication.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/security/visibility/VisibilityController.VisibilityReplication.html
@@ -117,7 +117,7 @@ var activeTableTab = "activeTableTab";
 
 
 
-public static class VisibilityController.VisibilityReplication
+public static class VisibilityController.VisibilityReplication
 extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true";
 title="class or interface in java.lang">Object
 implements RegionServerObserver
 A RegionServerObserver impl that provides the custom
@@ -255,7 +255,7 @@ implements 
 
 conf
-private org.apache.hadoop.conf.Configuration conf
+private org.apache.hadoop.conf.Configuration conf
 
 
 
@@ -264,7 +264,7 @@ implements 
 
 visibilityLabelService
-private VisibilityLabelService visibilityLabelService
+private VisibilityLabelService visibilityLabelService
 
 
 
@@ -281,7 +281,7 @@ implements 
 
 VisibilityReplication
-public VisibilityReplication()
+public VisibilityReplication()
 
 
 
@@ -298,7 +298,7 @@ implements 
 
 start
-public void start(CoprocessorEnvironment env)
+public void start(CoprocessorEnvironment env)
throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true";
 title="class or interface in java.io">IOException
 
 Specified by:
@@ -314,7 +314,7 @@ implements 
 
 stop
-public void stop(CoprocessorEnvironment env)
+public void stop(CoprocessorEnvironment env)
   throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true";
 title="class or interface in java.io">IOException
 
 Specified by:
@@ -330,7 +330,7 @@ implements 
 
 postCreateReplicationEndPoint
-public ReplicationEndpoint postCreateReplicationEndPoint(ObserverContext ctx,
+public ReplicationEndpoint postCreateReplicationEndPoint(ObserverContext ctx,
  ReplicationEndpoint endpoint)
 Description copied from 
interface: RegionServerObserver
 This will be called after the replication endpoint is 
instantiated.



[13/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ca5b0275/devapidocs/src-html/org/apache/hadoop/hbase/backup/HFileArchiver.FileStatusConverter.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/backup/HFileArchiver.FileStatusConverter.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/backup/HFileArchiver.FileStatusConverter.html
index bcb9b8e..310709a 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/backup/HFileArchiver.FileStatusConverter.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/backup/HFileArchiver.FileStatusConverter.html
@@ -50,10 +50,10 @@
 042import 
org.apache.hadoop.hbase.util.HFileArchiveUtil;
 043import 
org.apache.hadoop.io.MultipleIOException;
 044
-045import com.google.common.base.Function;
-046import 
com.google.common.base.Preconditions;
-047import 
com.google.common.collect.Collections2;
-048import com.google.common.collect.Lists;
+045import 
org.apache.hadoop.hbase.shaded.com.google.common.base.Function;
+046import 
org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions;
+047import 
org.apache.hadoop.hbase.shaded.com.google.common.collect.Collections2;
+048import 
org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
 049
 050/**
 051 * Utility class to handle the removal of 
HFiles (or the respective {@link StoreFile StoreFiles})

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ca5b0275/devapidocs/src-html/org/apache/hadoop/hbase/backup/HFileArchiver.FileablePath.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/backup/HFileArchiver.FileablePath.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/backup/HFileArchiver.FileablePath.html
index bcb9b8e..310709a 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/backup/HFileArchiver.FileablePath.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/backup/HFileArchiver.FileablePath.html
@@ -50,10 +50,10 @@
 042import 
org.apache.hadoop.hbase.util.HFileArchiveUtil;
 043import 
org.apache.hadoop.io.MultipleIOException;
 044
-045import com.google.common.base.Function;
-046import 
com.google.common.base.Preconditions;
-047import 
com.google.common.collect.Collections2;
-048import com.google.common.collect.Lists;
+045import 
org.apache.hadoop.hbase.shaded.com.google.common.base.Function;
+046import 
org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions;
+047import 
org.apache.hadoop.hbase.shaded.com.google.common.collect.Collections2;
+048import 
org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
 049
 050/**
 051 * Utility class to handle the removal of 
HFiles (or the respective {@link StoreFile StoreFiles})

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ca5b0275/devapidocs/src-html/org/apache/hadoop/hbase/backup/HFileArchiver.FileableStoreFile.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/backup/HFileArchiver.FileableStoreFile.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/backup/HFileArchiver.FileableStoreFile.html
index bcb9b8e..310709a 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/backup/HFileArchiver.FileableStoreFile.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/backup/HFileArchiver.FileableStoreFile.html
@@ -50,10 +50,10 @@
 042import 
org.apache.hadoop.hbase.util.HFileArchiveUtil;
 043import 
org.apache.hadoop.io.MultipleIOException;
 044
-045import com.google.common.base.Function;
-046import 
com.google.common.base.Preconditions;
-047import 
com.google.common.collect.Collections2;
-048import com.google.common.collect.Lists;
+045import 
org.apache.hadoop.hbase.shaded.com.google.common.base.Function;
+046import 
org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions;
+047import 
org.apache.hadoop.hbase.shaded.com.google.common.collect.Collections2;
+048import 
org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
 049
 050/**
 051 * Utility class to handle the removal of 
HFiles (or the respective {@link StoreFile StoreFiles})

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ca5b0275/devapidocs/src-html/org/apache/hadoop/hbase/backup/HFileArchiver.StoreToFile.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/backup/HFileArchiver.StoreToFile.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/backup/HFileArchiver.StoreToFile.html
index bcb9b8e..310709a 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/backup/HFileArchiver.StoreToFile.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/backup/HFileArchiver.StoreToFile.html
@@ -50,10 +50,10 @@
 042import 
org.apache.hadoop.hbase.util.HFileArchiveUtil;
 043import 
org.apache.hadoop.io.MultipleIOException;
 044
-045import com.google.common.base.Function;
-046import 
com.google.common.b

hbase git commit: HBASE-18430 fixed typo

Repository: hbase
Updated Branches:
  refs/heads/master 31c3edaa2 -> 70a357dc5


HBASE-18430 fixed typo

Signed-off-by: Misty Stanley-Jones 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/70a357dc
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/70a357dc
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/70a357dc

Branch: refs/heads/master
Commit: 70a357dc5cc74ae6a354c907959f644f563aeee4
Parents: 31c3eda
Author: coral 
Authored: Fri Jul 21 14:51:00 2017 -0500
Committer: Misty Stanley-Jones 
Committed: Fri Jul 21 13:46:38 2017 -0700

--
 .../asciidoc/_chapters/appendix_contributing_to_documentation.adoc | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/70a357dc/src/main/asciidoc/_chapters/appendix_contributing_to_documentation.adoc
--
diff --git 
a/src/main/asciidoc/_chapters/appendix_contributing_to_documentation.adoc 
b/src/main/asciidoc/_chapters/appendix_contributing_to_documentation.adoc
index 0d68dce..0337182 100644
--- a/src/main/asciidoc/_chapters/appendix_contributing_to_documentation.adoc
+++ b/src/main/asciidoc/_chapters/appendix_contributing_to_documentation.adoc
@@ -55,7 +55,7 @@ see <>.
 If you spot an error in a string in a UI, utility, script, log message, or 
elsewhere,
 or you think something could be made more clear, or you think text needs to be 
added
 where it doesn't currently exist, the first step is to file a JIRA. Be sure to 
set
-the component to `Documentation` in addition any other involved components. 
Most
+the component to `Documentation` in addition to any other involved components. 
Most
 components have one or more default owners, who monitor new issues which come 
into
 those queues. Regardless of whether you feel able to fix the bug, you should 
still
 file bugs where you see them.



hbase git commit: HBASE-18389 Remove byte[] from formal parameter of sizeOf() of ClassSize, ClassSize.MemoryLayout and ClassSize.UnsafeLayout

Repository: hbase
Updated Branches:
  refs/heads/branch-2 d5c6e1101 -> 946289113


HBASE-18389 Remove byte[] from formal parameter of sizeOf() of ClassSize, 
ClassSize.MemoryLayout and ClassSize.UnsafeLayout

Signed-off-by: Chia-Ping Tsai 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/94628911
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/94628911
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/94628911

Branch: refs/heads/branch-2
Commit: 946289113a1a867a6d15ba2c147b2f05b28fc806
Parents: d5c6e11
Author: Xiang Li 
Authored: Mon Jul 17 17:11:16 2017 +0800
Committer: Chia-Ping Tsai 
Committed: Sat Jul 22 02:16:09 2017 +0800

--
 .../java/org/apache/hadoop/hbase/CellUtil.java  |  8 ++---
 .../java/org/apache/hadoop/hbase/KeyValue.java  |  4 +--
 .../org/apache/hadoop/hbase/util/ClassSize.java | 34 +---
 3 files changed, 36 insertions(+), 10 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/94628911/hbase-common/src/main/java/org/apache/hadoop/hbase/CellUtil.java
--
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/CellUtil.java 
b/hbase-common/src/main/java/org/apache/hadoop/hbase/CellUtil.java
index 56de21b..1146de4 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/CellUtil.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/CellUtil.java
@@ -564,7 +564,7 @@ public final class CellUtil {
 public long heapSize() {
   long sum = HEAP_SIZE_OVERHEAD + CellUtil.estimatedHeapSizeOf(cell);
   if (this.tags != null) {
-sum += ClassSize.sizeOf(this.tags, this.tags.length);
+sum += ClassSize.sizeOf(this.tags);
   }
   return sum;
 }
@@ -763,7 +763,7 @@ public final class CellUtil {
   long sum = HEAP_SIZE_OVERHEAD + CellUtil.estimatedHeapSizeOf(cell);
   // this.tags is on heap byte[]
   if (this.tags != null) {
-sum += ClassSize.sizeOf(this.tags, this.tags.length);
+sum += ClassSize.sizeOf(this.tags);
   }
   return sum;
 }
@@ -889,7 +889,7 @@ public final class CellUtil {
 public long heapSize() {
   long sum = ClassSize.REFERENCE + super.heapSize();
   if (this.value != null) {
-sum += ClassSize.sizeOf(this.value, this.value.length);
+sum += ClassSize.sizeOf(this.value);
   }
   return sum;
 }
@@ -989,7 +989,7 @@ public final class CellUtil {
 public long heapSize() {
   long sum = ClassSize.REFERENCE + super.heapSize();
   if (this.value != null) {
-sum += ClassSize.sizeOf(this.value, this.value.length);
+sum += ClassSize.sizeOf(this.value);
   }
   return sum;
 }

http://git-wip-us.apache.org/repos/asf/hbase/blob/94628911/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValue.java
--
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValue.java 
b/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValue.java
index 9b9dc43..98cf9cb 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValue.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValue.java
@@ -2608,8 +2608,8 @@ public class KeyValue implements ExtendedCell {
  */
 return ClassSize.align(sum) +
 (offset == 0
-  ? ClassSize.sizeOf(bytes, length) // count both length and object 
overhead
-  : length);// only count the number of bytes
+  ? ClassSize.sizeOfByteArray(length)  // count both length and object 
overhead
+  : length);   // only count the number of 
bytes
   }
 
   /**

http://git-wip-us.apache.org/repos/asf/hbase/blob/94628911/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ClassSize.java
--
diff --git 
a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ClassSize.java 
b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ClassSize.java
index 000e99a..c31ee83 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ClassSize.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ClassSize.java
@@ -159,7 +159,7 @@ public class ClassSize {
   return  ((num + 7) >> 3) << 3;
 }
 
-long sizeOf(byte[] b, int len) {
+long sizeOfByteArray(int len) {
   return align(arrayHeaderSize() + len);
 }
   }
@@ -204,7 +204,7 @@ public class ClassSize {
 
 @Override
 @SuppressWarnings("static-access")
-long sizeOf(byte[] b, int len) {
+long sizeOfByteArray(int len) {
   return align(arrayHeaderSize() + len * 
UnsafeAccess.theUnsafe.ARRAY_BYTE_INDEX_SCALE);
 }
   }
@@ -444,8 +444

hbase git commit: HBASE-18389 Remove byte[] from formal parameter of sizeOf() of ClassSize, ClassSize.MemoryLayout and ClassSize.UnsafeLayout

Repository: hbase
Updated Branches:
  refs/heads/master 2a0d18928 -> 31c3edaa2


HBASE-18389 Remove byte[] from formal parameter of sizeOf() of ClassSize, 
ClassSize.MemoryLayout and ClassSize.UnsafeLayout

Signed-off-by: Chia-Ping Tsai 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/31c3edaa
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/31c3edaa
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/31c3edaa

Branch: refs/heads/master
Commit: 31c3edaa29776b63cc69ecd37306ad10836992eb
Parents: 2a0d189
Author: Xiang Li 
Authored: Mon Jul 17 17:11:16 2017 +0800
Committer: Chia-Ping Tsai 
Committed: Sat Jul 22 02:12:59 2017 +0800

--
 .../java/org/apache/hadoop/hbase/CellUtil.java  |  8 ++---
 .../java/org/apache/hadoop/hbase/KeyValue.java  |  4 +--
 .../org/apache/hadoop/hbase/util/ClassSize.java | 34 +---
 3 files changed, 36 insertions(+), 10 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/31c3edaa/hbase-common/src/main/java/org/apache/hadoop/hbase/CellUtil.java
--
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/CellUtil.java 
b/hbase-common/src/main/java/org/apache/hadoop/hbase/CellUtil.java
index 56de21b..1146de4 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/CellUtil.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/CellUtil.java
@@ -564,7 +564,7 @@ public final class CellUtil {
 public long heapSize() {
   long sum = HEAP_SIZE_OVERHEAD + CellUtil.estimatedHeapSizeOf(cell);
   if (this.tags != null) {
-sum += ClassSize.sizeOf(this.tags, this.tags.length);
+sum += ClassSize.sizeOf(this.tags);
   }
   return sum;
 }
@@ -763,7 +763,7 @@ public final class CellUtil {
   long sum = HEAP_SIZE_OVERHEAD + CellUtil.estimatedHeapSizeOf(cell);
   // this.tags is on heap byte[]
   if (this.tags != null) {
-sum += ClassSize.sizeOf(this.tags, this.tags.length);
+sum += ClassSize.sizeOf(this.tags);
   }
   return sum;
 }
@@ -889,7 +889,7 @@ public final class CellUtil {
 public long heapSize() {
   long sum = ClassSize.REFERENCE + super.heapSize();
   if (this.value != null) {
-sum += ClassSize.sizeOf(this.value, this.value.length);
+sum += ClassSize.sizeOf(this.value);
   }
   return sum;
 }
@@ -989,7 +989,7 @@ public final class CellUtil {
 public long heapSize() {
   long sum = ClassSize.REFERENCE + super.heapSize();
   if (this.value != null) {
-sum += ClassSize.sizeOf(this.value, this.value.length);
+sum += ClassSize.sizeOf(this.value);
   }
   return sum;
 }

http://git-wip-us.apache.org/repos/asf/hbase/blob/31c3edaa/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValue.java
--
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValue.java 
b/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValue.java
index 9b9dc43..98cf9cb 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValue.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValue.java
@@ -2608,8 +2608,8 @@ public class KeyValue implements ExtendedCell {
  */
 return ClassSize.align(sum) +
 (offset == 0
-  ? ClassSize.sizeOf(bytes, length) // count both length and object 
overhead
-  : length);// only count the number of bytes
+  ? ClassSize.sizeOfByteArray(length)  // count both length and object 
overhead
+  : length);   // only count the number of 
bytes
   }
 
   /**

http://git-wip-us.apache.org/repos/asf/hbase/blob/31c3edaa/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ClassSize.java
--
diff --git 
a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ClassSize.java 
b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ClassSize.java
index 1b19c92..3ab59e7 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ClassSize.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ClassSize.java
@@ -168,7 +168,7 @@ public class ClassSize {
   return  ((num + 7) >> 3) << 3;
 }
 
-long sizeOf(byte[] b, int len) {
+long sizeOfByteArray(int len) {
   return align(arrayHeaderSize() + len);
 }
   }
@@ -213,7 +213,7 @@ public class ClassSize {
 
 @Override
 @SuppressWarnings("static-access")
-long sizeOf(byte[] b, int len) {
+long sizeOfByteArray(int len) {
   return align(arrayHeaderSize() + len * 
UnsafeAccess.theUnsafe.ARRAY_BYTE_INDEX_SCALE);
 }
   }
@@ -463,8 +463,34 

hbase git commit: HBASE-18404 fixed typo in acid semantics

Repository: hbase
Updated Branches:
  refs/heads/master 890d92a90 -> 2a0d18928


HBASE-18404 fixed typo in acid semantics

Signed-off-by: Sean Busbey 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/2a0d1892
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/2a0d1892
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/2a0d1892

Branch: refs/heads/master
Commit: 2a0d18928e372b3e976c9c89457390b7afc0aafc
Parents: 890d92a
Author: coral 
Authored: Thu Jul 20 14:31:59 2017 -0500
Committer: Sean Busbey 
Committed: Fri Jul 21 12:26:48 2017 -0500

--
 src/main/site/xdoc/acid-semantics.xml | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/2a0d1892/src/main/site/xdoc/acid-semantics.xml
--
diff --git a/src/main/site/xdoc/acid-semantics.xml 
b/src/main/site/xdoc/acid-semantics.xml
index 2696df7..2d4eb6a 100644
--- a/src/main/site/xdoc/acid-semantics.xml
+++ b/src/main/site/xdoc/acid-semantics.xml
@@ -93,7 +93,7 @@ under the License.
   
 
 
-  All mutations are atomic within a row. Any put will either 
wholely succeed or wholely fail.[3]
+  All mutations are atomic within a row. Any put will either 
wholly succeed or wholly fail.[3]
   
 An operation that returns a "success" code has 
completely succeeded.
 An operation that returns a "failure" code has 
completely failed.
@@ -225,7 +225,7 @@ under the License.
   log. This does not actually imply an fsync() to magnetic media, but 
rather just that the data has been
   written to the OS cache on all replicas of the log. In the case of a 
full datacenter power loss, it is
   possible that the edits are not truly durable.
-  [3] Puts will either wholely succeed or wholely fail, provided that 
they are actually sent
+  [3] Puts will either wholly succeed or wholly fail, provided that 
they are actually sent
   to the RegionServer.  If the writebuffer is used, Puts will not be sent 
until the writebuffer is filled
   or it is explicitly flushed.
   



[1/8] hbase git commit: HBASE-17908 Upgrade guava

Repository: hbase
Updated Branches:
  refs/heads/branch-2 f8fa04c71 -> d5c6e1101


http://git-wip-us.apache.org/repos/asf/hbase/blob/d5c6e110/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/SpaceQuotaHelperForTests.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/SpaceQuotaHelperForTests.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/SpaceQuotaHelperForTests.java
index 1e2235a..9d56d60 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/SpaceQuotaHelperForTests.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/SpaceQuotaHelperForTests.java
@@ -43,9 +43,9 @@ import org.apache.hadoop.hbase.client.Table;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.junit.rules.TestName;
 
-import com.google.common.collect.HashMultimap;
-import com.google.common.collect.Iterables;
-import com.google.common.collect.Multimap;
+import org.apache.hadoop.hbase.shaded.com.google.common.collect.HashMultimap;
+import org.apache.hadoop.hbase.shaded.com.google.common.collect.Iterables;
+import org.apache.hadoop.hbase.shaded.com.google.common.collect.Multimap;
 
 @InterfaceAudience.Private
 public class SpaceQuotaHelperForTests {

http://git-wip-us.apache.org/repos/asf/hbase/blob/d5c6e110/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestNamespaceQuotaViolationStore.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestNamespaceQuotaViolationStore.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestNamespaceQuotaViolationStore.java
index 4a7258f..16bb7dd 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestNamespaceQuotaViolationStore.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestNamespaceQuotaViolationStore.java
@@ -16,7 +16,7 @@
  */
 package org.apache.hadoop.hbase.quotas;
 
-import static com.google.common.collect.Iterables.size;
+import static 
org.apache.hadoop.hbase.shaded.com.google.common.collect.Iterables.size;
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertNull;
 import static org.mockito.Matchers.any;

http://git-wip-us.apache.org/repos/asf/hbase/blob/d5c6e110/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestQuotaAdmin.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestQuotaAdmin.java 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestQuotaAdmin.java
index 52921af..b9f54ad 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestQuotaAdmin.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestQuotaAdmin.java
@@ -44,7 +44,7 @@ import org.junit.BeforeClass;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
 
-import com.google.common.collect.Iterables;
+import org.apache.hadoop.hbase.shaded.com.google.common.collect.Iterables;
 
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertNull;

http://git-wip-us.apache.org/repos/asf/hbase/blob/d5c6e110/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestQuotaObserverChore.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestQuotaObserverChore.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestQuotaObserverChore.java
index da294c6..cbdb8ec 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestQuotaObserverChore.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestQuotaObserverChore.java
@@ -32,7 +32,7 @@ import org.junit.Before;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
 
-import com.google.common.collect.Iterables;
+import org.apache.hadoop.hbase.shaded.com.google.common.collect.Iterables;
 
 /**
  * Non-HBase cluster unit tests for {@link QuotaObserverChore}.

http://git-wip-us.apache.org/repos/asf/hbase/blob/d5c6e110/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestQuotaObserverChoreWithMiniCluster.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestQuotaObserverChoreWithMiniCluster.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestQuotaObserverChoreWithMiniCluster.java
index dde9e71..b94333d 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestQuotaObserverChoreWithMiniCluster.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestQuotaObserverChoreWithMiniCluster.java
@@ -53,8 +53,8 @@ import org.junit.Test;
 import org.junit.experimental.categories.Category;
 import org.junit.rules.TestName;
 
-i

[8/8] hbase git commit: HBASE-17908 Upgrade guava

HBASE-17908 Upgrade guava

Pull in guava 22.0 by using the shaded version up in new hbase-thirdparty 
project.

In poms, exclude guava everywhere except on hadoop-common. Do this so
we minimize transitive includes. hadoop-common is needed because hadoop
Configuration uses guava doing preconditions.

Everywhere we used guava, instead use shaded so fix a load of imports.

Stopwatch API changed as did hashing and toStringHelper which is now
in MoreObjects class. Otherwise, minimal changes to come up on 22.0


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/d5c6e110
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/d5c6e110
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/d5c6e110

Branch: refs/heads/branch-2
Commit: d5c6e1101614b806eafd32da07eeeb16c299c1a1
Parents: f8fa04c
Author: Michael Stack 
Authored: Thu Jul 6 22:43:46 2017 -0700
Committer: Michael Stack 
Committed: Fri Jul 21 15:41:52 2017 +0100

--
 hbase-assembly/pom.xml  |  6 ++
 hbase-client/pom.xml| 15 +++-
 .../apache/hadoop/hbase/MetaTableAccessor.java  |  2 +-
 .../hbase/client/AsyncConnectionImpl.java   |  2 +-
 .../hadoop/hbase/client/AsyncHBaseAdmin.java|  6 ++
 .../hadoop/hbase/client/AsyncProcess.java   |  2 +-
 .../hbase/client/AsyncRequestFutureImpl.java|  2 +-
 .../client/AsyncRpcRetryingCallerFactory.java   |  6 +-
 .../AsyncScanSingleRegionRpcRetryingCaller.java |  2 +-
 .../hadoop/hbase/client/AsyncTableBase.java |  2 +-
 .../hbase/client/AsyncTableResultScanner.java   |  4 +-
 .../hbase/client/BufferedMutatorImpl.java   |  2 +-
 .../client/ClientAsyncPrefetchScanner.java  |  2 +-
 .../hadoop/hbase/client/ClientScanner.java  |  2 +-
 .../client/ColumnFamilyDescriptorBuilder.java   |  2 +-
 .../hbase/client/ConnectionConfiguration.java   |  2 +-
 .../hbase/client/ConnectionImplementation.java  |  2 +-
 .../hadoop/hbase/client/ConnectionUtils.java|  4 +-
 .../apache/hadoop/hbase/client/HBaseAdmin.java  |  2 +-
 .../hadoop/hbase/client/HRegionLocator.java |  2 +-
 .../org/apache/hadoop/hbase/client/HTable.java  |  4 +-
 .../hadoop/hbase/client/HTableMultiplexer.java  |  4 +-
 .../hadoop/hbase/client/MetricsConnection.java  |  2 +-
 .../hbase/client/MultiServerCallable.java   |  2 +-
 .../apache/hadoop/hbase/client/Mutation.java| 10 +--
 .../client/PreemptiveFastFailInterceptor.java   |  2 +-
 .../org/apache/hadoop/hbase/client/Query.java   |  6 +-
 .../hadoop/hbase/client/RawAsyncTableImpl.java  |  2 +-
 .../client/RegionCoprocessorRpcChannel.java |  2 +-
 .../client/RegionCoprocessorServiceExec.java|  2 +-
 .../apache/hadoop/hbase/client/RowAccess.java   |  2 +-
 .../client/ScannerCallableWithReplicas.java |  2 +-
 .../hbase/client/ServerStatisticTracker.java|  2 +-
 .../hbase/client/SimpleRequestController.java   |  2 +-
 .../backoff/ExponentialClientBackoffPolicy.java |  2 +-
 .../client/metrics/ServerSideScanMetrics.java   |  2 +-
 .../client/replication/ReplicationAdmin.java|  4 +-
 .../replication/ReplicationSerDeHelper.java |  2 +-
 .../hbase/filter/ColumnCountGetFilter.java  |  2 +-
 .../hbase/filter/ColumnPaginationFilter.java|  2 +-
 .../hadoop/hbase/filter/ColumnPrefixFilter.java |  2 +-
 .../hadoop/hbase/filter/ColumnRangeFilter.java  |  2 +-
 .../hadoop/hbase/filter/CompareFilter.java  |  2 +-
 .../hbase/filter/DependentColumnFilter.java |  2 +-
 .../hadoop/hbase/filter/FirstKeyOnlyFilter.java |  2 +-
 .../hadoop/hbase/filter/FuzzyRowFilter.java |  2 +-
 .../hbase/filter/InclusiveStopFilter.java   |  2 +-
 .../hadoop/hbase/filter/KeyOnlyFilter.java  |  2 +-
 .../apache/hadoop/hbase/filter/PageFilter.java  |  2 +-
 .../hadoop/hbase/filter/PrefixFilter.java   |  2 +-
 .../hbase/filter/SingleColumnValueFilter.java   |  2 +-
 .../hadoop/hbase/filter/TimestampsFilter.java   |  2 +-
 .../hadoop/hbase/ipc/AbstractRpcClient.java | 10 +--
 .../hadoop/hbase/ipc/BlockingRpcClient.java |  2 +-
 .../hadoop/hbase/ipc/CoprocessorRpcUtils.java   |  2 +-
 .../org/apache/hadoop/hbase/ipc/IPCUtil.java|  2 +-
 .../hbase/ipc/NettyRpcClientConfigHelper.java   |  2 +-
 .../hadoop/hbase/ipc/RpcClientFactory.java  |  4 +-
 .../ReplicationQueuesClientZKImpl.java  |  4 +-
 .../replication/ReplicationStateZKBase.java |  2 +-
 .../hbase/replication/ReplicationTableBase.java |  2 +-
 .../security/AbstractHBaseSaslRpcClient.java|  2 +-
 .../security/access/AccessControlUtil.java  |  8 +-
 .../hbase/security/access/Permission.java   |  2 +-
 .../access/ShadedAccessControlUtil.java |  4 +-
 .../hbase/zookeeper/MetricsZooKeeper.java   |  2 +-
 .../hadoop/hbase/zookeeper/ZNodePaths.java  |  2 +-
 .../hbase/TestInterfaceAudienceAnnotations.java | 10 +--
 .../hbase/client/TestClientNoCluster.java   |

[7/8] hbase git commit: HBASE-17908 Upgrade guava

http://git-wip-us.apache.org/repos/asf/hbase/blob/d5c6e110/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/AbstractRpcClient.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/AbstractRpcClient.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/AbstractRpcClient.java
index 2acddda..55895f2 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/AbstractRpcClient.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/AbstractRpcClient.java
@@ -21,11 +21,11 @@ package org.apache.hadoop.hbase.ipc;
 import static org.apache.hadoop.hbase.ipc.IPCUtil.toIOE;
 import static org.apache.hadoop.hbase.ipc.IPCUtil.wrapException;
 
-import com.google.common.annotations.VisibleForTesting;
-import com.google.common.base.Preconditions;
-import com.google.common.cache.CacheBuilder;
-import com.google.common.cache.CacheLoader;
-import com.google.common.cache.LoadingCache;
+import 
org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
+import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions;
+import org.apache.hadoop.hbase.shaded.com.google.common.cache.CacheBuilder;
+import org.apache.hadoop.hbase.shaded.com.google.common.cache.CacheLoader;
+import org.apache.hadoop.hbase.shaded.com.google.common.cache.LoadingCache;
 import org.apache.hadoop.hbase.shaded.com.google.protobuf.BlockingRpcChannel;
 import org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors;
 import org.apache.hadoop.hbase.shaded.com.google.protobuf.Message;

http://git-wip-us.apache.org/repos/asf/hbase/blob/d5c6e110/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/BlockingRpcClient.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/BlockingRpcClient.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/BlockingRpcClient.java
index d27602e..ab1a801 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/BlockingRpcClient.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/BlockingRpcClient.java
@@ -17,7 +17,7 @@
  */
 package org.apache.hadoop.hbase.ipc;
 
-import com.google.common.annotations.VisibleForTesting;
+import 
org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
 
 import java.io.IOException;
 import java.net.SocketAddress;

http://git-wip-us.apache.org/repos/asf/hbase/blob/d5c6e110/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/CoprocessorRpcUtils.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/CoprocessorRpcUtils.java
 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/CoprocessorRpcUtils.java
index dc5f122..c4e9455 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/CoprocessorRpcUtils.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/CoprocessorRpcUtils.java
@@ -243,4 +243,4 @@ public final class CoprocessorRpcUtils {
 }
 return new DoNotRetryIOException(controller.errorText());
   }
-}
\ No newline at end of file
+}

http://git-wip-us.apache.org/repos/asf/hbase/blob/d5c6e110/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/IPCUtil.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/IPCUtil.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/IPCUtil.java
index d2e0e90..6dab3b5 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/IPCUtil.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/IPCUtil.java
@@ -17,7 +17,7 @@
  */
 package org.apache.hadoop.hbase.ipc;
 
-import com.google.common.base.Preconditions;
+import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions;
 import org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream;
 import org.apache.hadoop.hbase.shaded.com.google.protobuf.Message;
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/d5c6e110/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/NettyRpcClientConfigHelper.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/NettyRpcClientConfigHelper.java
 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/NettyRpcClientConfigHelper.java
index fe039d3..a8aca52 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/NettyRpcClientConfigHelper.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/NettyRpcClientConfigHelper.java
@@ -17,7 +17,7 @@
  */
 package org.apache.hadoop.hbase.ipc;
 
-import com.google.common.base.Preconditions;
+import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions;
 
 import io.netty.channel.Channel;
 import io.netty.channel.EventLoopGroup;

http://

[6/8] hbase git commit: HBASE-17908 Upgrade guava

http://git-wip-us.apache.org/repos/asf/hbase/blob/d5c6e110/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/metrics2/util/MetricSampleQuantiles.java
--
diff --git 
a/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/metrics2/util/MetricSampleQuantiles.java
 
b/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/metrics2/util/MetricSampleQuantiles.java
index 3e4016d..16093b5 100644
--- 
a/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/metrics2/util/MetricSampleQuantiles.java
+++ 
b/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/metrics2/util/MetricSampleQuantiles.java
@@ -27,7 +27,7 @@ import java.util.Map;
 
 import org.apache.hadoop.hbase.classification.InterfaceAudience;
 
-import com.google.common.annotations.VisibleForTesting;
+import 
org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
 
 /**
  * Implementation of the Cormode, Korn, Muthukrishnan, and Srivastava algorithm

http://git-wip-us.apache.org/repos/asf/hbase/blob/d5c6e110/hbase-it/pom.xml
--
diff --git a/hbase-it/pom.xml b/hbase-it/pom.xml
index 14c8e71..953dacc 100644
--- a/hbase-it/pom.xml
+++ b/hbase-it/pom.xml
@@ -215,6 +215,12 @@
 
   org.apache.hbase
   hbase-hadoop-compat
+  
+
+  com.google.guava
+  guava
+
+  
 
 
   org.apache.hbase
@@ -226,8 +232,8 @@
   hbase-testing-util
 
 
-  com.google.guava
-  guava
+  org.apache.hbase.thirdparty
+  hbase-shaded-miscellaneous
 
 
   io.dropwizard.metrics
@@ -295,12 +301,24 @@
 
   org.apache.hadoop
   hadoop-mapreduce-client-core
+  
+
+  com.google.guava
+  guava
+
+  
 
 
   org.apache.hadoop
   hadoop-mapreduce-client-jobclient
   test-jar
   test
+  
+
+  com.google.guava
+  guava
+
+  
 
 
   org.apache.hadoop

http://git-wip-us.apache.org/repos/asf/hbase/blob/d5c6e110/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestAcidGuarantees.java
--
diff --git 
a/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestAcidGuarantees.java
 
b/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestAcidGuarantees.java
index ca8e277..e1c17a4 100644
--- 
a/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestAcidGuarantees.java
+++ 
b/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestAcidGuarantees.java
@@ -17,7 +17,7 @@
  */
 package org.apache.hadoop.hbase;
 
-import com.google.common.collect.Sets;
+import org.apache.hadoop.hbase.shaded.com.google.common.collect.Sets;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.regionserver.CompactingMemStore;
 import org.apache.hadoop.hbase.regionserver.ConstantSizeRegionSplitPolicy;

http://git-wip-us.apache.org/repos/asf/hbase/blob/d5c6e110/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestBackupRestore.java
--
diff --git 
a/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestBackupRestore.java
 
b/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestBackupRestore.java
index 8596489..f041f72 100644
--- 
a/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestBackupRestore.java
+++ 
b/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestBackupRestore.java
@@ -46,14 +46,15 @@ import org.apache.hadoop.hbase.client.HTable;
 import org.apache.hadoop.hbase.client.Table;
 import org.apache.hadoop.hbase.testclassification.IntegrationTests;
 import org.apache.hadoop.util.ToolRunner;
+import org.apache.hadoop.hbase.shaded.com.google.common.base.MoreObjects;
 import org.junit.After;
 import org.junit.Assert;
 import org.junit.Before;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
 
-import com.google.common.base.Objects;
-import com.google.common.collect.Lists;
+import org.apache.hadoop.hbase.shaded.com.google.common.base.Objects;
+import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
 
 /**
  * An integration test to detect regressions in HBASE-7912. Create
@@ -297,7 +298,8 @@ public class IntegrationTestBackupRestore extends 
IntegrationTestBase {
 rowsInBatch =
 Integer.parseInt(cmd.getOptionValue(NB_ROWS_IN_BATCH_KEY,
   Integer.toString(DEFAULT_NB_ROWS_IN_BATCH)));
-LOG.info(Objects.toStringHelper("Parsed Options").add(REGION_COUNT_KEY, 
regionsCountPerServer)
+LOG.info(MoreObjects.toStringHelper("Parsed Options").
+  add(REGION_COUNT_KEY, regionsCountPerServer)
 .add(REGIONSERVER_COUNT_KEY, 
regionServerCount).add(NB_ROWS_

[2/8] hbase git commit: HBASE-17908 Upgrade guava

http://git-wip-us.apache.org/repos/asf/hbase/blob/d5c6e110/hbase-server/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java
index 8abef2b..cabbdb4 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java
@@ -100,8 +100,8 @@ import org.apache.htrace.Trace;
 import org.apache.htrace.TraceScope;
 import org.apache.htrace.impl.ProbabilitySampler;
 
-import com.google.common.base.Objects;
-import com.google.common.util.concurrent.ThreadFactoryBuilder;
+import org.apache.hadoop.hbase.shaded.com.google.common.base.MoreObjects;
+import 
org.apache.hadoop.hbase.shaded.com.google.common.util.concurrent.ThreadFactoryBuilder;
 import com.codahale.metrics.Histogram;
 import com.codahale.metrics.UniformReservoir;
 
@@ -333,7 +333,7 @@ public class PerformanceEvaluation extends Configured 
implements Tool {
   || (!isReadCmd && desc != null && desc.getRegionReplication() != 
opts.replicas)) {
   needsDelete = true;
   // wait, why did it delete my table?!?
-  LOG.debug(Objects.toStringHelper("needsDelete")
+  LOG.debug(MoreObjects.toStringHelper("needsDelete")
 .add("needsDelete", needsDelete)
 .add("isReadCmd", isReadCmd)
 .add("exists", exists)

http://git-wip-us.apache.org/repos/asf/hbase/blob/d5c6e110/hbase-server/src/test/java/org/apache/hadoop/hbase/ScanPerformanceEvaluation.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/ScanPerformanceEvaluation.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/ScanPerformanceEvaluation.java
index 24e9590..e669f14 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/ScanPerformanceEvaluation.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/ScanPerformanceEvaluation.java
@@ -47,7 +47,7 @@ import 
org.apache.hadoop.mapreduce.lib.output.NullOutputFormat;
 import org.apache.hadoop.util.StringUtils;
 import org.apache.hadoop.util.ToolRunner;
 
-import com.google.common.base.Stopwatch;
+import org.apache.hadoop.hbase.shaded.com.google.common.base.Stopwatch;
 
 /**
  * A simple performance evaluation tool for single client and MR scans
@@ -102,8 +102,8 @@ public class ScanPerformanceEvaluation extends 
AbstractHBaseTool {
 FileSystem fs = filename.getFileSystem(getConf());
 
 // read the file from start to finish
-Stopwatch fileOpenTimer = new Stopwatch();
-Stopwatch streamTimer = new Stopwatch();
+Stopwatch fileOpenTimer = Stopwatch.createUnstarted();
+Stopwatch streamTimer = Stopwatch.createUnstarted();
 
 fileOpenTimer.start();
 FSDataInputStream in = fs.open(filename);
@@ -120,11 +120,12 @@ public class ScanPerformanceEvaluation extends 
AbstractHBaseTool {
 }
 streamTimer.stop();
 
-double throughput = (double)totalBytes / 
streamTimer.elapsedTime(TimeUnit.SECONDS);
+double throughput = (double)totalBytes / 
streamTimer.elapsed(TimeUnit.SECONDS);
 
 System.out.println("HDFS streaming: ");
-System.out.println("total time to open: " + fileOpenTimer.elapsedMillis() 
+ " ms");
-System.out.println("total time to read: " + streamTimer.elapsedMillis() + 
" ms");
+System.out.println("total time to open: " +
+  fileOpenTimer.elapsed(TimeUnit.MILLISECONDS) + " ms");
+System.out.println("total time to read: " + 
streamTimer.elapsed(TimeUnit.MILLISECONDS) + " ms");
 System.out.println("total bytes: " + totalBytes + " bytes ("
 + StringUtils.humanReadableInt(totalBytes) + ")");
 System.out.println("throghput  : " + 
StringUtils.humanReadableInt((long)throughput) + "B/s");
@@ -143,9 +144,9 @@ public class ScanPerformanceEvaluation extends 
AbstractHBaseTool {
   }
 
   public void testScan() throws IOException {
-Stopwatch tableOpenTimer = new Stopwatch();
-Stopwatch scanOpenTimer = new Stopwatch();
-Stopwatch scanTimer = new Stopwatch();
+Stopwatch tableOpenTimer = Stopwatch.createUnstarted();
+Stopwatch scanOpenTimer = Stopwatch.createUnstarted();
+Stopwatch scanTimer = Stopwatch.createUnstarted();
 
 tableOpenTimer.start();
 Connection connection = ConnectionFactory.createConnection(getConf());
@@ -176,14 +177,17 @@ public class ScanPerformanceEvaluation extends 
AbstractHBaseTool {
 
 ScanMetrics metrics = scan.getScanMetrics();
 long totalBytes = metrics.countOfBytesInResults.get();
-double throughput = (double)totalBytes / 
scanTimer.elapsedTime(TimeUnit.SECONDS);
-double throughputRows = (double)numRows / 
scanTimer.elapsedTime(TimeUnit.SECONDS);
-double throughputCells = (double)numCells / 
scanTimer.elapsedTim

[3/8] hbase git commit: HBASE-17908 Upgrade guava

http://git-wip-us.apache.org/repos/asf/hbase/blob/d5c6e110/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreScanner.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreScanner.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreScanner.java
index 1fcb314..969d485 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreScanner.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreScanner.java
@@ -51,7 +51,7 @@ import 
org.apache.hadoop.hbase.regionserver.querymatcher.UserScanQueryMatcher;
 import org.apache.hadoop.hbase.util.CollectionUtils;
 import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
 
-import com.google.common.annotations.VisibleForTesting;
+import 
org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
 
 /**
  * Scanner scans both the memstore and the Store. Coalesce KeyValue stream 
into List

http://git-wip-us.apache.org/repos/asf/hbase/blob/d5c6e110/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StripeStoreEngine.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StripeStoreEngine.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StripeStoreEngine.java
index 1e78ab2..2e17e18 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StripeStoreEngine.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StripeStoreEngine.java
@@ -35,7 +35,7 @@ import 
org.apache.hadoop.hbase.regionserver.compactions.StripeCompactor;
 import org.apache.hadoop.hbase.regionserver.throttle.ThroughputController;
 import org.apache.hadoop.hbase.security.User;
 
-import com.google.common.base.Preconditions;
+import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions;
 
 /**
  * The storage engine that implements the stripe-based store/compaction scheme.

http://git-wip-us.apache.org/repos/asf/hbase/blob/d5c6e110/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StripeStoreFileManager.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StripeStoreFileManager.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StripeStoreFileManager.java
index 18a6eec..1536774 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StripeStoreFileManager.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StripeStoreFileManager.java
@@ -45,8 +45,8 @@ import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.hbase.util.ConcatenatedLists;
 import org.apache.hadoop.util.StringUtils.TraditionalBinaryPrefix;
 
-import com.google.common.collect.ImmutableCollection;
-import com.google.common.collect.ImmutableList;
+import 
org.apache.hadoop.hbase.shaded.com.google.common.collect.ImmutableCollection;
+import org.apache.hadoop.hbase.shaded.com.google.common.collect.ImmutableList;
 
 /**
  * Stripe implementation of StoreFileManager.

http://git-wip-us.apache.org/repos/asf/hbase/blob/d5c6e110/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StripeStoreFlusher.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StripeStoreFlusher.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StripeStoreFlusher.java
index aa7024b..a26a11f 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StripeStoreFlusher.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StripeStoreFlusher.java
@@ -34,7 +34,7 @@ import org.apache.hadoop.hbase.monitoring.MonitoredTask;
 import org.apache.hadoop.hbase.regionserver.compactions.StripeCompactionPolicy;
 import org.apache.hadoop.hbase.regionserver.throttle.ThroughputController;
 
-import com.google.common.annotations.VisibleForTesting;
+import 
org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
 
 /**
  * Stripe implementation of StoreFlusher. Flushes files either into L0 file 
w/o metadata, or

http://git-wip-us.apache.org/repos/asf/hbase/blob/d5c6e110/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/CompactionRequest.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/CompactionRequest.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/CompactionRequest.java
index 716a820..340b780 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/CompactionRequest.java
+++ 
b/hbase-s

[4/8] hbase git commit: HBASE-17908 Upgrade guava

http://git-wip-us.apache.org/repos/asf/hbase/blob/d5c6e110/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.java
index fda9f60..785f6d5 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.java
@@ -55,11 +55,11 @@ import 
org.apache.hadoop.hbase.master.balancer.BaseLoadBalancer.Cluster.Action.T
 import org.apache.hadoop.hbase.security.access.AccessControlLists;
 import org.apache.hadoop.util.StringUtils;
 
-import com.google.common.annotations.VisibleForTesting;
-import com.google.common.base.Joiner;
-import com.google.common.collect.ArrayListMultimap;
-import com.google.common.collect.Lists;
-import com.google.common.collect.Sets;
+import 
org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
+import org.apache.hadoop.hbase.shaded.com.google.common.base.Joiner;
+import 
org.apache.hadoop.hbase.shaded.com.google.common.collect.ArrayListMultimap;
+import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
+import org.apache.hadoop.hbase.shaded.com.google.common.collect.Sets;
 
 /**
  * The base class for load balancers. It provides the the functions used to by

http://git-wip-us.apache.org/repos/asf/hbase/blob/d5c6e110/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/FavoredStochasticBalancer.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/FavoredStochasticBalancer.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/FavoredStochasticBalancer.java
index a8e22ce..30cf16a 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/FavoredStochasticBalancer.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/FavoredStochasticBalancer.java
@@ -47,9 +47,9 @@ import org.apache.hadoop.hbase.master.MasterServices;
 import org.apache.hadoop.hbase.master.RegionPlan;
 import org.apache.hadoop.hbase.util.Pair;
 
-import com.google.common.collect.Lists;
-import com.google.common.collect.Maps;
-import com.google.common.collect.Sets;
+import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
+import org.apache.hadoop.hbase.shaded.com.google.common.collect.Maps;
+import org.apache.hadoop.hbase.shaded.com.google.common.collect.Sets;
 
 /**
  * An implementation of the {@link 
org.apache.hadoop.hbase.master.LoadBalancer} that

http://git-wip-us.apache.org/repos/asf/hbase/blob/d5c6e110/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/RegionLocationFinder.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/RegionLocationFinder.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/RegionLocationFinder.java
index 907e745..d9a1ab8 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/RegionLocationFinder.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/RegionLocationFinder.java
@@ -43,15 +43,15 @@ import org.apache.hadoop.hbase.master.MasterServices;
 import org.apache.hadoop.hbase.regionserver.HRegion;
 import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
 
-import com.google.common.cache.CacheBuilder;
-import com.google.common.cache.CacheLoader;
-import com.google.common.cache.LoadingCache;
-import com.google.common.collect.Lists;
-import com.google.common.util.concurrent.Futures;
-import com.google.common.util.concurrent.ListenableFuture;
-import com.google.common.util.concurrent.ListeningExecutorService;
-import com.google.common.util.concurrent.MoreExecutors;
-import com.google.common.util.concurrent.ThreadFactoryBuilder;
+import org.apache.hadoop.hbase.shaded.com.google.common.cache.CacheBuilder;
+import org.apache.hadoop.hbase.shaded.com.google.common.cache.CacheLoader;
+import org.apache.hadoop.hbase.shaded.com.google.common.cache.LoadingCache;
+import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
+import 
org.apache.hadoop.hbase.shaded.com.google.common.util.concurrent.Futures;
+import 
org.apache.hadoop.hbase.shaded.com.google.common.util.concurrent.ListenableFuture;
+import 
org.apache.hadoop.hbase.shaded.com.google.common.util.concurrent.ListeningExecutorService;
+import 
org.apache.hadoop.hbase.shaded.com.google.common.util.concurrent.MoreExecutors;
+import 
org.apache.hadoop.hbase.shaded.com.google.common.util.concurrent.ThreadFactoryBuilder;
 
 /**
  * This will find where data for a region is located in HDFS. It 

[5/8] hbase git commit: HBASE-17908 Upgrade guava

http://git-wip-us.apache.org/repos/asf/hbase/blob/d5c6e110/hbase-rsgroup/src/test/java/org/apache/hadoop/hbase/rsgroup/TestRSGroupsBase.java
--
diff --git 
a/hbase-rsgroup/src/test/java/org/apache/hadoop/hbase/rsgroup/TestRSGroupsBase.java
 
b/hbase-rsgroup/src/test/java/org/apache/hadoop/hbase/rsgroup/TestRSGroupsBase.java
index a99c24f..367bb98 100644
--- 
a/hbase-rsgroup/src/test/java/org/apache/hadoop/hbase/rsgroup/TestRSGroupsBase.java
+++ 
b/hbase-rsgroup/src/test/java/org/apache/hadoop/hbase/rsgroup/TestRSGroupsBase.java
@@ -57,8 +57,8 @@ import org.junit.Before;
 import org.junit.Rule;
 import org.junit.Test;
 
-import com.google.common.collect.Maps;
-import com.google.common.collect.Sets;
+import org.apache.hadoop.hbase.shaded.com.google.common.collect.Maps;
+import org.apache.hadoop.hbase.shaded.com.google.common.collect.Sets;
 import org.junit.rules.TestName;
 
 public abstract class TestRSGroupsBase {

http://git-wip-us.apache.org/repos/asf/hbase/blob/d5c6e110/hbase-rsgroup/src/test/java/org/apache/hadoop/hbase/rsgroup/TestRSGroupsOfflineMode.java
--
diff --git 
a/hbase-rsgroup/src/test/java/org/apache/hadoop/hbase/rsgroup/TestRSGroupsOfflineMode.java
 
b/hbase-rsgroup/src/test/java/org/apache/hadoop/hbase/rsgroup/TestRSGroupsOfflineMode.java
index 8b200ab..691f8d3 100644
--- 
a/hbase-rsgroup/src/test/java/org/apache/hadoop/hbase/rsgroup/TestRSGroupsOfflineMode.java
+++ 
b/hbase-rsgroup/src/test/java/org/apache/hadoop/hbase/rsgroup/TestRSGroupsOfflineMode.java
@@ -17,7 +17,7 @@
  */
 package org.apache.hadoop.hbase.rsgroup;
 
-import com.google.common.collect.Sets;
+import org.apache.hadoop.hbase.shaded.com.google.common.collect.Sets;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.hbase.HBaseCluster;

http://git-wip-us.apache.org/repos/asf/hbase/blob/d5c6e110/hbase-rsgroup/src/test/java/org/apache/hadoop/hbase/rsgroup/VerifyingRSGroupAdminClient.java
--
diff --git 
a/hbase-rsgroup/src/test/java/org/apache/hadoop/hbase/rsgroup/VerifyingRSGroupAdminClient.java
 
b/hbase-rsgroup/src/test/java/org/apache/hadoop/hbase/rsgroup/VerifyingRSGroupAdminClient.java
index 2e89110..37cdd35 100644
--- 
a/hbase-rsgroup/src/test/java/org/apache/hadoop/hbase/rsgroup/VerifyingRSGroupAdminClient.java
+++ 
b/hbase-rsgroup/src/test/java/org/apache/hadoop/hbase/rsgroup/VerifyingRSGroupAdminClient.java
@@ -17,8 +17,8 @@
  */
 package org.apache.hadoop.hbase.rsgroup;
 
-import com.google.common.collect.Maps;
-import com.google.common.collect.Sets;
+import org.apache.hadoop.hbase.shaded.com.google.common.collect.Maps;
+import org.apache.hadoop.hbase.shaded.com.google.common.collect.Sets;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.classification.InterfaceAudience;

http://git-wip-us.apache.org/repos/asf/hbase/blob/d5c6e110/hbase-server/pom.xml
--
diff --git a/hbase-server/pom.xml b/hbase-server/pom.xml
index 7397573..6c5f77b 100644
--- a/hbase-server/pom.xml
+++ b/hbase-server/pom.xml
@@ -474,10 +474,6 @@
   metrics-core
 
 
-  com.google.guava
-  guava
-
-
   com.google.protobuf
   protobuf-java
 
@@ -567,6 +563,17 @@
   httpcore
   test
 
+
+  org.apache.commons
+  commons-crypto
+  ${commons-crypto.version}
+  
+
+  net.java.dev.jna
+  jna
+
+  
+
   
   
 
@@ -680,31 +687,67 @@
 
   org.apache.hadoop
   hadoop-client
+  
+
+  com.google.guava
+  guava
+
+  
 
 
   org.apache.hadoop
   hadoop-mapreduce-client-core
+  
+
+  com.google.guava
+  guava
+
+  
 
 
   org.apache.hadoop
   hadoop-mapreduce-client-jobclient
   test-jar
   test
+  
+
+  com.google.guava
+  guava
+
+  
 
 
   org.apache.hadoop
   hadoop-hdfs
+  
+
+  com.google.guava
+  guava
+
+  
 
 
   org.apache.hadoop
   hadoop-hdfs
   test-jar
   test
+  
+
+  com.google.guava
+  guava
+
+  
 
 
   org.apache.hadoop
   hadoop-minicluster
   test
+  
+
+  com.google.guava
+  guava
+
+  
 
 
 
@@ -766,6 +809,12 @@
  

[3/8] hbase git commit: HBASE-17908 Upgrade guava

http://git-wip-us.apache.org/repos/asf/hbase/blob/890d92a9/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFileWriter.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFileWriter.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFileWriter.java
index 0d94378..a8773c0 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFileWriter.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFileWriter.java
@@ -18,7 +18,7 @@
  */
 package org.apache.hadoop.hbase.regionserver;
 
-import com.google.common.base.Preconditions;
+import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions;
 
 import java.io.IOException;
 import java.net.InetSocketAddress;

http://git-wip-us.apache.org/repos/asf/hbase/blob/890d92a9/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreScanner.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreScanner.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreScanner.java
index 1fcb314..969d485 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreScanner.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreScanner.java
@@ -51,7 +51,7 @@ import 
org.apache.hadoop.hbase.regionserver.querymatcher.UserScanQueryMatcher;
 import org.apache.hadoop.hbase.util.CollectionUtils;
 import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
 
-import com.google.common.annotations.VisibleForTesting;
+import 
org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
 
 /**
  * Scanner scans both the memstore and the Store. Coalesce KeyValue stream 
into List

http://git-wip-us.apache.org/repos/asf/hbase/blob/890d92a9/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StripeStoreEngine.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StripeStoreEngine.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StripeStoreEngine.java
index 1e78ab2..2e17e18 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StripeStoreEngine.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StripeStoreEngine.java
@@ -35,7 +35,7 @@ import 
org.apache.hadoop.hbase.regionserver.compactions.StripeCompactor;
 import org.apache.hadoop.hbase.regionserver.throttle.ThroughputController;
 import org.apache.hadoop.hbase.security.User;
 
-import com.google.common.base.Preconditions;
+import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions;
 
 /**
  * The storage engine that implements the stripe-based store/compaction scheme.

http://git-wip-us.apache.org/repos/asf/hbase/blob/890d92a9/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StripeStoreFileManager.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StripeStoreFileManager.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StripeStoreFileManager.java
index 18a6eec..1536774 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StripeStoreFileManager.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StripeStoreFileManager.java
@@ -45,8 +45,8 @@ import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.hbase.util.ConcatenatedLists;
 import org.apache.hadoop.util.StringUtils.TraditionalBinaryPrefix;
 
-import com.google.common.collect.ImmutableCollection;
-import com.google.common.collect.ImmutableList;
+import 
org.apache.hadoop.hbase.shaded.com.google.common.collect.ImmutableCollection;
+import org.apache.hadoop.hbase.shaded.com.google.common.collect.ImmutableList;
 
 /**
  * Stripe implementation of StoreFileManager.

http://git-wip-us.apache.org/repos/asf/hbase/blob/890d92a9/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StripeStoreFlusher.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StripeStoreFlusher.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StripeStoreFlusher.java
index aa7024b..a26a11f 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StripeStoreFlusher.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StripeStoreFlusher.java
@@ -34,7 +34,7 @@ import org.apache.hadoop.hbase.monitoring.MonitoredTask;
 import org.apache.hadoop.hbase.regionserver.compactions.StripeCompactionPolicy;
 import org.apache.hadoop.hbase.regionserv

[1/8] hbase git commit: HBASE-17908 Upgrade guava

Repository: hbase
Updated Branches:
  refs/heads/master 7941b83aa -> 890d92a90


http://git-wip-us.apache.org/repos/asf/hbase/blob/890d92a9/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/TestZKProcedureControllers.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/TestZKProcedureControllers.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/TestZKProcedureControllers.java
index e7e2b23..8b947ee 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/TestZKProcedureControllers.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/TestZKProcedureControllers.java
@@ -48,7 +48,7 @@ import org.mockito.invocation.InvocationOnMock;
 import org.mockito.stubbing.Answer;
 import org.mockito.verification.VerificationMode;
 
-import com.google.common.collect.Lists;
+import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
 
 /**
  * Test zookeeper-based, procedure controllers

http://git-wip-us.apache.org/repos/asf/hbase/blob/890d92a9/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/SpaceQuotaHelperForTests.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/SpaceQuotaHelperForTests.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/SpaceQuotaHelperForTests.java
index c31bfe5..bfd6a4b 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/SpaceQuotaHelperForTests.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/SpaceQuotaHelperForTests.java
@@ -50,9 +50,9 @@ import org.apache.hadoop.hbase.regionserver.StoreFile;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.junit.rules.TestName;
 
-import com.google.common.collect.HashMultimap;
-import com.google.common.collect.Iterables;
-import com.google.common.collect.Multimap;
+import org.apache.hadoop.hbase.shaded.com.google.common.collect.HashMultimap;
+import org.apache.hadoop.hbase.shaded.com.google.common.collect.Iterables;
+import org.apache.hadoop.hbase.shaded.com.google.common.collect.Multimap;
 
 @InterfaceAudience.Private
 public class SpaceQuotaHelperForTests {

http://git-wip-us.apache.org/repos/asf/hbase/blob/890d92a9/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestNamespaceQuotaViolationStore.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestNamespaceQuotaViolationStore.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestNamespaceQuotaViolationStore.java
index 385f8c4..a673bcb 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestNamespaceQuotaViolationStore.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestNamespaceQuotaViolationStore.java
@@ -16,7 +16,7 @@
  */
 package org.apache.hadoop.hbase.quotas;
 
-import static com.google.common.collect.Iterables.size;
+import static 
org.apache.hadoop.hbase.shaded.com.google.common.collect.Iterables.size;
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertNull;
 import static org.mockito.Matchers.any;

http://git-wip-us.apache.org/repos/asf/hbase/blob/890d92a9/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestQuotaAdmin.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestQuotaAdmin.java 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestQuotaAdmin.java
index 52921af..b9f54ad 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestQuotaAdmin.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestQuotaAdmin.java
@@ -44,7 +44,7 @@ import org.junit.BeforeClass;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
 
-import com.google.common.collect.Iterables;
+import org.apache.hadoop.hbase.shaded.com.google.common.collect.Iterables;
 
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertNull;

http://git-wip-us.apache.org/repos/asf/hbase/blob/890d92a9/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestQuotaObserverChore.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestQuotaObserverChore.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestQuotaObserverChore.java
index da294c6..cbdb8ec 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestQuotaObserverChore.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestQuotaObserverChore.java
@@ -32,7 +32,7 @@ import org.junit.Before;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
 
-import com.google.common.collec

[8/8] hbase git commit: HBASE-17908 Upgrade guava

HBASE-17908 Upgrade guava

Pull in guava 22.0 by using the shaded version up in new hbase-thirdparty 
project.

In poms, exclude guava everywhere except on hadoop-common. Do this so
we minimize transitive includes. hadoop-common is needed because hadoop
Configuration uses guava doing preconditions.

Everywhere we used guava, instead use shaded so fix a load of imports.

Stopwatch API changed as did hashing and toStringHelper which is now
in MoreObjects class. Otherwise, minimal changes to come up on 22.0


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/890d92a9
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/890d92a9
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/890d92a9

Branch: refs/heads/master
Commit: 890d92a90cb5ef8d17e6c3e4036c8077dea4dc86
Parents: 7941b83
Author: Michael Stack 
Authored: Thu Jul 6 22:43:46 2017 -0700
Committer: Michael Stack 
Committed: Fri Jul 21 15:28:08 2017 +0100

--
 hbase-assembly/pom.xml  |  6 ++
 hbase-client/pom.xml| 15 +++-
 .../apache/hadoop/hbase/MetaTableAccessor.java  |  2 +-
 .../hbase/client/AsyncConnectionImpl.java   |  2 +-
 .../hadoop/hbase/client/AsyncHBaseAdmin.java|  6 ++
 .../hadoop/hbase/client/AsyncProcess.java   |  2 +-
 .../hbase/client/AsyncRequestFutureImpl.java|  2 +-
 .../client/AsyncRpcRetryingCallerFactory.java   |  6 +-
 .../AsyncScanSingleRegionRpcRetryingCaller.java |  2 +-
 .../hadoop/hbase/client/AsyncTableBase.java |  2 +-
 .../hbase/client/AsyncTableResultScanner.java   |  4 +-
 .../hbase/client/BufferedMutatorImpl.java   |  2 +-
 .../client/ClientAsyncPrefetchScanner.java  |  2 +-
 .../hadoop/hbase/client/ClientScanner.java  |  2 +-
 .../client/ColumnFamilyDescriptorBuilder.java   |  2 +-
 .../hbase/client/ConnectionConfiguration.java   |  2 +-
 .../hbase/client/ConnectionImplementation.java  |  2 +-
 .../hadoop/hbase/client/ConnectionUtils.java|  4 +-
 .../apache/hadoop/hbase/client/HBaseAdmin.java  |  2 +-
 .../hadoop/hbase/client/HRegionLocator.java |  2 +-
 .../org/apache/hadoop/hbase/client/HTable.java  |  4 +-
 .../hadoop/hbase/client/HTableMultiplexer.java  |  4 +-
 .../hadoop/hbase/client/MetricsConnection.java  |  2 +-
 .../hbase/client/MultiServerCallable.java   |  2 +-
 .../apache/hadoop/hbase/client/Mutation.java| 10 +--
 .../client/PreemptiveFastFailInterceptor.java   |  2 +-
 .../org/apache/hadoop/hbase/client/Query.java   |  6 +-
 .../hadoop/hbase/client/RawAsyncTableImpl.java  |  2 +-
 .../client/RegionCoprocessorRpcChannel.java |  2 +-
 .../client/RegionCoprocessorServiceExec.java|  2 +-
 .../apache/hadoop/hbase/client/RowAccess.java   |  2 +-
 .../client/ScannerCallableWithReplicas.java |  2 +-
 .../hbase/client/ServerStatisticTracker.java|  2 +-
 .../hbase/client/SimpleRequestController.java   |  2 +-
 .../backoff/ExponentialClientBackoffPolicy.java |  2 +-
 .../client/metrics/ServerSideScanMetrics.java   |  2 +-
 .../client/replication/ReplicationAdmin.java|  4 +-
 .../replication/ReplicationSerDeHelper.java |  2 +-
 .../hbase/filter/ColumnCountGetFilter.java  |  2 +-
 .../hbase/filter/ColumnPaginationFilter.java|  2 +-
 .../hadoop/hbase/filter/ColumnPrefixFilter.java |  2 +-
 .../hadoop/hbase/filter/ColumnRangeFilter.java  |  2 +-
 .../hadoop/hbase/filter/CompareFilter.java  |  2 +-
 .../hbase/filter/DependentColumnFilter.java |  2 +-
 .../hadoop/hbase/filter/FirstKeyOnlyFilter.java |  2 +-
 .../hadoop/hbase/filter/FuzzyRowFilter.java |  2 +-
 .../hbase/filter/InclusiveStopFilter.java   |  2 +-
 .../hadoop/hbase/filter/KeyOnlyFilter.java  |  2 +-
 .../apache/hadoop/hbase/filter/PageFilter.java  |  2 +-
 .../hadoop/hbase/filter/PrefixFilter.java   |  2 +-
 .../hbase/filter/SingleColumnValueFilter.java   |  2 +-
 .../hadoop/hbase/filter/TimestampsFilter.java   |  2 +-
 .../hadoop/hbase/ipc/AbstractRpcClient.java | 10 +--
 .../hadoop/hbase/ipc/BlockingRpcClient.java |  2 +-
 .../hadoop/hbase/ipc/CoprocessorRpcUtils.java   |  2 +-
 .../org/apache/hadoop/hbase/ipc/IPCUtil.java|  2 +-
 .../hbase/ipc/NettyRpcClientConfigHelper.java   |  2 +-
 .../hadoop/hbase/ipc/RpcClientFactory.java  |  4 +-
 .../ReplicationQueuesClientZKImpl.java  |  4 +-
 .../replication/ReplicationStateZKBase.java |  2 +-
 .../hbase/replication/ReplicationTableBase.java |  2 +-
 .../security/AbstractHBaseSaslRpcClient.java|  2 +-
 .../security/access/AccessControlUtil.java  |  8 +-
 .../hbase/security/access/Permission.java   |  2 +-
 .../access/ShadedAccessControlUtil.java |  4 +-
 .../hbase/zookeeper/MetricsZooKeeper.java   |  2 +-
 .../hadoop/hbase/zookeeper/ZNodePaths.java  |  2 +-
 .../hbase/TestInterfaceAudienceAnnotations.java | 10 +--
 .../hbase/client/TestClientNoCluster.java   | 1

[5/8] hbase git commit: HBASE-17908 Upgrade guava

http://git-wip-us.apache.org/repos/asf/hbase/blob/890d92a9/hbase-rsgroup/src/test/java/org/apache/hadoop/hbase/rsgroup/TestRSGroupsBase.java
--
diff --git 
a/hbase-rsgroup/src/test/java/org/apache/hadoop/hbase/rsgroup/TestRSGroupsBase.java
 
b/hbase-rsgroup/src/test/java/org/apache/hadoop/hbase/rsgroup/TestRSGroupsBase.java
index a99c24f..367bb98 100644
--- 
a/hbase-rsgroup/src/test/java/org/apache/hadoop/hbase/rsgroup/TestRSGroupsBase.java
+++ 
b/hbase-rsgroup/src/test/java/org/apache/hadoop/hbase/rsgroup/TestRSGroupsBase.java
@@ -57,8 +57,8 @@ import org.junit.Before;
 import org.junit.Rule;
 import org.junit.Test;
 
-import com.google.common.collect.Maps;
-import com.google.common.collect.Sets;
+import org.apache.hadoop.hbase.shaded.com.google.common.collect.Maps;
+import org.apache.hadoop.hbase.shaded.com.google.common.collect.Sets;
 import org.junit.rules.TestName;
 
 public abstract class TestRSGroupsBase {

http://git-wip-us.apache.org/repos/asf/hbase/blob/890d92a9/hbase-rsgroup/src/test/java/org/apache/hadoop/hbase/rsgroup/TestRSGroupsOfflineMode.java
--
diff --git 
a/hbase-rsgroup/src/test/java/org/apache/hadoop/hbase/rsgroup/TestRSGroupsOfflineMode.java
 
b/hbase-rsgroup/src/test/java/org/apache/hadoop/hbase/rsgroup/TestRSGroupsOfflineMode.java
index 8b200ab..691f8d3 100644
--- 
a/hbase-rsgroup/src/test/java/org/apache/hadoop/hbase/rsgroup/TestRSGroupsOfflineMode.java
+++ 
b/hbase-rsgroup/src/test/java/org/apache/hadoop/hbase/rsgroup/TestRSGroupsOfflineMode.java
@@ -17,7 +17,7 @@
  */
 package org.apache.hadoop.hbase.rsgroup;
 
-import com.google.common.collect.Sets;
+import org.apache.hadoop.hbase.shaded.com.google.common.collect.Sets;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.hbase.HBaseCluster;

http://git-wip-us.apache.org/repos/asf/hbase/blob/890d92a9/hbase-rsgroup/src/test/java/org/apache/hadoop/hbase/rsgroup/VerifyingRSGroupAdminClient.java
--
diff --git 
a/hbase-rsgroup/src/test/java/org/apache/hadoop/hbase/rsgroup/VerifyingRSGroupAdminClient.java
 
b/hbase-rsgroup/src/test/java/org/apache/hadoop/hbase/rsgroup/VerifyingRSGroupAdminClient.java
index 2e89110..37cdd35 100644
--- 
a/hbase-rsgroup/src/test/java/org/apache/hadoop/hbase/rsgroup/VerifyingRSGroupAdminClient.java
+++ 
b/hbase-rsgroup/src/test/java/org/apache/hadoop/hbase/rsgroup/VerifyingRSGroupAdminClient.java
@@ -17,8 +17,8 @@
  */
 package org.apache.hadoop.hbase.rsgroup;
 
-import com.google.common.collect.Maps;
-import com.google.common.collect.Sets;
+import org.apache.hadoop.hbase.shaded.com.google.common.collect.Maps;
+import org.apache.hadoop.hbase.shaded.com.google.common.collect.Sets;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.classification.InterfaceAudience;

http://git-wip-us.apache.org/repos/asf/hbase/blob/890d92a9/hbase-server/pom.xml
--
diff --git a/hbase-server/pom.xml b/hbase-server/pom.xml
index fb0a1ca..8c0ead2 100644
--- a/hbase-server/pom.xml
+++ b/hbase-server/pom.xml
@@ -474,10 +474,6 @@
   metrics-core
 
 
-  com.google.guava
-  guava
-
-
   com.google.protobuf
   protobuf-java
 
@@ -567,6 +563,17 @@
   httpcore
   test
 
+
+  org.apache.commons
+  commons-crypto
+  ${commons-crypto.version}
+  
+
+  net.java.dev.jna
+  jna
+
+  
+
   
   
 
@@ -680,31 +687,67 @@
 
   org.apache.hadoop
   hadoop-client
+  
+
+  com.google.guava
+  guava
+
+  
 
 
   org.apache.hadoop
   hadoop-mapreduce-client-core
+  
+
+  com.google.guava
+  guava
+
+  
 
 
   org.apache.hadoop
   hadoop-mapreduce-client-jobclient
   test-jar
   test
+  
+
+  com.google.guava
+  guava
+
+  
 
 
   org.apache.hadoop
   hadoop-hdfs
+  
+
+  com.google.guava
+  guava
+
+  
 
 
   org.apache.hadoop
   hadoop-hdfs
   test-jar
   test
+  
+
+  com.google.guava
+  guava
+
+  
 
 
   org.apache.hadoop
   hadoop-minicluster
   test
+  
+
+  com.google.guava
+  guava
+
+  
 
 
 
@@ -766,6 +809,12 @@
  

[7/8] hbase git commit: HBASE-17908 Upgrade guava

http://git-wip-us.apache.org/repos/asf/hbase/blob/890d92a9/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/AbstractRpcClient.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/AbstractRpcClient.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/AbstractRpcClient.java
index 2acddda..55895f2 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/AbstractRpcClient.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/AbstractRpcClient.java
@@ -21,11 +21,11 @@ package org.apache.hadoop.hbase.ipc;
 import static org.apache.hadoop.hbase.ipc.IPCUtil.toIOE;
 import static org.apache.hadoop.hbase.ipc.IPCUtil.wrapException;
 
-import com.google.common.annotations.VisibleForTesting;
-import com.google.common.base.Preconditions;
-import com.google.common.cache.CacheBuilder;
-import com.google.common.cache.CacheLoader;
-import com.google.common.cache.LoadingCache;
+import 
org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
+import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions;
+import org.apache.hadoop.hbase.shaded.com.google.common.cache.CacheBuilder;
+import org.apache.hadoop.hbase.shaded.com.google.common.cache.CacheLoader;
+import org.apache.hadoop.hbase.shaded.com.google.common.cache.LoadingCache;
 import org.apache.hadoop.hbase.shaded.com.google.protobuf.BlockingRpcChannel;
 import org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors;
 import org.apache.hadoop.hbase.shaded.com.google.protobuf.Message;

http://git-wip-us.apache.org/repos/asf/hbase/blob/890d92a9/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/BlockingRpcClient.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/BlockingRpcClient.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/BlockingRpcClient.java
index d27602e..ab1a801 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/BlockingRpcClient.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/BlockingRpcClient.java
@@ -17,7 +17,7 @@
  */
 package org.apache.hadoop.hbase.ipc;
 
-import com.google.common.annotations.VisibleForTesting;
+import 
org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
 
 import java.io.IOException;
 import java.net.SocketAddress;

http://git-wip-us.apache.org/repos/asf/hbase/blob/890d92a9/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/CoprocessorRpcUtils.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/CoprocessorRpcUtils.java
 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/CoprocessorRpcUtils.java
index dc5f122..c4e9455 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/CoprocessorRpcUtils.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/CoprocessorRpcUtils.java
@@ -243,4 +243,4 @@ public final class CoprocessorRpcUtils {
 }
 return new DoNotRetryIOException(controller.errorText());
   }
-}
\ No newline at end of file
+}

http://git-wip-us.apache.org/repos/asf/hbase/blob/890d92a9/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/IPCUtil.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/IPCUtil.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/IPCUtil.java
index d2e0e90..6dab3b5 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/IPCUtil.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/IPCUtil.java
@@ -17,7 +17,7 @@
  */
 package org.apache.hadoop.hbase.ipc;
 
-import com.google.common.base.Preconditions;
+import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions;
 import org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream;
 import org.apache.hadoop.hbase.shaded.com.google.protobuf.Message;
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/890d92a9/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/NettyRpcClientConfigHelper.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/NettyRpcClientConfigHelper.java
 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/NettyRpcClientConfigHelper.java
index fe039d3..a8aca52 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/NettyRpcClientConfigHelper.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/NettyRpcClientConfigHelper.java
@@ -17,7 +17,7 @@
  */
 package org.apache.hadoop.hbase.ipc;
 
-import com.google.common.base.Preconditions;
+import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions;
 
 import io.netty.channel.Channel;
 import io.netty.channel.EventLoopGroup;

http://

[4/8] hbase git commit: HBASE-17908 Upgrade guava

http://git-wip-us.apache.org/repos/asf/hbase/blob/890d92a9/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.java
index fda9f60..785f6d5 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.java
@@ -55,11 +55,11 @@ import 
org.apache.hadoop.hbase.master.balancer.BaseLoadBalancer.Cluster.Action.T
 import org.apache.hadoop.hbase.security.access.AccessControlLists;
 import org.apache.hadoop.util.StringUtils;
 
-import com.google.common.annotations.VisibleForTesting;
-import com.google.common.base.Joiner;
-import com.google.common.collect.ArrayListMultimap;
-import com.google.common.collect.Lists;
-import com.google.common.collect.Sets;
+import 
org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
+import org.apache.hadoop.hbase.shaded.com.google.common.base.Joiner;
+import 
org.apache.hadoop.hbase.shaded.com.google.common.collect.ArrayListMultimap;
+import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
+import org.apache.hadoop.hbase.shaded.com.google.common.collect.Sets;
 
 /**
  * The base class for load balancers. It provides the the functions used to by

http://git-wip-us.apache.org/repos/asf/hbase/blob/890d92a9/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/FavoredStochasticBalancer.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/FavoredStochasticBalancer.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/FavoredStochasticBalancer.java
index a8e22ce..30cf16a 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/FavoredStochasticBalancer.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/FavoredStochasticBalancer.java
@@ -47,9 +47,9 @@ import org.apache.hadoop.hbase.master.MasterServices;
 import org.apache.hadoop.hbase.master.RegionPlan;
 import org.apache.hadoop.hbase.util.Pair;
 
-import com.google.common.collect.Lists;
-import com.google.common.collect.Maps;
-import com.google.common.collect.Sets;
+import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
+import org.apache.hadoop.hbase.shaded.com.google.common.collect.Maps;
+import org.apache.hadoop.hbase.shaded.com.google.common.collect.Sets;
 
 /**
  * An implementation of the {@link 
org.apache.hadoop.hbase.master.LoadBalancer} that

http://git-wip-us.apache.org/repos/asf/hbase/blob/890d92a9/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/RegionLocationFinder.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/RegionLocationFinder.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/RegionLocationFinder.java
index 907e745..d9a1ab8 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/RegionLocationFinder.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/RegionLocationFinder.java
@@ -43,15 +43,15 @@ import org.apache.hadoop.hbase.master.MasterServices;
 import org.apache.hadoop.hbase.regionserver.HRegion;
 import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
 
-import com.google.common.cache.CacheBuilder;
-import com.google.common.cache.CacheLoader;
-import com.google.common.cache.LoadingCache;
-import com.google.common.collect.Lists;
-import com.google.common.util.concurrent.Futures;
-import com.google.common.util.concurrent.ListenableFuture;
-import com.google.common.util.concurrent.ListeningExecutorService;
-import com.google.common.util.concurrent.MoreExecutors;
-import com.google.common.util.concurrent.ThreadFactoryBuilder;
+import org.apache.hadoop.hbase.shaded.com.google.common.cache.CacheBuilder;
+import org.apache.hadoop.hbase.shaded.com.google.common.cache.CacheLoader;
+import org.apache.hadoop.hbase.shaded.com.google.common.cache.LoadingCache;
+import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
+import 
org.apache.hadoop.hbase.shaded.com.google.common.util.concurrent.Futures;
+import 
org.apache.hadoop.hbase.shaded.com.google.common.util.concurrent.ListenableFuture;
+import 
org.apache.hadoop.hbase.shaded.com.google.common.util.concurrent.ListeningExecutorService;
+import 
org.apache.hadoop.hbase.shaded.com.google.common.util.concurrent.MoreExecutors;
+import 
org.apache.hadoop.hbase.shaded.com.google.common.util.concurrent.ThreadFactoryBuilder;
 
 /**
  * This will find where data for a region is located in HDFS. It 

[2/8] hbase git commit: HBASE-17908 Upgrade guava

http://git-wip-us.apache.org/repos/asf/hbase/blob/890d92a9/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java
index 24a2f9c..3fb9459 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java
@@ -2004,8 +2004,6 @@ public class HBaseTestingUtility extends 
HBaseCommonTestingUtility {
* @param tableName
* @param startKey
* @param stopKey
-   * @param callingMethod
-   * @param conf
* @param isReadOnly
* @param families
* @return A region on which you must call
@@ -3450,7 +3448,6 @@ public class HBaseTestingUtility extends 
HBaseCommonTestingUtility {
   /**
* Uses directly the assignment manager to assign the region.
* and waits until the specified region has completed assignment.
-   * @param tableName the table name
* @throws IOException
* @throw InterruptedException
* @return true if the region is assigned false otherwise.

http://git-wip-us.apache.org/repos/asf/hbase/blob/890d92a9/hbase-server/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java
index 8abef2b..cabbdb4 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java
@@ -100,8 +100,8 @@ import org.apache.htrace.Trace;
 import org.apache.htrace.TraceScope;
 import org.apache.htrace.impl.ProbabilitySampler;
 
-import com.google.common.base.Objects;
-import com.google.common.util.concurrent.ThreadFactoryBuilder;
+import org.apache.hadoop.hbase.shaded.com.google.common.base.MoreObjects;
+import 
org.apache.hadoop.hbase.shaded.com.google.common.util.concurrent.ThreadFactoryBuilder;
 import com.codahale.metrics.Histogram;
 import com.codahale.metrics.UniformReservoir;
 
@@ -333,7 +333,7 @@ public class PerformanceEvaluation extends Configured 
implements Tool {
   || (!isReadCmd && desc != null && desc.getRegionReplication() != 
opts.replicas)) {
   needsDelete = true;
   // wait, why did it delete my table?!?
-  LOG.debug(Objects.toStringHelper("needsDelete")
+  LOG.debug(MoreObjects.toStringHelper("needsDelete")
 .add("needsDelete", needsDelete)
 .add("isReadCmd", isReadCmd)
 .add("exists", exists)

http://git-wip-us.apache.org/repos/asf/hbase/blob/890d92a9/hbase-server/src/test/java/org/apache/hadoop/hbase/ScanPerformanceEvaluation.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/ScanPerformanceEvaluation.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/ScanPerformanceEvaluation.java
index 24e9590..e669f14 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/ScanPerformanceEvaluation.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/ScanPerformanceEvaluation.java
@@ -47,7 +47,7 @@ import 
org.apache.hadoop.mapreduce.lib.output.NullOutputFormat;
 import org.apache.hadoop.util.StringUtils;
 import org.apache.hadoop.util.ToolRunner;
 
-import com.google.common.base.Stopwatch;
+import org.apache.hadoop.hbase.shaded.com.google.common.base.Stopwatch;
 
 /**
  * A simple performance evaluation tool for single client and MR scans
@@ -102,8 +102,8 @@ public class ScanPerformanceEvaluation extends 
AbstractHBaseTool {
 FileSystem fs = filename.getFileSystem(getConf());
 
 // read the file from start to finish
-Stopwatch fileOpenTimer = new Stopwatch();
-Stopwatch streamTimer = new Stopwatch();
+Stopwatch fileOpenTimer = Stopwatch.createUnstarted();
+Stopwatch streamTimer = Stopwatch.createUnstarted();
 
 fileOpenTimer.start();
 FSDataInputStream in = fs.open(filename);
@@ -120,11 +120,12 @@ public class ScanPerformanceEvaluation extends 
AbstractHBaseTool {
 }
 streamTimer.stop();
 
-double throughput = (double)totalBytes / 
streamTimer.elapsedTime(TimeUnit.SECONDS);
+double throughput = (double)totalBytes / 
streamTimer.elapsed(TimeUnit.SECONDS);
 
 System.out.println("HDFS streaming: ");
-System.out.println("total time to open: " + fileOpenTimer.elapsedMillis() 
+ " ms");
-System.out.println("total time to read: " + streamTimer.elapsedMillis() + 
" ms");
+System.out.println("total time to open: " +
+  fileOpenTimer.elapsed(TimeUnit.MILLISECONDS) + " ms");
+System.out.println("total time to read: " + 
streamTimer.elapsed(TimeUnit.MILLIS

[6/8] hbase git commit: HBASE-17908 Upgrade guava

http://git-wip-us.apache.org/repos/asf/hbase/blob/890d92a9/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/metrics2/util/MetricSampleQuantiles.java
--
diff --git 
a/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/metrics2/util/MetricSampleQuantiles.java
 
b/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/metrics2/util/MetricSampleQuantiles.java
index 3e4016d..16093b5 100644
--- 
a/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/metrics2/util/MetricSampleQuantiles.java
+++ 
b/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/metrics2/util/MetricSampleQuantiles.java
@@ -27,7 +27,7 @@ import java.util.Map;
 
 import org.apache.hadoop.hbase.classification.InterfaceAudience;
 
-import com.google.common.annotations.VisibleForTesting;
+import 
org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
 
 /**
  * Implementation of the Cormode, Korn, Muthukrishnan, and Srivastava algorithm

http://git-wip-us.apache.org/repos/asf/hbase/blob/890d92a9/hbase-it/pom.xml
--
diff --git a/hbase-it/pom.xml b/hbase-it/pom.xml
index ae8c790..eba4440 100644
--- a/hbase-it/pom.xml
+++ b/hbase-it/pom.xml
@@ -215,6 +215,12 @@
 
   org.apache.hbase
   hbase-hadoop-compat
+  
+
+  com.google.guava
+  guava
+
+  
 
 
   org.apache.hbase
@@ -226,8 +232,8 @@
   hbase-testing-util
 
 
-  com.google.guava
-  guava
+  org.apache.hbase.thirdparty
+  hbase-shaded-miscellaneous
 
 
   io.dropwizard.metrics
@@ -295,12 +301,24 @@
 
   org.apache.hadoop
   hadoop-mapreduce-client-core
+  
+
+  com.google.guava
+  guava
+
+  
 
 
   org.apache.hadoop
   hadoop-mapreduce-client-jobclient
   test-jar
   test
+  
+
+  com.google.guava
+  guava
+
+  
 
 
   org.apache.hadoop

http://git-wip-us.apache.org/repos/asf/hbase/blob/890d92a9/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestAcidGuarantees.java
--
diff --git 
a/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestAcidGuarantees.java
 
b/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestAcidGuarantees.java
index ca8e277..e1c17a4 100644
--- 
a/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestAcidGuarantees.java
+++ 
b/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestAcidGuarantees.java
@@ -17,7 +17,7 @@
  */
 package org.apache.hadoop.hbase;
 
-import com.google.common.collect.Sets;
+import org.apache.hadoop.hbase.shaded.com.google.common.collect.Sets;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.regionserver.CompactingMemStore;
 import org.apache.hadoop.hbase.regionserver.ConstantSizeRegionSplitPolicy;

http://git-wip-us.apache.org/repos/asf/hbase/blob/890d92a9/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestBackupRestore.java
--
diff --git 
a/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestBackupRestore.java
 
b/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestBackupRestore.java
index 8596489..f041f72 100644
--- 
a/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestBackupRestore.java
+++ 
b/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestBackupRestore.java
@@ -46,14 +46,15 @@ import org.apache.hadoop.hbase.client.HTable;
 import org.apache.hadoop.hbase.client.Table;
 import org.apache.hadoop.hbase.testclassification.IntegrationTests;
 import org.apache.hadoop.util.ToolRunner;
+import org.apache.hadoop.hbase.shaded.com.google.common.base.MoreObjects;
 import org.junit.After;
 import org.junit.Assert;
 import org.junit.Before;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
 
-import com.google.common.base.Objects;
-import com.google.common.collect.Lists;
+import org.apache.hadoop.hbase.shaded.com.google.common.base.Objects;
+import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
 
 /**
  * An integration test to detect regressions in HBASE-7912. Create
@@ -297,7 +298,8 @@ public class IntegrationTestBackupRestore extends 
IntegrationTestBase {
 rowsInBatch =
 Integer.parseInt(cmd.getOptionValue(NB_ROWS_IN_BATCH_KEY,
   Integer.toString(DEFAULT_NB_ROWS_IN_BATCH)));
-LOG.info(Objects.toStringHelper("Parsed Options").add(REGION_COUNT_KEY, 
regionsCountPerServer)
+LOG.info(MoreObjects.toStringHelper("Parsed Options").
+  add(REGION_COUNT_KEY, regionsCountPerServer)
 .add(REGIONSERVER_COUNT_KEY, 
regionServerCount).add(NB_ROWS_

hbase git commit: HBASE-18420 Some methods of Admin don't use ColumnFamilyDescriptor

Repository: hbase
Updated Branches:
  refs/heads/branch-2 b81fed7f8 -> f8fa04c71


HBASE-18420 Some methods of Admin don't use ColumnFamilyDescriptor


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/f8fa04c7
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/f8fa04c7
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/f8fa04c7

Branch: refs/heads/branch-2
Commit: f8fa04c71763334a0a515e7d8bceb9c4d86d9a8d
Parents: b81fed7
Author: Chia-Ping Tsai 
Authored: Fri Jul 21 19:15:09 2017 +0800
Committer: Chia-Ping Tsai 
Committed: Fri Jul 21 19:15:09 2017 +0800

--
 .../main/java/org/apache/hadoop/hbase/client/Admin.java | 12 ++--
 .../java/org/apache/hadoop/hbase/client/HBaseAdmin.java | 12 ++--
 2 files changed, 12 insertions(+), 12 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/f8fa04c7/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Admin.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Admin.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Admin.java
index 9bb5df4..6e9b0df 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Admin.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Admin.java
@@ -714,7 +714,7 @@ public interface Admin extends Abortable, Closeable {
* @deprecated As of release 2.0.0.
* (https://issues.apache.org/jira/browse/HBASE-1989";>HBASE-1989).
* This will be removed in HBase 3.0.0.
-   * Use {@link #addColumnFamily(TableName, HColumnDescriptor)}.
+   * Use {@link #addColumnFamily(TableName, 
ColumnFamilyDescriptor)}.
*/
   @Deprecated
   void addColumn(final TableName tableName, final HColumnDescriptor 
columnFamily)
@@ -727,7 +727,7 @@ public interface Admin extends Abortable, Closeable {
* @param columnFamily column family descriptor of column family to be added
* @throws IOException if a remote or network exception occurs
*/
-  void addColumnFamily(final TableName tableName, final HColumnDescriptor 
columnFamily)
+  void addColumnFamily(final TableName tableName, final ColumnFamilyDescriptor 
columnFamily)
 throws IOException;
 
   /**
@@ -743,7 +743,7 @@ public interface Admin extends Abortable, Closeable {
* @return the result of the async add column family. You can use 
Future.get(long, TimeUnit) to
* wait on the operation to complete.
*/
-  Future addColumnFamilyAsync(final TableName tableName, final 
HColumnDescriptor columnFamily)
+  Future addColumnFamilyAsync(final TableName tableName, final 
ColumnFamilyDescriptor columnFamily)
   throws IOException;
 
   /**
@@ -794,7 +794,7 @@ public interface Admin extends Abortable, Closeable {
* @deprecated As of release 2.0.0.
* (https://issues.apache.org/jira/browse/HBASE-1989";>HBASE-1989).
* This will be removed in HBase 3.0.0.
-   * Use {@link #modifyColumnFamily(TableName, HColumnDescriptor)}.
+   * Use {@link #modifyColumnFamily(TableName, 
ColumnFamilyDescriptor)}.
*/
   @Deprecated
   void modifyColumn(final TableName tableName, final HColumnDescriptor 
columnFamily)
@@ -807,7 +807,7 @@ public interface Admin extends Abortable, Closeable {
* @param columnFamily new column family descriptor to use
* @throws IOException if a remote or network exception occurs
*/
-  void modifyColumnFamily(final TableName tableName, final HColumnDescriptor 
columnFamily)
+  void modifyColumnFamily(final TableName tableName, final 
ColumnFamilyDescriptor columnFamily)
   throws IOException;
 
   /**
@@ -823,7 +823,7 @@ public interface Admin extends Abortable, Closeable {
* @return the result of the async modify column family. You can use 
Future.get(long, TimeUnit) to
* wait on the operation to complete.
*/
-  Future modifyColumnFamilyAsync(TableName tableName, HColumnDescriptor 
columnFamily)
+  Future modifyColumnFamilyAsync(TableName tableName, 
ColumnFamilyDescriptor columnFamily)
   throws IOException;
 
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/f8fa04c7/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java
index fb9df62..11224da 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java
@@ -1000,7 +1000,7 @@ public class HBaseAdmin implements Admin {
   /**
* {@

hbase git commit: HBASE-18420 Some methods of Admin don't use ColumnFamilyDescriptor

Repository: hbase
Updated Branches:
  refs/heads/master af534acab -> 7941b83aa


HBASE-18420 Some methods of Admin don't use ColumnFamilyDescriptor


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/7941b83a
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/7941b83a
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/7941b83a

Branch: refs/heads/master
Commit: 7941b83aafc721034b885f635dcb50cfb23ed9eb
Parents: af534ac
Author: Chia-Ping Tsai 
Authored: Fri Jul 21 19:14:33 2017 +0800
Committer: Chia-Ping Tsai 
Committed: Fri Jul 21 19:14:33 2017 +0800

--
 .../main/java/org/apache/hadoop/hbase/client/Admin.java | 12 ++--
 .../java/org/apache/hadoop/hbase/client/HBaseAdmin.java | 12 ++--
 2 files changed, 12 insertions(+), 12 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/7941b83a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Admin.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Admin.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Admin.java
index 9bb5df4..6e9b0df 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Admin.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Admin.java
@@ -714,7 +714,7 @@ public interface Admin extends Abortable, Closeable {
* @deprecated As of release 2.0.0.
* (https://issues.apache.org/jira/browse/HBASE-1989";>HBASE-1989).
* This will be removed in HBase 3.0.0.
-   * Use {@link #addColumnFamily(TableName, HColumnDescriptor)}.
+   * Use {@link #addColumnFamily(TableName, 
ColumnFamilyDescriptor)}.
*/
   @Deprecated
   void addColumn(final TableName tableName, final HColumnDescriptor 
columnFamily)
@@ -727,7 +727,7 @@ public interface Admin extends Abortable, Closeable {
* @param columnFamily column family descriptor of column family to be added
* @throws IOException if a remote or network exception occurs
*/
-  void addColumnFamily(final TableName tableName, final HColumnDescriptor 
columnFamily)
+  void addColumnFamily(final TableName tableName, final ColumnFamilyDescriptor 
columnFamily)
 throws IOException;
 
   /**
@@ -743,7 +743,7 @@ public interface Admin extends Abortable, Closeable {
* @return the result of the async add column family. You can use 
Future.get(long, TimeUnit) to
* wait on the operation to complete.
*/
-  Future addColumnFamilyAsync(final TableName tableName, final 
HColumnDescriptor columnFamily)
+  Future addColumnFamilyAsync(final TableName tableName, final 
ColumnFamilyDescriptor columnFamily)
   throws IOException;
 
   /**
@@ -794,7 +794,7 @@ public interface Admin extends Abortable, Closeable {
* @deprecated As of release 2.0.0.
* (https://issues.apache.org/jira/browse/HBASE-1989";>HBASE-1989).
* This will be removed in HBase 3.0.0.
-   * Use {@link #modifyColumnFamily(TableName, HColumnDescriptor)}.
+   * Use {@link #modifyColumnFamily(TableName, 
ColumnFamilyDescriptor)}.
*/
   @Deprecated
   void modifyColumn(final TableName tableName, final HColumnDescriptor 
columnFamily)
@@ -807,7 +807,7 @@ public interface Admin extends Abortable, Closeable {
* @param columnFamily new column family descriptor to use
* @throws IOException if a remote or network exception occurs
*/
-  void modifyColumnFamily(final TableName tableName, final HColumnDescriptor 
columnFamily)
+  void modifyColumnFamily(final TableName tableName, final 
ColumnFamilyDescriptor columnFamily)
   throws IOException;
 
   /**
@@ -823,7 +823,7 @@ public interface Admin extends Abortable, Closeable {
* @return the result of the async modify column family. You can use 
Future.get(long, TimeUnit) to
* wait on the operation to complete.
*/
-  Future modifyColumnFamilyAsync(TableName tableName, HColumnDescriptor 
columnFamily)
+  Future modifyColumnFamilyAsync(TableName tableName, 
ColumnFamilyDescriptor columnFamily)
   throws IOException;
 
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/7941b83a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java
index fb9df62..11224da 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java
@@ -1000,7 +1000,7 @@ public class HBaseAdmin implements Admin {
   /**
* {@inhe