hbase git commit: HBASE-18931 Make ObserverContext an interface.

2017-10-05 Thread appy
Repository: hbase
Updated Branches:
  refs/heads/master bafbade24 -> e2ad4c038


HBASE-18931 Make ObserverContext an interface.

Change-Id: I9284a3271e06a3ee8ab9719cf012a4d8b3a82c88


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/e2ad4c03
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/e2ad4c03
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/e2ad4c03

Branch: refs/heads/master
Commit: e2ad4c038cf5c4144195edad2af06980d599c45f
Parents: bafbade
Author: Apekshit Sharma 
Authored: Tue Oct 3 18:34:16 2017 -0700
Committer: Apekshit Sharma 
Committed: Thu Oct 5 23:34:19 2017 -0700

--
 .../hbase/coprocessor/CoprocessorHost.java  |   2 +-
 .../hbase/coprocessor/ObserverContext.java  |  78 ++--
 .../hbase/coprocessor/ObserverContextImpl.java  | 105 
 .../hbase/coprocessor/TestMasterObserver.java   |   5 -
 .../security/access/TestAccessController.java   | 126 +--
 .../security/access/TestAccessController3.java  |   4 +-
 .../security/access/TestNamespaceCommands.java  |  12 +-
 .../access/TestWithDisabledAuthorization.java   |  92 +++---
 8 files changed, 230 insertions(+), 194 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/e2ad4c03/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/CoprocessorHost.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/CoprocessorHost.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/CoprocessorHost.java
index 07e5a3a..14a1d31 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/CoprocessorHost.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/CoprocessorHost.java
@@ -545,7 +545,7 @@ public abstract class CoprocessorHost extends Function> {}
 
-  private abstract class ObserverOperation extends ObserverContext {
+  private abstract class ObserverOperation extends ObserverContextImpl {
 ObserverGetter observerGetter;
 
 ObserverOperation(ObserverGetter observerGetter) {

http://git-wip-us.apache.org/repos/asf/hbase/blob/e2ad4c03/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/ObserverContext.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/ObserverContext.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/ObserverContext.java
index 3110abc..3cb054b 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/ObserverContext.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/ObserverContext.java
@@ -17,16 +17,14 @@
  */
 package org.apache.hadoop.hbase.coprocessor;
 
-import java.util.Optional;
-
-import com.google.common.annotations.VisibleForTesting;
 import org.apache.hadoop.hbase.CoprocessorEnvironment;
 import org.apache.hadoop.hbase.HBaseInterfaceAudience;
-import org.apache.hadoop.hbase.ipc.RpcServer;
 import org.apache.hadoop.hbase.security.User;
 import org.apache.yetus.audience.InterfaceAudience;
 import org.apache.yetus.audience.InterfaceStability;
 
+import java.util.Optional;
+
 /**
  * Carries the execution state for a given invocation of an Observer 
coprocessor
  * ({@link RegionObserver}, {@link MasterObserver}, or {@link WALObserver})
@@ -39,64 +37,21 @@ import org.apache.yetus.audience.InterfaceStability;
  */
 @InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.COPROC)
 @InterfaceStability.Evolving
-public class ObserverContext {
-  private E env;
-  private boolean bypass;
-  private boolean complete;
-  private final User caller;
-
-  @InterfaceAudience.Private
-  public ObserverContext(User caller) {
-this.caller = caller;
-  }
-
-  public E getEnvironment() {
-return env;
-  }
-
-  @InterfaceAudience.Private
-  public void prepare(E env) {
-this.env = env;
-  }
+public interface ObserverContext {
+  E getEnvironment();
 
   /**
* Call to indicate that the current coprocessor's return value should be
* used in place of the normal HBase obtained value.
*/
-  public void bypass() {
-bypass = true;
-  }
+  void bypass();
 
   /**
* Call to indicate that additional coprocessors further down the execution
* chain do not need to be invoked.  Implies that this coprocessor's response
* is definitive.
*/
-  public void complete() {
-complete = true;
-  }
-
-  /**
-   * For use by the coprocessor framework.
-   * @return true if {@link ObserverContext#bypass()}
-   * was called by one of the loaded coprocessors, false 
otherwise.
-   */
-  public boolean shouldBypass() {
-boolean current = bypass;
-bypass = false;
-return current;
-  }
-
-  /**
-

[44/60] [abbrv] hbase git commit: HBASE-18913 TestShell fails because NoMethodError: undefined method parseColumn

2017-10-05 Thread busbey
HBASE-18913 TestShell fails because NoMethodError: undefined method parseColumn


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/afa03a20
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/afa03a20
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/afa03a20

Branch: refs/heads/HBASE-18467
Commit: afa03a207ea5ff33439bc5a3bc67e7c59cc26298
Parents: 214d219
Author: Guanghao Zhang 
Authored: Tue Oct 3 09:16:35 2017 +0800
Committer: Guanghao Zhang 
Committed: Tue Oct 3 11:47:14 2017 +0800

--
 hbase-shell/src/main/ruby/hbase/table.rb | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/afa03a20/hbase-shell/src/main/ruby/hbase/table.rb
--
diff --git a/hbase-shell/src/main/ruby/hbase/table.rb 
b/hbase-shell/src/main/ruby/hbase/table.rb
index 33c80ae..320ec7c 100644
--- a/hbase-shell/src/main/ruby/hbase/table.rb
+++ b/hbase-shell/src/main/ruby/hbase/table.rb
@@ -722,7 +722,7 @@ EOF
 
 # Returns family and (when has it) qualifier for a column name
 def parse_column_name(column)
-  split = 
org.apache.hadoop.hbase.KeyValue.parseColumn(column.to_java_bytes)
+  split = 
org.apache.hadoop.hbase.CellUtil.parseColumn(column.to_java_bytes)
   set_converter(split) if split.length > 1
   [split[0], split.length > 1 ? split[1] : nil]
 end
@@ -793,7 +793,7 @@ EOF
 # 2. register the CONVERTER information based on column spec - 
"cf:qualifier"
 def set_converter(column)
   family = String.from_java_bytes(column[0])
-  parts = org.apache.hadoop.hbase.KeyValue.parseColumn(column[1])
+  parts = org.apache.hadoop.hbase.CellUtil.parseColumn(column[1])
   if parts.length > 1
 @converters["#{family}:#{String.from_java_bytes(parts[0])}"] = 
String.from_java_bytes(parts[1])
 column[1] = parts[0]



[46/60] [abbrv] hbase git commit: HBASE-18815 We need to pass something like CompactionRequest in CP to give user some information about the compaction

2017-10-05 Thread busbey
http://git-wip-us.apache.org/repos/asf/hbase/blob/0af61dce/hbase-server/src/test/java/org/apache/hadoop/hbase/mob/compactions/TestMobCompactor.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/mob/compactions/TestMobCompactor.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/mob/compactions/TestMobCompactor.java
index e0d9fa2..7add5d2 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/mob/compactions/TestMobCompactor.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/mob/compactions/TestMobCompactor.java
@@ -89,6 +89,7 @@ import org.apache.hadoop.hbase.regionserver.Store;
 import org.apache.hadoop.hbase.regionserver.StoreFile;
 import org.apache.hadoop.hbase.regionserver.StoreFileInfo;
 import 
org.apache.hadoop.hbase.regionserver.compactions.CompactionLifeCycleTracker;
+import org.apache.hadoop.hbase.regionserver.compactions.CompactionRequest;
 import org.apache.hadoop.hbase.security.EncryptionUtil;
 import org.apache.hadoop.hbase.security.User;
 import org.apache.hadoop.hbase.testclassification.LargeTests;
@@ -730,7 +731,8 @@ public class TestMobCompactor {
 
 @Override
 public void 
preCompactSelection(ObserverContext c, Store 
store,
-List candidates, CompactionLifeCycleTracker 
tracker)
+List candidates, CompactionLifeCycleTracker 
tracker,
+CompactionRequest request)
 throws IOException {
   int count = candidates.size();
   if (count >= 2) {

http://git-wip-us.apache.org/repos/asf/hbase/blob/0af61dce/hbase-server/src/test/java/org/apache/hadoop/hbase/namespace/TestNamespaceAuditor.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/namespace/TestNamespaceAuditor.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/namespace/TestNamespaceAuditor.java
index 47e3783..2140cc1 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/namespace/TestNamespaceAuditor.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/namespace/TestNamespaceAuditor.java
@@ -71,6 +71,7 @@ import org.apache.hadoop.hbase.quotas.QuotaUtil;
 import org.apache.hadoop.hbase.regionserver.Store;
 import org.apache.hadoop.hbase.regionserver.StoreFile;
 import 
org.apache.hadoop.hbase.regionserver.compactions.CompactionLifeCycleTracker;
+import org.apache.hadoop.hbase.regionserver.compactions.CompactionRequest;
 import org.apache.hadoop.hbase.snapshot.RestoreSnapshotException;
 import org.apache.hadoop.hbase.testclassification.MediumTests;
 import org.apache.hadoop.hbase.util.Bytes;
@@ -465,7 +466,8 @@ public class TestNamespaceAuditor {
 
 @Override
 public void postCompact(ObserverContext e, 
Store store,
-StoreFile resultFile, CompactionLifeCycleTracker tracker) throws 
IOException {
+StoreFile resultFile, CompactionLifeCycleTracker tracker, 
CompactionRequest request)
+throws IOException {
   postCompact.countDown();
 }
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/0af61dce/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/NoOpScanPolicyObserver.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/NoOpScanPolicyObserver.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/NoOpScanPolicyObserver.java
index bde28a2..2b98cf2 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/NoOpScanPolicyObserver.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/NoOpScanPolicyObserver.java
@@ -32,6 +32,7 @@ import org.apache.hadoop.hbase.coprocessor.RegionCoprocessor;
 import org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment;
 import org.apache.hadoop.hbase.coprocessor.RegionObserver;
 import 
org.apache.hadoop.hbase.regionserver.compactions.CompactionLifeCycleTracker;
+import org.apache.hadoop.hbase.regionserver.compactions.CompactionRequest;
 
 /**
  * RegionObserver that just reimplements the default behavior,
@@ -69,7 +70,8 @@ public class NoOpScanPolicyObserver implements 
RegionCoprocessor, RegionObserver
   public InternalScanner preCompactScannerOpen(
   final ObserverContext c, Store store,
   List scanners, ScanType scanType, long 
earliestPutTs,
-  InternalScanner s, CompactionLifeCycleTracker tracker, long readPoint) 
throws IOException {
+  InternalScanner s, CompactionLifeCycleTracker tracker, CompactionRequest 
request,
+  long readPoint) throws IOException {
 HStore hs = (HStore) store;
 // this demonstrates how to override the scanners default behavior
 ScanInfo oldSI = hs.getScanInfo();

http://git-wip-us.apache.org/repos/asf/hbase/blob/0af61dce/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompaction.java

[02/60] [abbrv] hbase git commit: HBASE-18839 Apply RegionInfo to code base

2017-10-05 Thread busbey
http://git-wip-us.apache.org/repos/asf/hbase/blob/a11a35a1/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionInfoBuilder.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionInfoBuilder.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionInfoBuilder.java
index 497ecc4..5e553dc 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionInfoBuilder.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionInfoBuilder.java
@@ -86,7 +86,7 @@ public class TestRegionInfoBuilder {
 RegionInfo ri = RegionInfoBuilder.FIRST_META_REGIONINFO;
 byte [] bytes = RegionInfo.toByteArray(ri);
 RegionInfo pbri = RegionInfo.parseFrom(bytes);
-assertTrue(ri.equals(pbri));
+assertTrue(RegionInfo.COMPARATOR.compare(ri, pbri) == 0);
   }
 
   @Test
@@ -286,7 +286,7 @@ public class TestRegionInfoBuilder {
 .setReplicaId(replicaId).build();
 
 // convert two times, compare
-RegionInfo convertedRi = 
ProtobufUtil.toRegionInfo(ProtobufUtil.toProtoRegionInfo(ri));
+RegionInfo convertedRi = 
ProtobufUtil.toRegionInfo(ProtobufUtil.toRegionInfo(ri));
 
 assertEquals(ri, convertedRi);
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/a11a35a1/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionMergeTransactionOnCluster.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionMergeTransactionOnCluster.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionMergeTransactionOnCluster.java
index b594a2f..15c6b76 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionMergeTransactionOnCluster.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionMergeTransactionOnCluster.java
@@ -40,7 +40,6 @@ import org.apache.hadoop.hbase.CategoryBasedTimeout;
 import org.apache.hadoop.hbase.CoordinatedStateManager;
 import org.apache.hadoop.hbase.HBaseTestingUtility;
 import org.apache.hadoop.hbase.HConstants;
-import org.apache.hadoop.hbase.HRegionInfo;
 import org.apache.hadoop.hbase.MetaTableAccessor;
 import org.apache.hadoop.hbase.MiniHBaseCluster;
 import org.apache.hadoop.hbase.ServerName;
@@ -49,6 +48,7 @@ import org.apache.hadoop.hbase.UnknownRegionException;
 import org.apache.hadoop.hbase.client.Admin;
 import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
 import org.apache.hadoop.hbase.client.Put;
+import org.apache.hadoop.hbase.client.RegionInfo;
 import org.apache.hadoop.hbase.client.RegionReplicaUtil;
 import org.apache.hadoop.hbase.client.Result;
 import org.apache.hadoop.hbase.client.ResultScanner;
@@ -62,11 +62,6 @@ import org.apache.hadoop.hbase.master.RegionState;
 import org.apache.hadoop.hbase.master.assignment.AssignmentManager;
 import org.apache.hadoop.hbase.master.assignment.RegionStates;
 import org.apache.hadoop.hbase.procedure2.ProcedureTestingUtility;
-import org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController;
-import org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException;
-import 
org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionStateTransition.TransitionCode;
-import 
org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.ReportRegionStateTransitionRequest;
-import 
org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.ReportRegionStateTransitionResponse;
 import org.apache.hadoop.hbase.testclassification.MediumTests;
 import org.apache.hadoop.hbase.testclassification.RegionServerTests;
 import org.apache.hadoop.hbase.util.Bytes;
@@ -87,6 +82,11 @@ import org.junit.rules.TestName;
 import org.junit.rules.TestRule;
 
 import org.apache.hadoop.hbase.shaded.com.google.common.base.Joiner;
+import org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController;
+import org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException;
+import 
org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionStateTransition.TransitionCode;
+import 
org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.ReportRegionStateTransitionRequest;
+import 
org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.ReportRegionStateTransitionResponse;
 
 @Category({RegionServerTests.class, MediumTests.class})
 public class TestRegionMergeTransactionOnCluster {
@@ -142,14 +142,14 @@ public class TestRegionMergeTransactionOnCluster {
 INITIAL_REGION_NUM - 1);
 
 // Merge 2nd and 3th region
-PairOfSameType mergedRegions =
+PairOfSameType mergedRegions =
   mergeRegionsAndVerifyRegionNum(MASTER, tableName, 1, 2,
 INITIAL_REGION_NUM - 2);
 
 verifyRowCount(table, RO

[33/60] [abbrv] hbase git commit: HBASE-18090 Improve TableSnapshotInputFormat to allow more multiple mappers per region

2017-10-05 Thread busbey
HBASE-18090 Improve TableSnapshotInputFormat to allow more multiple mappers per 
region

Signed-off-by: Ashu Pachauri 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/4aadc5d3
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/4aadc5d3
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/4aadc5d3

Branch: refs/heads/HBASE-18467
Commit: 4aadc5d322884310ce6ef49fb0031bfbd2a096b9
Parents: 367dfab
Author: libisthanks 
Authored: Wed Sep 27 12:33:53 2017 +0800
Committer: Ashu Pachauri 
Committed: Sat Sep 30 02:08:42 2017 -0700

--
 ...IntegrationTestTableSnapshotInputFormat.java |   4 +-
 .../hadoop/hbase/mapred/TableMapReduceUtil.java |  38 ++
 .../hbase/mapred/TableSnapshotInputFormat.java  |  18 +++
 .../hbase/mapreduce/TableMapReduceUtil.java |  38 ++
 .../mapreduce/TableSnapshotInputFormat.java |  24 +++-
 .../mapreduce/TableSnapshotInputFormatImpl.java | 115 ---
 .../mapred/TestTableSnapshotInputFormat.java|  41 ---
 .../TableSnapshotInputFormatTestBase.java   |  23 ++--
 .../mapreduce/TestTableSnapshotInputFormat.java |  41 ---
 .../hbase/client/ClientSideRegionScanner.java   |   2 +
 .../hadoop/hbase/util/RegionSplitter.java   |  71 
 .../hadoop/hbase/util/TestRegionSplitter.java   |  20 
 12 files changed, 383 insertions(+), 52 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/4aadc5d3/hbase-it/src/test/java/org/apache/hadoop/hbase/mapreduce/IntegrationTestTableSnapshotInputFormat.java
--
diff --git 
a/hbase-it/src/test/java/org/apache/hadoop/hbase/mapreduce/IntegrationTestTableSnapshotInputFormat.java
 
b/hbase-it/src/test/java/org/apache/hadoop/hbase/mapreduce/IntegrationTestTableSnapshotInputFormat.java
index 1a152e8..2df1c4b 100644
--- 
a/hbase-it/src/test/java/org/apache/hadoop/hbase/mapreduce/IntegrationTestTableSnapshotInputFormat.java
+++ 
b/hbase-it/src/test/java/org/apache/hadoop/hbase/mapreduce/IntegrationTestTableSnapshotInputFormat.java
@@ -151,7 +151,7 @@ public class IntegrationTestTableSnapshotInputFormat 
extends IntegrationTestBase
   int expectedNumSplits = numRegions > 2 ? numRegions - 2 : numRegions;
 
   
org.apache.hadoop.hbase.mapreduce.TestTableSnapshotInputFormat.doTestWithMapReduce(util,
-tableName, snapshotName, START_ROW, END_ROW, tableDir, numRegions,
+tableName, snapshotName, START_ROW, END_ROW, tableDir, numRegions, 1,
 expectedNumSplits, false);
 } else if (mr.equalsIgnoreCase(MAPRED_IMPLEMENTATION)) {
   /*
@@ -165,7 +165,7 @@ public class IntegrationTestTableSnapshotInputFormat 
extends IntegrationTestBase
   int expectedNumSplits = numRegions;
 
   
org.apache.hadoop.hbase.mapred.TestTableSnapshotInputFormat.doTestWithMapReduce(util,
-tableName, snapshotName, MAPRED_START_ROW, MAPRED_END_ROW, tableDir, 
numRegions,
+tableName, snapshotName, MAPRED_START_ROW, MAPRED_END_ROW, tableDir, 
numRegions, 1,
 expectedNumSplits, false);
 } else {
   throw new IllegalArgumentException("Unrecognized mapreduce 
implementation: " + mr +".");

http://git-wip-us.apache.org/repos/asf/hbase/blob/4aadc5d3/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapred/TableMapReduceUtil.java
--
diff --git 
a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapred/TableMapReduceUtil.java
 
b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapred/TableMapReduceUtil.java
index 35dbf02..0427f50 100644
--- 
a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapred/TableMapReduceUtil.java
+++ 
b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapred/TableMapReduceUtil.java
@@ -33,6 +33,7 @@ import org.apache.hadoop.hbase.mapreduce.ResultSerialization;
 import org.apache.hadoop.hbase.security.User;
 import org.apache.hadoop.hbase.security.UserProvider;
 import org.apache.hadoop.hbase.security.token.TokenUtil;
+import org.apache.hadoop.hbase.util.RegionSplitter;
 import org.apache.hadoop.mapred.FileInputFormat;
 import org.apache.hadoop.mapred.InputFormat;
 import org.apache.hadoop.mapred.JobConf;
@@ -186,6 +187,43 @@ public class TableMapReduceUtil {
   }
 
   /**
+   * Sets up the job for reading from a table snapshot. It bypasses hbase 
servers
+   * and read directly from snapshot files.
+   *
+   * @param snapshotName The name of the snapshot (of a table) to read from.
+   * @param columns  The columns to scan.
+   * @param mapper  The mapper class to use.
+   * @param outputKeyClass  The class of the output key.
+   * @param outputValueClass  The class of the output value.
+   * @param jobConf  The current job to adjust.  Make sure the passed job is
+   * carryi

[29/60] [abbrv] hbase git commit: HBASE-18814 Improve TableSnapshotInputFormat to allow more multiple mappers per region

2017-10-05 Thread busbey
HBASE-18814 Improve TableSnapshotInputFormat to allow more multiple mappers per 
region

Signed-off-by: Ashu Pachauri 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/f20580a5
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/f20580a5
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/f20580a5

Branch: refs/heads/HBASE-18467
Commit: f20580a53083b69eec3d766cf2a1f99d0bff9747
Parents: ca87d05
Author: libisthanks 
Authored: Wed Sep 27 12:33:53 2017 +0800
Committer: Ashu Pachauri 
Committed: Fri Sep 29 16:12:39 2017 -0700

--
 ...IntegrationTestTableSnapshotInputFormat.java |   4 +-
 .../hadoop/hbase/mapred/TableMapReduceUtil.java |  38 ++
 .../hbase/mapred/TableSnapshotInputFormat.java  |  18 +++
 .../hbase/mapreduce/TableMapReduceUtil.java |  38 ++
 .../mapreduce/TableSnapshotInputFormat.java |  24 +++-
 .../mapreduce/TableSnapshotInputFormatImpl.java | 115 ---
 .../mapred/TestTableSnapshotInputFormat.java|  41 ---
 .../TableSnapshotInputFormatTestBase.java   |  23 ++--
 .../mapreduce/TestTableSnapshotInputFormat.java |  41 ---
 .../hbase/client/ClientSideRegionScanner.java   |   2 +
 .../hadoop/hbase/util/RegionSplitter.java   |  71 
 .../hadoop/hbase/util/TestRegionSplitter.java   |  20 
 12 files changed, 383 insertions(+), 52 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/f20580a5/hbase-it/src/test/java/org/apache/hadoop/hbase/mapreduce/IntegrationTestTableSnapshotInputFormat.java
--
diff --git 
a/hbase-it/src/test/java/org/apache/hadoop/hbase/mapreduce/IntegrationTestTableSnapshotInputFormat.java
 
b/hbase-it/src/test/java/org/apache/hadoop/hbase/mapreduce/IntegrationTestTableSnapshotInputFormat.java
index 1a152e8..2df1c4b 100644
--- 
a/hbase-it/src/test/java/org/apache/hadoop/hbase/mapreduce/IntegrationTestTableSnapshotInputFormat.java
+++ 
b/hbase-it/src/test/java/org/apache/hadoop/hbase/mapreduce/IntegrationTestTableSnapshotInputFormat.java
@@ -151,7 +151,7 @@ public class IntegrationTestTableSnapshotInputFormat 
extends IntegrationTestBase
   int expectedNumSplits = numRegions > 2 ? numRegions - 2 : numRegions;
 
   
org.apache.hadoop.hbase.mapreduce.TestTableSnapshotInputFormat.doTestWithMapReduce(util,
-tableName, snapshotName, START_ROW, END_ROW, tableDir, numRegions,
+tableName, snapshotName, START_ROW, END_ROW, tableDir, numRegions, 1,
 expectedNumSplits, false);
 } else if (mr.equalsIgnoreCase(MAPRED_IMPLEMENTATION)) {
   /*
@@ -165,7 +165,7 @@ public class IntegrationTestTableSnapshotInputFormat 
extends IntegrationTestBase
   int expectedNumSplits = numRegions;
 
   
org.apache.hadoop.hbase.mapred.TestTableSnapshotInputFormat.doTestWithMapReduce(util,
-tableName, snapshotName, MAPRED_START_ROW, MAPRED_END_ROW, tableDir, 
numRegions,
+tableName, snapshotName, MAPRED_START_ROW, MAPRED_END_ROW, tableDir, 
numRegions, 1,
 expectedNumSplits, false);
 } else {
   throw new IllegalArgumentException("Unrecognized mapreduce 
implementation: " + mr +".");

http://git-wip-us.apache.org/repos/asf/hbase/blob/f20580a5/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapred/TableMapReduceUtil.java
--
diff --git 
a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapred/TableMapReduceUtil.java
 
b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapred/TableMapReduceUtil.java
index 35dbf02..0427f50 100644
--- 
a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapred/TableMapReduceUtil.java
+++ 
b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapred/TableMapReduceUtil.java
@@ -33,6 +33,7 @@ import org.apache.hadoop.hbase.mapreduce.ResultSerialization;
 import org.apache.hadoop.hbase.security.User;
 import org.apache.hadoop.hbase.security.UserProvider;
 import org.apache.hadoop.hbase.security.token.TokenUtil;
+import org.apache.hadoop.hbase.util.RegionSplitter;
 import org.apache.hadoop.mapred.FileInputFormat;
 import org.apache.hadoop.mapred.InputFormat;
 import org.apache.hadoop.mapred.JobConf;
@@ -186,6 +187,43 @@ public class TableMapReduceUtil {
   }
 
   /**
+   * Sets up the job for reading from a table snapshot. It bypasses hbase 
servers
+   * and read directly from snapshot files.
+   *
+   * @param snapshotName The name of the snapshot (of a table) to read from.
+   * @param columns  The columns to scan.
+   * @param mapper  The mapper class to use.
+   * @param outputKeyClass  The class of the output key.
+   * @param outputValueClass  The class of the output value.
+   * @param jobConf  The current job to adjust.  Make sure the passed job is
+   * carryi

[57/60] [abbrv] hbase git commit: HBASE-18878 Use Optional in return types.

2017-10-05 Thread busbey
http://git-wip-us.apache.org/repos/asf/hbase/blob/98d1637b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessController.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessController.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessController.java
index 0fe5b2e..871f0b4 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessController.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessController.java
@@ -381,7 +381,7 @@ public class TestAccessController extends SecureTestUtil {
   public Object run() throws Exception {
 HTableDescriptor htd = new 
HTableDescriptor(TableName.valueOf(name.getMethodName()));
 htd.addFamily(new HColumnDescriptor(TEST_FAMILY));
-
ACCESS_CONTROLLER.preCreateTable(ObserverContext.createAndPrepare(CP_ENV, 
null), htd, null);
+
ACCESS_CONTROLLER.preCreateTable(ObserverContext.createAndPrepare(CP_ENV), htd, 
null);
 return null;
   }
 };
@@ -402,7 +402,7 @@ public class TestAccessController extends SecureTestUtil {
 HTableDescriptor htd = new HTableDescriptor(TEST_TABLE);
 htd.addFamily(new HColumnDescriptor(TEST_FAMILY));
 htd.addFamily(new HColumnDescriptor("fam_" + 
User.getCurrent().getShortName()));
-
ACCESS_CONTROLLER.preModifyTable(ObserverContext.createAndPrepare(CP_ENV, null),
+
ACCESS_CONTROLLER.preModifyTable(ObserverContext.createAndPrepare(CP_ENV),
 TEST_TABLE, htd);
 return null;
   }
@@ -419,7 +419,7 @@ public class TestAccessController extends SecureTestUtil {
   @Override
   public Object run() throws Exception {
 ACCESS_CONTROLLER
-.preDeleteTable(ObserverContext.createAndPrepare(CP_ENV, null), 
TEST_TABLE);
+.preDeleteTable(ObserverContext.createAndPrepare(CP_ENV), 
TEST_TABLE);
 return null;
   }
 };
@@ -435,7 +435,7 @@ public class TestAccessController extends SecureTestUtil {
   @Override
   public Object run() throws Exception {
 ACCESS_CONTROLLER
-.preTruncateTable(ObserverContext.createAndPrepare(CP_ENV, null),
+.preTruncateTable(ObserverContext.createAndPrepare(CP_ENV),
   TEST_TABLE);
 return null;
   }
@@ -452,7 +452,7 @@ public class TestAccessController extends SecureTestUtil {
 AccessTestAction action = new AccessTestAction() {
   @Override
   public Object run() throws Exception {
-
ACCESS_CONTROLLER.preAddColumnFamily(ObserverContext.createAndPrepare(CP_ENV, 
null), TEST_TABLE,
+
ACCESS_CONTROLLER.preAddColumnFamily(ObserverContext.createAndPrepare(CP_ENV), 
TEST_TABLE,
   hcd);
 return null;
   }
@@ -470,7 +470,7 @@ public class TestAccessController extends SecureTestUtil {
 AccessTestAction action = new AccessTestAction() {
   @Override
   public Object run() throws Exception {
-
ACCESS_CONTROLLER.preModifyColumnFamily(ObserverContext.createAndPrepare(CP_ENV,
 null),
+
ACCESS_CONTROLLER.preModifyColumnFamily(ObserverContext.createAndPrepare(CP_ENV),
   TEST_TABLE, hcd);
 return null;
   }
@@ -486,7 +486,7 @@ public class TestAccessController extends SecureTestUtil {
 AccessTestAction action = new AccessTestAction() {
   @Override
   public Object run() throws Exception {
-
ACCESS_CONTROLLER.preDeleteColumnFamily(ObserverContext.createAndPrepare(CP_ENV,
 null),
+
ACCESS_CONTROLLER.preDeleteColumnFamily(ObserverContext.createAndPrepare(CP_ENV),
   TEST_TABLE, TEST_FAMILY);
 return null;
   }
@@ -502,7 +502,7 @@ public class TestAccessController extends SecureTestUtil {
 AccessTestAction disableTable = new AccessTestAction() {
   @Override
   public Object run() throws Exception {
-
ACCESS_CONTROLLER.preDisableTable(ObserverContext.createAndPrepare(CP_ENV, 
null),
+
ACCESS_CONTROLLER.preDisableTable(ObserverContext.createAndPrepare(CP_ENV),
   TEST_TABLE);
 return null;
   }
@@ -511,7 +511,7 @@ public class TestAccessController extends SecureTestUtil {
 AccessTestAction disableAclTable = new AccessTestAction() {
   @Override
   public Object run() throws Exception {
-
ACCESS_CONTROLLER.preDisableTable(ObserverContext.createAndPrepare(CP_ENV, 
null),
+
ACCESS_CONTROLLER.preDisableTable(ObserverContext.createAndPrepare(CP_ENV),
 AccessControlLists.ACL_TABLE_NAME);
 return null;
   }
@@ -532,7 +532,7 @@ public class TestAccessController extends SecureTestUtil {
   @Override
   public Object run() throws Exception {
 ACCESS_CONTROLLER
-.preEnableTable(ObserverContext.createAndPrepare(CP_ENV, null), 
TEST_TABL

[31/60] [abbrv] hbase git commit: HBASE-18884 (addendum) Add more javadoc comment. Coprocessor Design Improvements follow up of HBASE-17732.

2017-10-05 Thread busbey
HBASE-18884 (addendum) Add more javadoc comment. Coprocessor Design 
Improvements follow up of HBASE-17732.

Change-Id: Id1a9e508cc04612b1e79e6cfa7e39b7755598be3


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/cacf3f58
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/cacf3f58
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/cacf3f58

Branch: refs/heads/HBASE-18467
Commit: cacf3f58356df41fe15e8375f9b633d3db16f8c6
Parents: c835dcc
Author: Apekshit Sharma 
Authored: Fri Sep 29 17:24:10 2017 -0700
Committer: Apekshit Sharma 
Committed: Fri Sep 29 17:32:11 2017 -0700

--
 .../src/main/java/org/apache/hadoop/hbase/Coprocessor.java   | 4 +++-
 1 file changed, 3 insertions(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/cacf3f58/hbase-client/src/main/java/org/apache/hadoop/hbase/Coprocessor.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/Coprocessor.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/Coprocessor.java
index 422e064..c0d9b60 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/Coprocessor.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/Coprocessor.java
@@ -96,7 +96,9 @@ public interface Coprocessor {
   default void stop(CoprocessorEnvironment env) throws IOException {}
 
   /**
-   * Coprocessor endpoints providing protobuf services should implement this 
interface.
+   * Coprocessor endpoints providing protobuf services should override this 
method.
+   * @return Iterable of {@link Service}s or empty collection. Implementations 
should never
+   * return null.
*/
   default Iterable getServices() {
 return Collections.EMPTY_SET;



[38/60] [abbrv] hbase git commit: HBASE-18897 Substitute MemStore for Memstore

2017-10-05 Thread busbey
http://git-wip-us.apache.org/repos/asf/hbase/blob/d35d8376/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreUtils.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreUtils.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreUtils.java
index 354b056..2ada5a9 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreUtils.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreUtils.java
@@ -81,8 +81,8 @@ public class StoreUtils {
* were created by a mapreduce bulk load are ignored, as they do not 
correspond to any specific
* put operation, and thus do not have a memstoreTS associated with them.
*/
-  public static OptionalLong getMaxMemstoreTSInList(Collection 
sfs) {
-return sfs.stream().filter(sf -> 
!sf.isBulkLoadResult()).mapToLong(HStoreFile::getMaxMemstoreTS)
+  public static OptionalLong getMaxMemStoreTSInList(Collection 
sfs) {
+return sfs.stream().filter(sf -> 
!sf.isBulkLoadResult()).mapToLong(HStoreFile::getMaxMemStoreTS)
 .max();
   }
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/d35d8376/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StripeStoreConfig.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StripeStoreConfig.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StripeStoreConfig.java
index 169d1d8..eb2a9b6 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StripeStoreConfig.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StripeStoreConfig.java
@@ -96,7 +96,7 @@ public class StripeStoreConfig {
 this.splitPartCount = splitPartCount;
 // Arbitrary default split size - 4 times the size of one L0 compaction.
 // If we flush into L0 there's no split compaction, but for default value 
it is ok.
-double flushSize = sci.getMemstoreFlushSize();
+double flushSize = sci.getMemStoreFlushSize();
 if (flushSize == 0) {
   flushSize = 128 * 1024 * 1024;
 }

http://git-wip-us.apache.org/repos/asf/hbase/blob/d35d8376/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/CompactionConfiguration.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/CompactionConfiguration.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/CompactionConfiguration.java
index fe9ae30..b8194eb 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/CompactionConfiguration.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/CompactionConfiguration.java
@@ -115,10 +115,10 @@ public class CompactionConfiguration {
 this.storeConfigInfo = storeConfigInfo;
 
 maxCompactSize = conf.getLong(HBASE_HSTORE_COMPACTION_MAX_SIZE_KEY, 
Long.MAX_VALUE);
-offPeakMaxCompactSize = 
conf.getLong(HBASE_HSTORE_COMPACTION_MAX_SIZE_OFFPEAK_KEY, 
-  maxCompactSize);  
+offPeakMaxCompactSize = 
conf.getLong(HBASE_HSTORE_COMPACTION_MAX_SIZE_OFFPEAK_KEY,
+  maxCompactSize);
 minCompactSize = conf.getLong(HBASE_HSTORE_COMPACTION_MIN_SIZE_KEY,
-storeConfigInfo.getMemstoreFlushSize());
+storeConfigInfo.getMemStoreFlushSize());
 minFilesToCompact = Math.max(2, 
conf.getInt(HBASE_HSTORE_COMPACTION_MIN_KEY,
   /*old name*/ conf.getInt("hbase.hstore.compactionThreshold", 3)));
 maxFilesToCompact = conf.getInt(HBASE_HSTORE_COMPACTION_MAX_KEY, 10);
@@ -126,7 +126,7 @@ public class CompactionConfiguration {
 offPeakCompactionRatio = 
conf.getFloat(HBASE_HSTORE_COMPACTION_RATIO_OFFPEAK_KEY, 5.0F);
 
 throttlePoint = 
conf.getLong("hbase.regionserver.thread.compaction.throttle",
-  2 * maxFilesToCompact * storeConfigInfo.getMemstoreFlushSize());
+  2 * maxFilesToCompact * storeConfigInfo.getMemStoreFlushSize());
 majorCompactionPeriod = conf.getLong(HConstants.MAJOR_COMPACTION_PERIOD, 
1000*60*60*24*7);
 // Make it 0.5 so jitter has us fall evenly either side of when the 
compaction should run
 majorCompactionJitter = 
conf.getFloat("hbase.hregion.majorcompaction.jitter", 0.50F);

http://git-wip-us.apache.org/repos/asf/hbase/blob/d35d8376/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/Compactor.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/Compactor.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/Compactor.java
index 2c9a519..5865ed5 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop

[43/60] [abbrv] hbase git commit: HBASE-18814 Make ScanMetrics enabled and add counter into the MapReduce Job over snapshot

2017-10-05 Thread busbey
HBASE-18814 Make ScanMetrics enabled and add counter  into the MapReduce Job over snapshot

Signed-off-by: Ashu Pachauri 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/214d2199
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/214d2199
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/214d2199

Branch: refs/heads/HBASE-18467
Commit: 214d21994e5e0c64ca9f463b51a91279deffcef5
Parents: 4093cc0
Author: libisthanks 
Authored: Thu Sep 14 09:58:34 2017 +0800
Committer: Ashu Pachauri 
Committed: Mon Oct 2 18:25:18 2017 -0700

--
 .../apache/hadoop/hbase/mapreduce/TableSnapshotInputFormatImpl.java | 1 +
 .../org/apache/hadoop/hbase/client/ClientSideRegionScanner.java | 1 +
 2 files changed, 2 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/214d2199/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/TableSnapshotInputFormatImpl.java
--
diff --git 
a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/TableSnapshotInputFormatImpl.java
 
b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/TableSnapshotInputFormatImpl.java
index 5098b30..bcaa448 100644
--- 
a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/TableSnapshotInputFormatImpl.java
+++ 
b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/TableSnapshotInputFormatImpl.java
@@ -216,6 +216,7 @@ public class TableSnapshotInputFormatImpl {
   scan.setIsolationLevel(IsolationLevel.READ_UNCOMMITTED);
   // disable caching of data blocks
   scan.setCacheBlocks(false);
+  scan.setScanMetricsEnabled(true);
 
   scanner =
   new ClientSideRegionScanner(conf, fs, new Path(split.restoreDir), 
htd, hri, scan, null);

http://git-wip-us.apache.org/repos/asf/hbase/blob/214d2199/hbase-server/src/main/java/org/apache/hadoop/hbase/client/ClientSideRegionScanner.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/client/ClientSideRegionScanner.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/client/ClientSideRegionScanner.java
index 141fcdd..6871717 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/client/ClientSideRegionScanner.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/client/ClientSideRegionScanner.java
@@ -86,6 +86,7 @@ public class ClientSideRegionScanner extends 
AbstractClientScanner {
 resultSize += CellUtil.estimatedSerializedSizeOf(cell);
   }
   this.scanMetrics.countOfBytesInResults.addAndGet(resultSize);
+  this.scanMetrics.countOfRowsScanned.incrementAndGet();
 }
 
 return result;



[48/60] [abbrv] hbase git commit: HBASE-18929 Hbase backup command doesn’t show debug option to enable backup in debug mode (Amit Kabra)

2017-10-05 Thread busbey
HBASE-18929 Hbase backup command doesn’t show debug option to enable backup 
in debug mode (Amit Kabra)


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/1ff90d3b
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/1ff90d3b
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/1ff90d3b

Branch: refs/heads/HBASE-18467
Commit: 1ff90d3b4b82427d4ab984ee937ea4c02b1d5910
Parents: 0af61dc
Author: tedyu 
Authored: Tue Oct 3 10:11:27 2017 -0700
Committer: tedyu 
Committed: Tue Oct 3 10:11:27 2017 -0700

--
 .../java/org/apache/hadoop/hbase/backup/impl/BackupCommands.java | 4 +++-
 1 file changed, 3 insertions(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/1ff90d3b/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/impl/BackupCommands.java
--
diff --git 
a/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/impl/BackupCommands.java
 
b/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/impl/BackupCommands.java
index 194d350..fb4f095 100644
--- 
a/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/impl/BackupCommands.java
+++ 
b/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/impl/BackupCommands.java
@@ -34,6 +34,8 @@ import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_WORKE
 import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_WORKERS_DESC;
 import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_YARN_QUEUE_NAME;
 import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_YARN_QUEUE_NAME_DESC;
+import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_DEBUG;
+import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_DEBUG_DESC;
 
 import java.io.IOException;
 import java.net.URI;
@@ -383,7 +385,7 @@ public final class BackupCommands {
   options.addOption(OPTION_SET, true, OPTION_SET_BACKUP_DESC);
   options.addOption(OPTION_TABLE, true, OPTION_TABLE_LIST_DESC);
   options.addOption(OPTION_YARN_QUEUE_NAME, true, 
OPTION_YARN_QUEUE_NAME_DESC);
-
+  options.addOption(OPTION_DEBUG, false, OPTION_DEBUG_DESC);
 
   HelpFormatter helpFormatter = new HelpFormatter();
   helpFormatter.setLeftPadding(2);



[45/60] [abbrv] hbase git commit: HBASE-18490 Modifying a table descriptor to enable replicas does not create replica regions (Ram)

2017-10-05 Thread busbey
HBASE-18490 Modifying a table descriptor to enable replicas does not
create replica regions (Ram)


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/50265395
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/50265395
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/50265395

Branch: refs/heads/HBASE-18467
Commit: 50265395d1fd96415162224179dd0ad087e9b8a7
Parents: afa03a2
Author: Ramkrishna 
Authored: Tue Oct 3 13:10:44 2017 +0530
Committer: Ramkrishna 
Committed: Tue Oct 3 13:10:44 2017 +0530

--
 .../apache/hadoop/hbase/MetaTableAccessor.java  |   2 +-
 .../hadoop/hbase/client/RegionReplicaUtil.java  |  30 +++
 .../master/procedure/CreateTableProcedure.java  |  29 +--
 .../master/procedure/EnableTableProcedure.java  | 105 -
 .../master/procedure/ModifyTableProcedure.java  |  22 +-
 .../TestRegionReplicasWithModifyTable.java  | 215 +++
 6 files changed, 371 insertions(+), 32 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/50265395/hbase-client/src/main/java/org/apache/hadoop/hbase/MetaTableAccessor.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/MetaTableAccessor.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/MetaTableAccessor.java
index 8b6cf66..4286afe 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/MetaTableAccessor.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/MetaTableAccessor.java
@@ -274,7 +274,7 @@ public class MetaTableAccessor {
* @return An {@link Table} for hbase:meta
* @throws IOException
*/
-  static Table getMetaHTable(final Connection connection)
+  public static Table getMetaHTable(final Connection connection)
   throws IOException {
 // We used to pass whole CatalogTracker in here, now we just pass in 
Connection
 if (connection == null) {

http://git-wip-us.apache.org/repos/asf/hbase/blob/50265395/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RegionReplicaUtil.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RegionReplicaUtil.java
 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RegionReplicaUtil.java
index d444c82..9c5af37 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RegionReplicaUtil.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RegionReplicaUtil.java
@@ -18,8 +18,10 @@
 
 package org.apache.hadoop.hbase.client;
 
+import java.util.ArrayList;
 import java.util.Collection;
 import java.util.Iterator;
+import java.util.List;
 
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.yetus.audience.InterfaceAudience;
@@ -157,4 +159,32 @@ public class RegionReplicaUtil {
 }
 return 0;
   }
+
+  /**
+   * Create any replicas for the regions (the default replicas that was 
already created is passed to
+   * the method)
+   * @param tableDescriptor descriptor to use
+   * @param regions existing regions
+   * @param oldReplicaCount existing replica count
+   * @param newReplicaCount updated replica count due to modify table
+   * @return the combined list of default and non-default replicas
+   */
+  public static List addReplicas(final TableDescriptor 
tableDescriptor,
+  final List regions, int oldReplicaCount, int 
newReplicaCount) {
+if ((newReplicaCount - 1) <= 0) {
+  return regions;
+}
+List hRegionInfos = new ArrayList<>((newReplicaCount) * 
regions.size());
+for (int i = 0; i < regions.size(); i++) {
+  if (RegionReplicaUtil.isDefaultReplica(regions.get(i))) {
+// region level replica index starts from 0. So if oldReplicaCount was 
2 then the max replicaId for
+// the existing regions would be 1
+for (int j = oldReplicaCount; j < newReplicaCount; j++) {
+  
hRegionInfos.add(RegionReplicaUtil.getRegionInfoForReplica(regions.get(i), j));
+}
+  }
+}
+hRegionInfos.addAll(regions);
+return hRegionInfos;
+  }
 }

http://git-wip-us.apache.org/repos/asf/hbase/blob/50265395/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/CreateTableProcedure.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/CreateTableProcedure.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/CreateTableProcedure.java
index a1c82c7..e9804dd 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/CreateTableProcedure.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/CreateTableProcedure.java
@@ -340,7 +340,9 @@ public class CreateTableP

[03/60] [abbrv] hbase git commit: HBASE-18839 Apply RegionInfo to code base

2017-10-05 Thread busbey
http://git-wip-us.apache.org/repos/asf/hbase/blob/a11a35a1/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestDeleteTableProcedure.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestDeleteTableProcedure.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestDeleteTableProcedure.java
index 3ad8ec8..6f109e5 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestDeleteTableProcedure.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestDeleteTableProcedure.java
@@ -23,10 +23,10 @@ import static org.junit.Assert.assertTrue;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.hbase.CategoryBasedTimeout;
-import org.apache.hadoop.hbase.HRegionInfo;
 import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.TableNotDisabledException;
 import org.apache.hadoop.hbase.TableNotFoundException;
+import org.apache.hadoop.hbase.client.RegionInfo;
 import org.apache.hadoop.hbase.procedure2.Procedure;
 import org.apache.hadoop.hbase.procedure2.ProcedureExecutor;
 import org.apache.hadoop.hbase.procedure2.ProcedureTestingUtility;
@@ -75,7 +75,7 @@ public class TestDeleteTableProcedure extends 
TestTableDDLProcedureBase {
 final TableName tableName = TableName.valueOf(name.getMethodName());
 final ProcedureExecutor procExec = 
getMasterProcedureExecutor();
 
-HRegionInfo[] regions = MasterProcedureTestingUtility.createTable(
+RegionInfo[] regions = MasterProcedureTestingUtility.createTable(
   procExec, tableName, null, "f");
 UTIL.getAdmin().disableTable(tableName);
 
@@ -118,7 +118,7 @@ public class TestDeleteTableProcedure extends 
TestTableDDLProcedureBase {
   }
 
   private void testSimpleDelete(final TableName tableName, byte[][] splitKeys) 
throws Exception {
-HRegionInfo[] regions = MasterProcedureTestingUtility.createTable(
+RegionInfo[] regions = MasterProcedureTestingUtility.createTable(
   getMasterProcedureExecutor(), tableName, splitKeys, "f1", "f2");
 UTIL.getAdmin().disableTable(tableName);
 
@@ -136,7 +136,7 @@ public class TestDeleteTableProcedure extends 
TestTableDDLProcedureBase {
 
 // create the table
 byte[][] splitKeys = null;
-HRegionInfo[] regions = MasterProcedureTestingUtility.createTable(
+RegionInfo[] regions = MasterProcedureTestingUtility.createTable(
   getMasterProcedureExecutor(), tableName, splitKeys, "f1", "f2");
 UTIL.getAdmin().disableTable(tableName);
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/a11a35a1/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestMasterFailoverWithProcedures.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestMasterFailoverWithProcedures.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestMasterFailoverWithProcedures.java
index 4e96cea..3eeb382 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestMasterFailoverWithProcedures.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestMasterFailoverWithProcedures.java
@@ -26,18 +26,13 @@ import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hbase.CategoryBasedTimeout;
 import org.apache.hadoop.hbase.HBaseTestingUtility;
-import org.apache.hadoop.hbase.HRegionInfo;
 import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.client.RegionInfo;
 import org.apache.hadoop.hbase.client.TableDescriptor;
 import org.apache.hadoop.hbase.procedure2.Procedure;
 import org.apache.hadoop.hbase.procedure2.ProcedureExecutor;
 import org.apache.hadoop.hbase.procedure2.ProcedureTestingUtility;
 import org.apache.hadoop.hbase.procedure2.store.wal.WALProcedureStore;
-import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.CreateTableState;
-import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.DeleteTableState;
-import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.DisableTableState;
-import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.EnableTableState;
-import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.TruncateTableState;
 import org.apache.hadoop.hbase.testclassification.LargeTests;
 import org.apache.hadoop.hbase.testclassification.MasterTests;
 import org.apache.hadoop.hbase.util.Bytes;
@@ -50,6 +45,12 @@ import org.junit.Test;
 import org.junit.experimental.categories.Category;
 import org.junit.rules.TestRule;
 
+import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.CreateTableState;
+import 
org.apache.hadoop.hbase.shaded.pro

[36/60] [abbrv] hbase git commit: HBASE-17441 Fix invalid quoting around hadoop-3 build in yetus personality

2017-10-05 Thread busbey
HBASE-17441 Fix invalid quoting around hadoop-3 build in yetus personality


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/869b90c6
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/869b90c6
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/869b90c6

Branch: refs/heads/HBASE-18467
Commit: 869b90c612ebf2f931dcd43be59b817e4b55ab32
Parents: 9f1bfbe
Author: Josh Elser 
Authored: Thu Sep 28 17:49:00 2017 -0400
Committer: Josh Elser 
Committed: Sun Oct 1 15:46:24 2017 -0400

--
 dev-support/hbase-personality.sh | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/869b90c6/dev-support/hbase-personality.sh
--
diff --git a/dev-support/hbase-personality.sh b/dev-support/hbase-personality.sh
index b7a2f97..c58e47e 100755
--- a/dev-support/hbase-personality.sh
+++ b/dev-support/hbase-personality.sh
@@ -267,8 +267,8 @@ function hadoopcheck_rebuild
 echo_and_redirect "${logfile}" \
   "${MAVEN}" clean install \
 -DskipTests -DHBasePatchProcess \
--Dhadoop-three.version="${hadoopver} \
--Dhadoop.profile=3.0"
+-Dhadoop-three.version="${hadoopver}" \
+-Dhadoop.profile=3.0
 count=$(${GREP} -c '\[ERROR\]' "${logfile}")
 if [[ ${count} -gt 0 ]]; then
   add_vote_table -1 hadoopcheck "${BUILDMODEMSG} causes ${count} errors 
with Hadoop v${hadoopver}."



[23/60] [abbrv] hbase git commit: HBASE-13844 Move static helper methods from KeyValue into CellUtils

2017-10-05 Thread busbey
HBASE-13844 Move static helper methods from KeyValue into CellUtils

Signed-off-by: Chia-Ping Tsai 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/afce850c
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/afce850c
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/afce850c

Branch: refs/heads/HBASE-18467
Commit: afce850cfdc827cd727418b5f7a2543f2160f790
Parents: b5b4108
Author: Andy Yang 
Authored: Tue Sep 26 05:19:31 2017 -0700
Committer: Chia-Ping Tsai 
Committed: Fri Sep 29 11:50:23 2017 +0800

--
 .../hadoop/hbase/client/RegionInfoBuilder.java  |   9 -
 .../java/org/apache/hadoop/hbase/CellUtil.java  |  48 
 .../java/org/apache/hadoop/hbase/KeyValue.java  | 257 +--
 .../java/org/apache/hadoop/hbase/TableName.java |  16 --
 .../hadoop/hbase/mapred/GroupingTableMap.java   |   3 +-
 .../hbase/mapreduce/GroupingTableMapper.java|   3 +-
 .../hbase/mapreduce/TableInputFormat.java   |   4 +-
 .../apache/hadoop/hbase/rest/RowResource.java   |  19 +-
 .../hadoop/hbase/rest/RowResultGenerator.java   |   4 +-
 .../hbase/rest/ScannerInstanceResource.java |   3 +-
 .../hbase/rest/ScannerResultGenerator.java  |   4 +-
 .../hadoop/hbase/rest/client/RemoteHTable.java  |   2 +-
 .../hadoop/hbase/rest/model/CellModel.java  |   3 +-
 .../hadoop/hbase/rest/TestScannerResource.java  |   4 +-
 .../hbase/rest/TestScannersWithFilters.java |   2 +-
 .../hbase/rest/TestScannersWithLabels.java  |   4 +-
 .../hadoop/hbase/rest/TestTableResource.java|   4 +-
 .../hadoop/hbase/io/hfile/FixedFileTrailer.java |   4 +-
 .../org/apache/hadoop/hbase/HBaseTestCase.java  |   2 +-
 .../hadoop/hbase/thrift/IncrementCoalescer.java |   4 +-
 .../hadoop/hbase/thrift/ThriftServerRunner.java |  35 +--
 .../hadoop/hbase/thrift/ThriftUtilities.java|  10 +-
 22 files changed, 107 insertions(+), 337 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/afce850c/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RegionInfoBuilder.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RegionInfoBuilder.java
 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RegionInfoBuilder.java
index 7d5c476..acff186 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RegionInfoBuilder.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RegionInfoBuilder.java
@@ -487,14 +487,5 @@ public class RegionInfoBuilder {
   return RegionInfo.COMPARATOR.compare(this, other);
 }
 
-/**
- * @return Comparator to use comparing {@link KeyValue}s.
- * @deprecated Use Region#getCellComparator().  deprecated for hbase 2.0, 
remove for hbase 3.0
- */
-@Deprecated
-public KeyValue.KVComparator getComparator() {
-  return isMetaRegion()?
-  KeyValue.META_COMPARATOR: KeyValue.COMPARATOR;
-}
   }
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hbase/blob/afce850c/hbase-common/src/main/java/org/apache/hadoop/hbase/CellUtil.java
--
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/CellUtil.java 
b/hbase-common/src/main/java/org/apache/hadoop/hbase/CellUtil.java
index a3029f8..dc5df30 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/CellUtil.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/CellUtil.java
@@ -20,6 +20,9 @@ package org.apache.hadoop.hbase;
 
 import static org.apache.hadoop.hbase.HConstants.EMPTY_BYTE_ARRAY;
 import static org.apache.hadoop.hbase.Tag.TAG_LENGTH_SIZE;
+import static org.apache.hadoop.hbase.KeyValue.COLUMN_FAMILY_DELIMITER;
+import static org.apache.hadoop.hbase.KeyValue.getDelimiter;
+import static org.apache.hadoop.hbase.KeyValue.COLUMN_FAMILY_DELIM_ARRAY;
 
 import java.io.DataOutputStream;
 import java.io.IOException;
@@ -126,6 +129,51 @@ public final class CellUtil {
 return output;
   }
 
+  /**
+   * Makes a column in family:qualifier form from separate byte arrays.
+   * 
+   * Not recommended for usage as this is old-style API.
+   * @param family
+   * @param qualifier
+   * @return family:qualifier
+   */
+  public static byte [] makeColumn(byte [] family, byte [] qualifier) {
+return Bytes.add(family, COLUMN_FAMILY_DELIM_ARRAY, qualifier);
+  }
+
+  /**
+   * Splits a column in {@code family:qualifier} form into separate byte 
arrays. An empty qualifier
+   * (ie, {@code fam:}) is parsed as { fam, EMPTY_BYTE_ARRAY } 
while no delimiter (ie,
+   * {@code fam}) is parsed as an array of one element, { fam }.
+   * 
+   * Don't forget, HBase DOES support empty qualifiers. (see HBASE-9549)
+   * 
+   * 
+   * Not recommend to be used as t

[07/60] [abbrv] hbase git commit: HBASE-18839 Apply RegionInfo to code base

2017-10-05 Thread busbey
http://git-wip-us.apache.org/repos/asf/hbase/blob/a11a35a1/hbase-server/src/main/resources/hbase-webapps/master/table.jsp
--
diff --git a/hbase-server/src/main/resources/hbase-webapps/master/table.jsp 
b/hbase-server/src/main/resources/hbase-webapps/master/table.jsp
index 6c401a9..c7a1aff 100644
--- a/hbase-server/src/main/resources/hbase-webapps/master/table.jsp
+++ b/hbase-server/src/main/resources/hbase-webapps/master/table.jsp
@@ -17,54 +17,57 @@
  * limitations under the License.
  */
 --%>
-<%@page 
import="org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.RegionSpecifierType"%>
+<%@page import="java.net.URLEncoder"%>
 <%@ page contentType="text/html;charset=UTF-8"
   import="static org.apache.commons.lang3.StringEscapeUtils.escapeXml"
-  import="org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString"
-  import="java.net.URLEncoder"
   import="java.util.ArrayList"
-  import="java.util.TreeMap"
-  import="java.util.List"
-  import="java.util.LinkedHashMap"
-  import="java.util.Map"
-  import="java.util.Set"
   import="java.util.Collection"
   import="java.util.Collections"
   import="java.util.Comparator"
+  import="java.util.LinkedHashMap"
+  import="java.util.List"
+  import="java.util.Map"
+  import="java.util.TreeMap"
   import="org.apache.commons.lang3.StringEscapeUtils"
   import="org.apache.hadoop.conf.Configuration"
-  import="org.apache.hadoop.util.StringUtils"
-  import="org.apache.hadoop.hbase.HRegionInfo"
+  import="org.apache.hadoop.hbase.HBaseConfiguration"
+  import="org.apache.hadoop.hbase.HColumnDescriptor"
+  import="org.apache.hadoop.hbase.HConstants"
   import="org.apache.hadoop.hbase.HRegionLocation"
-  import="org.apache.hadoop.hbase.ServerName"
-  import="org.apache.hadoop.hbase.ServerLoad"
   import="org.apache.hadoop.hbase.RegionLoad"
-  import="org.apache.hadoop.hbase.HConstants"
+  import="org.apache.hadoop.hbase.ServerLoad"
+  import="org.apache.hadoop.hbase.ServerName"
+  import="org.apache.hadoop.hbase.TableName"
+  import="org.apache.hadoop.hbase.TableNotFoundException"
+  import="org.apache.hadoop.hbase.client.Admin"
+  import="org.apache.hadoop.hbase.client.CompactionState"
+  import="org.apache.hadoop.hbase.client.RegionInfo"
+  import="org.apache.hadoop.hbase.client.RegionInfoBuilder"
+  import="org.apache.hadoop.hbase.client.RegionLocator"
+  import="org.apache.hadoop.hbase.client.RegionReplicaUtil"
+  import="org.apache.hadoop.hbase.client.Table"
   import="org.apache.hadoop.hbase.master.HMaster"
-  import="org.apache.hadoop.hbase.zookeeper.MetaTableLocator"
   import="org.apache.hadoop.hbase.quotas.QuotaTableUtil"
   import="org.apache.hadoop.hbase.quotas.SpaceQuotaSnapshot"
   import="org.apache.hadoop.hbase.util.Bytes"
   import="org.apache.hadoop.hbase.util.FSUtils"
-  
import="org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos"
-  import="org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos"
-  import="org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.Quotas"
-  
import="org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuota"
-  import="org.apache.hadoop.hbase.TableName"
-  import="org.apache.hadoop.hbase.HColumnDescriptor"
-  import="org.apache.hadoop.hbase.HBaseConfiguration"
-  import="org.apache.hadoop.hbase.TableNotFoundException"%>
-<%@ page import="org.apache.hadoop.hbase.client.*" %>
+  import="org.apache.hadoop.hbase.zookeeper.MetaTableLocator"
+  import="org.apache.hadoop.util.StringUtils"
+  import="org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString"%>
+<%@ page 
import="org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos" 
%>
+<%@ page 
import="org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos" %>
+<%@ page 
import="org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.Quotas" %>
+<%@ page 
import="org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuota"
 %>
 <%!
   /**
-   * @return An empty region load stamped with the passed in hri
+   * @return An empty region load stamped with the passed in 
regionInfo
* region name.
*/
-  private RegionLoad getEmptyRegionLoad(final HRegionInfo hri) {
+  private RegionLoad getEmptyRegionLoad(final RegionInfo regionInfo) {
 return new RegionLoad(ClusterStatusProtos.RegionLoad.newBuilder().
   setRegionSpecifier(HBaseProtos.RegionSpecifier.newBuilder().
   setType(HBaseProtos.RegionSpecifier.RegionSpecifierType.REGION_NAME).
-  setValue(ByteString.copyFrom(hri.getRegionName())).build()).build());
+  
setValue(ByteString.copyFrom(regionInfo.getRegionName())).build()).build());
   }
 %>
 <%
@@ -205,10 +208,10 @@ if ( fqtn != null ) {
 %> Split request accepted. <%
 } else if (action.equals("compact")) {
   if (key != null && key.length() > 0) {
-List regions = 
admin.getTableRegions(TableName.valueOf(fqtn));
+List regions = admin.getRegions(Ta

[18/60] [abbrv] hbase git commit: HBASE-18839 Apply RegionInfo to code base

2017-10-05 Thread busbey
http://git-wip-us.apache.org/repos/asf/hbase/blob/a11a35a1/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java
index fac3ef1..7b42d3d 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java
@@ -40,6 +40,7 @@ import java.util.concurrent.TimeoutException;
 import java.util.concurrent.atomic.AtomicInteger;
 import java.util.concurrent.atomic.AtomicReference;
 import java.util.regex.Pattern;
+import java.util.stream.Collectors;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
@@ -69,8 +70,6 @@ import org.apache.hadoop.hbase.TableNotDisabledException;
 import org.apache.hadoop.hbase.TableNotFoundException;
 import org.apache.hadoop.hbase.UnknownRegionException;
 import org.apache.hadoop.hbase.ZooKeeperConnectionException;
-import org.apache.yetus.audience.InterfaceAudience;
-import org.apache.yetus.audience.InterfaceStability;
 import org.apache.hadoop.hbase.client.replication.ReplicationSerDeHelper;
 import org.apache.hadoop.hbase.client.replication.TableCFs;
 import org.apache.hadoop.hbase.client.security.SecurityCapability;
@@ -86,6 +85,26 @@ import 
org.apache.hadoop.hbase.regionserver.wal.FailedLogCloseException;
 import org.apache.hadoop.hbase.replication.ReplicationException;
 import org.apache.hadoop.hbase.replication.ReplicationPeerConfig;
 import org.apache.hadoop.hbase.replication.ReplicationPeerDescription;
+import org.apache.hadoop.hbase.snapshot.ClientSnapshotDescriptionUtils;
+import org.apache.hadoop.hbase.snapshot.HBaseSnapshotException;
+import org.apache.hadoop.hbase.snapshot.RestoreSnapshotException;
+import org.apache.hadoop.hbase.snapshot.SnapshotCreationException;
+import org.apache.hadoop.hbase.snapshot.UnknownSnapshotException;
+import org.apache.hadoop.hbase.util.Addressing;
+import org.apache.hadoop.hbase.util.Bytes;
+import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
+import org.apache.hadoop.hbase.util.ForeignExceptionUtil;
+import org.apache.hadoop.hbase.util.Pair;
+import org.apache.hadoop.hbase.zookeeper.MasterAddressTracker;
+import org.apache.hadoop.hbase.zookeeper.MetaTableLocator;
+import org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher;
+import org.apache.hadoop.ipc.RemoteException;
+import org.apache.hadoop.util.StringUtils;
+import org.apache.yetus.audience.InterfaceAudience;
+import org.apache.yetus.audience.InterfaceStability;
+import org.apache.zookeeper.KeeperException;
+
+import 
org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
 import org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException;
 import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
 import org.apache.hadoop.hbase.shaded.protobuf.RequestConverter;
@@ -184,28 +203,10 @@ import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.UnassignRe
 import org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos;
 import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.GetReplicationPeerConfigResponse;
 import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos;
-import org.apache.hadoop.hbase.snapshot.ClientSnapshotDescriptionUtils;
-import org.apache.hadoop.hbase.snapshot.HBaseSnapshotException;
-import org.apache.hadoop.hbase.snapshot.RestoreSnapshotException;
-import org.apache.hadoop.hbase.snapshot.SnapshotCreationException;
-import org.apache.hadoop.hbase.snapshot.UnknownSnapshotException;
-import org.apache.hadoop.hbase.util.Addressing;
-import org.apache.hadoop.hbase.util.Bytes;
-import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
-import org.apache.hadoop.hbase.util.ForeignExceptionUtil;
-import org.apache.hadoop.hbase.util.Pair;
-import org.apache.hadoop.hbase.zookeeper.MasterAddressTracker;
-import org.apache.hadoop.hbase.zookeeper.MetaTableLocator;
-import org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher;
-import org.apache.hadoop.ipc.RemoteException;
-import org.apache.hadoop.util.StringUtils;
-import org.apache.zookeeper.KeeperException;
 
-import 
org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
 import com.google.protobuf.Descriptors;
 import com.google.protobuf.Message;
 import com.google.protobuf.RpcController;
-import java.util.stream.Collectors;
 
 /**
  * HBaseAdmin is no longer a client API. It is marked 
InterfaceAudience.Private indicating that
@@ -399,12 +400,26 @@ public class HBaseAdmin implements Admin {
 
   @Override
   public List getRegions(final ServerName sn) throws IOException {
-return getOnlineRegions(sn).stream().collect(Collectors.toList());
+AdminService.BlockingInterface admin = this.connection.getAdmin(sn);
+  

[41/60] [abbrv] hbase git commit: HBASE-18105 [AMv2] Split/Merge need cleanup; currently they diverge and do not fully embrace AMv2 world (Yi Liang)

2017-10-05 Thread busbey
HBASE-18105 [AMv2] Split/Merge need cleanup; currently they diverge and do not 
fully embrace AMv2 world (Yi Liang)


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/38eaf47f
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/38eaf47f
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/38eaf47f

Branch: refs/heads/HBASE-18467
Commit: 38eaf47fa7dd4b0b3b822e73c9e644370a5cacb6
Parents: d35d837
Author: Michael Stack 
Authored: Mon Oct 2 11:38:11 2017 -0700
Committer: Michael Stack 
Committed: Mon Oct 2 11:38:11 2017 -0700

--
 .../src/main/protobuf/MasterProcedure.proto |  4 +--
 .../src/main/protobuf/RegionServerStatus.proto  |  6 ++--
 .../hbase/coprocessor/MasterObserver.java   | 12 +++
 .../hbase/master/MasterCoprocessorHost.java | 12 +++
 .../master/assignment/AssignmentManager.java|  2 --
 .../assignment/SplitTableRegionProcedure.java   | 29 -
 .../hbase/coprocessor/TestMasterObserver.java   |  4 +--
 .../TestMergeTableRegionsProcedure.java | 29 +
 .../TestSplitTableRegionProcedure.java  | 33 
 .../MasterProcedureTestingUtility.java  |  2 +-
 .../TestSplitTransactionOnCluster.java  |  2 +-
 11 files changed, 99 insertions(+), 36 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/38eaf47f/hbase-protocol-shaded/src/main/protobuf/MasterProcedure.proto
--
diff --git a/hbase-protocol-shaded/src/main/protobuf/MasterProcedure.proto 
b/hbase-protocol-shaded/src/main/protobuf/MasterProcedure.proto
index 2cdebb1..626530f 100644
--- a/hbase-protocol-shaded/src/main/protobuf/MasterProcedure.proto
+++ b/hbase-protocol-shaded/src/main/protobuf/MasterProcedure.proto
@@ -286,9 +286,9 @@ enum SplitTableRegionState {
   SPLIT_TABLE_REGION_PRE_OPERATION = 2;
   SPLIT_TABLE_REGION_CLOSE_PARENT_REGION = 3;
   SPLIT_TABLE_REGION_CREATE_DAUGHTER_REGIONS = 4;
-  SPLIT_TABLE_REGION_PRE_OPERATION_BEFORE_PONR = 5;
+  SPLIT_TABLE_REGION_PRE_OPERATION_BEFORE_META = 5;
   SPLIT_TABLE_REGION_UPDATE_META = 6;
-  SPLIT_TABLE_REGION_PRE_OPERATION_AFTER_PONR = 7;
+  SPLIT_TABLE_REGION_PRE_OPERATION_AFTER_META = 7;
   SPLIT_TABLE_REGION_OPEN_CHILD_REGIONS = 8;
   SPLIT_TABLE_REGION_POST_OPERATION = 9;
 }

http://git-wip-us.apache.org/repos/asf/hbase/blob/38eaf47f/hbase-protocol-shaded/src/main/protobuf/RegionServerStatus.proto
--
diff --git a/hbase-protocol-shaded/src/main/protobuf/RegionServerStatus.proto 
b/hbase-protocol-shaded/src/main/protobuf/RegionServerStatus.proto
index 1cd4376..f83bb20 100644
--- a/hbase-protocol-shaded/src/main/protobuf/RegionServerStatus.proto
+++ b/hbase-protocol-shaded/src/main/protobuf/RegionServerStatus.proto
@@ -105,11 +105,13 @@ message RegionStateTransition {
 READY_TO_SPLIT = 3;
 READY_TO_MERGE = 4;
 
-SPLIT_PONR = 5;
-MERGE_PONR = 6;
 
+/** We used to have PONR enums for split and merge in here occupying
+ positions 5 and 6 but they have since been removed. Do not reuse these
+ indices */
 SPLIT = 7;
 MERGED = 8;
+
 SPLIT_REVERTED = 9;
 MERGE_REVERTED = 10;
   }

http://git-wip-us.apache.org/repos/asf/hbase/blob/38eaf47f/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/MasterObserver.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/MasterObserver.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/MasterObserver.java
index bfa88e6..85da610 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/MasterObserver.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/MasterObserver.java
@@ -696,25 +696,25 @@ public interface MasterObserver {
   final RegionInfo regionInfoB) throws IOException {}
 
   /**
-   * This will be called before PONR step as part of split transaction. Calling
+   * This will be called before update META step as part of split transaction. 
Calling
* {@link org.apache.hadoop.hbase.coprocessor.ObserverContext#bypass()} 
rollback the split
* @param ctx the environment to interact with the framework and master
* @param splitKey
* @param metaEntries
*/
-  default void preSplitRegionBeforePONRAction(
+  default void preSplitRegionBeforeMETAAction(
   final ObserverContext ctx,
   final byte[] splitKey,
   final List metaEntries) throws IOException {}
 
 
   /**
-   * This will be called after PONR step as part of split transaction
+   * This will be called after update META step as part of split transaction
* Calling {@link 
org.apache.hadoop.hbase.coprocessor.Ob

[56/60] [abbrv] hbase git commit: HBASE-18941 Confusing logging error around rerun of restore on an existing table.

2017-10-05 Thread busbey
HBASE-18941 Confusing logging error around rerun of restore on an existing 
table.

Signed-off-by: tedyu 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/1ec6ece9
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/1ec6ece9
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/1ec6ece9

Branch: refs/heads/HBASE-18467
Commit: 1ec6ece918a738cdb335bfb85f526f79c31e921e
Parents: 11aa674
Author: Amit 
Authored: Wed Oct 4 22:25:15 2017 +0530
Committer: tedyu 
Committed: Wed Oct 4 11:04:18 2017 -0700

--
 .../apache/hadoop/hbase/backup/impl/RestoreTablesClient.java   | 6 +++---
 1 file changed, 3 insertions(+), 3 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/1ec6ece9/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/impl/RestoreTablesClient.java
--
diff --git 
a/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/impl/RestoreTablesClient.java
 
b/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/impl/RestoreTablesClient.java
index f2237af..1d32c88 100644
--- 
a/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/impl/RestoreTablesClient.java
+++ 
b/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/impl/RestoreTablesClient.java
@@ -108,10 +108,10 @@ public class RestoreTablesClient {
   if (!isOverwrite) {
 LOG.error("Existing table (" + existTableList
 + ") found in the restore target, please add "
-+ "\"-overwrite\" option in the command if you mean"
++ "\"-o\" as overwrite option in the command if you mean"
 + " to restore to these existing tables");
-throw new IOException("Existing table found in target while no 
\"-overwrite\" "
-+ "option found");
+throw new IOException("Existing table found in target while no \"-o\" "
++ "as overwrite option found");
   } else {
 if (disabledTableList.size() > 0) {
   LOG.error("Found offline table in the restore target, "



[52/60] [abbrv] hbase git commit: HBASE-16894 Create more than 1 split per region, generalize HBASE-12590

2017-10-05 Thread busbey
HBASE-16894 Create more than 1 split per region, generalize HBASE-12590

Signed-off-by: Andrew Purtell 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/16d483f9
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/16d483f9
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/16d483f9

Branch: refs/heads/HBASE-18467
Commit: 16d483f9003ddee71404f37ce7694003d1a18ac4
Parents: 56830c3
Author: Yi Liang 
Authored: Wed Sep 13 11:38:29 2017 -0700
Committer: Andrew Purtell 
Committed: Tue Oct 3 17:11:06 2017 -0700

--
 .../hbase/mapreduce/TableInputFormatBase.java   | 473 ++-
 .../mapreduce/TestTableInputFormatScan1.java| 103 +---
 .../mapreduce/TestTableInputFormatScanBase.java |  83 +++-
 3 files changed, 324 insertions(+), 335 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/16d483f9/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.java
--
diff --git 
a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.java
 
b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.java
index b3be90b..e7a65e8 100644
--- 
a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.java
+++ 
b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.java
@@ -24,13 +24,12 @@ import java.net.InetAddress;
 import java.net.InetSocketAddress;
 import java.net.UnknownHostException;
 import java.util.ArrayList;
-import java.util.Arrays;
 import java.util.HashMap;
 import java.util.List;
+import java.util.Map;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.conf.Configuration;
 import org.apache.yetus.audience.InterfaceAudience;
 import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.HRegionLocation;
@@ -41,6 +40,7 @@ import org.apache.hadoop.hbase.client.RegionLocator;
 import org.apache.hadoop.hbase.client.Result;
 import org.apache.hadoop.hbase.client.Scan;
 import org.apache.hadoop.hbase.client.Table;
+import org.apache.hadoop.hbase.exceptions.IllegalArgumentIOException;
 import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
 import org.apache.hadoop.hbase.util.Addressing;
 import org.apache.hadoop.hbase.util.Bytes;
@@ -93,20 +93,21 @@ import org.apache.hadoop.util.StringUtils;
  * }
  *   }
  * 
+ *
+ *
+ * The number of InputSplits(mappers) match the number of regions in a table 
by default.
+ * Set "hbase.mapreduce.tableinput.mappers.per.region" to specify how many 
mappers per region, set
+ * this property will disable autobalance below.\
+ * Set "hbase.mapreduce.tif.input.autobalance" to enable autobalance, hbase 
will assign mappers
+ * based on average region size; For regions, whose size larger than average 
region size may assigned
+ * more mappers, and for smaller one, they may group together to use one 
mapper. If actual average
+ * region size is too big, like 50G, it is not good to only assign 1 mapper 
for those large regions.
+ * Use "hbase.mapreduce.tif.ave.regionsize" to set max average region size 
when enable "autobalanece",
+ * default mas average region size is 8G.
  */
 @InterfaceAudience.Public
 public abstract class TableInputFormatBase
-extends InputFormat {
-
-  /** Specify if we enable auto-balance for input in M/R jobs.*/
-  public static final String MAPREDUCE_INPUT_AUTOBALANCE = 
"hbase.mapreduce.input.autobalance";
-  /** Specify if ratio for data skew in M/R jobs, it goes well with the 
enabling hbase.mapreduce
-   * .input.autobalance property.*/
-  public static final String INPUT_AUTOBALANCE_MAXSKEWRATIO = 
"hbase.mapreduce.input.autobalance" +
-  ".maxskewratio";
-  /** Specify if the row key in table is text (ASCII between 32~126),
-   * default is true. False means the table is using binary row key*/
-  public static final String TABLE_ROW_TEXTKEY = "hbase.table.row.textkey";
+extends InputFormat {
 
   private static final Log LOG = LogFactory.getLog(TableInputFormatBase.class);
 
@@ -114,8 +115,17 @@ extends InputFormat {
   "initialized. Ensure you call initializeTable either in your constructor 
or initialize " +
   "method";
   private static final String INITIALIZATION_ERROR = "Cannot create a record 
reader because of a" +
-" previous error. Please look at the previous logs lines from" +
-" the task's full log for more details.";
+  " previous error. Please look at the previous logs lines from" +
+  " the task's full log for more details.";
+
+  /** Specify if we enable auto-balance to set number of mappers in M/R jobs. 
*/
+  public static final String MAPREDUCE_INPUT_AUTOBALAN

[42/60] [abbrv] hbase git commit: HBASE-18894: null pointer exception in list_regions in shell command

2017-10-05 Thread busbey
HBASE-18894: null pointer exception in list_regions in shell command

Signed-off-by: Mike Drob 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/4093cc02
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/4093cc02
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/4093cc02

Branch: refs/heads/HBASE-18467
Commit: 4093cc0291aa7f00ec75a0bae48841ac85eb1fa5
Parents: 38eaf47
Author: Yi Liang 
Authored: Thu Sep 28 13:07:27 2017 -0700
Committer: Mike Drob 
Committed: Mon Oct 2 16:50:29 2017 -0500

--
 hbase-shell/src/main/ruby/shell/commands/list_regions.rb | 11 ++-
 1 file changed, 10 insertions(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/4093cc02/hbase-shell/src/main/ruby/shell/commands/list_regions.rb
--
diff --git a/hbase-shell/src/main/ruby/shell/commands/list_regions.rb 
b/hbase-shell/src/main/ruby/shell/commands/list_regions.rb
index 5feb926..bcc0c4a 100644
--- a/hbase-shell/src/main/ruby/shell/commands/list_regions.rb
+++ b/hbase-shell/src/main/ruby/shell/commands/list_regions.rb
@@ -77,6 +77,7 @@ EOF
   raise "#{cols} must be an array of strings. Possible values are 
SERVER_NAME, REGION_NAME, START_KEY, END_KEY, SIZE, REQ, LOCALITY."
 end
 
+error = false
 admin_instance = admin.instance_variable_get('@admin')
 conn_instance = admin_instance.getConnection
 cluster_status = admin_instance.getClusterStatus
@@ -105,6 +106,12 @@ EOF
 region_load_map = 
cluster_status.getLoad(server_name).getRegionsLoad
 region_load = region_load_map.get(hregion_info.getRegionName)
 
+if region_load.nil?
+  puts "Can not find region: #{hregion_info.getRegionName} , it 
may be disabled or in transition\n"
+  error = true
+  break
+end
+
 # Ignore regions which exceed our locality threshold
 next unless accept_region_for_locality? 
region_load.getDataLocality, locality_threshold
 result_hash = {}
@@ -157,12 +164,14 @@ EOF
 
 @end_time = Time.now
 
+return if error
+
 size_hash.each do |param, length|
   printf(" %#{length}s |", param)
 end
 printf("\n")
 
-size_hash.each do |_param, length|
+size_hash.each_value do |length|
   str = '-' * length
   printf(" %#{length}s |", str)
 end



[59/60] [abbrv] hbase git commit: HBASE-18940 include project pylint configs in source artifact.

2017-10-05 Thread busbey
HBASE-18940 include project pylint configs in source artifact.

Signed-off-by: Mike Drob 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/bafbade2
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/bafbade2
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/bafbade2

Branch: refs/heads/HBASE-18467
Commit: bafbade24899b9d221def6997528885032d3cae3
Parents: 98d1637
Author: Sean Busbey 
Authored: Wed Oct 4 23:42:15 2017 -0500
Committer: Sean Busbey 
Committed: Thu Oct 5 10:38:28 2017 -0500

--
 hbase-assembly/src/main/assembly/src.xml | 1 +
 1 file changed, 1 insertion(+)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/bafbade2/hbase-assembly/src/main/assembly/src.xml
--
diff --git a/hbase-assembly/src/main/assembly/src.xml 
b/hbase-assembly/src/main/assembly/src.xml
index 3b02b3c..91163bb 100644
--- a/hbase-assembly/src/main/assembly/src.xml
+++ b/hbase-assembly/src/main/assembly/src.xml
@@ -143,6 +143,7 @@
 NOTICE.txt
 CHANGES.txt
 README.txt
+.pylintrc
   
   0644
 



[54/60] [abbrv] hbase git commit: HBASE-18932 Backup masking exception in a scenario and though it fails , it shows success message

2017-10-05 Thread busbey
HBASE-18932 Backup masking exception in a scenario and though it fails , it 
shows success message

Signed-off-by: tedyu 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/2f251325
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/2f251325
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/2f251325

Branch: refs/heads/HBASE-18467
Commit: 2f2513255df0b3e46793c5e9284b9555f1bccd0a
Parents: 0a24178
Author: Amit 
Authored: Wed Oct 4 14:54:35 2017 +0530
Committer: tedyu 
Committed: Wed Oct 4 08:18:08 2017 -0700

--
 .../hadoop/hbase/backup/impl/IncrementalTableBackupClient.java  | 5 +++--
 1 file changed, 3 insertions(+), 2 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/2f251325/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/impl/IncrementalTableBackupClient.java
--
diff --git 
a/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/impl/IncrementalTableBackupClient.java
 
b/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/impl/IncrementalTableBackupClient.java
index 4efe04b..52f6b5c 100644
--- 
a/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/impl/IncrementalTableBackupClient.java
+++ 
b/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/impl/IncrementalTableBackupClient.java
@@ -221,7 +221,7 @@ public class IncrementalTableBackupClient extends 
TableBackupClient {
   // fail the overall backup and return
   failBackup(conn, backupInfo, backupManager, e, "Unexpected Exception : ",
 BackupType.INCREMENTAL, conf);
-  return;
+  throw new IOException(e);
 }
 
 // case INCREMENTAL_COPY:
@@ -237,7 +237,7 @@ public class IncrementalTableBackupClient extends 
TableBackupClient {
   String msg = "Unexpected exception in incremental-backup: incremental 
copy " + backupId;
   // fail the overall backup and return
   failBackup(conn, backupInfo, backupManager, e, msg, 
BackupType.INCREMENTAL, conf);
-  return;
+  throw new IOException(e);
 }
 // case INCR_BACKUP_COMPLETE:
 // set overall backup status: complete. Here we make sure to complete the 
backup.
@@ -266,6 +266,7 @@ public class IncrementalTableBackupClient extends 
TableBackupClient {
 } catch (IOException e) {
   failBackup(conn, backupInfo, backupManager, e, "Unexpected Exception : ",
 BackupType.INCREMENTAL, conf);
+  throw new IOException(e);
 }
   }
 



[21/60] [abbrv] hbase git commit: HBASE-18884 Coprocessor Design Improvements follow up of HBASE-17732

2017-10-05 Thread busbey
HBASE-18884 Coprocessor Design Improvements follow up of HBASE-17732

- Change Service Coprocessor#getService() to List 
Coprocessor#getServices()
- Checkin the finalized design doc into repo
- Added example to javadoc of Coprocessor base interface on how to implement 
one in the new design


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/74d0adce
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/74d0adce
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/74d0adce

Branch: refs/heads/HBASE-18467
Commit: 74d0adce61fc39ef9d1ec2254dcd0f53181bb62c
Parents: ca29598
Author: Apekshit Sharma 
Authored: Wed Sep 27 18:06:12 2017 -0700
Committer: Apekshit Sharma 
Committed: Thu Sep 28 10:25:47 2017 -0700

--
 ...ad_of_inheritance-HBASE-17732-2017_09_27.pdf | Bin 0 -> 161724 bytes
 .../org/apache/hadoop/hbase/Coprocessor.java|  36 ---
 .../coprocessor/AggregateImplementation.java|   6 ++--
 .../apache/hadoop/hbase/coprocessor/Export.java |   6 ++--
 .../security/access/SecureBulkLoadEndpoint.java |   6 ++--
 .../coprocessor/ColumnAggregationEndpoint.java  |   7 ++--
 .../ColumnAggregationEndpointNullResponse.java  |   6 ++--
 .../ColumnAggregationEndpointWithErrors.java|   6 ++--
 .../coprocessor/ProtobufCoprocessorService.java |   6 ++--
 .../TestAsyncCoprocessorEndpoint.java   |   6 ++--
 .../TestRegionServerCoprocessorEndpoint.java|   8 ++---
 .../regionserver/TestServerCustomProtocol.java  |   6 ++--
 .../coprocessor/example/BulkDeleteEndpoint.java |   6 ++--
 .../example/RefreshHFilesEndpoint.java  |   6 ++--
 .../coprocessor/example/RowCountEndpoint.java   |   6 ++--
 .../hbase/rsgroup/RSGroupAdminEndpoint.java |   5 +--
 .../coprocessor/BaseRowProcessorEndpoint.java   |   6 ++--
 .../CoprocessorServiceBackwardCompatiblity.java |  14 
 .../coprocessor/MultiRowMutationEndpoint.java   |   6 ++--
 .../hbase/coprocessor/WALCoprocessor.java   |   2 +-
 .../hbase/master/MasterCoprocessorHost.java |   6 +++-
 .../regionserver/RegionCoprocessorHost.java |   7 ++--
 .../RegionServerCoprocessorHost.java|   6 +++-
 .../hbase/security/access/AccessController.java |   8 ++---
 .../hbase/security/token/TokenProvider.java |   7 ++--
 .../visibility/VisibilityController.java|   6 ++--
 .../security/access/TestAccessController.java   |   6 ++--
 27 files changed, 117 insertions(+), 79 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/74d0adce/dev-support/design-docs/Coprocessor_Design_Improvements-Use_composition_instead_of_inheritance-HBASE-17732-2017_09_27.pdf
--
diff --git 
a/dev-support/design-docs/Coprocessor_Design_Improvements-Use_composition_instead_of_inheritance-HBASE-17732-2017_09_27.pdf
 
b/dev-support/design-docs/Coprocessor_Design_Improvements-Use_composition_instead_of_inheritance-HBASE-17732-2017_09_27.pdf
new file mode 100644
index 000..30a6d54
Binary files /dev/null and 
b/dev-support/design-docs/Coprocessor_Design_Improvements-Use_composition_instead_of_inheritance-HBASE-17732-2017_09_27.pdf
 differ

http://git-wip-us.apache.org/repos/asf/hbase/blob/74d0adce/hbase-client/src/main/java/org/apache/hadoop/hbase/Coprocessor.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/Coprocessor.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/Coprocessor.java
index 38fe74e..c4003ae 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/Coprocessor.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/Coprocessor.java
@@ -20,14 +20,42 @@
 package org.apache.hadoop.hbase;
 
 import java.io.IOException;
-import java.util.Optional;
+import java.util.Collections;
 
 import com.google.protobuf.Service;
 import org.apache.yetus.audience.InterfaceAudience;
 import org.apache.yetus.audience.InterfaceStability;
 
 /**
- * Coprocessor interface.
+ * Base interface for the 4 coprocessors - MasterCoprocessor, 
RegionCoprocessor,
+ * RegionServerCoprocessor, and WALCoprocessor.
+ * Do NOT implement this interface directly. Unless an implementation 
implements one (or more) of
+ * the above mentioned 4 coprocessors, it'll fail to be loaded by any 
coprocessor host.
+ *
+ * Example:
+ * Building a coprocessor to observer Master operations.
+ * 
+ * class MyMasterCoprocessor implements MasterCoprocessor {
+ *   @Override
+ *   public Optional getMasterObserver() {
+ * return new MyMasterObserver();
+ *   }
+ * }
+ *
+ * class MyMasterObserver implements MasterObserver {
+ *   
+ * }
+ * 
+ *
+ * Building a Service which can be loaded by both Master and RegionServer
+ * 
+ * class MyCoprocessorService imp

[15/60] [abbrv] hbase git commit: HBASE-18839 Apply RegionInfo to code base

2017-10-05 Thread busbey
http://git-wip-us.apache.org/repos/asf/hbase/blob/a11a35a1/hbase-server/src/main/java/org/apache/hadoop/hbase/favored/FavoredNodeLoadBalancer.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/favored/FavoredNodeLoadBalancer.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/favored/FavoredNodeLoadBalancer.java
index 9c5c180..68e5e89 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/favored/FavoredNodeLoadBalancer.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/favored/FavoredNodeLoadBalancer.java
@@ -29,23 +29,26 @@ import java.util.List;
 import java.util.Map;
 import java.util.Set;
 
-import org.apache.hadoop.hbase.shaded.com.google.common.collect.Maps;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
-import org.apache.yetus.audience.InterfaceAudience;
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hbase.favored.FavoredNodesPlan.Position;
 import org.apache.hadoop.hbase.HBaseIOException;
 import org.apache.hadoop.hbase.HBaseInterfaceAudience;
-import org.apache.hadoop.hbase.HRegionInfo;
 import org.apache.hadoop.hbase.ServerLoad;
 import org.apache.hadoop.hbase.ServerName;
 import org.apache.hadoop.hbase.TableName;
-import org.apache.hadoop.hbase.master.*;
+import org.apache.hadoop.hbase.client.RegionInfo;
+import org.apache.hadoop.hbase.favored.FavoredNodesPlan.Position;
+import org.apache.hadoop.hbase.master.RackManager;
+import org.apache.hadoop.hbase.master.RegionPlan;
+import org.apache.hadoop.hbase.master.ServerManager;
+import org.apache.hadoop.hbase.master.SnapshotOfRegionAssignmentFromMeta;
 import org.apache.hadoop.hbase.master.balancer.BaseLoadBalancer;
 import org.apache.hadoop.hbase.util.Pair;
+import org.apache.yetus.audience.InterfaceAudience;
 
 import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
+import org.apache.hadoop.hbase.shaded.com.google.common.collect.Maps;
 import org.apache.hadoop.hbase.shaded.com.google.common.collect.Sets;
 
 /**
@@ -85,7 +88,7 @@ public class FavoredNodeLoadBalancer extends BaseLoadBalancer 
implements Favored
   }
 
   @Override
-  public List balanceCluster(Map> 
clusterState)  {
+  public List balanceCluster(Map> 
clusterState)  {
 //TODO. Look at is whether Stochastic loadbalancer can be integrated with 
this
 List plans = new ArrayList<>();
 //perform a scan of the meta to get the latest updates (if any)
@@ -105,13 +108,13 @@ public class FavoredNodeLoadBalancer extends 
BaseLoadBalancer implements Favored
   // FindBugs complains about useless store! 
serverNameToServerNameWithoutCode.put(sn, s);
   serverNameWithoutCodeToServerName.put(s, sn);
 }
-for (Map.Entry> entry : 
clusterState.entrySet()) {
+for (Map.Entry> entry : 
clusterState.entrySet()) {
   ServerName currentServer = entry.getKey();
   //get a server without the startcode for the currentServer
   ServerName currentServerWithoutStartCode = 
ServerName.valueOf(currentServer.getHostname(),
   currentServer.getPort(), ServerName.NON_STARTCODE);
-  List list = entry.getValue();
-  for (HRegionInfo region : list) {
+  List list = entry.getValue();
+  for (RegionInfo region : list) {
 if(!FavoredNodesManager.isFavoredNodeApplicable(region)) {
   continue;
 }
@@ -157,9 +160,9 @@ public class FavoredNodeLoadBalancer extends 
BaseLoadBalancer implements Favored
   }
 
   @Override
-  public Map> 
roundRobinAssignment(List regions,
+  public Map> 
roundRobinAssignment(List regions,
   List servers) throws HBaseIOException {
-Map> assignmentMap;
+Map> assignmentMap;
 try {
   FavoredNodeAssignmentHelper assignmentHelper =
   new FavoredNodeAssignmentHelper(servers, rackManager);
@@ -183,10 +186,10 @@ public class FavoredNodeLoadBalancer extends 
BaseLoadBalancer implements Favored
   //need to come up with favored nodes assignments for them. The 
corner case
   //in (1) above is that all the nodes are unavailable and in that 
case, we
   //will note that this region doesn't have favored nodes.
-  Pair>, List> 
segregatedRegions =
+  Pair>, List> 
segregatedRegions =
   segregateRegionsAndAssignRegionsWithFavoredNodes(regions, servers);
-  Map> regionsWithFavoredNodesMap = 
segregatedRegions.getFirst();
-  List regionsWithNoFavoredNodes = 
segregatedRegions.getSecond();
+  Map> regionsWithFavoredNodesMap = 
segregatedRegions.getFirst();
+  List regionsWithNoFavoredNodes = 
segregatedRegions.getSecond();
   assignmentMap = new HashMap<>();
   roundRobinAssignmentImpl(assignmentHelper, assignmentMap, 
regionsWithNoFavoredNodes,
   servers);
@@ -201,7 +204,7 @@ public class FavoredNodeLoadBalancer extends 
BaseLoadBalancer implements Favored
   }
 
   @Override
-  public ServerName randomAssignment(HRegionI

[04/60] [abbrv] hbase git commit: HBASE-18839 Apply RegionInfo to code base

2017-10-05 Thread busbey
http://git-wip-us.apache.org/repos/asf/hbase/blob/a11a35a1/hbase-server/src/test/java/org/apache/hadoop/hbase/master/balancer/TestBaseLoadBalancer.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/balancer/TestBaseLoadBalancer.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/balancer/TestBaseLoadBalancer.java
index b360145..c33cd56 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/balancer/TestBaseLoadBalancer.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/balancer/TestBaseLoadBalancer.java
@@ -17,7 +17,12 @@
  */
 package org.apache.hadoop.hbase.master.balancer;
 
-import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNull;
+import static org.junit.Assert.assertTrue;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.when;
+
 import java.util.ArrayList;
 import java.util.Collections;
 import java.util.HashMap;
@@ -28,15 +33,17 @@ import java.util.Set;
 import java.util.TreeMap;
 import java.util.TreeSet;
 import java.util.stream.Collectors;
+
 import org.apache.commons.lang3.ArrayUtils;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.HBaseConfiguration;
 import org.apache.hadoop.hbase.HBaseIOException;
-import org.apache.hadoop.hbase.HRegionInfo;
 import org.apache.hadoop.hbase.ServerName;
 import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.client.RegionInfo;
+import org.apache.hadoop.hbase.client.RegionInfoBuilder;
 import org.apache.hadoop.hbase.client.RegionReplicaUtil;
 import org.apache.hadoop.hbase.master.LoadBalancer;
 import org.apache.hadoop.hbase.master.MasterServices;
@@ -48,17 +55,14 @@ import 
org.apache.hadoop.hbase.master.balancer.BaseLoadBalancer.Cluster.MoveRegi
 import org.apache.hadoop.hbase.testclassification.MasterTests;
 import org.apache.hadoop.hbase.testclassification.MediumTests;
 import org.apache.hadoop.net.DNSToSwitchMapping;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertNull;
-import static org.junit.Assert.assertTrue;
 import org.junit.BeforeClass;
 import org.junit.Rule;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
 import org.junit.rules.TestName;
 import org.mockito.Mockito;
-import static org.mockito.Mockito.mock;
-import static org.mockito.Mockito.when;
+
+import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
 
 @Category({MasterTests.class, MediumTests.class})
 public class TestBaseLoadBalancer extends BalancerTestBase {
@@ -108,13 +112,13 @@ public class TestBaseLoadBalancer extends 
BalancerTestBase {
 
   public static class MockBalancer extends BaseLoadBalancer {
 @Override
-public List balanceCluster(Map> 
clusterState) {
+public List balanceCluster(Map> 
clusterState) {
   return null;
 }
 
 @Override
 public List balanceCluster(TableName tableName,
-Map> clusterState) throws 
HBaseIOException {
+Map> clusterState) throws 
HBaseIOException {
   return null;
 }
   }
@@ -125,9 +129,9 @@ public class TestBaseLoadBalancer extends BalancerTestBase {
* @param servers
* @param assignments
*/
-  private void assertImmediateAssignment(List regions, 
List servers,
-  Map assignments) {
-for (HRegionInfo region : regions) {
+  private void assertImmediateAssignment(List regions, 
List servers,
+  Map assignments) {
+for (RegionInfo region : regions) {
   assertTrue(assignments.containsKey(region));
 }
   }
@@ -143,31 +147,31 @@ public class TestBaseLoadBalancer extends 
BalancerTestBase {
   @Test (timeout=18)
   public void testBulkAssignment() throws Exception {
 List tmp = getListOfServerNames(randomServers(5, 0));
-List hris = randomRegions(20);
-hris.add(HRegionInfo.FIRST_META_REGIONINFO);
+List hris = randomRegions(20);
+hris.add(RegionInfoBuilder.FIRST_META_REGIONINFO);
 tmp.add(master);
-Map> plans = 
loadBalancer.roundRobinAssignment(hris, tmp);
+Map> plans = 
loadBalancer.roundRobinAssignment(hris, tmp);
 if (LoadBalancer.isTablesOnMaster(loadBalancer.getConf())) {
-  
assertTrue(plans.get(master).contains(HRegionInfo.FIRST_META_REGIONINFO));
+  
assertTrue(plans.get(master).contains(RegionInfoBuilder.FIRST_META_REGIONINFO));
   assertEquals(1, plans.get(master).size());
 }
 int totalRegion = 0;
-for (List regions: plans.values()) {
+for (List regions: plans.values()) {
   totalRegion += regions.size();
 }
 assertEquals(hris.size(), totalRegion);
 for (int[] mock : regionsAndServersMocks) {
   LOG.debug("testBulkAssignment with " + mock[0] + " regions and " + 
mock[1] + " servers");
- 

[58/60] [abbrv] hbase git commit: HBASE-18878 Use Optional in return types.

2017-10-05 Thread busbey
HBASE-18878 Use Optional in return types.

These functions have been changed to return Optional instead of T, where T = 
old return type.
- ObserverContext#getCaller
- RpcCallContext#getRequestUser
- RpcCallContext#getRequestUserName
- RpcServer#getCurrentCall
- RpcServer#getRequestUser
- RpcServer#getRequestUserName
- RpcServer#getRemoteAddress
- ServerCall#getRequestUser

Change-Id: Ib7b4e6be637283755f55755dd4c5124729f7052e
Signed-off-by: Apekshit Sharma 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/98d1637b
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/98d1637b
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/98d1637b

Branch: refs/heads/HBASE-18467
Commit: 98d1637bcd1954f73817d77b6aba7ebb6fc24890
Parents: 1ec6ece
Author: zhangduo 
Authored: Mon Oct 2 19:03:19 2017 -0700
Committer: Apekshit Sharma 
Committed: Wed Oct 4 15:55:18 2017 -0700

--
 .../org/apache/hadoop/hbase/util/Classes.java   |  12 +-
 .../apache/hadoop/hbase/coprocessor/Export.java |   5 +-
 .../coprocessor/ProtobufCoprocessorService.java |   4 +-
 .../hadoop/hbase/client/VersionInfoUtil.java|   3 +-
 .../hbase/coprocessor/CoprocessorHost.java  |   2 +-
 .../hbase/coprocessor/ObserverContext.java  |  74 ---
 .../org/apache/hadoop/hbase/ipc/CallRunner.java |   5 +-
 .../apache/hadoop/hbase/ipc/RpcCallContext.java |  13 +-
 .../org/apache/hadoop/hbase/ipc/RpcServer.java  |  51 
 .../org/apache/hadoop/hbase/ipc/ServerCall.java |  23 ++--
 .../hadoop/hbase/ipc/SimpleRpcScheduler.java|   3 +-
 .../master/procedure/MasterProcedureEnv.java|   6 +-
 .../hbase/master/snapshot/SnapshotManager.java  |  24 ++--
 .../quotas/RegionServerRpcQuotaManager.java |   8 +-
 .../hadoop/hbase/regionserver/HRegion.java  |  23 ++--
 .../hbase/regionserver/HRegionServer.java   |   2 +-
 .../hbase/regionserver/RSRpcServices.java   |  24 ++--
 .../regionserver/SecureBulkLoadManager.java |  58 -
 .../hbase/security/access/AccessController.java |  59 +
 .../hbase/security/token/TokenProvider.java |  33 ++---
 .../visibility/VisibilityController.java|  27 ++--
 .../security/visibility/VisibilityUtils.java|  11 +-
 .../hbase/ipc/TestProtobufRpcServiceImpl.java   |   4 +-
 .../hbase/ipc/TestSimpleRpcScheduler.java   |  11 +-
 .../security/access/TestAccessController.java   | 124 +--
 .../security/access/TestAccessController3.java  |   3 +-
 .../security/access/TestNamespaceCommands.java  |  10 +-
 .../access/TestWithDisabledAuthorization.java   |  93 +++---
 28 files changed, 335 insertions(+), 380 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/98d1637b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Classes.java
--
diff --git 
a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Classes.java 
b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Classes.java
index 2366daf..c52a09c 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Classes.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Classes.java
@@ -17,7 +17,6 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-
 package org.apache.hadoop.hbase.util;
 
 import org.apache.yetus.audience.InterfaceAudience;
@@ -31,7 +30,7 @@ public class Classes {
   /**
* Equivalent of {@link Class#forName(String)} which also returns classes for
* primitives like boolean, etc.
-   * 
+   *
* @param className
*  The name of the class to retrieve. Can be either a normal class 
or
*  a primitive class.
@@ -64,10 +63,10 @@ public class Classes {
 return valueType;
   }
 
-  public static String stringify(Class[] classes) {
+  public static String stringify(Class[] classes) {
 StringBuilder buf = new StringBuilder();
 if (classes != null) {
-  for (Class c : classes) {
+  for (Class c : classes) {
 if (buf.length() > 0) {
   buf.append(",");
 }
@@ -78,4 +77,9 @@ public class Classes {
 }
 return buf.toString();
   }
+
+  @SuppressWarnings("unchecked")
+  public static  Class cast(Class clazz) {
+return (Class) clazz;
+  }
 }

http://git-wip-us.apache.org/repos/asf/hbase/blob/98d1637b/hbase-endpoint/src/main/java/org/apache/hadoop/hbase/coprocessor/Export.java
--
diff --git 
a/hbase-endpoint/src/main/java/org/apache/hadoop/hbase/coprocessor/Export.java 
b/hbase-endpoint/src/main/java/org/apache/hadoop/hbase/coprocessor/Export.java
index 667f7a3..a00af0f 100644
--- 
a/hbase-endpoint/src/main/java/org/apache/hadoop/hbase/coprocessor/Export.java

[30/60] [abbrv] hbase git commit: HBASE-18559 Add histogram to MetricsConnection to track concurrent calls per server

2017-10-05 Thread busbey
HBASE-18559 Add histogram to MetricsConnection to track concurrent calls per 
server

Signed-off-by: Andrew Purtell 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/c835dcc7
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/c835dcc7
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/c835dcc7

Branch: refs/heads/HBASE-18467
Commit: c835dcc7e70cc13415e60198ed4fb9c0e7339ac4
Parents: f20580a
Author: Robert Yokota 
Authored: Thu Aug 10 14:13:04 2017 -0700
Committer: Andrew Purtell 
Committed: Fri Sep 29 17:12:26 2017 -0700

--
 .../hadoop/hbase/client/MetricsConnection.java  | 16 
 .../apache/hadoop/hbase/ipc/AbstractRpcClient.java  |  1 +
 2 files changed, 17 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/c835dcc7/hbase-client/src/main/java/org/apache/hadoop/hbase/client/MetricsConnection.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/MetricsConnection.java
 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/MetricsConnection.java
index c54729b..b88baa4 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/MetricsConnection.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/MetricsConnection.java
@@ -73,6 +73,7 @@ public class MetricsConnection implements StatisticTrackable {
 private long responseSizeBytes = 0;
 private long startTime = 0;
 private long callTimeMs = 0;
+private int concurrentCallsPerServer = 0;
 
 public long getRequestSizeBytes() {
   return requestSizeBytes;
@@ -105,6 +106,14 @@ public class MetricsConnection implements 
StatisticTrackable {
 public void setCallTimeMs(long callTimeMs) {
   this.callTimeMs = callTimeMs;
 }
+
+public int getConcurrentCallsPerServer() {
+  return concurrentCallsPerServer;
+}
+
+public void setConcurrentCallsPerServer(int callsPerServer) {
+  this.concurrentCallsPerServer = callsPerServer;
+}
   }
 
   @VisibleForTesting
@@ -271,6 +280,7 @@ public class MetricsConnection implements 
StatisticTrackable {
   @VisibleForTesting protected final Counter metaCacheNumClearRegion;
   @VisibleForTesting protected final Counter hedgedReadOps;
   @VisibleForTesting protected final Counter hedgedReadWin;
+  @VisibleForTesting protected final Histogram concurrentCallsPerServerHist;
 
   // dynamic metrics
 
@@ -327,6 +337,8 @@ public class MetricsConnection implements 
StatisticTrackable {
 this.putTracker = new CallTracker(this.registry, "Mutate", "Put", scope);
 this.multiTracker = new CallTracker(this.registry, "Multi", scope);
 this.runnerStats = new RunnerStats(this.registry);
+this.concurrentCallsPerServerHist = 
registry.histogram(name(MetricsConnection.class, 
+  "concurrentCallsPerServer", scope));
 
 this.reporter = JmxReporter.forRegistry(this.registry).build();
 this.reporter.start();
@@ -422,6 +434,10 @@ public class MetricsConnection implements 
StatisticTrackable {
 
   /** Report RPC context to metrics system. */
   public void updateRpc(MethodDescriptor method, Message param, CallStats 
stats) {
+int callsPerServer = stats.getConcurrentCallsPerServer();
+if (callsPerServer > 0) {
+  concurrentCallsPerServerHist.update(callsPerServer);
+}
 // this implementation is tied directly to protobuf implementation 
details. would be better
 // if we could dispatch based on something static, ie, request Message 
type.
 if (method.getService() == ClientService.getDescriptor()) {

http://git-wip-us.apache.org/repos/asf/hbase/blob/c835dcc7/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/AbstractRpcClient.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/AbstractRpcClient.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/AbstractRpcClient.java
index de4dea4..22da05a 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/AbstractRpcClient.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/AbstractRpcClient.java
@@ -416,6 +416,7 @@ public abstract class AbstractRpcClient implements RpcC
   if (count > maxConcurrentCallsPerServer) {
 throw new ServerTooBusyException(addr, count);
   }
+  cs.setConcurrentCallsPerServer(count);
   T connection = getConnection(remoteId);
   connection.sendRequest(call, hrc);
 } catch (Exception e) {



[47/60] [abbrv] hbase git commit: HBASE-18815 We need to pass something like CompactionRequest in CP to give user some information about the compaction

2017-10-05 Thread busbey
HBASE-18815 We need to pass something like CompactionRequest in CP to give user 
some information about the compaction

CompactionRequest was removed from CP in HBASE-18453, this change reintroduces
CompatcionRequest to CP as a read-only interface called CompactionRequest.
The CompactionRequest class is renamed to CompactionRequestImpl.

Additionally, this change removes selectionTimeInNanos from CompactionRequest 
and
uses selectionTime as a replacement. This means that CompactionRequest:toString
is modified and compare as well.

Signed-off-by: Michael Stack 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/0af61dce
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/0af61dce
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/0af61dce

Branch: refs/heads/HBASE-18467
Commit: 0af61dce6576edfb95b41c1959d1947add7dd092
Parents: 5026539
Author: Peter Somogyi 
Authored: Fri Sep 29 15:25:25 2017 -0700
Committer: Michael Stack 
Committed: Tue Oct 3 07:52:33 2017 -0700

--
 .../example/ZooKeeperScanPolicyObserver.java|   4 +-
 .../hbase/coprocessor/RegionObserver.java   |  27 +++-
 .../hbase/mob/DefaultMobStoreCompactor.java |   9 +-
 .../hadoop/hbase/regionserver/CompactSplit.java |   6 +-
 .../regionserver/DateTieredStoreEngine.java |   4 +-
 .../hadoop/hbase/regionserver/HStore.java   |  28 ++--
 .../regionserver/RegionCoprocessorHost.java |  37 +++--
 .../hbase/regionserver/StripeStoreEngine.java   |   6 +-
 .../compactions/CompactionContext.java  |   6 +-
 .../compactions/CompactionPolicy.java   |   2 -
 .../compactions/CompactionRequest.java  | 149 -
 .../compactions/CompactionRequestImpl.java  | 159 +++
 .../regionserver/compactions/Compactor.java |  18 +--
 .../compactions/DateTieredCompactionPolicy.java |  10 +-
 .../DateTieredCompactionRequest.java|   2 +-
 .../compactions/DateTieredCompactor.java|   6 +-
 .../compactions/DefaultCompactor.java   |  14 +-
 .../compactions/FIFOCompactionPolicy.java   |   8 +-
 .../compactions/RatioBasedCompactionPolicy.java |   4 +-
 .../compactions/SortedCompactionPolicy.java |   6 +-
 .../compactions/StripeCompactionPolicy.java |  18 +--
 .../compactions/StripeCompactor.java|   8 +-
 .../hbase/security/access/AccessController.java |   5 +-
 ...estAvoidCellReferencesIntoShippedBlocks.java |   4 +-
 .../hbase/coprocessor/SimpleRegionObserver.java |  17 +-
 .../coprocessor/TestCoprocessorInterface.java   |   7 +-
 .../TestRegionObserverInterface.java|   6 +-
 .../TestRegionObserverScannerOpenHook.java  |   4 +-
 .../hbase/mob/compactions/TestMobCompactor.java |   4 +-
 .../hbase/namespace/TestNamespaceAuditor.java   |   4 +-
 .../regionserver/NoOpScanPolicyObserver.java|   4 +-
 .../hbase/regionserver/TestCompaction.java  |   6 +-
 .../regionserver/TestCompactionPolicy.java  |   4 +-
 .../TestDefaultCompactSelection.java|   4 +-
 .../regionserver/TestHRegionServerBulkLoad.java |   4 +-
 .../hbase/regionserver/TestMajorCompaction.java |   6 +-
 .../regionserver/TestStripeStoreEngine.java |   6 +-
 .../compactions/PerfTestCompactionPolicies.java |   2 +-
 .../regionserver/compactions/TestCompactor.java |   4 +-
 .../compactions/TestDateTieredCompactor.java|   4 +-
 .../compactions/TestStripeCompactionPolicy.java |   4 +-
 .../security/access/TestAccessController.java   |   2 +-
 .../hbase/util/TestCoprocessorScanPolicy.java   |   4 +-
 43 files changed, 378 insertions(+), 258 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/0af61dce/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/ZooKeeperScanPolicyObserver.java
--
diff --git 
a/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/ZooKeeperScanPolicyObserver.java
 
b/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/ZooKeeperScanPolicyObserver.java
index 80290dd..7f2a906 100644
--- 
a/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/ZooKeeperScanPolicyObserver.java
+++ 
b/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/ZooKeeperScanPolicyObserver.java
@@ -42,6 +42,7 @@ import org.apache.hadoop.hbase.regionserver.ScanType;
 import org.apache.hadoop.hbase.regionserver.Store;
 import org.apache.hadoop.hbase.regionserver.StoreScanner;
 import 
org.apache.hadoop.hbase.regionserver.compactions.CompactionLifeCycleTracker;
+import org.apache.hadoop.hbase.regionserver.compactions.CompactionRequest;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
 import org.apache.zookeeper.KeeperEx

[37/60] [abbrv] hbase git commit: HBASE-18897 Substitute MemStore for Memstore

2017-10-05 Thread busbey
http://git-wip-us.apache.org/repos/asf/hbase/blob/d35d8376/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestPerColumnFamilyFlush.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestPerColumnFamilyFlush.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestPerColumnFamilyFlush.java
index a0d953e..e4f7663 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestPerColumnFamilyFlush.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestPerColumnFamilyFlush.java
@@ -46,7 +46,6 @@ import org.apache.hadoop.hbase.util.JVMClusterUtil;
 import org.apache.hadoop.hbase.util.Pair;
 import org.apache.hadoop.hbase.wal.AbstractFSWALProvider;
 import org.apache.hadoop.hbase.wal.WAL;
-import org.junit.Before;
 import org.junit.Ignore;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
@@ -144,7 +143,7 @@ public class TestPerColumnFamilyFlush {
   }
 }
 
-long totalMemstoreSize = region.getMemstoreSize();
+long totalMemstoreSize = region.getMemStoreSize();
 
 // Find the smallest LSNs for edits wrt to each CF.
 long smallestSeqCF1 = region.getOldestSeqIdOfStore(FAMILY1);
@@ -152,13 +151,13 @@ public class TestPerColumnFamilyFlush {
 long smallestSeqCF3 = region.getOldestSeqIdOfStore(FAMILY3);
 
 // Find the sizes of the memstores of each CF.
-MemstoreSize cf1MemstoreSize = region.getStore(FAMILY1).getMemStoreSize();
-MemstoreSize cf2MemstoreSize = region.getStore(FAMILY2).getMemStoreSize();
-MemstoreSize cf3MemstoreSize = region.getStore(FAMILY3).getMemStoreSize();
+MemStoreSize cf1MemstoreSize = region.getStore(FAMILY1).getMemStoreSize();
+MemStoreSize cf2MemstoreSize = region.getStore(FAMILY2).getMemStoreSize();
+MemStoreSize cf3MemstoreSize = region.getStore(FAMILY3).getMemStoreSize();
 
 // Get the overall smallest LSN in the region's memstores.
 long smallestSeqInRegionCurrentMemstore = getWAL(region)
-
.getEarliestMemstoreSeqNum(region.getRegionInfo().getEncodedNameAsBytes());
+
.getEarliestMemStoreSeqNum(region.getRegionInfo().getEncodedNameAsBytes());
 
 // The overall smallest LSN in the region's memstores should be the same as
 // the LSN of the smallest edit in CF1
@@ -180,16 +179,16 @@ public class TestPerColumnFamilyFlush {
 region.flush(false);
 
 // Will use these to check if anything changed.
-MemstoreSize oldCF2MemstoreSize = cf2MemstoreSize;
-MemstoreSize oldCF3MemstoreSize = cf3MemstoreSize;
+MemStoreSize oldCF2MemstoreSize = cf2MemstoreSize;
+MemStoreSize oldCF3MemstoreSize = cf3MemstoreSize;
 
 // Recalculate everything
 cf1MemstoreSize = region.getStore(FAMILY1).getMemStoreSize();
 cf2MemstoreSize = region.getStore(FAMILY2).getMemStoreSize();
 cf3MemstoreSize = region.getStore(FAMILY3).getMemStoreSize();
-totalMemstoreSize = region.getMemstoreSize();
+totalMemstoreSize = region.getMemStoreSize();
 smallestSeqInRegionCurrentMemstore = getWAL(region)
-
.getEarliestMemstoreSeqNum(region.getRegionInfo().getEncodedNameAsBytes());
+
.getEarliestMemStoreSeqNum(region.getRegionInfo().getEncodedNameAsBytes());
 
 // We should have cleared out only CF1, since we chose the flush thresholds
 // and number of puts accordingly.
@@ -225,9 +224,9 @@ public class TestPerColumnFamilyFlush {
 cf1MemstoreSize = region.getStore(FAMILY1).getMemStoreSize();
 cf2MemstoreSize = region.getStore(FAMILY2).getMemStoreSize();
 cf3MemstoreSize = region.getStore(FAMILY3).getMemStoreSize();
-totalMemstoreSize = region.getMemstoreSize();
+totalMemstoreSize = region.getMemStoreSize();
 smallestSeqInRegionCurrentMemstore = getWAL(region)
-
.getEarliestMemstoreSeqNum(region.getRegionInfo().getEncodedNameAsBytes());
+
.getEarliestMemStoreSeqNum(region.getRegionInfo().getEncodedNameAsBytes());
 
 // CF1 and CF2, both should be absent.
 assertEquals(0, cf1MemstoreSize.getDataSize());
@@ -261,7 +260,7 @@ public class TestPerColumnFamilyFlush {
 
 // Since we won't find any CF above the threshold, and hence no specific
 // store to flush, we should flush all the memstores.
-assertEquals(0, region.getMemstoreSize());
+assertEquals(0, region.getMemStoreSize());
 HBaseTestingUtility.closeRegionAndWAL(region);
   }
 
@@ -285,12 +284,12 @@ public class TestPerColumnFamilyFlush {
   }
 }
 
-long totalMemstoreSize = region.getMemstoreSize();
+long totalMemstoreSize = region.getMemStoreSize();
 
 // Find the sizes of the memstores of each CF.
-MemstoreSize cf1MemstoreSize = region.getStore(FAMILY1).getMemStoreSize();
-MemstoreSize cf2MemstoreSize = region.getStore(FAMILY2).getMemStoreSize();
-MemstoreSize cf3MemstoreSize = region.getStore(FAMILY3).getMemStoreSize();
+M

[16/60] [abbrv] hbase git commit: HBASE-18839 Apply RegionInfo to code base

2017-10-05 Thread busbey
http://git-wip-us.apache.org/repos/asf/hbase/blob/a11a35a1/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupBasedLoadBalancer.java
--
diff --git 
a/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupBasedLoadBalancer.java
 
b/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupBasedLoadBalancer.java
index 169e42f..3f1373f 100644
--- 
a/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupBasedLoadBalancer.java
+++ 
b/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupBasedLoadBalancer.java
@@ -18,13 +18,6 @@
 
 package org.apache.hadoop.hbase.rsgroup;
 
-import 
org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
-import 
org.apache.hadoop.hbase.shaded.com.google.common.collect.ArrayListMultimap;
-import 
org.apache.hadoop.hbase.shaded.com.google.common.collect.LinkedListMultimap;
-import org.apache.hadoop.hbase.shaded.com.google.common.collect.ListMultimap;
-import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
-import org.apache.hadoop.hbase.shaded.com.google.common.collect.Maps;
-
 import java.io.IOException;
 import java.util.ArrayList;
 import java.util.Collection;
@@ -43,10 +36,9 @@ import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.ClusterStatus;
 import org.apache.hadoop.hbase.HBaseIOException;
 import org.apache.hadoop.hbase.HConstants;
-import org.apache.hadoop.hbase.HRegionInfo;
 import org.apache.hadoop.hbase.ServerName;
 import org.apache.hadoop.hbase.TableName;
-import org.apache.yetus.audience.InterfaceAudience;
+import org.apache.hadoop.hbase.client.RegionInfo;
 import org.apache.hadoop.hbase.constraint.ConstraintException;
 import org.apache.hadoop.hbase.master.LoadBalancer;
 import org.apache.hadoop.hbase.master.MasterServices;
@@ -54,6 +46,14 @@ import org.apache.hadoop.hbase.master.RegionPlan;
 import org.apache.hadoop.hbase.master.balancer.StochasticLoadBalancer;
 import org.apache.hadoop.hbase.net.Address;
 import org.apache.hadoop.util.ReflectionUtils;
+import org.apache.yetus.audience.InterfaceAudience;
+
+import 
org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
+import 
org.apache.hadoop.hbase.shaded.com.google.common.collect.ArrayListMultimap;
+import 
org.apache.hadoop.hbase.shaded.com.google.common.collect.LinkedListMultimap;
+import org.apache.hadoop.hbase.shaded.com.google.common.collect.ListMultimap;
+import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
+import org.apache.hadoop.hbase.shaded.com.google.common.collect.Maps;
 
 /**
  * GroupBasedLoadBalancer, used when Region Server Grouping is configured 
(HBase-6721)
@@ -106,31 +106,31 @@ public class RSGroupBasedLoadBalancer implements 
RSGroupableBalancer {
   }
 
   @Override
-  public List balanceCluster(TableName tableName, Map>
+  public List balanceCluster(TableName tableName, Map>
   clusterState) throws HBaseIOException {
 return balanceCluster(clusterState);
   }
 
   @Override
-  public List balanceCluster(Map> 
clusterState)
+  public List balanceCluster(Map> 
clusterState)
   throws HBaseIOException {
 if (!isOnline()) {
   throw new ConstraintException(RSGroupInfoManager.RSGROUP_TABLE_NAME +
   " is not online, unable to perform balance");
 }
 
-Map> correctedState = 
correctAssignments(clusterState);
+Map> correctedState = 
correctAssignments(clusterState);
 List regionPlans = new ArrayList<>();
 
-List misplacedRegions = 
correctedState.get(LoadBalancer.BOGUS_SERVER_NAME);
-for (HRegionInfo regionInfo : misplacedRegions) {
+List misplacedRegions = 
correctedState.get(LoadBalancer.BOGUS_SERVER_NAME);
+for (RegionInfo regionInfo : misplacedRegions) {
   regionPlans.add(new RegionPlan(regionInfo, null, null));
 }
 try {
   List rsgi = rsGroupInfoManager.listRSGroups();
   for (RSGroupInfo info: rsgi) {
-Map> groupClusterState = new HashMap<>();
-Map>> groupClusterLoad = 
new HashMap<>();
+Map> groupClusterState = new HashMap<>();
+Map>> groupClusterLoad = 
new HashMap<>();
 for (Address sName : info.getServers()) {
   for(ServerName curr: clusterState.keySet()) {
 if(curr.getAddress().equals(sName)) {
@@ -154,15 +154,15 @@ public class RSGroupBasedLoadBalancer implements 
RSGroupableBalancer {
   }
 
   @Override
-  public Map> roundRobinAssignment(
-  List regions, List servers) throws 
HBaseIOException {
-Map> assignments = Maps.newHashMap();
-ListMultimap regionMap = ArrayListMultimap.create();
+  public Map> roundRobinAssignment(
+  List regions, List servers) throws 
HBaseIOException {
+Map> assignments = Maps.newHashMap();
+ListMultimap regionMap = ArrayListMultimap.create();
 ListMultimap serverMap = ArrayListMultimap.create();
 generateGroupMaps(regions, servers, regionMap, 

[27/60] [abbrv] hbase git commit: HBASE-18904 Missing break in NEXT_ROW case of FilterList#mergeReturnCodeForOrOperator()

2017-10-05 Thread busbey
HBASE-18904 Missing break in NEXT_ROW case of 
FilterList#mergeReturnCodeForOrOperator()

Signed-off-by: tedyu 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/3bd824fa
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/3bd824fa
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/3bd824fa

Branch: refs/heads/HBASE-18467
Commit: 3bd824facadd40dabb06e19ccad911be0ea6915a
Parents: b0e1a15
Author: Biju Nair 
Authored: Fri Sep 29 16:55:54 2017 -0400
Committer: tedyu 
Committed: Fri Sep 29 15:31:10 2017 -0700

--
 .../src/main/java/org/apache/hadoop/hbase/filter/FilterList.java   | 2 ++
 1 file changed, 2 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/3bd824fa/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
index 7f2405d..033ca83 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
@@ -566,6 +566,7 @@ final public class FilterList extends FilterBase {
   if (isInReturnCodes(rc, ReturnCode.NEXT_ROW)) {
 return ReturnCode.NEXT_ROW;
   }
+  break;
 case SEEK_NEXT_USING_HINT:
   if (isInReturnCodes(rc, ReturnCode.INCLUDE, 
ReturnCode.INCLUDE_AND_NEXT_COL,
 ReturnCode.INCLUDE_AND_SEEK_NEXT_ROW)) {
@@ -577,6 +578,7 @@ final public class FilterList extends FilterBase {
   if (isInReturnCodes(rc, ReturnCode.SEEK_NEXT_USING_HINT)) {
 return ReturnCode.SEEK_NEXT_USING_HINT;
   }
+  break;
 }
 throw new IllegalStateException(
 "Received code is not valid. rc: " + rc + ", localRC: " + localRC);



[25/60] [abbrv] hbase git commit: HBASE-18883 Update Curator to 4.0

2017-10-05 Thread busbey
HBASE-18883 Update Curator to 4.0

Signed-off-by: zhangduo 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/4136ab33
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/4136ab33
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/4136ab33

Branch: refs/heads/HBASE-18467
Commit: 4136ab338d5691166e7e8c44f26aad6d273283d3
Parents: 239e687
Author: Mike Drob 
Authored: Tue Sep 26 15:00:49 2017 -0500
Committer: Mike Drob 
Committed: Fri Sep 29 10:39:57 2017 -0500

--
 pom.xml | 17 +++--
 1 file changed, 11 insertions(+), 6 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/4136ab33/pom.xml
--
diff --git a/pom.xml b/pom.xml
index a684f3c..13be3ab 100755
--- a/pom.xml
+++ b/pom.xml
@@ -1436,7 +1436,7 @@
 1.46
 1.0.0-RC2
 1.0.0
-2.12.0
+4.0.0
 
 0.12
 1.5.5
@@ -2066,13 +2066,14 @@
   
   
 org.apache.curator
-curator-recipes
-${curator.version}
-  
-  
-org.apache.curator
 curator-framework
 ${curator.version}
+
+  
+org.apache.zookeeper
+zookeeper
+  
+
   
   
 org.apache.curator
@@ -2083,6 +2084,10 @@
 com.google.guava
 guava
   
+  
+org.apache.zookeeper
+zookeeper
+  
 
   
   



[55/60] [abbrv] hbase git commit: HBASE-18927 Add the DataType which is subset of KeyValue#Type to CellBuilder for building cell

2017-10-05 Thread busbey
HBASE-18927 Add the DataType which is subset of KeyValue#Type to CellBuilder 
for building cell


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/11aa6742
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/11aa6742
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/11aa6742

Branch: refs/heads/HBASE-18467
Commit: 11aa6742f0aa44d6084f95810c675aa387e9ee86
Parents: 2f25132
Author: Chia-Ping Tsai 
Authored: Wed Oct 4 01:57:08 2017 +0800
Committer: Chia-Ping Tsai 
Committed: Wed Oct 4 23:45:43 2017 +0800

--
 .../hadoop/hbase/protobuf/ProtobufUtil.java |  8 +++-
 .../hbase/shaded/protobuf/ProtobufUtil.java |  6 +++---
 .../hbase/shaded/protobuf/TestProtobufUtil.java |  4 ++--
 .../org/apache/hadoop/hbase/CellBuilder.java| 14 -
 .../java/org/apache/hadoop/hbase/CellUtil.java  |  6 +++---
 .../hadoop/hbase/ExtendedCellBuilder.java   |  2 ++
 .../hadoop/hbase/ExtendedCellBuilderImpl.java   | 21 ++--
 .../hbase/IndividualBytesFieldCellBuilder.java  |  2 +-
 .../apache/hadoop/hbase/KeyValueBuilder.java|  2 +-
 .../apache/hadoop/hbase/TestCellBuilder.java|  4 ++--
 .../apache/hadoop/hbase/types/TestPBCell.java   |  4 ++--
 .../hbase/replication/BulkLoadCellFilter.java   | 10 +-
 .../hadoop/hbase/protobuf/TestProtobufUtil.java |  4 ++--
 .../hbase/regionserver/MockHStoreFile.java  |  7 ---
 .../hadoop/hbase/regionserver/TestHStore.java   |  9 +
 15 files changed, 67 insertions(+), 36 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/11aa6742/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java
index b886e02..8950311 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java
@@ -31,8 +31,6 @@ import java.util.function.Function;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.Cell;
-import org.apache.hadoop.hbase.CellBuilder;
-import org.apache.hadoop.hbase.CellBuilderFactory;
 import org.apache.hadoop.hbase.CellBuilderType;
 import org.apache.hadoop.hbase.CellScanner;
 import org.apache.hadoop.hbase.CellUtil;
@@ -1328,7 +1326,7 @@ public final class ProtobufUtil {
 }
 
 List cells = new ArrayList<>(values.size());
-CellBuilder builder = 
CellBuilderFactory.create(CellBuilderType.SHALLOW_COPY);
+ExtendedCellBuilder builder = 
ExtendedCellBuilderFactory.create(CellBuilderType.SHALLOW_COPY);
 for (CellProtos.Cell c : values) {
   cells.add(toCell(builder, c));
 }
@@ -1371,7 +1369,7 @@ public final class ProtobufUtil {
 
 if (!values.isEmpty()){
   if (cells == null) cells = new ArrayList<>(values.size());
-  CellBuilder builder = 
CellBuilderFactory.create(CellBuilderType.SHALLOW_COPY);
+  ExtendedCellBuilder builder = 
ExtendedCellBuilderFactory.create(CellBuilderType.SHALLOW_COPY);
   for (CellProtos.Cell c: values) {
 cells.add(toCell(builder, c));
   }
@@ -1632,7 +1630,7 @@ public final class ProtobufUtil {
 return kvbuilder.build();
   }
 
-  public static Cell toCell(CellBuilder cellBuilder, final CellProtos.Cell 
cell) {
+  public static Cell toCell(ExtendedCellBuilder cellBuilder, final 
CellProtos.Cell cell) {
 return cellBuilder.clear()
 .setRow(cell.getRow().toByteArray())
 .setFamily(cell.getFamily().toByteArray())

http://git-wip-us.apache.org/repos/asf/hbase/blob/11aa6742/hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/ProtobufUtil.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/ProtobufUtil.java
 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/ProtobufUtil.java
index 776aff2..e566704 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/ProtobufUtil.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/ProtobufUtil.java
@@ -1493,7 +1493,7 @@ public final class ProtobufUtil {
 }
 
 List cells = new ArrayList<>(values.size());
-CellBuilder builder = 
CellBuilderFactory.create(CellBuilderType.SHALLOW_COPY);
+ExtendedCellBuilder builder = 
ExtendedCellBuilderFactory.create(CellBuilderType.SHALLOW_COPY);
 for (CellProtos.Cell c : values) {
   cells.add(toCell(builder, c));
 }
@@ -1536,7 +1536,7 @@ public final class ProtobufUtil {
 
 if (!values.isEmpty()){
   if (cell

[14/60] [abbrv] hbase git commit: HBASE-18839 Apply RegionInfo to code base

2017-10-05 Thread busbey
http://git-wip-us.apache.org/repos/asf/hbase/blob/a11a35a1/hbase-server/src/main/java/org/apache/hadoop/hbase/master/RegionPlacementMaintainer.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/RegionPlacementMaintainer.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/RegionPlacementMaintainer.java
index d23cf7d..afd402b 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/RegionPlacementMaintainer.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/RegionPlacementMaintainer.java
@@ -40,31 +40,32 @@ import org.apache.commons.cli.ParseException;
 import org.apache.commons.lang3.StringUtils;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
-import org.apache.yetus.audience.InterfaceAudience;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.hbase.ClusterStatus.Option;
 import org.apache.hadoop.hbase.HBaseConfiguration;
 import org.apache.hadoop.hbase.HConstants;
-import org.apache.hadoop.hbase.HRegionInfo;
 import org.apache.hadoop.hbase.ServerName;
 import org.apache.hadoop.hbase.TableName;
-import org.apache.hadoop.hbase.ClusterStatus.Option;
 import org.apache.hadoop.hbase.client.Admin;
 import org.apache.hadoop.hbase.client.ClusterConnection;
 import org.apache.hadoop.hbase.client.Connection;
 import org.apache.hadoop.hbase.client.ConnectionFactory;
+import org.apache.hadoop.hbase.client.RegionInfo;
 import org.apache.hadoop.hbase.favored.FavoredNodeAssignmentHelper;
 import org.apache.hadoop.hbase.favored.FavoredNodesPlan;
-import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminService.BlockingInterface;
-import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
-import org.apache.hadoop.hbase.shaded.protobuf.RequestConverter;
-import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.UpdateFavoredNodesRequest;
-import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.UpdateFavoredNodesResponse;
 import org.apache.hadoop.hbase.util.FSUtils;
 import org.apache.hadoop.hbase.util.MunkresAssignment;
 import org.apache.hadoop.hbase.util.Pair;
 import org.apache.log4j.Level;
 import org.apache.log4j.Logger;
+import org.apache.yetus.audience.InterfaceAudience;
+
+import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
+import org.apache.hadoop.hbase.shaded.protobuf.RequestConverter;
+import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminService.BlockingInterface;
+import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.UpdateFavoredNodesRequest;
+import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.UpdateFavoredNodesResponse;
 
 /**
  * A tool that is used for manipulating and viewing favored nodes information
@@ -197,12 +198,12 @@ public class RegionPlacementMaintainer {
   Map> regionLocalityMap, FavoredNodesPlan plan,
   boolean munkresForSecondaryAndTertiary) throws IOException {
   // Get the all the regions for the current table
-  List regions =
+  List regions =
 assignmentSnapshot.getTableToRegionMap().get(tableName);
   int numRegions = regions.size();
 
   // Get the current assignment map
-  Map currentAssignmentMap =
+  Map currentAssignmentMap =
 assignmentSnapshot.getRegionToRegionServerMap();
 
   // Get the all the region servers
@@ -257,12 +258,12 @@ public class RegionPlacementMaintainer {
 // Compute the total rack locality for each region in each rack. The 
total
 // rack locality is the sum of the localities of a region on all 
servers in
 // a rack.
-Map> rackRegionLocality = new 
HashMap<>();
+Map> rackRegionLocality = new 
HashMap<>();
 for (int i = 0; i < numRegions; i++) {
-  HRegionInfo region = regions.get(i);
+  RegionInfo region = regions.get(i);
   for (int j = 0; j < regionSlots; j += slotsPerServer) {
 String rack = rackManager.getRack(servers.get(j / slotsPerServer));
-Map rackLocality = 
rackRegionLocality.get(rack);
+Map rackLocality = rackRegionLocality.get(rack);
 if (rackLocality == null) {
   rackLocality = new HashMap<>();
   rackRegionLocality.put(rack, rackLocality);
@@ -417,18 +418,18 @@ public class RegionPlacementMaintainer {
 LOG.info("Assignment plan for secondary and tertiary generated " +
 "using MunkresAssignment");
   } else {
-Map primaryRSMap = new HashMap<>();
+Map primaryRSMap = new HashMap<>();
 for (int i = 0; i < numRegions; i++) {
   primaryRSMap.put(regions.get(i), servers.get(primaryAssignment[i] / 
slotsPerServer));
 }
 FavoredNodeAssignmentHelper favoredNodeHelper =
 new FavoredNodeAssignmentHelper(servers, conf);
  

[19/60] [abbrv] hbase git commit: HBASE-18839 Apply RegionInfo to code base

2017-10-05 Thread busbey
HBASE-18839 Apply RegionInfo to code base


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/a11a35a1
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/a11a35a1
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/a11a35a1

Branch: refs/heads/HBASE-18467
Commit: a11a35a1135c431ee12534451c925727165eded5
Parents: 7f4c3b3
Author: Chia-Ping Tsai 
Authored: Thu Sep 28 16:16:21 2017 +0800
Committer: Chia-Ping Tsai 
Committed: Thu Sep 28 20:19:29 2017 +0800

--
 .../hadoop/hbase/backup/util/BackupUtils.java   |  14 +-
 .../hadoop/hbase/AsyncMetaTableAccessor.java|  47 +--
 .../org/apache/hadoop/hbase/HRegionInfo.java|  19 +-
 .../apache/hadoop/hbase/HRegionLocation.java|  26 +-
 .../apache/hadoop/hbase/MetaTableAccessor.java  | 245 ++---
 .../apache/hadoop/hbase/RegionLocations.java|   5 +-
 .../apache/hadoop/hbase/client/AsyncAdmin.java  |  15 +-
 .../hadoop/hbase/client/AsyncHBaseAdmin.java|   9 +-
 .../hbase/client/ConnectionImplementation.java  |  38 +-
 .../hbase/client/FlushRegionCallable.java   |  10 +-
 .../apache/hadoop/hbase/client/HBaseAdmin.java  | 134 ---
 .../hadoop/hbase/client/HRegionLocator.java |   9 +-
 .../hadoop/hbase/client/HTableMultiplexer.java  |  15 +-
 .../hbase/client/ImmutableHRegionInfo.java  |   2 +-
 .../hadoop/hbase/client/RawAsyncHBaseAdmin.java |  64 ++--
 .../hadoop/hbase/client/RawAsyncTable.java  |  25 +-
 .../hadoop/hbase/client/RawAsyncTableImpl.java  |  18 +-
 .../client/RegionCoprocessorRpcChannelImpl.java |  22 +-
 .../apache/hadoop/hbase/client/RegionInfo.java  |  27 +-
 .../hadoop/hbase/client/RegionInfoBuilder.java  | 360 ++-
 .../hadoop/hbase/client/RegionReplicaUtil.java  |  65 ++--
 .../hadoop/hbase/client/ZooKeeperRegistry.java  |   7 +-
 .../apache/hadoop/hbase/master/RegionState.java |  23 +-
 .../hbase/shaded/protobuf/ProtobufUtil.java |  38 +-
 .../hbase/shaded/protobuf/RequestConverter.java |  48 +--
 .../shaded/protobuf/ResponseConverter.java  |  20 +-
 .../hbase/zookeeper/MetaTableLocator.java   |  71 ++--
 .../hadoop/hbase/client/TestAsyncProcess.java   |  28 +-
 .../coprocessor/AsyncAggregationClient.java |  28 +-
 .../apache/hadoop/hbase/coprocessor/Export.java |  22 +-
 .../example/TestRefreshHFilesEndpoint.java  |  20 +-
 .../mapreduce/TableSnapshotInputFormat.java |  26 +-
 .../hbase/regionserver/CompactionTool.java  |  10 +-
 .../hadoop/hbase/snapshot/ExportSnapshot.java   |  23 +-
 .../hbase/mapreduce/TestImportExport.java   |  18 +-
 .../replication/TestReplicationSmallTests.java  |  25 +-
 .../hbase/snapshot/TestExportSnapshot.java  |  13 +-
 .../hbase/snapshot/TestMobExportSnapshot.java   |   4 +-
 .../hadoop/hbase/rest/RegionsResource.java  |  20 +-
 .../hbase/rsgroup/RSGroupAdminServer.java   |  43 +--
 .../hbase/rsgroup/RSGroupBasedLoadBalancer.java | 104 +++---
 .../hbase/rsgroup/RSGroupInfoManagerImpl.java   |  15 +-
 .../balancer/TestRSGroupBasedLoadBalancer.java  | 133 +++
 .../hadoop/hbase/rsgroup/TestRSGroupsBase.java  |  22 +-
 .../hbase/tmpl/master/MasterStatusTmpl.jamon|   6 +-
 .../hbase/tmpl/regionserver/RSStatusTmpl.jamon  |   4 +-
 .../tmpl/regionserver/RegionListTmpl.jamon  |  41 +--
 .../hadoop/hbase/RegionStateListener.java   |  11 +-
 .../hadoop/hbase/backup/HFileArchiver.java  |  18 +-
 .../hbase/client/ClientSideRegionScanner.java   |   5 +-
 .../hbase/client/TableSnapshotScanner.java  |  13 +-
 .../hbase/client/locking/LockServiceClient.java |  18 +-
 .../SplitLogManagerCoordination.java|   8 +-
 .../ZKSplitLogManagerCoordination.java  |  16 +-
 .../coprocessor/MultiRowMutationEndpoint.java   |  16 +-
 .../favored/FavoredNodeAssignmentHelper.java| 104 +++---
 .../hbase/favored/FavoredNodeLoadBalancer.java  |  85 ++---
 .../hbase/favored/FavoredNodesManager.java  |  44 +--
 .../hadoop/hbase/favored/FavoredNodesPlan.java  |  10 +-
 .../hbase/favored/FavoredNodesPromoter.java |   8 +-
 .../org/apache/hadoop/hbase/io/HFileLink.java   |  19 +-
 .../hadoop/hbase/master/AssignmentListener.java |   9 +-
 .../master/AssignmentVerificationReport.java|  38 +-
 .../hadoop/hbase/master/CatalogJanitor.java |  56 ++-
 .../org/apache/hadoop/hbase/master/HMaster.java |  58 +--
 .../hadoop/hbase/master/LoadBalancer.java   |  32 +-
 .../hadoop/hbase/master/MasterFileSystem.java   |  13 +-
 .../hbase/master/MasterMetaBootstrap.java   |  15 +-
 .../hadoop/hbase/master/MasterRpcServices.java  |  54 +--
 .../hadoop/hbase/master/MasterServices.java |  11 +-
 .../hadoop/hbase/master/MasterWalManager.java   |   6 +-
 .../hbase/master/RegionPlacementMaintainer.java |  67 ++--
 .../apache/hadoop/hbase/master/RegionPlan.java  |  10 +-
 .../hadoop/hbase/master/ServerManager.java  |  32 +-
 .../SnapshotOfRegionAssignmentFromM

[13/60] [abbrv] hbase git commit: HBASE-18839 Apply RegionInfo to code base

2017-10-05 Thread busbey
http://git-wip-us.apache.org/repos/asf/hbase/blob/a11a35a1/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/MergeTableRegionsProcedure.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/MergeTableRegionsProcedure.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/MergeTableRegionsProcedure.java
index 9e37292..da6afc9 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/MergeTableRegionsProcedure.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/MergeTableRegionsProcedure.java
@@ -30,15 +30,15 @@ import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hbase.HConstants;
-import org.apache.hadoop.hbase.HRegionInfo;
 import org.apache.hadoop.hbase.MetaMutationAnnotation;
 import org.apache.hadoop.hbase.ServerName;
 import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.UnknownRegionException;
-import org.apache.yetus.audience.InterfaceAudience;
 import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
 import org.apache.hadoop.hbase.client.MasterSwitchType;
 import org.apache.hadoop.hbase.client.Mutation;
+import org.apache.hadoop.hbase.client.RegionInfo;
+import org.apache.hadoop.hbase.client.RegionInfoBuilder;
 import org.apache.hadoop.hbase.client.RegionReplicaUtil;
 import org.apache.hadoop.hbase.client.TableDescriptor;
 import org.apache.hadoop.hbase.exceptions.MergeRegionException;
@@ -59,13 +59,16 @@ import 
org.apache.hadoop.hbase.quotas.QuotaExceededException;
 import org.apache.hadoop.hbase.regionserver.HRegionFileSystem;
 import org.apache.hadoop.hbase.regionserver.HStoreFile;
 import org.apache.hadoop.hbase.regionserver.StoreFileInfo;
+import org.apache.hadoop.hbase.util.Bytes;
+import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
+import org.apache.hadoop.hbase.util.FSUtils;
+import org.apache.yetus.audience.InterfaceAudience;
+
 import 
org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
+import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
 import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionInfoResponse;
 import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos;
 import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.MergeTableRegionsState;
-import org.apache.hadoop.hbase.util.Bytes;
-import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
-import org.apache.hadoop.hbase.util.FSUtils;
 
 /**
  * The procedure to Merge a region in a table.
@@ -79,8 +82,8 @@ public class MergeTableRegionsProcedure
   private Boolean traceEnabled;
   private volatile boolean lock = false;
   private ServerName regionLocation;
-  private HRegionInfo[] regionsToMerge;
-  private HRegionInfo mergedRegion;
+  private RegionInfo[] regionsToMerge;
+  private RegionInfo mergedRegion;
   private boolean forcible;
 
   public MergeTableRegionsProcedure() {
@@ -88,18 +91,18 @@ public class MergeTableRegionsProcedure
   }
 
   public MergeTableRegionsProcedure(final MasterProcedureEnv env,
-  final HRegionInfo regionToMergeA, final HRegionInfo regionToMergeB) 
throws IOException {
+  final RegionInfo regionToMergeA, final RegionInfo regionToMergeB) throws 
IOException {
 this(env, regionToMergeA, regionToMergeB, false);
   }
 
   public MergeTableRegionsProcedure(final MasterProcedureEnv env,
-  final HRegionInfo regionToMergeA, final HRegionInfo regionToMergeB,
+  final RegionInfo regionToMergeA, final RegionInfo regionToMergeB,
   final boolean forcible) throws MergeRegionException {
-this(env, new HRegionInfo[] {regionToMergeA, regionToMergeB}, forcible);
+this(env, new RegionInfo[] {regionToMergeA, regionToMergeB}, forcible);
   }
 
   public MergeTableRegionsProcedure(final MasterProcedureEnv env,
-  final HRegionInfo[] regionsToMerge, final boolean forcible)
+  final RegionInfo[] regionsToMerge, final boolean forcible)
   throws MergeRegionException {
 super(env);
 
@@ -117,7 +120,7 @@ public class MergeTableRegionsProcedure
 this.forcible = forcible;
   }
 
-  private static void checkRegionsToMerge(final HRegionInfo[] regionsToMerge,
+  private static void checkRegionsToMerge(final RegionInfo[] regionsToMerge,
   final boolean forcible) throws MergeRegionException {
 // For now, we only merge 2 regions.
 // It could be extended to more than 2 regions in the future.
@@ -129,19 +132,19 @@ public class MergeTableRegionsProcedure
 checkRegionsToMerge(regionsToMerge[0], regionsToMerge[1], forcible);
   }
 
-  private static void checkRegionsToMerge(final HRegionInfo regionToMergeA,
-  final HRegionInfo regionToMergeB, final boolean forcible) throws 
MergeRegionException {
+  private static void checkRegionsT

[32/60] [abbrv] hbase git commit: Revert "HBASE-18814 Improve TableSnapshotInputFormat to allow more multiple mappers per region" due to wrong jira id.

2017-10-05 Thread busbey
Revert "HBASE-18814 Improve TableSnapshotInputFormat to allow more multiple 
mappers per region" due to wrong jira id.

This reverts commit f20580a53083b69eec3d766cf2a1f99d0bff9747.


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/367dfabf
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/367dfabf
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/367dfabf

Branch: refs/heads/HBASE-18467
Commit: 367dfabf0694a5c72dbd5c30092a1ff9e30fca5c
Parents: cacf3f5
Author: Ashu Pachauri 
Authored: Sat Sep 30 01:43:10 2017 -0700
Committer: Ashu Pachauri 
Committed: Sat Sep 30 01:44:39 2017 -0700

--
 ...IntegrationTestTableSnapshotInputFormat.java |   4 +-
 .../hadoop/hbase/mapred/TableMapReduceUtil.java |  38 --
 .../hbase/mapred/TableSnapshotInputFormat.java  |  18 ---
 .../hbase/mapreduce/TableMapReduceUtil.java |  38 --
 .../mapreduce/TableSnapshotInputFormat.java |  24 +---
 .../mapreduce/TableSnapshotInputFormatImpl.java | 115 +++
 .../mapred/TestTableSnapshotInputFormat.java|  41 +++
 .../TableSnapshotInputFormatTestBase.java   |  23 ++--
 .../mapreduce/TestTableSnapshotInputFormat.java |  41 +++
 .../hbase/client/ClientSideRegionScanner.java   |   2 -
 .../hadoop/hbase/util/RegionSplitter.java   |  71 
 .../hadoop/hbase/util/TestRegionSplitter.java   |  20 
 12 files changed, 52 insertions(+), 383 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/367dfabf/hbase-it/src/test/java/org/apache/hadoop/hbase/mapreduce/IntegrationTestTableSnapshotInputFormat.java
--
diff --git 
a/hbase-it/src/test/java/org/apache/hadoop/hbase/mapreduce/IntegrationTestTableSnapshotInputFormat.java
 
b/hbase-it/src/test/java/org/apache/hadoop/hbase/mapreduce/IntegrationTestTableSnapshotInputFormat.java
index 2df1c4b..1a152e8 100644
--- 
a/hbase-it/src/test/java/org/apache/hadoop/hbase/mapreduce/IntegrationTestTableSnapshotInputFormat.java
+++ 
b/hbase-it/src/test/java/org/apache/hadoop/hbase/mapreduce/IntegrationTestTableSnapshotInputFormat.java
@@ -151,7 +151,7 @@ public class IntegrationTestTableSnapshotInputFormat 
extends IntegrationTestBase
   int expectedNumSplits = numRegions > 2 ? numRegions - 2 : numRegions;
 
   
org.apache.hadoop.hbase.mapreduce.TestTableSnapshotInputFormat.doTestWithMapReduce(util,
-tableName, snapshotName, START_ROW, END_ROW, tableDir, numRegions, 1,
+tableName, snapshotName, START_ROW, END_ROW, tableDir, numRegions,
 expectedNumSplits, false);
 } else if (mr.equalsIgnoreCase(MAPRED_IMPLEMENTATION)) {
   /*
@@ -165,7 +165,7 @@ public class IntegrationTestTableSnapshotInputFormat 
extends IntegrationTestBase
   int expectedNumSplits = numRegions;
 
   
org.apache.hadoop.hbase.mapred.TestTableSnapshotInputFormat.doTestWithMapReduce(util,
-tableName, snapshotName, MAPRED_START_ROW, MAPRED_END_ROW, tableDir, 
numRegions, 1,
+tableName, snapshotName, MAPRED_START_ROW, MAPRED_END_ROW, tableDir, 
numRegions,
 expectedNumSplits, false);
 } else {
   throw new IllegalArgumentException("Unrecognized mapreduce 
implementation: " + mr +".");

http://git-wip-us.apache.org/repos/asf/hbase/blob/367dfabf/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapred/TableMapReduceUtil.java
--
diff --git 
a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapred/TableMapReduceUtil.java
 
b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapred/TableMapReduceUtil.java
index 0427f50..35dbf02 100644
--- 
a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapred/TableMapReduceUtil.java
+++ 
b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapred/TableMapReduceUtil.java
@@ -33,7 +33,6 @@ import org.apache.hadoop.hbase.mapreduce.ResultSerialization;
 import org.apache.hadoop.hbase.security.User;
 import org.apache.hadoop.hbase.security.UserProvider;
 import org.apache.hadoop.hbase.security.token.TokenUtil;
-import org.apache.hadoop.hbase.util.RegionSplitter;
 import org.apache.hadoop.mapred.FileInputFormat;
 import org.apache.hadoop.mapred.InputFormat;
 import org.apache.hadoop.mapred.JobConf;
@@ -187,43 +186,6 @@ public class TableMapReduceUtil {
   }
 
   /**
-   * Sets up the job for reading from a table snapshot. It bypasses hbase 
servers
-   * and read directly from snapshot files.
-   *
-   * @param snapshotName The name of the snapshot (of a table) to read from.
-   * @param columns  The columns to scan.
-   * @param mapper  The mapper class to use.
-   * @param outputKeyClass  The class of the output key.
-   * @param outputValueClass  The class of the output value.
-   * @param jobConf  The

[60/60] [abbrv] hbase git commit: HBASE-18467 report nightly results to devs via jira

2017-10-05 Thread busbey
HBASE-18467 report nightly results to devs via jira


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/a47592ef
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/a47592ef
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/a47592ef

Branch: refs/heads/HBASE-18467
Commit: a47592effd07a66aee08c863aeada79cb5a19962
Parents: bafbade
Author: Sean Busbey 
Authored: Wed Aug 9 00:48:46 2017 -0500
Committer: Sean Busbey 
Committed: Thu Oct 5 23:13:34 2017 -0500

--
 dev-support/Jenkinsfile | 133 +--
 1 file changed, 127 insertions(+), 6 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/a47592ef/dev-support/Jenkinsfile
--
diff --git a/dev-support/Jenkinsfile b/dev-support/Jenkinsfile
index 1f01a47..ad47608 100644
--- a/dev-support/Jenkinsfile
+++ b/dev-support/Jenkinsfile
@@ -114,6 +114,13 @@ curl -L  -o personality.sh "${env.PROJET_PERSONALITY}"
 stash name: 'yetus', includes: 
"yetus-*/*,yetus-*/**/*,tools/personality.sh"
   }
 }
+stage ('cleanup') {
+  steps {
+sh '''#!/usr/bin/env bash
+  rm -rf "${OUTPUTDIR_GENERAL}/success" "${OUTPUTDIR_GENERAL}/failure" 
"${OUTPUTDIR_JDK7}/success" "${OUTPUTDIR_JDK7}/failure" 
"${OUTPUTDIR_JDK8}/success" "${OUTPUTDIR_JDK8}/failure" 
"${WORKSPACE}/src_tarball_success" "${WORKSPACE}/src_tarball_failure"
+'''
+  }
+}
 stage ('yetus general check') {
   environment {
 // TODO does hadoopcheck need to be jdk specific?
@@ -128,7 +135,17 @@ curl -L  -o personality.sh "${env.PROJET_PERSONALITY}"
   steps {
 unstash 'yetus'
 // TODO should this be a download from master, similar to how the 
personality is?
-sh "${env.BASEDIR}/dev-support/hbase_nightly_yetus.sh"
+sh '''#!/usr/bin/env bash
+  declare commentfile
+  if "${BASEDIR}/dev-support/hbase_nightly_yetus.sh" ; then
+commentfile="${OUTPUTDIR}/success"
+echo '(/) {color:green}+1 general checks{color}' >> 
"${commentfile}"
+  else
+commentfile="${OUTPUTDIR}/failure"
+echo '(x) {color:red}-1 general checks{color}' >> "${commentfile}"
+  fi
+  echo "-- For more information [see general 
report|${BUILD_URL}/General_Nightly_Build_Report/]" >> "${commentfile}"
+'''
   }
   post {
 always {
@@ -159,13 +176,21 @@ curl -L  -o personality.sh "${env.PROJET_PERSONALITY}"
   }
   steps {
 unstash 'yetus'
-sh """#!/usr/bin/env bash
+sh '''#!/usr/bin/env bash
   # for branch-1.1 we don't do jdk8 findbugs, so do it here
-  if [ "${env.BRANCH_NAME}" == "branch-1.1" ]; then
+  if [ "${BRANCH_NAME}" == "branch-1.1" ]; then
 TESTS+=",findbugs"
   fi
-  "${env.BASEDIR}/dev-support/hbase_nightly_yetus.sh"
-"""
+  declare commentfile
+  if "${BASEDIR}/dev-support/hbase_nightly_yetus.sh" ; then
+commentfile="${OUTPUTDIR}/success"
+echo '(/) {color:green}+1 jdk7 checks{color}' >> "${commentfile}"
+  else
+commentfile="${OUTPUTDIR}/failure"
+echo '(x) {color:red}-1 jdk7 checks{color}' >> "${commentfile}"
+  fi
+  echo "-- For more information [see jdk7 
report|${BUILD_URL}/JDK7_Nightly_Build_Report/]" >> "${commentfile}"
+'''
   }
   post {
 always {
@@ -215,7 +240,17 @@ curl -L  -o personality.sh "${env.PROJET_PERSONALITY}"
   }
   steps {
 unstash 'yetus'
-sh "${env.BASEDIR}/dev-support/hbase_nightly_yetus.sh"
+sh '''#!/usr/bin/env bash
+  declare commentfile
+  if "${BASEDIR}/dev-support/hbase_nightly_yetus.sh" ; then
+commentfile="${OUTPUTDIR}/success"
+echo '(/) {color:green}+1 jdk8 checks{color}' >> "${commentfile}"
+  else
+commentfile="${OUTPUTDIR}/failure"
+echo '(x) {color:red}-1 jdk8 checks{color}' >> "${commentfile}"
+  fi
+  echo "-- For more information [see jdk8 
report|${BUILD_URL}/JDK8_Nightly_Build_Report/]" >> "${commentfile}"
+'''
   }
   post {
 always {
@@ -304,6 +339,92 @@ END
   fi
 '''
   }
+  // This approach only works because the source release artifact is the 
last stage that does work.
+  post {
+success {
+  writeFile file: "${env.WORKSPACE}/src_tarball_success", text: '(/) 
{color:green}+1 source release artifact{color}\n-- See build output for 
details.'
+}
+failure {
+  writeFile file: "${env.WORKSPACE}/src_tarball_failure", text: '(x) 
{color:red}-1 source release artifact{color}\

[17/60] [abbrv] hbase git commit: HBASE-18839 Apply RegionInfo to code base

2017-10-05 Thread busbey
http://git-wip-us.apache.org/repos/asf/hbase/blob/a11a35a1/hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/RequestConverter.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/RequestConverter.java
 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/RequestConverter.java
index e5f1848..2fbbc3f 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/RequestConverter.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/RequestConverter.java
@@ -30,11 +30,9 @@ import org.apache.hadoop.hbase.CellScannable;
 import org.apache.hadoop.hbase.ClusterStatus.Option;
 import org.apache.hadoop.hbase.DoNotRetryIOException;
 import org.apache.hadoop.hbase.HConstants;
-import org.apache.hadoop.hbase.HRegionInfo;
 import org.apache.hadoop.hbase.NamespaceDescriptor;
 import org.apache.hadoop.hbase.ServerName;
 import org.apache.hadoop.hbase.TableName;
-import org.apache.yetus.audience.InterfaceAudience;
 import org.apache.hadoop.hbase.client.Action;
 import org.apache.hadoop.hbase.client.Append;
 import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
@@ -46,6 +44,7 @@ import org.apache.hadoop.hbase.client.MasterSwitchType;
 import org.apache.hadoop.hbase.client.Mutation;
 import org.apache.hadoop.hbase.client.Put;
 import org.apache.hadoop.hbase.client.RegionCoprocessorServiceExec;
+import org.apache.hadoop.hbase.client.RegionInfo;
 import org.apache.hadoop.hbase.client.Row;
 import org.apache.hadoop.hbase.client.RowMutations;
 import org.apache.hadoop.hbase.client.Scan;
@@ -54,6 +53,12 @@ import 
org.apache.hadoop.hbase.client.replication.ReplicationSerDeHelper;
 import org.apache.hadoop.hbase.exceptions.DeserializationException;
 import org.apache.hadoop.hbase.filter.ByteArrayComparable;
 import org.apache.hadoop.hbase.replication.ReplicationPeerConfig;
+import org.apache.hadoop.hbase.util.Bytes;
+import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
+import org.apache.hadoop.hbase.util.Pair;
+import org.apache.hadoop.security.token.Token;
+import org.apache.yetus.audience.InterfaceAudience;
+
 import org.apache.hadoop.hbase.shaded.com.google.protobuf.UnsafeByteOperations;
 import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.ClearCompactionQueuesRequest;
 import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CompactRegionRequest;
@@ -70,7 +75,6 @@ import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.UpdateFavor
 import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.UpdateFavoredNodesRequest.RegionUpdateInfo;
 import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.WarmupRegionRequest;
 import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos;
 import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileRequest;
 import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Condition;
 import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.GetRequest;
@@ -81,6 +85,7 @@ import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationPr
 import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.MutationType;
 import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionAction;
 import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ScanRequest;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos;
 import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.CompareType;
 import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier;
 import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.RegionSpecifierType;
@@ -97,12 +102,9 @@ import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteTabl
 import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DisableTableRequest;
 import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DrainRegionServersRequest;
 import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableCatalogJanitorRequest;
-import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetNamespaceDescriptorRequest;
-import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ModifyNamespaceRequest;
-import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RemoveDrainFromRegionServersRequest;
-import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetCleanerChoreRunningRequest;
 import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableTableRequest;
 import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetClusterStatusRequest;
+import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetN

[28/60] [abbrv] hbase git commit: HBASE-18436 Add client-side hedged read metrics (Yun Zhao)

2017-10-05 Thread busbey
HBASE-18436 Add client-side hedged read metrics (Yun Zhao)


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/ca87d05a
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/ca87d05a
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/ca87d05a

Branch: refs/heads/HBASE-18467
Commit: ca87d05a518338e64099f42c229d557b93ce51c8
Parents: 3bd824f
Author: Andrew Purtell 
Authored: Fri Sep 29 14:02:49 2017 -0700
Committer: Andrew Purtell 
Committed: Fri Sep 29 15:37:04 2017 -0700

--
 .../hadoop/hbase/client/MetricsConnection.java  |  14 +++
 .../RpcRetryingCallerWithReadReplicas.java  |  11 +-
 .../hadoop/hbase/client/TestReplicasClient.java | 126 ---
 .../TestSplitTransactionOnCluster.java  |   6 +-
 4 files changed, 132 insertions(+), 25 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/ca87d05a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/MetricsConnection.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/MetricsConnection.java
 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/MetricsConnection.java
index 31612f3..c54729b 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/MetricsConnection.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/MetricsConnection.java
@@ -269,6 +269,8 @@ public class MetricsConnection implements 
StatisticTrackable {
   @VisibleForTesting protected final RunnerStats runnerStats;
   @VisibleForTesting protected final Counter metaCacheNumClearServer;
   @VisibleForTesting protected final Counter metaCacheNumClearRegion;
+  @VisibleForTesting protected final Counter hedgedReadOps;
+  @VisibleForTesting protected final Counter hedgedReadWin;
 
   // dynamic metrics
 
@@ -315,6 +317,8 @@ public class MetricsConnection implements 
StatisticTrackable {
   "metaCacheNumClearServer", scope));
 this.metaCacheNumClearRegion = registry.counter(name(this.getClass(),
   "metaCacheNumClearRegion", scope));
+this.hedgedReadOps = registry.counter(name(this.getClass(), 
"hedgedReadOps", scope));
+this.hedgedReadWin = registry.counter(name(this.getClass(), 
"hedgedReadWin", scope));
 this.getTracker = new CallTracker(this.registry, "Get", scope);
 this.scanTracker = new CallTracker(this.registry, "Scan", scope);
 this.appendTracker = new CallTracker(this.registry, "Mutate", "Append", 
scope);
@@ -373,6 +377,16 @@ public class MetricsConnection implements 
StatisticTrackable {
 metaCacheNumClearRegion.inc();
   }
 
+  /** Increment the number of hedged read that have occurred. */
+  public void incrHedgedReadOps() {
+hedgedReadOps.inc();
+  }
+
+  /** Increment the number of hedged read returned faster than the original 
read. */
+  public void incrHedgedReadWin() {
+hedgedReadWin.inc();
+  }
+
   /** Increment the number of normal runner counts. */
   public void incrNormalRunners() {
 this.runnerStats.incrNormalRunners();

http://git-wip-us.apache.org/repos/asf/hbase/blob/ca87d05a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RpcRetryingCallerWithReadReplicas.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RpcRetryingCallerWithReadReplicas.java
 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RpcRetryingCallerWithReadReplicas.java
index e7a4ba6..c6ba228 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RpcRetryingCallerWithReadReplicas.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RpcRetryingCallerWithReadReplicas.java
@@ -216,6 +216,9 @@ public class RpcRetryingCallerWithReadReplicas {
   if (f != null) {
 return f.get(); //great we got a response
   }
+  if (cConnection.getConnectionMetrics() != null) {
+cConnection.getConnectionMetrics().incrHedgedReadOps();
+  }
 } catch (ExecutionException e) {
   // We ignore the ExecutionException and continue with the secondary 
replicas
   if (LOG.isDebugEnabled()) {
@@ -238,13 +241,17 @@ public class RpcRetryingCallerWithReadReplicas {
   addCallsForReplica(cs, rl, 1, rl.size() - 1);
 }
 try {
-  Future f = 
cs.pollForFirstSuccessfullyCompletedTask(operationTimeout,
-  TimeUnit.MILLISECONDS, startIndex, endIndex);
+  ResultBoundedCompletionService.QueueingFuture f =
+  cs.pollForFirstSuccessfullyCompletedTask(operationTimeout, 
TimeUnit.MILLISECONDS, startIndex, endIndex);
   if (f == null) {
 throw new RetriesExhaustedException("Timed out after " + 
operationTimeout +
 "ms. Get is sent to repl

[53/60] [abbrv] hbase git commit: HBASE-18649 Deprecate KV Usage in MR to move to Cells in 3.0 (ram)

2017-10-05 Thread busbey
HBASE-18649 Deprecate KV Usage in MR to move to Cells in 3.0 (ram)


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/0a24178d
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/0a24178d
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/0a24178d

Branch: refs/heads/HBASE-18467
Commit: 0a24178d06ac3dd5780522d52b5054e7aff1e50f
Parents: 16d483f
Author: Ramkrishna 
Authored: Wed Oct 4 16:00:28 2017 +0530
Committer: Ramkrishna 
Committed: Wed Oct 4 16:00:28 2017 +0530

--
 .../mapreduce/MapReduceHFileSplitterJob.java|  33 +--
 .../java/org/apache/hadoop/hbase/CellUtil.java  |  77 +-
 .../hbase/io/encoding/FastDiffDeltaEncoder.java |   3 +-
 .../io/encoding/PrefixKeyDeltaEncoder.java  |   3 +-
 .../hadoop/hbase/util/ByteBufferUtils.java  |  23 ++
 .../org/apache/hadoop/hbase/TestCellUtil.java   | 117 
 .../hbase/mapreduce/CellSerialization.java  |  93 +++
 .../hadoop/hbase/mapreduce/CellSortReducer.java |  60 +
 .../hadoop/hbase/mapreduce/CopyTable.java   |   2 +-
 .../hbase/mapreduce/HFileOutputFormat2.java |  26 +-
 .../apache/hadoop/hbase/mapreduce/Import.java   | 128 -
 .../hadoop/hbase/mapreduce/ImportTsv.java   |   2 +-
 .../hbase/mapreduce/KeyValueSerialization.java  |  88 --
 .../hbase/mapreduce/KeyValueSortReducer.java|  57 
 .../hadoop/hbase/mapreduce/PutSortReducer.java  |   2 +-
 .../hbase/mapreduce/TableMapReduceUtil.java |   2 +-
 .../hadoop/hbase/mapreduce/TextSortReducer.java |   2 +-
 .../hadoop/hbase/mapreduce/WALPlayer.java   |  21 +-
 .../apache/hadoop/hbase/util/MapReduceCell.java | 270 +++
 .../hbase/mapreduce/TestHFileOutputFormat2.java |   6 +-
 .../hbase/mapreduce/TestImportExport.java   |   9 +-
 .../hadoop/hbase/mapreduce/TestWALPlayer.java   |   7 +-
 22 files changed, 757 insertions(+), 274 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/0a24178d/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/mapreduce/MapReduceHFileSplitterJob.java
--
diff --git 
a/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/mapreduce/MapReduceHFileSplitterJob.java
 
b/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/mapreduce/MapReduceHFileSplitterJob.java
index 97ece3d..51a6b1d 100644
--- 
a/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/mapreduce/MapReduceHFileSplitterJob.java
+++ 
b/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/mapreduce/MapReduceHFileSplitterJob.java
@@ -24,22 +24,21 @@ import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.conf.Configured;
 import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hbase.Cell;
 import org.apache.hadoop.hbase.CellUtil;
 import org.apache.hadoop.hbase.HBaseConfiguration;
-import org.apache.hadoop.hbase.KeyValue;
-import org.apache.hadoop.hbase.KeyValue.Type;
 import org.apache.hadoop.hbase.TableName;
-import org.apache.yetus.audience.InterfaceAudience;
 import org.apache.hadoop.hbase.client.Connection;
 import org.apache.hadoop.hbase.client.ConnectionFactory;
 import org.apache.hadoop.hbase.client.RegionLocator;
 import org.apache.hadoop.hbase.client.Table;
 import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
+import org.apache.hadoop.hbase.mapreduce.CellSortReducer;
 import org.apache.hadoop.hbase.mapreduce.HFileInputFormat;
 import org.apache.hadoop.hbase.mapreduce.HFileOutputFormat2;
-import org.apache.hadoop.hbase.mapreduce.KeyValueSortReducer;
 import org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil;
 import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
+import org.apache.hadoop.hbase.util.MapReduceCell;
 import org.apache.hadoop.io.NullWritable;
 import org.apache.hadoop.mapreduce.Job;
 import org.apache.hadoop.mapreduce.Mapper;
@@ -47,6 +46,7 @@ import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
 import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
 import org.apache.hadoop.util.Tool;
 import org.apache.hadoop.util.ToolRunner;
+import org.apache.yetus.audience.InterfaceAudience;
 
 /**
  * A tool to split HFiles into new region boundaries as a MapReduce job. The 
tool generates HFiles
@@ -70,24 +70,15 @@ public class MapReduceHFileSplitterJob extends Configured 
implements Tool {
 
   /**
* A mapper that just writes out cells. This one can be used together with
-   * {@link KeyValueSortReducer}
+   * {@link CellSortReducer}
*/
   static class HFileCellMapper extends
-  Mapper {
+  Mapper {
 
 @Override
-public void map(NullWritable key, KeyValue value, Context context) throws 
IOException,
-InterruptedException {
-  // Convert value to KeyValue if subclass
-  if (!v

[50/60] [abbrv] hbase git commit: HBASE-18902 TestCoprocessorServiceBackwardCompatibility fails

2017-10-05 Thread busbey
HBASE-18902 TestCoprocessorServiceBackwardCompatibility fails


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/c205b5b5
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/c205b5b5
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/c205b5b5

Branch: refs/heads/HBASE-18467
Commit: c205b5b5ed873a2c503915aa70267ef2800f4d20
Parents: 2ad7be2
Author: tedyu 
Authored: Tue Oct 3 13:48:46 2017 -0700
Committer: tedyu 
Committed: Tue Oct 3 13:48:46 2017 -0700

--
 .../protobuf/DummyRegionServerEndpoint.proto|  1 +
 ...CoprocessorServiceBackwardCompatibility.java | 64 +++-
 2 files changed, 36 insertions(+), 29 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/c205b5b5/hbase-endpoint/src/main/protobuf/DummyRegionServerEndpoint.proto
--
diff --git a/hbase-endpoint/src/main/protobuf/DummyRegionServerEndpoint.proto 
b/hbase-endpoint/src/main/protobuf/DummyRegionServerEndpoint.proto
index 539f7da..8894718 100644
--- a/hbase-endpoint/src/main/protobuf/DummyRegionServerEndpoint.proto
+++ b/hbase-endpoint/src/main/protobuf/DummyRegionServerEndpoint.proto
@@ -25,6 +25,7 @@ option java_generic_services = true;
 option java_generate_equals_and_hash = true;
 
 message DummyRequest {
+  optional uint64 value = 1;
 }
 
 message DummyResponse {

http://git-wip-us.apache.org/repos/asf/hbase/blob/c205b5b5/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/TestCoprocessorServiceBackwardCompatibility.java
--
diff --git 
a/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/TestCoprocessorServiceBackwardCompatibility.java
 
b/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/TestCoprocessorServiceBackwardCompatibility.java
index c2ff36e..c51da81 100644
--- 
a/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/TestCoprocessorServiceBackwardCompatibility.java
+++ 
b/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/TestCoprocessorServiceBackwardCompatibility.java
@@ -43,9 +43,19 @@ public class TestCoprocessorServiceBackwardCompatibility {
   private static HBaseTestingUtility TEST_UTIL = null;
   private static Configuration CONF = null;
 
+  private static final long MASTER = 1;
+  private static final long REGIONSERVER = 2;
+  private static final long REGION = 3;
+
   public static class DummyCoprocessorService extends DummyService
   implements CoprocessorService, SingletonCoprocessorService {
-static int numCalls = 0;
+// depending on the value passed thru DummyRequest, the following fields 
would be incremented
+// value == MASTER
+static int numMaster = 0;
+// value == REGIONSERVER
+static int numRegionServer = 0;
+// value == REGION
+static int numRegion = 0;
 
 @Override
 public Service getService() {
@@ -56,7 +66,13 @@ public class TestCoprocessorServiceBackwardCompatibility {
 public void dummyCall(RpcController controller, DummyRequest request,
 RpcCallback callback) {
   callback.run(DummyResponse.newBuilder().setValue("").build());
-  numCalls++;
+  if (request.getValue() == MASTER) {
+numMaster += request.getValue();
+  } else if (request.getValue() == REGIONSERVER) {
+numRegionServer += request.getValue();
+  } else if (request.getValue() == REGION) {
+numRegion += request.getValue();
+  }
 }
 
 @Override
@@ -69,48 +85,38 @@ public class TestCoprocessorServiceBackwardCompatibility {
   public static void setupBeforeClass() throws Exception {
 TEST_UTIL = new HBaseTestingUtility();
 CONF = TEST_UTIL.getConfiguration();
-DummyCoprocessorService.numCalls = 0;
+CONF.setStrings(CoprocessorHost.MASTER_COPROCESSOR_CONF_KEY,
+DummyCoprocessorService.class.getName());
+CONF.setStrings(CoprocessorHost.REGIONSERVER_COPROCESSOR_CONF_KEY,
+DummyCoprocessorService.class.getName());
+CONF.setStrings(CoprocessorHost.REGION_COPROCESSOR_CONF_KEY,
+DummyCoprocessorService.class.getName());
+TEST_UTIL.startMiniCluster();
   }
 
   @AfterClass
-  public static void tearDownAfterClass() throws Exception {
+  public static void tearDownAfter() throws Exception {
 TEST_UTIL.shutdownMiniCluster();
   }
 
   @Test
-  public void testCoprocessorServiceLoadedByMaster() throws Exception {
-CONF.setStrings(CoprocessorHost.MASTER_COPROCESSOR_CONF_KEY,
-DummyCoprocessorService.class.getName());
-TEST_UTIL.startMiniCluster();
-
+  public void testCoprocessorServiceLoadedByMaster() throws Throwable {
 TEST_UTIL.getAdmin().coprocessorService().callBlockingMethod(
 
DummyCoprocessorService.getDescriptor().fi

[26/60] [abbrv] hbase git commit: Update Misty's timezone

2017-10-05 Thread busbey
Update Misty's timezone

Signed-off-by: Misty Stanley-Jones 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/b0e1a150
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/b0e1a150
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/b0e1a150

Branch: refs/heads/HBASE-18467
Commit: b0e1a150928a5eb5dca2f933dc55ac206977f3ca
Parents: 4136ab3
Author: Misty Stanley-Jones 
Authored: Fri Sep 29 10:37:45 2017 -0700
Committer: Misty Stanley-Jones 
Committed: Fri Sep 29 10:37:45 2017 -0700

--
 pom.xml | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/b0e1a150/pom.xml
--
diff --git a/pom.xml b/pom.xml
index 13be3ab..272e4d4 100755
--- a/pom.xml
+++ b/pom.xml
@@ -431,7 +431,7 @@
   misty
   Misty Stanley-Jones
   mi...@apache.org
-  +10
+  -8
 
 
   ndimiduk



[20/60] [abbrv] hbase git commit: HBASE-18887 After full backup passed on hdfs root and incremental failed, full backup cannot be cleaned (Vladimir Rodionov)

2017-10-05 Thread busbey
HBASE-18887 After full backup passed on hdfs root and incremental failed, full 
backup cannot be cleaned (Vladimir Rodionov)


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/ca295982
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/ca295982
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/ca295982

Branch: refs/heads/HBASE-18467
Commit: ca2959824ddf206143b819ae85bb29b6b60faa3b
Parents: a11a35a
Author: tedyu 
Authored: Thu Sep 28 10:20:02 2017 -0700
Committer: tedyu 
Committed: Thu Sep 28 10:20:02 2017 -0700

--
 .../hadoop/hbase/backup/impl/BackupCommands.java  | 18 +++---
 .../hadoop/hbase/backup/TestBackupBase.java   | 12 
 .../hbase/backup/TestBackupCommandLineTool.java   | 12 +++-
 3 files changed, 34 insertions(+), 8 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/ca295982/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/impl/BackupCommands.java
--
diff --git 
a/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/impl/BackupCommands.java
 
b/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/impl/BackupCommands.java
index 2dfd46e..194d350 100644
--- 
a/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/impl/BackupCommands.java
+++ 
b/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/impl/BackupCommands.java
@@ -58,11 +58,11 @@ import 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.BackupCommand;
 import org.apache.hadoop.hbase.backup.BackupType;
 import org.apache.hadoop.hbase.backup.util.BackupSet;
 import org.apache.hadoop.hbase.backup.util.BackupUtils;
-import org.apache.yetus.audience.InterfaceAudience;
 import org.apache.hadoop.hbase.client.Connection;
 import org.apache.hadoop.hbase.client.ConnectionFactory;
 import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
 import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
+import org.apache.yetus.audience.InterfaceAudience;
 
 /**
  * General backup commands, options and usage messages
@@ -73,6 +73,9 @@ public final class BackupCommands {
 
   public final static String INCORRECT_USAGE = "Incorrect usage";
 
+  public final static String TOP_LEVEL_NOT_ALLOWED =
+  "Top level (root) folder is not allowed to be a backup destination";
+
   public static final String USAGE = "Usage: hbase backup COMMAND 
[command-specific arguments]\n"
   + "where COMMAND is one of:\n" + "  create create a new backup 
image\n"
   + "  delete delete an existing backup image\n"
@@ -283,7 +286,11 @@ public final class BackupCommands {
 printUsage();
 throw new IOException(INCORRECT_USAGE);
   }
-
+  String targetBackupDir = args[2];
+  // Check if backup destination is top level (root) folder - not allowed
+  if (isRootFolder(targetBackupDir)) {
+throw new IOException(TOP_LEVEL_NOT_ALLOWED);
+  }
   String tables = null;
 
   // Check if we have both: backup set and list of tables
@@ -331,7 +338,7 @@ public final class BackupCommands {
 .withBackupType(BackupType.valueOf(args[1].toUpperCase()))
 .withTableList(
   tables != null ? 
Lists.newArrayList(BackupUtils.parseTableNames(tables)) : null)
-.withTargetRootDir(args[2]).withTotalTasks(workers)
+.withTargetRootDir(targetBackupDir).withTotalTasks(workers)
 
.withBandwidthPerTasks(bandwidth).withBackupSetName(setName).build();
 String backupId = admin.backupTables(request);
 System.out.println("Backup session " + backupId + " finished. Status: 
SUCCESS");
@@ -341,6 +348,11 @@ public final class BackupCommands {
   }
 }
 
+private boolean isRootFolder(String targetBackupDir) {
+  Path p = new Path(targetBackupDir);
+  return p.isRoot();
+}
+
 private boolean verifyPath(String path) {
   try {
 Path p = new Path(path);

http://git-wip-us.apache.org/repos/asf/hbase/blob/ca295982/hbase-backup/src/test/java/org/apache/hadoop/hbase/backup/TestBackupBase.java
--
diff --git 
a/hbase-backup/src/test/java/org/apache/hadoop/hbase/backup/TestBackupBase.java 
b/hbase-backup/src/test/java/org/apache/hadoop/hbase/backup/TestBackupBase.java
index 8752ca2..69db342 100644
--- 
a/hbase-backup/src/test/java/org/apache/hadoop/hbase/backup/TestBackupBase.java
+++ 
b/hbase-backup/src/test/java/org/apache/hadoop/hbase/backup/TestBackupBase.java
@@ -98,8 +98,8 @@ public class TestBackupBase {
   protected static final byte[] qualName = Bytes.toBytes("q1");
   protected static final byte[] famName = Bytes.toBytes("f");
 
-  protected static String BACKUP_ROOT

[49/60] [abbrv] hbase git commit: HBASE-18606 Tests in hbase-spark module fail with UnsatisfiedLinkError

2017-10-05 Thread busbey
HBASE-18606 Tests in hbase-spark module fail with UnsatisfiedLinkError


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/2ad7be24
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/2ad7be24
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/2ad7be24

Branch: refs/heads/HBASE-18467
Commit: 2ad7be24a93ad47bc92fd4e3d939a0ce7c3e5a22
Parents: 1ff90d3
Author: Michael Stack 
Authored: Tue Oct 3 10:53:32 2017 -0700
Committer: Michael Stack 
Committed: Tue Oct 3 12:13:24 2017 -0700

--
 hbase-spark/pom.xml | 3 +++
 1 file changed, 3 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/2ad7be24/hbase-spark/pom.xml
--
diff --git a/hbase-spark/pom.xml b/hbase-spark/pom.xml
index ce7313a..320104b 100644
--- a/hbase-spark/pom.xml
+++ b/hbase-spark/pom.xml
@@ -568,6 +568,9 @@
   .
   WDF TestSuite.txt
   false
+  
+
org.apache.hadoop.hbase.shaded.
+  
 
 
   



[22/60] [abbrv] hbase git commit: HBASE-18884 Coprocessor Design Improvements follow up of HBASE-17732; MINOR AMENDMENT adding README to design-doc dir

2017-10-05 Thread busbey
HBASE-18884 Coprocessor Design Improvements follow up of HBASE-17732; MINOR 
AMENDMENT adding README to design-doc dir


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/b5b4108f
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/b5b4108f
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/b5b4108f

Branch: refs/heads/HBASE-18467
Commit: b5b4108fcedeea4e25e86f426e313ffa51229d10
Parents: 74d0adc
Author: Michael Stack 
Authored: Thu Sep 28 13:24:40 2017 -0700
Committer: Michael Stack 
Committed: Thu Sep 28 13:24:40 2017 -0700

--
 dev-support/design-docs/README.txt| 10 ++
 .../main/java/org/apache/hadoop/hbase/Coprocessor.java|  2 +-
 2 files changed, 11 insertions(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/b5b4108f/dev-support/design-docs/README.txt
--
diff --git a/dev-support/design-docs/README.txt 
b/dev-support/design-docs/README.txt
new file mode 100644
index 000..be57e8b
--- /dev/null
+++ b/dev-support/design-docs/README.txt
@@ -0,0 +1,10 @@
+This directory hosts design docs and proposals. Add here final
+or near-final writeups so they are easy to find and part of
+the code base.
+
+Be warned that final delivery may not be a perfect reflection
+of what is captured at design time; implementation bends as
+it encounters hurdles not conceptualized at design-time.
+
+The effort at capturing all design in a single directory ratherthan spread
+about JIRA as attachments was begun in September of 2017.

http://git-wip-us.apache.org/repos/asf/hbase/blob/b5b4108f/hbase-client/src/main/java/org/apache/hadoop/hbase/Coprocessor.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/Coprocessor.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/Coprocessor.java
index c4003ae..422e064 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/Coprocessor.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/Coprocessor.java
@@ -33,7 +33,7 @@ import org.apache.yetus.audience.InterfaceStability;
  * the above mentioned 4 coprocessors, it'll fail to be loaded by any 
coprocessor host.
  *
  * Example:
- * Building a coprocessor to observer Master operations.
+ * Building a coprocessor to observe Master operations.
  * 
  * class MyMasterCoprocessor implements MasterCoprocessor {
  *   @Override



[35/60] [abbrv] hbase git commit: HBASE-18753 Introduce the unsynchronized TimeRangeTracker

2017-10-05 Thread busbey
HBASE-18753 Introduce the unsynchronized TimeRangeTracker


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/9f1bfbea
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/9f1bfbea
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/9f1bfbea

Branch: refs/heads/HBASE-18467
Commit: 9f1bfbeaab7b8eb4bb9cfbc6ccab959b025fc7cd
Parents: dd3d7de
Author: Chia-Ping Tsai 
Authored: Sun Oct 1 16:59:35 2017 +0800
Committer: Chia-Ping Tsai 
Committed: Sun Oct 1 21:45:03 2017 +0800

--
 .../org/apache/hadoop/hbase/util/ClassSize.java |   9 +-
 .../hbase/mapreduce/TestHFileOutputFormat2.java |   2 +-
 .../regionserver/CompositeImmutableSegment.java |  17 +-
 .../hadoop/hbase/regionserver/Segment.java  |   8 +-
 .../hbase/regionserver/StoreFileWriter.java |   2 +-
 .../hbase/regionserver/TimeRangeTracker.java| 213 +++---
 .../apache/hadoop/hbase/io/TestHeapSize.java|  74 +++--
 .../AbstractTestDateTieredCompactionPolicy.java |   2 +-
 .../TestDefaultCompactSelection.java|   2 +-
 .../TestSimpleTimeRangeTracker.java | 131 +
 .../regionserver/TestSyncTimeRangeTracker.java  | 164 +++
 .../regionserver/TestTimeRangeTracker.java  | 276 ---
 12 files changed, 529 insertions(+), 371 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/9f1bfbea/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ClassSize.java
--
diff --git 
a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ClassSize.java 
b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ClassSize.java
index d63b1cf..a4d5792 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ClassSize.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ClassSize.java
@@ -20,7 +20,6 @@
 
 package org.apache.hadoop.hbase.util;
 
-import 
org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
 import java.lang.reflect.Field;
 import java.lang.reflect.Modifier;
 import java.util.concurrent.ConcurrentHashMap;
@@ -30,6 +29,8 @@ import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.yetus.audience.InterfaceAudience;
 
+import 
org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
+
 
 /**
  * Class for determining the "size" of a class, an attempt to calculate the
@@ -128,8 +129,8 @@ public class ClassSize {
   /** Overhead for timerange */
   public static final int TIMERANGE;
 
-  /** Overhead for TimeRangeTracker */
-  public static final int TIMERANGE_TRACKER;
+  /** Overhead for SyncTimeRangeTracker */
+  public static final int SYNC_TIMERANGE_TRACKER;
 
   /** Overhead for CellSkipListSet */
   public static final int CELL_SET;
@@ -325,7 +326,7 @@ public class ClassSize {
 
 TIMERANGE = align(ClassSize.OBJECT + Bytes.SIZEOF_LONG * 2 + 
Bytes.SIZEOF_BOOLEAN);
 
-TIMERANGE_TRACKER = align(ClassSize.OBJECT + 2 * REFERENCE);
+SYNC_TIMERANGE_TRACKER = align(ClassSize.OBJECT + 2 * REFERENCE);
 CELL_SET = align(OBJECT + REFERENCE);
 
 STORE_SERVICES = align(OBJECT + REFERENCE + ATOMIC_LONG);

http://git-wip-us.apache.org/repos/asf/hbase/blob/9f1bfbea/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat2.java
--
diff --git 
a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat2.java
 
b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat2.java
index 5f8c119..42df264 100644
--- 
a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat2.java
+++ 
b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat2.java
@@ -399,7 +399,7 @@ public class TestHFileOutputFormat2  {
   assertNotNull(range);
 
   // unmarshall and check values.
-  TimeRangeTracker timeRangeTracker = new TimeRangeTracker();
+  TimeRangeTracker timeRangeTracker = 
TimeRangeTracker.create(TimeRangeTracker.Type.SYNC);
   Writables.copyWritable(range, timeRangeTracker);
   LOG.info(timeRangeTracker.getMin() +
   "" + timeRangeTracker.getMax());

http://git-wip-us.apache.org/repos/asf/hbase/blob/9f1bfbea/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CompositeImmutableSegment.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CompositeImmutableSegment.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CompositeImmutableSegment.java
index 1bae267..5c1410f 100644
--- 
a/hbase-server/src/main/java/org/apach

[51/60] [abbrv] hbase git commit: HBASE-18928 Backup delete command shows wrong number of deletes requested (Amit Kabra)

2017-10-05 Thread busbey
HBASE-18928 Backup delete command shows wrong number of deletes requested (Amit 
Kabra)


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/56830c30
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/56830c30
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/56830c30

Branch: refs/heads/HBASE-18467
Commit: 56830c30941e1fb6d3196c2d9f0145b8e7e59885
Parents: c205b5b
Author: tedyu 
Authored: Tue Oct 3 14:07:58 2017 -0700
Committer: tedyu 
Committed: Tue Oct 3 14:07:58 2017 -0700

--
 .../java/org/apache/hadoop/hbase/backup/impl/BackupCommands.java   | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/56830c30/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/impl/BackupCommands.java
--
diff --git 
a/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/impl/BackupCommands.java
 
b/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/impl/BackupCommands.java
index fb4f095..102d45d 100644
--- 
a/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/impl/BackupCommands.java
+++ 
b/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/impl/BackupCommands.java
@@ -571,7 +571,7 @@ public final class BackupCommands {
   System.arraycopy(args, 1, backupIds, 0, backupIds.length);
   try (BackupAdminImpl admin = new BackupAdminImpl(conn);) {
 int deleted = admin.deleteBackups(backupIds);
-System.out.println("Deleted " + deleted + " backups. Total requested: 
" + args.length);
+System.out.println("Deleted " + deleted + " backups. Total requested: 
" + (args.length -1));
   } catch (IOException e) {
 System.err
 .println("Delete command FAILED. Please run backup repair tool to 
restore backup system integrity");



[24/60] [abbrv] hbase git commit: HBASE-18845 TestReplicationSmallTests fails after HBASE-14004

2017-10-05 Thread busbey
HBASE-18845 TestReplicationSmallTests fails after HBASE-14004


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/239e6872
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/239e6872
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/239e6872

Branch: refs/heads/HBASE-18467
Commit: 239e6872674ff122ecec2d8d6a557b269e6ae54b
Parents: afce850
Author: zhangduo 
Authored: Mon Sep 25 12:07:19 2017 +0800
Committer: zhangduo 
Committed: Fri Sep 29 14:32:26 2017 +0800

--
 .../replication/TestReplicationSmallTests.java  | 115 +--
 .../hbase/replication/TestReplicationBase.java  |   2 +-
 2 files changed, 56 insertions(+), 61 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/239e6872/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationSmallTests.java
--
diff --git 
a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationSmallTests.java
 
b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationSmallTests.java
index 6105a0d..28bf249 100644
--- 
a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationSmallTests.java
+++ 
b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationSmallTests.java
@@ -26,7 +26,6 @@ import static org.junit.Assert.fail;
 
 import java.io.IOException;
 import java.util.ArrayList;
-import java.util.HashMap;
 import java.util.List;
 import java.util.NavigableMap;
 import java.util.TreeMap;
@@ -39,13 +38,13 @@ import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hbase.Cell;
 import org.apache.hadoop.hbase.CellUtil;
 import org.apache.hadoop.hbase.HBaseTestingUtility;
-import org.apache.hadoop.hbase.HColumnDescriptor;
 import org.apache.hadoop.hbase.HConstants;
-import org.apache.hadoop.hbase.HTableDescriptor;
 import org.apache.hadoop.hbase.KeyValue;
 import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.Waiter;
 import org.apache.hadoop.hbase.client.Admin;
+import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
+import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
 import org.apache.hadoop.hbase.client.Connection;
 import org.apache.hadoop.hbase.client.ConnectionFactory;
 import org.apache.hadoop.hbase.client.Delete;
@@ -57,10 +56,14 @@ import org.apache.hadoop.hbase.client.Result;
 import org.apache.hadoop.hbase.client.ResultScanner;
 import org.apache.hadoop.hbase.client.Scan;
 import org.apache.hadoop.hbase.client.Table;
-import org.apache.hadoop.hbase.client.replication.ReplicationAdmin;
+import org.apache.hadoop.hbase.client.TableDescriptor;
+import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
+import org.apache.hadoop.hbase.client.replication.TableCFs;
 import org.apache.hadoop.hbase.mapreduce.replication.VerifyReplication;
 import org.apache.hadoop.hbase.regionserver.HRegion;
+import org.apache.hadoop.hbase.regionserver.HRegionServer;
 import org.apache.hadoop.hbase.regionserver.MultiVersionConcurrencyControl;
+import org.apache.hadoop.hbase.regionserver.wal.AbstractFSWAL;
 import org.apache.hadoop.hbase.replication.regionserver.Replication;
 import org.apache.hadoop.hbase.replication.regionserver.ReplicationSource;
 import 
org.apache.hadoop.hbase.replication.regionserver.ReplicationSourceInterface;
@@ -73,8 +76,8 @@ import org.apache.hadoop.hbase.util.FSUtils;
 import org.apache.hadoop.hbase.util.JVMClusterUtil;
 import org.apache.hadoop.hbase.wal.AbstractFSWALProvider;
 import org.apache.hadoop.hbase.wal.WAL;
-import org.apache.hadoop.hbase.wal.WALEdit;
 import org.apache.hadoop.hbase.wal.WALKey;
+import org.apache.hadoop.hbase.wal.WALEdit;
 import org.apache.hadoop.mapreduce.Job;
 import org.junit.Before;
 import org.junit.Rule;
@@ -162,7 +165,7 @@ public class TestReplicationSmallTests extends 
TestReplicationBase {
 htable1.put(put);
 
 Get get = new Get(row);
-get.setMaxVersions();
+get.readAllVersions();
 for (int i = 0; i < NB_RETRIES; i++) {
   if (i==NB_RETRIES-1) {
 fail("Waited too much time for put replication");
@@ -184,7 +187,7 @@ public class TestReplicationSmallTests extends 
TestReplicationBase {
 htable1.delete(d);
 
 get = new Get(row);
-get.setMaxVersions();
+get.readAllVersions();
 for (int i = 0; i < NB_RETRIES; i++) {
   if (i==NB_RETRIES-1) {
 fail("Waited too much time for put replication");
@@ -327,7 +330,7 @@ public class TestReplicationSmallTests extends 
TestReplicationBase {
   public void testDisableEnable() throws Exception {
 
 // Test disabling replication
-admin.disablePeer(PEER_ID);
+hbaseAdmin.disableReplicationPeer(PEER_ID);
 
 byte[] rowkey = Bytes.toBytes("disable en

[01/60] [abbrv] hbase git commit: HBASE-18839 Apply RegionInfo to code base [Forced Update!]

2017-10-05 Thread busbey
Repository: hbase
Updated Branches:
  refs/heads/HBASE-18467 7b1264a91 -> a47592eff (forced update)


http://git-wip-us.apache.org/repos/asf/hbase/blob/a11a35a1/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestHBaseFsckTwoRS.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestHBaseFsckTwoRS.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestHBaseFsckTwoRS.java
index beef02b..6fa455a 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestHBaseFsckTwoRS.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestHBaseFsckTwoRS.java
@@ -34,8 +34,24 @@ import java.util.concurrent.TimeUnit;
 
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hbase.*;
-import org.apache.hadoop.hbase.client.*;
+import org.apache.hadoop.hbase.HColumnDescriptor;
+import org.apache.hadoop.hbase.HConstants;
+import org.apache.hadoop.hbase.HRegionLocation;
+import org.apache.hadoop.hbase.HTableDescriptor;
+import org.apache.hadoop.hbase.MetaTableAccessor;
+import org.apache.hadoop.hbase.MiniHBaseCluster;
+import org.apache.hadoop.hbase.ServerName;
+import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.client.ClusterConnection;
+import org.apache.hadoop.hbase.client.Durability;
+import org.apache.hadoop.hbase.client.HBaseAdmin;
+import org.apache.hadoop.hbase.client.Put;
+import org.apache.hadoop.hbase.client.RegionInfo;
+import org.apache.hadoop.hbase.client.RegionInfoBuilder;
+import org.apache.hadoop.hbase.client.Result;
+import org.apache.hadoop.hbase.client.ResultScanner;
+import org.apache.hadoop.hbase.client.Scan;
+import org.apache.hadoop.hbase.client.Table;
 import org.apache.hadoop.hbase.coprocessor.CoprocessorHost;
 import org.apache.hadoop.hbase.io.HFileLink;
 import org.apache.hadoop.hbase.io.hfile.HFile;
@@ -43,7 +59,6 @@ import org.apache.hadoop.hbase.io.hfile.HFileContext;
 import org.apache.hadoop.hbase.io.hfile.HFileContextBuilder;
 import org.apache.hadoop.hbase.master.RegionState;
 import org.apache.hadoop.hbase.regionserver.HRegionServer;
-import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
 import org.apache.hadoop.hbase.testclassification.LargeTests;
 import org.apache.hadoop.hbase.testclassification.MiscTests;
 import org.apache.hadoop.hbase.zookeeper.MetaTableLocator;
@@ -57,6 +72,7 @@ import org.junit.experimental.categories.Category;
 import org.junit.rules.TestName;
 
 import org.apache.hadoop.hbase.shaded.com.google.common.collect.Multimap;
+import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
 
 @Ignore // Until after HBASE-14614 goes in.
 @Category({MiscTests.class, LargeTests.class})
@@ -111,10 +127,10 @@ public class TestHBaseFsckTwoRS extends BaseTestHBaseFsck 
{
   @Test(timeout=18)
   public void testFixAssignmentsWhenMETAinTransition() throws Exception {
 MiniHBaseCluster cluster = TEST_UTIL.getHBaseCluster();
-admin.unassign(HRegionInfo.FIRST_META_REGIONINFO.getRegionName(), true);
-assignmentManager.offlineRegion(HRegionInfo.FIRST_META_REGIONINFO);
+admin.unassign(RegionInfoBuilder.FIRST_META_REGIONINFO.getRegionName(), 
true);
+assignmentManager.offlineRegion(RegionInfoBuilder.FIRST_META_REGIONINFO);
 new 
MetaTableLocator().deleteMetaLocation(cluster.getMaster().getZooKeeper());
-
assertFalse(regionStates.isRegionOnline(HRegionInfo.FIRST_META_REGIONINFO));
+
assertFalse(regionStates.isRegionOnline(RegionInfoBuilder.FIRST_META_REGIONINFO));
 HBaseFsck hbck = doFsck(conf, true);
 assertErrors(hbck, new HBaseFsck.ErrorReporter.ERROR_CODE[] { 
HBaseFsck.ErrorReporter.ERROR_CODE.UNKNOWN, 
HBaseFsck.ErrorReporter.ERROR_CODE.NO_META_REGION,
 HBaseFsck.ErrorReporter.ERROR_CODE.NULL_META_REGION });
@@ -134,7 +150,7 @@ public class TestHBaseFsckTwoRS extends BaseTestHBaseFsck {
   assertEquals(ROWKEYS.length, countRows());
 
   // Now let's mess it up, by adding a region with a duplicate startkey
-  HRegionInfo hriDupe =
+  RegionInfo hriDupe =
   createRegion(tbl.getTableDescriptor(), Bytes.toBytes("A"), 
Bytes.toBytes("A2"));
   TEST_UTIL.assignRegion(hriDupe);
 
@@ -172,7 +188,7 @@ public class TestHBaseFsckTwoRS extends BaseTestHBaseFsck {
   assertEquals(ROWKEYS.length, countRows());
 
   // Now let's mess it up, by adding a region with a duplicate startkey
-  HRegionInfo hriDupe =
+  RegionInfo hriDupe =
   createRegion(tbl.getTableDescriptor(), Bytes.toBytes("A"), 
Bytes.toBytes("B"));
   TEST_UTIL.assignRegion(hriDupe);
 
@@ -221,7 +237,7 @@ public class TestHBaseFsckTwoRS extends BaseTestHBaseFsck {
   assertEquals(ROWKEYS.length, countRows());
 
   // Mess it up by creating an overlap in the metadata
-  HRegionInfo hriOverlap =
+  RegionInfo hriOverlap =
   createRegion(tbl.getTableDescriptor(), Bytes.toBytes("A2"), 
Bytes.

[05/60] [abbrv] hbase git commit: HBASE-18839 Apply RegionInfo to code base

2017-10-05 Thread busbey
http://git-wip-us.apache.org/repos/asf/hbase/blob/a11a35a1/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterOperationsForRegionReplicas.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterOperationsForRegionReplicas.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterOperationsForRegionReplicas.java
index a8b9998..b73c873 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterOperationsForRegionReplicas.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterOperationsForRegionReplicas.java
@@ -19,12 +19,9 @@
 package org.apache.hadoop.hbase.master;
 
 import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertTrue;
 
 import java.io.IOException;
-import java.util.Collection;
 import java.util.EnumSet;
-import java.util.HashMap;
 import java.util.HashSet;
 import java.util.List;
 import java.util.Map;
@@ -38,18 +35,18 @@ import org.apache.hadoop.hbase.ClusterStatus.Option;
 import org.apache.hadoop.hbase.HBaseTestingUtility;
 import org.apache.hadoop.hbase.HColumnDescriptor;
 import org.apache.hadoop.hbase.HConstants;
-import org.apache.hadoop.hbase.HRegionInfo;
 import org.apache.hadoop.hbase.HRegionLocation;
 import org.apache.hadoop.hbase.HTableDescriptor;
+import org.apache.hadoop.hbase.MetaTableAccessor;
+import org.apache.hadoop.hbase.MetaTableAccessor.Visitor;
 import org.apache.hadoop.hbase.RegionLocations;
 import org.apache.hadoop.hbase.ServerName;
 import org.apache.hadoop.hbase.TableName;
-import org.apache.hadoop.hbase.MetaTableAccessor;
-import org.apache.hadoop.hbase.MetaTableAccessor.Visitor;
 import org.apache.hadoop.hbase.client.Admin;
 import org.apache.hadoop.hbase.client.Connection;
 import org.apache.hadoop.hbase.client.ConnectionFactory;
 import org.apache.hadoop.hbase.client.Delete;
+import org.apache.hadoop.hbase.client.RegionInfo;
 import org.apache.hadoop.hbase.client.RegionReplicaUtil;
 import org.apache.hadoop.hbase.client.Result;
 import org.apache.hadoop.hbase.client.Table;
@@ -107,7 +104,7 @@ public class TestMasterOperationsForRegionReplicas {
   ADMIN.createTable(desc, Bytes.toBytes("A"), Bytes.toBytes("Z"), 
numRegions);
 
   validateNumberOfRowsInMeta(tableName, numRegions, ADMIN.getConnection());
-  List hris = MetaTableAccessor.getTableRegions(
+  List hris = MetaTableAccessor.getTableRegions(
 ADMIN.getConnection(), tableName);
   assert(hris.size() == numRegions * numReplica);
 } finally {
@@ -129,12 +126,12 @@ public class TestMasterOperationsForRegionReplicas {
   TEST_UTIL.waitTableEnabled(tableName);
   validateNumberOfRowsInMeta(tableName, numRegions, ADMIN.getConnection());
 
-  List hris = 
MetaTableAccessor.getTableRegions(ADMIN.getConnection(), tableName);
+  List hris = 
MetaTableAccessor.getTableRegions(ADMIN.getConnection(), tableName);
   assert(hris.size() == numRegions * numReplica);
   // check that the master created expected number of RegionState objects
   for (int i = 0; i < numRegions; i++) {
 for (int j = 0; j < numReplica; j++) {
-  HRegionInfo replica = 
RegionReplicaUtil.getRegionInfoForReplica(hris.get(i), j);
+  RegionInfo replica = 
RegionReplicaUtil.getRegionInfoForReplica(hris.get(i), j);
   RegionState state = 
TEST_UTIL.getHBaseCluster().getMaster().getAssignmentManager()
   .getRegionStates().getRegionState(replica);
   assert (state != null);
@@ -145,7 +142,7 @@ public class TestMasterOperationsForRegionReplicas {
   int numRows = 0;
   for (Result result : metaRows) {
 RegionLocations locations = 
MetaTableAccessor.getRegionLocations(result);
-HRegionInfo hri = locations.getRegionLocation().getRegionInfo();
+RegionInfo hri = locations.getRegionLocation().getRegionInfo();
 if (!hri.getTable().equals(tableName)) continue;
 numRows += 1;
 HRegionLocation[] servers = locations.getRegionLocations();
@@ -168,7 +165,7 @@ public class TestMasterOperationsForRegionReplicas {
   TEST_UTIL.getHBaseClusterInterface().waitForActiveAndReadyMaster();
   for (int i = 0; i < numRegions; i++) {
 for (int j = 0; j < numReplica; j++) {
-  HRegionInfo replica = 
RegionReplicaUtil.getRegionInfoForReplica(hris.get(i), j);
+  RegionInfo replica = 
RegionReplicaUtil.getRegionInfoForReplica(hris.get(i), j);
   RegionState state = 
TEST_UTIL.getHBaseCluster().getMaster().getAssignmentManager()
   .getRegionStates().getRegionState(replica);
   assert (state != null);
@@ -205,7 +202,7 @@ public class TestMasterOperationsForRegionReplicas {
   ADMIN.enableTable(tableName);
   LOG.info(ADMIN.getTableDescriptor(tableName).toString());
   assert(ADMIN.isTableEnabled(tableName));
-  List regions = TEST_UTI

[06/60] [abbrv] hbase git commit: HBASE-18839 Apply RegionInfo to code base

2017-10-05 Thread busbey
http://git-wip-us.apache.org/repos/asf/hbase/blob/a11a35a1/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncClusterAdminApi.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncClusterAdminApi.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncClusterAdminApi.java
index 1acfcde..6307210 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncClusterAdminApi.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncClusterAdminApi.java
@@ -34,14 +34,13 @@ import java.util.Optional;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.ClusterStatus;
+import org.apache.hadoop.hbase.ClusterStatus.Option;
 import org.apache.hadoop.hbase.HBaseTestingUtility;
 import org.apache.hadoop.hbase.HConstants;
-import org.apache.hadoop.hbase.HRegionInfo;
 import org.apache.hadoop.hbase.RegionLoad;
 import org.apache.hadoop.hbase.ServerLoad;
 import org.apache.hadoop.hbase.ServerName;
 import org.apache.hadoop.hbase.TableName;
-import org.apache.hadoop.hbase.ClusterStatus.Option;
 import org.apache.hadoop.hbase.regionserver.HRegionServer;
 import org.apache.hadoop.hbase.regionserver.Region;
 import org.apache.hadoop.hbase.testclassification.ClientTests;
@@ -227,13 +226,13 @@ public class TestAsyncClusterAdminApi extends 
TestAsyncAdminBase {
 // Check if regions match with the regionLoad from the server
 Collection servers = admin.getRegionServers().get();
 for (ServerName serverName : servers) {
-  List regions = admin.getOnlineRegions(serverName).get();
+  List regions = admin.getOnlineRegions(serverName).get();
   checkRegionsAndRegionLoads(regions, 
admin.getRegionLoads(serverName).get());
 }
 
 // Check if regionLoad matches the table's regions and nothing is missed
 for (TableName table : tables) {
-  List tableRegions = admin.getTableRegions(table).get();
+  List tableRegions = admin.getTableRegions(table).get();
   List regionLoads = Lists.newArrayList();
   for (ServerName serverName : servers) {
 regionLoads.addAll(admin.getRegionLoads(serverName, 
Optional.of(table)).get());
@@ -268,7 +267,7 @@ public class TestAsyncClusterAdminApi extends 
TestAsyncAdminBase {
 }
   }
 
-  private void checkRegionsAndRegionLoads(Collection regions,
+  private void checkRegionsAndRegionLoads(Collection regions,
   Collection regionLoads) {
 
 assertEquals("No of regions and regionloads doesn't match", 
regions.size(), regionLoads.size());
@@ -277,7 +276,7 @@ public class TestAsyncClusterAdminApi extends 
TestAsyncAdminBase {
 for (RegionLoad regionLoad : regionLoads) {
   regionLoadMap.put(regionLoad.getName(), regionLoad);
 }
-for (HRegionInfo info : regions) {
+for (RegionInfo info : regions) {
   assertTrue("Region not in regionLoadMap region:" + 
info.getRegionNameAsString()
   + " regionMap: " + regionLoadMap, 
regionLoadMap.containsKey(info.getRegionName()));
 }

http://git-wip-us.apache.org/repos/asf/hbase/blob/a11a35a1/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncRegionAdminApi.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncRegionAdminApi.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncRegionAdminApi.java
index 6292b10..9775b86 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncRegionAdminApi.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncRegionAdminApi.java
@@ -33,7 +33,6 @@ import java.util.stream.Collectors;
 
 import org.apache.hadoop.hbase.AsyncMetaTableAccessor;
 import org.apache.hadoop.hbase.HConstants;
-import org.apache.hadoop.hbase.HRegionInfo;
 import org.apache.hadoop.hbase.HRegionLocation;
 import org.apache.hadoop.hbase.ServerName;
 import org.apache.hadoop.hbase.TableName;
@@ -45,7 +44,6 @@ import 
org.apache.hadoop.hbase.master.assignment.AssignmentManager;
 import org.apache.hadoop.hbase.master.assignment.RegionStates;
 import org.apache.hadoop.hbase.regionserver.HRegionServer;
 import org.apache.hadoop.hbase.regionserver.Region;
-import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
 import org.apache.hadoop.hbase.testclassification.ClientTests;
 import org.apache.hadoop.hbase.testclassification.LargeTests;
 import org.apache.hadoop.hbase.util.Bytes;
@@ -58,6 +56,8 @@ import org.junit.experimental.categories.Category;
 import org.junit.runner.RunWith;
 import org.junit.runners.Parameterized;
 
+import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
+
 /**
  * Class to test asynchronous region admin operations.
  */
@@ -69,10 +69,10 @@ public class TestAsyncRegionAdminApi extends 
TestAsyncAdminBase {
   public void testCloseRegion() throws Exception

[34/60] [abbrv] hbase git commit: HBASE-13844 (Addendum) Replace KeyValue#isDelete by CellUtil#isDelete in hbase-shell

2017-10-05 Thread busbey
HBASE-13844 (Addendum) Replace KeyValue#isDelete by CellUtil#isDelete in 
hbase-shell


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/dd3d7de0
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/dd3d7de0
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/dd3d7de0

Branch: refs/heads/HBASE-18467
Commit: dd3d7de018f15b24311c007e795798582c39
Parents: 4aadc5d
Author: Chia-Ping Tsai 
Authored: Sun Oct 1 17:47:47 2017 +0800
Committer: Chia-Ping Tsai 
Committed: Sun Oct 1 17:47:47 2017 +0800

--
 hbase-shell/src/main/ruby/hbase/table.rb | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/dd3d7de0/hbase-shell/src/main/ruby/hbase/table.rb
--
diff --git a/hbase-shell/src/main/ruby/hbase/table.rb 
b/hbase-shell/src/main/ruby/hbase/table.rb
index 7eaf1b3..33c80ae 100644
--- a/hbase-shell/src/main/ruby/hbase/table.rb
+++ b/hbase-shell/src/main/ruby/hbase/table.rb
@@ -748,7 +748,7 @@ EOF
 end
   end
 
-  if kv.isDelete
+  if org.apache.hadoop.hbase.CellUtil.isDelete(kv)
 val = "timestamp=#{kv.getTimestamp}, 
type=#{org.apache.hadoop.hbase.KeyValue::Type.codeToType(kv.getType)}"
   else
 val = "timestamp=#{kv.getTimestamp}, value=#{convert(column, kv, 
converter_class, converter)}"



[40/60] [abbrv] hbase git commit: HBASE-18897 Substitute MemStore for Memstore

2017-10-05 Thread busbey
HBASE-18897 Substitute MemStore for Memstore


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/d35d8376
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/d35d8376
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/d35d8376

Branch: refs/heads/HBASE-18467
Commit: d35d8376a70a8de63c5d232a46e39657ba739eef
Parents: 869b90c
Author: Chia-Ping Tsai 
Authored: Mon Oct 2 14:53:02 2017 +0800
Committer: Chia-Ping Tsai 
Committed: Mon Oct 2 20:55:06 2017 +0800

--
 .../apache/hadoop/hbase/HTableDescriptor.java   |  26 +++-
 .../org/apache/hadoop/hbase/RegionLoad.java |   2 +-
 .../org/apache/hadoop/hbase/ServerLoad.java |   6 +-
 .../hadoop/hbase/client/MetricsConnection.java  |   2 +-
 .../hadoop/hbase/client/RegionLoadStats.java|   9 ++
 .../hadoop/hbase/client/TableDescriptor.java|   2 +-
 .../hbase/client/TableDescriptorBuilder.java|   8 +-
 .../backoff/ExponentialClientBackoffPolicy.java |   2 +-
 .../hbase/client/backoff/ServerStatistics.java  |   4 +-
 .../hbase/shaded/protobuf/ProtobufUtil.java |   2 +-
 .../client/TestClientExponentialBackoff.java|   4 +-
 .../regionserver/MetricsRegionServerSource.java |   2 +-
 .../MetricsRegionServerWrapper.java |   6 +-
 .../regionserver/MetricsRegionWrapper.java  |   2 +-
 .../MetricsTableWrapperAggregate.java   |   2 +-
 .../MetricsRegionServerSourceImpl.java  |   4 +-
 .../regionserver/MetricsRegionSourceImpl.java   |   2 +-
 .../regionserver/MetricsTableSourceImpl.java|   2 +-
 .../TestMetricsRegionSourceImpl.java|   2 +-
 .../TestMetricsTableSourceImpl.java |   2 +-
 .../src/main/protobuf/Client.proto  |   6 +-
 .../src/main/protobuf/ClusterStatus.proto   |   2 +-
 hbase-protocol/src/main/protobuf/Client.proto   |   6 +-
 .../rest/model/StorageClusterStatusModel.java   |   8 +-
 .../protobuf/StorageClusterStatusMessage.proto  |   4 +-
 .../model/TestStorageClusterStatusModel.java|  12 +-
 .../tmpl/master/RegionServerListTmpl.jamon  |   4 +-
 .../tmpl/regionserver/RegionListTmpl.jamon  |   2 +-
 .../tmpl/regionserver/ServerMetricsTmpl.jamon   |   4 +-
 .../org/apache/hadoop/hbase/io/hfile/HFile.java |   4 +-
 .../hadoop/hbase/io/hfile/HFileBlock.java   |   4 +-
 .../hadoop/hbase/io/hfile/HFileReaderImpl.java  |  20 +--
 .../hadoop/hbase/io/util/MemorySizeUtil.java|   6 +-
 .../master/balancer/StochasticLoadBalancer.java |   6 +-
 .../org/apache/hadoop/hbase/mob/MobFile.java|   4 +-
 .../hbase/regionserver/AbstractMemStore.java|  14 +-
 .../regionserver/CellArrayImmutableSegment.java |   4 +-
 .../regionserver/CellChunkImmutableSegment.java |   4 +-
 .../hbase/regionserver/CompactingMemStore.java  |  18 +--
 .../hbase/regionserver/CompactionPipeline.java  |  18 +--
 .../regionserver/CompositeImmutableSegment.java |   4 +-
 .../regionserver/DefaultHeapMemoryTuner.java|   4 +-
 .../hbase/regionserver/DefaultMemStore.java |  14 +-
 .../regionserver/FlushLargeStoresPolicy.java|   2 +-
 .../FlushNonSloppyStoresFirstPolicy.java|   2 +-
 .../hbase/regionserver/FlushRequester.java  |   2 +-
 .../hadoop/hbase/regionserver/HRegion.java  | 154 +--
 .../hbase/regionserver/HRegionServer.java   |  14 +-
 .../hadoop/hbase/regionserver/HStore.java   |  22 +--
 .../hadoop/hbase/regionserver/HStoreFile.java   |   2 +-
 .../hbase/regionserver/HeapMemoryManager.java   |  26 ++--
 .../hadoop/hbase/regionserver/MemStore.java |  12 +-
 .../hbase/regionserver/MemStoreFlusher.java |  54 +++
 .../hadoop/hbase/regionserver/MemStoreSize.java | 107 +
 .../hadoop/hbase/regionserver/MemstoreSize.java | 107 -
 .../hbase/regionserver/MetricsRegionServer.java |   2 +-
 .../MetricsRegionServerWrapperImpl.java |  10 +-
 .../regionserver/MetricsRegionWrapperImpl.java  |   2 +-
 .../MetricsTableWrapperAggregateImpl.java   |  12 +-
 .../hbase/regionserver/MutableSegment.java  |   6 +-
 .../hadoop/hbase/regionserver/Region.java   |   2 +-
 .../regionserver/RegionServerAccounting.java|  64 
 .../regionserver/RegionServicesForStores.java   |  16 +-
 .../hadoop/hbase/regionserver/Segment.java  |   8 +-
 .../hbase/regionserver/SegmentFactory.java  |   3 +-
 .../apache/hadoop/hbase/regionserver/Store.java |  10 +-
 .../regionserver/StoreConfigInformation.java|   2 +-
 .../hadoop/hbase/regionserver/StoreFile.java|   2 +-
 .../hadoop/hbase/regionserver/StoreUtils.java   |   4 +-
 .../hbase/regionserver/StripeStoreConfig.java   |   2 +-
 .../compactions/CompactionConfiguration.java|   8 +-
 .../regionserver/compactions/Compactor.java |  12 +-
 .../hbase/regionserver/wal/AbstractFSWAL.java   |   8 +-
 .../hbase/regionserver/wal/FSWALEntry.java  |   2 +-
 .../RegionReplicaReplicationEndpoint.java

[09/60] [abbrv] hbase git commit: HBASE-18839 Apply RegionInfo to code base

2017-10-05 Thread busbey
http://git-wip-us.apache.org/repos/asf/hbase/blob/a11a35a1/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionServerServices.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionServerServices.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionServerServices.java
index 1d31d5a..b21d55a 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionServerServices.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionServerServices.java
@@ -23,21 +23,21 @@ import java.util.List;
 import java.util.Map;
 import java.util.concurrent.ConcurrentMap;
 
-import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.hbase.Abortable;
-import org.apache.hadoop.hbase.HRegionInfo;
 import org.apache.hadoop.hbase.Server;
+import org.apache.hadoop.hbase.client.RegionInfo;
 import org.apache.hadoop.hbase.client.locking.EntityLock;
 import org.apache.hadoop.hbase.executor.ExecutorService;
 import org.apache.hadoop.hbase.ipc.RpcServerInterface;
 import org.apache.hadoop.hbase.quotas.RegionServerRpcQuotaManager;
 import org.apache.hadoop.hbase.quotas.RegionServerSpaceQuotaManager;
 import org.apache.hadoop.hbase.regionserver.throttle.ThroughputController;
-import 
org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionStateTransition.TransitionCode;
 import org.apache.hadoop.hbase.wal.WAL;
 import org.apache.yetus.audience.InterfaceAudience;
 import org.apache.zookeeper.KeeperException;
 
+import 
org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionStateTransition.TransitionCode;
+
 import com.google.protobuf.Service;
 
 /**
@@ -49,7 +49,7 @@ public interface RegionServerServices
 
   /** @return the WAL for a particular region. Pass null for getting the
* default (common) WAL */
-  WAL getWAL(HRegionInfo regionInfo) throws IOException;
+  WAL getWAL(RegionInfo regionInfo) throws IOException;
 
   /** @return the List of WALs that are used by this server
*  Doesn't include the meta WAL
@@ -127,11 +127,11 @@ public interface RegionServerServices
 private final TransitionCode code;
 private final long openSeqNum;
 private final long masterSystemTime;
-private final HRegionInfo[] hris;
+private final RegionInfo[] hris;
 
 @InterfaceAudience.Private
 public RegionStateTransitionContext(TransitionCode code, long openSeqNum, 
long masterSystemTime,
-HRegionInfo... hris) {
+RegionInfo... hris) {
   this.code = code;
   this.openSeqNum = openSeqNum;
   this.masterSystemTime = masterSystemTime;
@@ -146,7 +146,7 @@ public interface RegionServerServices
 public long getMasterSystemTime() {
   return masterSystemTime;
 }
-public HRegionInfo[] getHris() {
+public RegionInfo[] getHris() {
   return hris;
 }
   }
@@ -161,14 +161,14 @@ public interface RegionServerServices
* @deprecated use {@link 
#reportRegionStateTransition(RegionStateTransitionContext)}
*/
   @Deprecated
-  boolean reportRegionStateTransition(TransitionCode code, long openSeqNum, 
HRegionInfo... hris);
+  boolean reportRegionStateTransition(TransitionCode code, long openSeqNum, 
RegionInfo... hris);
 
   /**
* Notify master that a handler requests to change a region state
* @deprecated use {@link 
#reportRegionStateTransition(RegionStateTransitionContext)}
*/
   @Deprecated
-  boolean reportRegionStateTransition(TransitionCode code, HRegionInfo... 
hris);
+  boolean reportRegionStateTransition(TransitionCode code, RegionInfo... hris);
 
   /**
* Returns a reference to the region server's RPC server
@@ -244,7 +244,7 @@ public interface RegionServerServices
   /**
* Master based locks on namespaces/tables/regions.
*/
-  EntityLock regionLock(List regionInfos, String description,
+  EntityLock regionLock(List regionInfos, String description,
   Abortable abort) throws IOException;
 
   /**

http://git-wip-us.apache.org/repos/asf/hbase/blob/a11a35a1/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionServicesForStores.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionServicesForStores.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionServicesForStores.java
index 9b4a32a..aea92f8 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionServicesForStores.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionServicesForStores.java
@@ -23,9 +23,9 @@ import java.util.concurrent.ThreadFactory;
 import java.util.concurrent.ThreadPoolExecutor;
 import java.util.concurrent.TimeUnit;
 
-import org.apache.hadoop.hbase.HRegionInfo;
-import org.apache.yetus.audience.InterfaceAudience;
+import org.

[10/60] [abbrv] hbase git commit: HBASE-18839 Apply RegionInfo to code base

2017-10-05 Thread busbey
http://git-wip-us.apache.org/repos/asf/hbase/blob/a11a35a1/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/flush/MasterFlushTableProcedureManager.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/flush/MasterFlushTableProcedureManager.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/flush/MasterFlushTableProcedureManager.java
index 03141a3..66f9240 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/flush/MasterFlushTableProcedureManager.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/flush/MasterFlushTableProcedureManager.java
@@ -24,15 +24,15 @@ import java.util.List;
 import java.util.Map;
 import java.util.Set;
 import java.util.concurrent.ThreadPoolExecutor;
+
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
-import org.apache.yetus.audience.InterfaceAudience;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.HBaseInterfaceAudience;
-import org.apache.hadoop.hbase.HRegionInfo;
+import org.apache.hadoop.hbase.MetaTableAccessor;
 import org.apache.hadoop.hbase.ServerName;
 import org.apache.hadoop.hbase.TableName;
-import org.apache.hadoop.hbase.MetaTableAccessor;
+import org.apache.hadoop.hbase.client.RegionInfo;
 import org.apache.hadoop.hbase.errorhandling.ForeignException;
 import org.apache.hadoop.hbase.errorhandling.ForeignExceptionDispatcher;
 import org.apache.hadoop.hbase.master.MasterCoprocessorHost;
@@ -43,12 +43,13 @@ import org.apache.hadoop.hbase.procedure.Procedure;
 import org.apache.hadoop.hbase.procedure.ProcedureCoordinator;
 import org.apache.hadoop.hbase.procedure.ProcedureCoordinatorRpcs;
 import org.apache.hadoop.hbase.procedure.ZKProcedureCoordinator;
-import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ProcedureDescription;
 import org.apache.hadoop.hbase.util.Pair;
 import org.apache.hadoop.hbase.zookeeper.MetaTableLocator;
+import org.apache.yetus.audience.InterfaceAudience;
 import org.apache.zookeeper.KeeperException;
 
 import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
+import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ProcedureDescription;
 
 @InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.CONFIG)
 public class MasterFlushTableProcedureManager extends MasterProcedureManager {
@@ -125,7 +126,7 @@ public class MasterFlushTableProcedureManager extends 
MasterProcedureManager {
 // It is possible that regions may move after we get the region server 
list.
 // Each region server will get its own online regions for the table.
 // We may still miss regions that need to be flushed.
-List> regionsAndLocations;
+List> regionsAndLocations;
 
 if (TableName.META_TABLE_NAME.equals(tableName)) {
   regionsAndLocations = new MetaTableLocator().getMetaRegionsAndLocations(
@@ -136,9 +137,9 @@ public class MasterFlushTableProcedureManager extends 
MasterProcedureManager {
 }
 
 Set regionServers = new HashSet<>(regionsAndLocations.size());
-for (Pair region : regionsAndLocations) {
+for (Pair region : regionsAndLocations) {
   if (region != null && region.getFirst() != null && region.getSecond() != 
null) {
-HRegionInfo hri = region.getFirst();
+RegionInfo hri = region.getFirst();
 if (hri.isOffline() && (hri.isSplit() || hri.isSplitParent())) 
continue;
 regionServers.add(region.getSecond().toString());
   }

http://git-wip-us.apache.org/repos/asf/hbase/blob/a11a35a1/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/FileSystemUtilizationChore.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/FileSystemUtilizationChore.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/FileSystemUtilizationChore.java
index 89d7e14..a76e9c1 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/FileSystemUtilizationChore.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/FileSystemUtilizationChore.java
@@ -26,16 +26,14 @@ import java.util.concurrent.TimeUnit;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hbase.HRegionInfo;
 import org.apache.hadoop.hbase.ScheduledChore;
-import org.apache.yetus.audience.InterfaceAudience;
+import org.apache.hadoop.hbase.client.RegionInfo;
 import org.apache.hadoop.hbase.regionserver.HRegion;
 import org.apache.hadoop.hbase.regionserver.HRegionServer;
 import org.apache.hadoop.hbase.regionserver.Region;
 import org.apache.hadoop.hbase.regionserver.Store;
-import org.apache.hadoop.hbase.regionserver.StoreFile;
-import org.apache.hadoop.hbase.regionserver.StoreFileReader;
 import org.apache.hadoop.hbase.util.Environmen

[11/60] [abbrv] hbase git commit: HBASE-18839 Apply RegionInfo to code base

2017-10-05 Thread busbey
http://git-wip-us.apache.org/repos/asf/hbase/blob/a11a35a1/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/CloneSnapshotProcedure.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/CloneSnapshotProcedure.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/CloneSnapshotProcedure.java
index 9f2baf4..6155f16 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/CloneSnapshotProcedure.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/CloneSnapshotProcedure.java
@@ -30,11 +30,10 @@ import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hbase.HRegionInfo;
 import org.apache.hadoop.hbase.MetaTableAccessor;
 import org.apache.hadoop.hbase.TableExistsException;
 import org.apache.hadoop.hbase.TableName;
-import org.apache.yetus.audience.InterfaceAudience;
+import org.apache.hadoop.hbase.client.RegionInfo;
 import org.apache.hadoop.hbase.client.TableDescriptor;
 import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
 import org.apache.hadoop.hbase.errorhandling.ForeignException;
@@ -46,11 +45,6 @@ import 
org.apache.hadoop.hbase.master.procedure.CreateTableProcedure.CreateHdfsR
 import org.apache.hadoop.hbase.monitoring.MonitoredTask;
 import org.apache.hadoop.hbase.monitoring.TaskMonitor;
 import org.apache.hadoop.hbase.procedure2.ProcedureStateSerializer;
-import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos;
-import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.CloneSnapshotState;
-import 
org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription;
 import org.apache.hadoop.hbase.snapshot.ClientSnapshotDescriptionUtils;
 import org.apache.hadoop.hbase.snapshot.RestoreSnapshotException;
 import org.apache.hadoop.hbase.snapshot.RestoreSnapshotHelper;
@@ -59,8 +53,14 @@ import org.apache.hadoop.hbase.snapshot.SnapshotManifest;
 import org.apache.hadoop.hbase.util.FSTableDescriptors;
 import org.apache.hadoop.hbase.util.FSUtils;
 import org.apache.hadoop.hbase.util.Pair;
+import org.apache.yetus.audience.InterfaceAudience;
 
 import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions;
+import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos;
+import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.CloneSnapshotState;
+import 
org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription;
 
 @InterfaceAudience.Private
 public class CloneSnapshotProcedure
@@ -70,7 +70,7 @@ public class CloneSnapshotProcedure
   private TableDescriptor tableDescriptor;
   private SnapshotDescription snapshot;
   private boolean restoreAcl;
-  private List newRegions = null;
+  private List newRegions = null;
   private Map > parentsToChildrenPairMap = new 
HashMap<>();
 
   // Monitor
@@ -253,8 +253,8 @@ public class CloneSnapshotProcedure
 .setSnapshot(this.snapshot)
 .setTableSchema(ProtobufUtil.toTableSchema(tableDescriptor));
 if (newRegions != null) {
-  for (HRegionInfo hri: newRegions) {
-cloneSnapshotMsg.addRegionInfo(HRegionInfo.convert(hri));
+  for (RegionInfo hri: newRegions) {
+cloneSnapshotMsg.addRegionInfo(ProtobufUtil.toRegionInfo(hri));
   }
 }
 if (!parentsToChildrenPairMap.isEmpty()) {
@@ -289,7 +289,7 @@ public class CloneSnapshotProcedure
 } else {
   newRegions = new ArrayList<>(cloneSnapshotMsg.getRegionInfoCount());
   for (HBaseProtos.RegionInfo hri: cloneSnapshotMsg.getRegionInfoList()) {
-newRegions.add(HRegionInfo.convert(hri));
+newRegions.add(ProtobufUtil.toRegionInfo(hri));
   }
 }
 if (cloneSnapshotMsg.getParentToChildRegionsPairListCount() > 0) {
@@ -357,8 +357,8 @@ public class CloneSnapshotProcedure
   throws IOException, InterruptedException {
 final MasterCoprocessorHost cpHost = env.getMasterCoprocessorHost();
 if (cpHost != null) {
-  final HRegionInfo[] regions = (newRegions == null) ? null :
-newRegions.toArray(new HRegionInfo[newRegions.size()]);
+  final RegionInfo[] regions = (newRegions == null) ? null :
+newRegions.toArray(new RegionInfo[newRegions.size()]);
   cpHost.postCompletedCreateTableAction(tableDescriptor, regions, 
getUser());
 }
   }
@@ -368,16 +368,16 @@ public class CloneSnapshotProcedure
* @param env MasterProcedureEnv
* @throws IOException
*/
-  private Li

[39/60] [abbrv] hbase git commit: HBASE-18897 Substitute MemStore for Memstore

2017-10-05 Thread busbey
http://git-wip-us.apache.org/repos/asf/hbase/blob/d35d8376/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
index d059977..80c0433 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
@@ -518,23 +518,23 @@ public class HRegion implements HeapSize, 
PropagatingConfigurationObserver, Regi
 final FlushResultImpl result; // indicating a failure result from prepare
 final TreeMap storeFlushCtxs;
 final TreeMap> committedFiles;
-final TreeMap storeFlushableSize;
+final TreeMap storeFlushableSize;
 final long startTime;
 final long flushOpSeqId;
 final long flushedSeqId;
-final MemstoreSize totalFlushableSize;
+final MemStoreSize totalFlushableSize;
 
 /** Constructs an early exit case */
 PrepareFlushResult(FlushResultImpl result, long flushSeqId) {
-  this(result, null, null, null, Math.max(0, flushSeqId), 0, 0, new 
MemstoreSize());
+  this(result, null, null, null, Math.max(0, flushSeqId), 0, 0, new 
MemStoreSize());
 }
 
 /** Constructs a successful prepare flush result */
 PrepareFlushResult(
   TreeMap storeFlushCtxs,
   TreeMap> committedFiles,
-  TreeMap storeFlushableSize, long startTime, long 
flushSeqId,
-  long flushedSeqId, MemstoreSize totalFlushableSize) {
+  TreeMap storeFlushableSize, long startTime, long 
flushSeqId,
+  long flushedSeqId, MemStoreSize totalFlushableSize) {
   this(null, storeFlushCtxs, committedFiles, storeFlushableSize, startTime,
 flushSeqId, flushedSeqId, totalFlushableSize);
 }
@@ -543,8 +543,8 @@ public class HRegion implements HeapSize, 
PropagatingConfigurationObserver, Regi
 FlushResultImpl result,
   TreeMap storeFlushCtxs,
   TreeMap> committedFiles,
-  TreeMap storeFlushableSize, long startTime, long 
flushSeqId,
-  long flushedSeqId, MemstoreSize totalFlushableSize) {
+  TreeMap storeFlushableSize, long startTime, long 
flushSeqId,
+  long flushedSeqId, MemStoreSize totalFlushableSize) {
   this.result = result;
   this.storeFlushCtxs = storeFlushCtxs;
   this.committedFiles = committedFiles;
@@ -1007,7 +1007,7 @@ public class HRegion implements HeapSize, 
PropagatingConfigurationObserver, Regi
   Future future = completionService.take();
   HStore store = future.get();
   this.stores.put(store.getColumnFamilyDescriptor().getName(), store);
-  if (store.isSloppyMemstore()) {
+  if (store.isSloppyMemStore()) {
 hasSloppyStores = true;
   }
 
@@ -1017,7 +1017,7 @@ public class HRegion implements HeapSize, 
PropagatingConfigurationObserver, Regi
   if (maxSeqId == -1 || storeMaxSequenceId > maxSeqId) {
 maxSeqId = storeMaxSequenceId;
   }
-  long maxStoreMemstoreTS = store.getMaxMemstoreTS().orElse(0L);
+  long maxStoreMemstoreTS = store.getMaxMemStoreTS().orElse(0L);
   if (maxStoreMemstoreTS > maxMemstoreTS) {
 maxMemstoreTS = maxStoreMemstoreTS;
   }
@@ -1194,24 +1194,24 @@ public class HRegion implements HeapSize, 
PropagatingConfigurationObserver, Regi
* store
* @return the size of memstore in this region
*/
-  public long addAndGetMemstoreSize(MemstoreSize memstoreSize) {
+  public long addAndGetMemStoreSize(MemStoreSize memstoreSize) {
 if (this.rsAccounting != null) {
-  rsAccounting.incGlobalMemstoreSize(memstoreSize);
+  rsAccounting.incGlobalMemStoreSize(memstoreSize);
 }
 long size = this.memstoreDataSize.addAndGet(memstoreSize.getDataSize());
-checkNegativeMemstoreDataSize(size, memstoreSize.getDataSize());
+checkNegativeMemStoreDataSize(size, memstoreSize.getDataSize());
 return size;
   }
 
-  public void decrMemstoreSize(MemstoreSize memstoreSize) {
+  public void decrMemStoreSize(MemStoreSize memstoreSize) {
 if (this.rsAccounting != null) {
-  rsAccounting.decGlobalMemstoreSize(memstoreSize);
+  rsAccounting.decGlobalMemStoreSize(memstoreSize);
 }
 long size = this.memstoreDataSize.addAndGet(-memstoreSize.getDataSize());
-checkNegativeMemstoreDataSize(size, -memstoreSize.getDataSize());
+checkNegativeMemStoreDataSize(size, -memstoreSize.getDataSize());
   }
 
-  private void checkNegativeMemstoreDataSize(long memstoreDataSize, long 
delta) {
+  private void checkNegativeMemStoreDataSize(long memstoreDataSize, long 
delta) {
 // This is extremely bad if we make memstoreSize negative. Log as much 
info on the offending
 // caller as possible. (memStoreSize might be a negative value already -- 
freeing memory)
  

[12/60] [abbrv] hbase git commit: HBASE-18839 Apply RegionInfo to code base

2017-10-05 Thread busbey
http://git-wip-us.apache.org/repos/asf/hbase/blob/a11a35a1/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.java
index ca73ff7..a05ad67 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.java
@@ -43,10 +43,10 @@ import org.apache.hadoop.hbase.HBaseConfiguration;
 import org.apache.hadoop.hbase.HBaseIOException;
 import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.HDFSBlocksDistribution;
-import org.apache.hadoop.hbase.HRegionInfo;
 import org.apache.hadoop.hbase.ServerLoad;
 import org.apache.hadoop.hbase.ServerName;
 import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.client.RegionInfo;
 import org.apache.hadoop.hbase.client.RegionReplicaUtil;
 import org.apache.hadoop.hbase.master.LoadBalancer;
 import org.apache.hadoop.hbase.master.MasterServices;
@@ -71,7 +71,7 @@ public abstract class BaseLoadBalancer implements 
LoadBalancer {
   protected static final int MIN_SERVER_BALANCE = 2;
   private volatile boolean stopped = false;
 
-  private static final List EMPTY_REGION_LIST = new 
ArrayList<>(0);
+  private static final List EMPTY_REGION_LIST = new ArrayList<>(0);
 
   static final Predicate IDLE_SERVER_PREDICATOR
 = load -> load.getNumberOfRegions() == 0;
@@ -127,7 +127,7 @@ public abstract class BaseLoadBalancer implements 
LoadBalancer {
 boolean multiServersPerHost = false; // whether or not any host has more 
than one server
 
 ArrayList tables;
-HRegionInfo[] regions;
+RegionInfo[] regions;
 Deque[] regionLoads;
 private RegionLocationFinder regionFinder;
 
@@ -160,7 +160,7 @@ public abstract class BaseLoadBalancer implements 
LoadBalancer {
 Map hostsToIndex;
 Map racksToIndex;
 Map tablesToIndex;
-Map regionsToIndex;
+Map regionsToIndex;
 float[] localityPerServer;
 
 int numServers;
@@ -170,7 +170,7 @@ public abstract class BaseLoadBalancer implements 
LoadBalancer {
 int numRegions;
 
 int numMovedRegions = 0; //num moved regions from the initial configuration
-Map> clusterState;
+Map> clusterState;
 
 protected final RackManager rackManager;
 // Maps region -> rackIndex -> locality of region on rack
@@ -179,7 +179,7 @@ public abstract class BaseLoadBalancer implements 
LoadBalancer {
 private int[][] regionsToMostLocalEntities;
 
 protected Cluster(
-Map> clusterState,
+Map> clusterState,
 Map> loads,
 RegionLocationFinder regionFinder,
 RackManager rackManager) {
@@ -188,8 +188,8 @@ public abstract class BaseLoadBalancer implements 
LoadBalancer {
 
 @SuppressWarnings("unchecked")
 protected Cluster(
-Collection unassignedRegions,
-Map> clusterState,
+Collection unassignedRegions,
+Map> clusterState,
 Map> loads,
 RegionLocationFinder regionFinder,
 RackManager rackManager) {
@@ -247,7 +247,7 @@ public abstract class BaseLoadBalancer implements 
LoadBalancer {
   }
 
   // Count how many regions there are.
-  for (Entry> entry : 
clusterState.entrySet()) {
+  for (Entry> entry : 
clusterState.entrySet()) {
 numRegions += entry.getValue().size();
   }
   numRegions += unassignedRegions.size();
@@ -256,7 +256,7 @@ public abstract class BaseLoadBalancer implements 
LoadBalancer {
   servers = new ServerName[numServers];
   serversPerHost = new int[numHosts][];
   serversPerRack = new int[numRacks][];
-  regions = new HRegionInfo[numRegions];
+  regions = new RegionInfo[numRegions];
   regionIndexToServerIndex = new int[numRegions];
   initialRegionIndexToServerIndex = new int[numRegions];
   regionIndexToTableIndex = new int[numRegions];
@@ -279,7 +279,7 @@ public abstract class BaseLoadBalancer implements 
LoadBalancer {
 
   int tableIndex = 0, regionIndex = 0, regionPerServerIndex = 0;
 
-  for (Entry> entry : 
clusterState.entrySet()) {
+  for (Entry> entry : 
clusterState.entrySet()) {
 if (entry.getKey() == null) {
   LOG.warn("SERVERNAME IS NULL, skipping " + entry.getValue());
   continue;
@@ -314,7 +314,7 @@ public abstract class BaseLoadBalancer implements 
LoadBalancer {
 racks[entry.getValue()] = entry.getKey();
   }
 
-  for (Entry> entry : 
clusterState.entrySet()) {
+  for (Entry> entry : 
clusterState.entrySet()) {
 int serverIndex = serversToIndex.get(entry.getKey().getHostAndPort());
 regionPerServerIndex = 0;
 
@@ -324,14 +324,14 @@ public abstract class BaseLoadBalancer i

[08/60] [abbrv] hbase git commit: HBASE-18839 Apply RegionInfo to code base

2017-10-05 Thread busbey
http://git-wip-us.apache.org/repos/asf/hbase/blob/a11a35a1/hbase-server/src/main/java/org/apache/hadoop/hbase/tool/Canary.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/tool/Canary.java 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/tool/Canary.java
index b4e5007..e942a02 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/tool/Canary.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/tool/Canary.java
@@ -22,8 +22,6 @@ package org.apache.hadoop.hbase.tool;
 import static 
org.apache.hadoop.hbase.HConstants.DEFAULT_ZOOKEEPER_ZNODE_PARENT;
 import static org.apache.hadoop.hbase.HConstants.ZOOKEEPER_ZNODE_PARENT;
 
-import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
-
 import java.io.Closeable;
 import java.io.IOException;
 import java.net.InetSocketAddress;
@@ -62,7 +60,6 @@ import org.apache.hadoop.hbase.DoNotRetryIOException;
 import org.apache.hadoop.hbase.HBaseConfiguration;
 import org.apache.hadoop.hbase.HColumnDescriptor;
 import org.apache.hadoop.hbase.HConstants;
-import org.apache.hadoop.hbase.HRegionInfo;
 import org.apache.hadoop.hbase.HRegionLocation;
 import org.apache.hadoop.hbase.HTableDescriptor;
 import org.apache.hadoop.hbase.MetaTableAccessor;
@@ -72,13 +69,13 @@ import org.apache.hadoop.hbase.ServerName;
 import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.TableNotEnabledException;
 import org.apache.hadoop.hbase.TableNotFoundException;
-import org.apache.yetus.audience.InterfaceAudience;
 import org.apache.hadoop.hbase.client.Admin;
 import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
 import org.apache.hadoop.hbase.client.Connection;
 import org.apache.hadoop.hbase.client.ConnectionFactory;
 import org.apache.hadoop.hbase.client.Get;
 import org.apache.hadoop.hbase.client.Put;
+import org.apache.hadoop.hbase.client.RegionInfo;
 import org.apache.hadoop.hbase.client.RegionLocator;
 import org.apache.hadoop.hbase.client.ResultScanner;
 import org.apache.hadoop.hbase.client.Scan;
@@ -96,11 +93,14 @@ import org.apache.hadoop.hbase.zookeeper.ZKConfig;
 import org.apache.hadoop.util.GenericOptionsParser;
 import org.apache.hadoop.util.Tool;
 import org.apache.hadoop.util.ToolRunner;
+import org.apache.yetus.audience.InterfaceAudience;
 import org.apache.zookeeper.KeeperException;
 import org.apache.zookeeper.ZooKeeper;
 import org.apache.zookeeper.client.ConnectStringParser;
 import org.apache.zookeeper.data.Stat;
 
+import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
+
 /**
  * HBase Canary Tool, that that can be used to do
  * "canary monitoring" of a running HBase cluster.
@@ -210,34 +210,34 @@ public final class Canary implements Tool {
 private Map perTableReadLatency = new HashMap<>();
 private LongAdder writeLatency = new LongAdder();
 
-public void publishReadFailure(ServerName serverName, HRegionInfo region, 
Exception e) {
+public void publishReadFailure(ServerName serverName, RegionInfo region, 
Exception e) {
   incReadFailureCount();
   LOG.error(String.format("read from region %s on regionserver %s failed", 
region.getRegionNameAsString(), serverName), e);
 }
 
-public void publishReadFailure(ServerName serverName, HRegionInfo region, 
ColumnFamilyDescriptor column, Exception e) {
+public void publishReadFailure(ServerName serverName, RegionInfo region, 
ColumnFamilyDescriptor column, Exception e) {
   incReadFailureCount();
   LOG.error(String.format("read from region %s on regionserver %s column 
family %s failed",
 region.getRegionNameAsString(), serverName, column.getNameAsString()), 
e);
 }
 
-public void publishReadTiming(ServerName serverName, HRegionInfo region, 
ColumnFamilyDescriptor column, long msTime) {
+public void publishReadTiming(ServerName serverName, RegionInfo region, 
ColumnFamilyDescriptor column, long msTime) {
   LOG.info(String.format("read from region %s on regionserver %s column 
family %s in %dms",
 region.getRegionNameAsString(), serverName, column.getNameAsString(), 
msTime));
 }
 
-public void publishWriteFailure(ServerName serverName, HRegionInfo region, 
Exception e) {
+public void publishWriteFailure(ServerName serverName, RegionInfo region, 
Exception e) {
   incWriteFailureCount();
   LOG.error(String.format("write to region %s on regionserver %s failed", 
region.getRegionNameAsString(), serverName), e);
 }
 
-public void publishWriteFailure(ServerName serverName, HRegionInfo region, 
ColumnFamilyDescriptor column, Exception e) {
+public void publishWriteFailure(ServerName serverName, RegionInfo region, 
ColumnFamilyDescriptor column, Exception e) {
   incWriteFailureCount();
   LOG.error(String.format("write to region %s on regionserver %s column 
family %s failed",
 region.getRegionNameAsString(), serverName, column.getNameAsSt

hbase git commit: HBASE-18940 include project pylint configs in source artifact.

2017-10-05 Thread busbey
Repository: hbase
Updated Branches:
  refs/heads/branch-1.3 af9de6ed8 -> 82ff32ea0


HBASE-18940 include project pylint configs in source artifact.

Signed-off-by: Mike Drob 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/82ff32ea
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/82ff32ea
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/82ff32ea

Branch: refs/heads/branch-1.3
Commit: 82ff32ea0a87fd146dcbeba39baaf25ee1768f79
Parents: af9de6e
Author: Sean Busbey 
Authored: Wed Oct 4 23:42:15 2017 -0500
Committer: Sean Busbey 
Committed: Thu Oct 5 12:54:22 2017 -0500

--
 hbase-assembly/src/main/assembly/src.xml | 1 +
 1 file changed, 1 insertion(+)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/82ff32ea/hbase-assembly/src/main/assembly/src.xml
--
diff --git a/hbase-assembly/src/main/assembly/src.xml 
b/hbase-assembly/src/main/assembly/src.xml
index b972bfd..b13967e 100644
--- a/hbase-assembly/src/main/assembly/src.xml
+++ b/hbase-assembly/src/main/assembly/src.xml
@@ -128,6 +128,7 @@
 NOTICE.txt
 CHANGES.txt
 README.txt
+.pylintrc
   
   0644
 



[5/5] hbase git commit: HBASE-18940 include project pylint configs in source artifact.

2017-10-05 Thread busbey
HBASE-18940 include project pylint configs in source artifact.

Signed-off-by: Mike Drob 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/decf01d4
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/decf01d4
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/decf01d4

Branch: refs/heads/branch-1.1
Commit: decf01d4b85cdbc764522a26446e4d325eaa84d2
Parents: ea67aca
Author: Sean Busbey 
Authored: Wed Oct 4 23:42:15 2017 -0500
Committer: Sean Busbey 
Committed: Thu Oct 5 12:52:40 2017 -0500

--
 hbase-assembly/src/main/assembly/src.xml | 1 +
 1 file changed, 1 insertion(+)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/decf01d4/hbase-assembly/src/main/assembly/src.xml
--
diff --git a/hbase-assembly/src/main/assembly/src.xml 
b/hbase-assembly/src/main/assembly/src.xml
index d925965..dd063ee 100644
--- a/hbase-assembly/src/main/assembly/src.xml
+++ b/hbase-assembly/src/main/assembly/src.xml
@@ -126,6 +126,7 @@
 NOTICE.txt
 CHANGES.txt
 README.txt
+.pylintrc
   
   0644
 



[2/5] hbase git commit: HBASE-18940 include project pylint configs in source artifact.

2017-10-05 Thread busbey
HBASE-18940 include project pylint configs in source artifact.

Signed-off-by: Mike Drob 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/719f5465
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/719f5465
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/719f5465

Branch: refs/heads/branch-1
Commit: 719f5465c48de142c5065f51af729e479d573327
Parents: fc783ef
Author: Sean Busbey 
Authored: Wed Oct 4 23:42:15 2017 -0500
Committer: Sean Busbey 
Committed: Thu Oct 5 10:41:06 2017 -0500

--
 hbase-assembly/src/main/assembly/src.xml | 1 +
 1 file changed, 1 insertion(+)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/719f5465/hbase-assembly/src/main/assembly/src.xml
--
diff --git a/hbase-assembly/src/main/assembly/src.xml 
b/hbase-assembly/src/main/assembly/src.xml
index b972bfd..b13967e 100644
--- a/hbase-assembly/src/main/assembly/src.xml
+++ b/hbase-assembly/src/main/assembly/src.xml
@@ -128,6 +128,7 @@
 NOTICE.txt
 CHANGES.txt
 README.txt
+.pylintrc
   
   0644
 



[3/5] hbase git commit: HBASE-18940 include project pylint configs in source artifact.

2017-10-05 Thread busbey
HBASE-18940 include project pylint configs in source artifact.

Signed-off-by: Mike Drob 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/d0881ee6
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/d0881ee6
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/d0881ee6

Branch: refs/heads/branch-1.4
Commit: d0881ee6081eb5c7f401a4231268a1076c190869
Parents: cbbcb2d
Author: Sean Busbey 
Authored: Wed Oct 4 23:42:15 2017 -0500
Committer: Sean Busbey 
Committed: Thu Oct 5 10:47:27 2017 -0500

--
 hbase-assembly/src/main/assembly/src.xml | 1 +
 1 file changed, 1 insertion(+)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/d0881ee6/hbase-assembly/src/main/assembly/src.xml
--
diff --git a/hbase-assembly/src/main/assembly/src.xml 
b/hbase-assembly/src/main/assembly/src.xml
index b972bfd..b13967e 100644
--- a/hbase-assembly/src/main/assembly/src.xml
+++ b/hbase-assembly/src/main/assembly/src.xml
@@ -128,6 +128,7 @@
 NOTICE.txt
 CHANGES.txt
 README.txt
+.pylintrc
   
   0644
 



[1/5] hbase git commit: HBASE-18940 include project pylint configs in source artifact.

2017-10-05 Thread busbey
Repository: hbase
Updated Branches:
  refs/heads/branch-1 fc783ef04 -> 719f5465c
  refs/heads/branch-1.1 ea67acab7 -> decf01d4b
  refs/heads/branch-1.2 91b9c5bb9 -> 66205be06
  refs/heads/branch-1.4 cbbcb2db2 -> d0881ee60
  refs/heads/branch-2 e4d03d000 -> b57c9bf40


HBASE-18940 include project pylint configs in source artifact.

Signed-off-by: Mike Drob 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/b57c9bf4
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/b57c9bf4
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/b57c9bf4

Branch: refs/heads/branch-2
Commit: b57c9bf400af8e1cbcc13820298c9235aac3a98a
Parents: e4d03d0
Author: Sean Busbey 
Authored: Wed Oct 4 23:42:15 2017 -0500
Committer: Sean Busbey 
Committed: Thu Oct 5 10:40:28 2017 -0500

--
 hbase-assembly/src/main/assembly/src.xml | 1 +
 1 file changed, 1 insertion(+)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/b57c9bf4/hbase-assembly/src/main/assembly/src.xml
--
diff --git a/hbase-assembly/src/main/assembly/src.xml 
b/hbase-assembly/src/main/assembly/src.xml
index 0c300c6..0678b27 100644
--- a/hbase-assembly/src/main/assembly/src.xml
+++ b/hbase-assembly/src/main/assembly/src.xml
@@ -141,6 +141,7 @@
 NOTICE.txt
 CHANGES.txt
 README.txt
+.pylintrc
   
   0644
 



[4/5] hbase git commit: HBASE-18940 include project pylint configs in source artifact.

2017-10-05 Thread busbey
HBASE-18940 include project pylint configs in source artifact.

Signed-off-by: Mike Drob 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/66205be0
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/66205be0
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/66205be0

Branch: refs/heads/branch-1.2
Commit: 66205be0624caf48cc33c741e483e61a4e904e47
Parents: 91b9c5b
Author: Sean Busbey 
Authored: Wed Oct 4 23:42:15 2017 -0500
Committer: Sean Busbey 
Committed: Thu Oct 5 12:52:13 2017 -0500

--
 hbase-assembly/src/main/assembly/src.xml | 1 +
 1 file changed, 1 insertion(+)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/66205be0/hbase-assembly/src/main/assembly/src.xml
--
diff --git a/hbase-assembly/src/main/assembly/src.xml 
b/hbase-assembly/src/main/assembly/src.xml
index 16b22ee..adc69ac 100644
--- a/hbase-assembly/src/main/assembly/src.xml
+++ b/hbase-assembly/src/main/assembly/src.xml
@@ -127,6 +127,7 @@
 NOTICE.txt
 CHANGES.txt
 README.txt
+.pylintrc
   
   0644
 



hbase git commit: HBASE-18939 Backport HBASE-16538 to branch-1.3

2017-10-05 Thread ashishsinghi
Repository: hbase
Updated Branches:
  refs/heads/branch-1.3 82aee4ba3 -> af9de6ed8


HBASE-18939 Backport HBASE-16538 to branch-1.3

Signed-off-by: Ashish Singhi 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/af9de6ed
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/af9de6ed
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/af9de6ed

Branch: refs/heads/branch-1.3
Commit: af9de6ed8b3b33d0f87103d98b194bd7e9ddb5d5
Parents: 82aee4b
Author: Ashish Singhi 
Authored: Thu Oct 5 21:47:35 2017 +0530
Committer: Ashish Singhi 
Committed: Thu Oct 5 21:47:35 2017 +0530

--
 .../apache/hadoop/hbase/VersionAnnotation.java  | 66 
 .../apache/hadoop/hbase/util/VersionInfo.java   | 32 +++---
 hbase-common/src/saveVersion.sh | 14 +++--
 3 files changed, 18 insertions(+), 94 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/af9de6ed/hbase-common/src/main/java/org/apache/hadoop/hbase/VersionAnnotation.java
--
diff --git 
a/hbase-common/src/main/java/org/apache/hadoop/hbase/VersionAnnotation.java 
b/hbase-common/src/main/java/org/apache/hadoop/hbase/VersionAnnotation.java
deleted file mode 100644
index f3137ae..000
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/VersionAnnotation.java
+++ /dev/null
@@ -1,66 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.hbase;
-
-import java.lang.annotation.*;
-
-import org.apache.hadoop.hbase.classification.InterfaceAudience;
-
-/**
- * A package attribute that captures the version of hbase that was compiled.
- * Copied down from hadoop.  All is same except name of interface.
- */
-@Retention(RetentionPolicy.RUNTIME)
-@Target(ElementType.PACKAGE)
-@InterfaceAudience.Private
-public @interface VersionAnnotation {
-
-  /**
-   * Get the Hadoop version
-   * @return the version string "0.6.3-dev"
-   */
-  String version();
-
-  /**
-   * Get the username that compiled Hadoop.
-   */
-  String user();
-
-  /**
-   * Get the date when Hadoop was compiled.
-   * @return the date in unix 'date' format
-   */
-  String date();
-
-  /**
-   * Get the url for the subversion repository.
-   */
-  String url();
-
-  /**
-   * Get the subversion revision.
-   * @return the revision number as a string (eg. "451451")
-   */
-  String revision();
-
-  /**
-   * Get a checksum of the source files from which HBase was compiled.
-   * @return a string that uniquely identifies the source
-   **/
-  String srcChecksum();
-}

http://git-wip-us.apache.org/repos/asf/hbase/blob/af9de6ed/hbase-common/src/main/java/org/apache/hadoop/hbase/util/VersionInfo.java
--
diff --git 
a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/VersionInfo.java 
b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/VersionInfo.java
index 8061b4d..dc242d0 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/VersionInfo.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/VersionInfo.java
@@ -24,39 +24,23 @@ import java.io.PrintWriter;
 
 import org.apache.hadoop.hbase.classification.InterfaceAudience;
 import org.apache.hadoop.hbase.classification.InterfaceStability;
-import org.apache.hadoop.hbase.VersionAnnotation;
+import org.apache.hadoop.hbase.Version;
 import org.apache.commons.logging.Log;
 
 /**
- * This class finds the package info for hbase and the VersionAnnotation
- * information.  Taken from hadoop.  Only name of annotation is different.
+ * This class finds the Version information for HBase.
  */
 @InterfaceAudience.Public
 @InterfaceStability.Evolving
 public class VersionInfo {
   private static final Log LOG = 
LogFactory.getLog(VersionInfo.class.getName());
-  private static Package myPackage;
-  private static VersionAnnotation version;
-
-  static {
-myPackage = VersionAnnotation.class.getPackage();
-version = myPackage.getAnnotation(VersionAnnotation.cl

hbase git commit: HBASE-18940 include project pylint configs in source artifact.

2017-10-05 Thread busbey
Repository: hbase
Updated Branches:
  refs/heads/master 98d1637bc -> bafbade24


HBASE-18940 include project pylint configs in source artifact.

Signed-off-by: Mike Drob 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/bafbade2
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/bafbade2
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/bafbade2

Branch: refs/heads/master
Commit: bafbade24899b9d221def6997528885032d3cae3
Parents: 98d1637
Author: Sean Busbey 
Authored: Wed Oct 4 23:42:15 2017 -0500
Committer: Sean Busbey 
Committed: Thu Oct 5 10:38:28 2017 -0500

--
 hbase-assembly/src/main/assembly/src.xml | 1 +
 1 file changed, 1 insertion(+)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/bafbade2/hbase-assembly/src/main/assembly/src.xml
--
diff --git a/hbase-assembly/src/main/assembly/src.xml 
b/hbase-assembly/src/main/assembly/src.xml
index 3b02b3c..91163bb 100644
--- a/hbase-assembly/src/main/assembly/src.xml
+++ b/hbase-assembly/src/main/assembly/src.xml
@@ -143,6 +143,7 @@
 NOTICE.txt
 CHANGES.txt
 README.txt
+.pylintrc
   
   0644
 



hbase-site git commit: INFRA-10751 Empty commit

2017-10-05 Thread git-site-role
Repository: hbase-site
Updated Branches:
  refs/heads/asf-site b838bdf0b -> 2b91409e9


INFRA-10751 Empty commit


Project: http://git-wip-us.apache.org/repos/asf/hbase-site/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase-site/commit/2b91409e
Tree: http://git-wip-us.apache.org/repos/asf/hbase-site/tree/2b91409e
Diff: http://git-wip-us.apache.org/repos/asf/hbase-site/diff/2b91409e

Branch: refs/heads/asf-site
Commit: 2b91409e9a6ad111e8b3d0c49f334e97333b871e
Parents: b838bdf
Author: jenkins 
Authored: Thu Oct 5 15:13:42 2017 +
Committer: jenkins 
Committed: Thu Oct 5 15:13:42 2017 +

--

--




[12/51] [partial] hbase-site git commit: Published site at .

2017-10-05 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/b838bdf0/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.CompactionChecker.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.CompactionChecker.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.CompactionChecker.html
index 03692cd..a9dd1e3 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.CompactionChecker.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.CompactionChecker.html
@@ -2120,7 +2120,7 @@
 2112
 2113  @Override
 2114  public void stop(final String msg) {
-2115stop(msg, false, 
RpcServer.getRequestUser());
+2115stop(msg, false, 
RpcServer.getRequestUser().orElse(null));
 2116  }
 2117
 2118  /**

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/b838bdf0/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.MovedRegionInfo.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.MovedRegionInfo.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.MovedRegionInfo.html
index 03692cd..a9dd1e3 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.MovedRegionInfo.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.MovedRegionInfo.html
@@ -2120,7 +2120,7 @@
 2112
 2113  @Override
 2114  public void stop(final String msg) {
-2115stop(msg, false, 
RpcServer.getRequestUser());
+2115stop(msg, false, 
RpcServer.getRequestUser().orElse(null));
 2116  }
 2117
 2118  /**

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/b838bdf0/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.MovedRegionsCleaner.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.MovedRegionsCleaner.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.MovedRegionsCleaner.html
index 03692cd..a9dd1e3 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.MovedRegionsCleaner.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.MovedRegionsCleaner.html
@@ -2120,7 +2120,7 @@
 2112
 2113  @Override
 2114  public void stop(final String msg) {
-2115stop(msg, false, 
RpcServer.getRequestUser());
+2115stop(msg, false, 
RpcServer.getRequestUser().orElse(null));
 2116  }
 2117
 2118  /**

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/b838bdf0/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.PeriodicMemStoreFlusher.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.PeriodicMemStoreFlusher.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.PeriodicMemStoreFlusher.html
index 03692cd..a9dd1e3 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.PeriodicMemStoreFlusher.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.PeriodicMemStoreFlusher.html
@@ -2120,7 +2120,7 @@
 2112
 2113  @Override
 2114  public void stop(final String msg) {
-2115stop(msg, false, 
RpcServer.getRequestUser());
+2115stop(msg, false, 
RpcServer.getRequestUser().orElse(null));
 2116  }
 2117
 2118  /**

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/b838bdf0/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html
index 03692cd..a9dd1e3 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html
@@ -2120,7 +2120,7 @@
 2112
 2113  @Override
 2114  public void stop(final String msg) {
-2115stop(msg, false, 
RpcServer.getRequestUser());
+2115stop(msg, false, 
RpcServer.getRequestUser().orElse(null));
 2116  }
 2117
 2118  /**

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/b838bdf0/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RSRpcServices.LogDelegate.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RSRpcServices.LogDelegate.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RSRpcServices.LogDelegate.html
index 9ec6b56..8802652 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RSRpcServ

[33/51] [partial] hbase-site git commit: Published site at .

2017-10-05 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/b838bdf0/devapidocs/src-html/org/apache/hadoop/hbase/ExtendedCellBuilder.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/ExtendedCellBuilder.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/ExtendedCellBuilder.html
index 6f58dbe..b93bf76 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/ExtendedCellBuilder.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/ExtendedCellBuilder.html
@@ -55,24 +55,26 @@
 047  ExtendedCellBuilder setTimestamp(final 
long timestamp);
 048
 049  @Override
-050  ExtendedCellBuilder setType(final byte 
type);
+050  ExtendedCellBuilder setType(final 
DataType type);
 051
-052  @Override
-053  ExtendedCellBuilder setValue(final 
byte[] value);
+052  ExtendedCellBuilder setType(final byte 
type);
+053
 054  @Override
-055  ExtendedCellBuilder setValue(final 
byte[] value, final int vOffset, final int vLength);
-056
-057  @Override
-058  ExtendedCell build();
-059
-060  @Override
-061  ExtendedCellBuilder clear();
-062
-063  ExtendedCellBuilder setTags(final 
byte[] tags);
-064  ExtendedCellBuilder setTags(final 
byte[] tags, int tagsOffset, int tagsLength);
-065
-066  ExtendedCellBuilder setSequenceId(final 
long seqId);
-067}
+055  ExtendedCellBuilder setValue(final 
byte[] value);
+056  @Override
+057  ExtendedCellBuilder setValue(final 
byte[] value, final int vOffset, final int vLength);
+058
+059  @Override
+060  ExtendedCell build();
+061
+062  @Override
+063  ExtendedCellBuilder clear();
+064
+065  ExtendedCellBuilder setTags(final 
byte[] tags);
+066  ExtendedCellBuilder setTags(final 
byte[] tags, int tagsOffset, int tagsLength);
+067
+068  ExtendedCellBuilder setSequenceId(final 
long seqId);
+069}
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/b838bdf0/devapidocs/src-html/org/apache/hadoop/hbase/ExtendedCellBuilderImpl.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/ExtendedCellBuilderImpl.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/ExtendedCellBuilderImpl.html
index 32e08d7..54a2937 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/ExtendedCellBuilderImpl.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/ExtendedCellBuilderImpl.html
@@ -40,7 +40,7 @@
 032  protected int qOffset = 0;
 033  protected int qLength = 0;
 034  protected long timestamp = 
HConstants.LATEST_TIMESTAMP;
-035  protected Byte type = null;
+035  protected KeyValue.Type type = null;
 036  protected byte[] value = null;
 037  protected int vOffset = 0;
 038  protected int vLength = 0;
@@ -95,80 +95,97 @@
 087  }
 088
 089  @Override
-090  public ExtendedCellBuilder 
setType(final byte type) {
-091this.type = type;
+090  public ExtendedCellBuilder 
setType(final DataType type) {
+091this.type = toKeyValueType(type);
 092return this;
 093  }
 094
 095  @Override
-096  public ExtendedCellBuilder 
setValue(final byte[] value) {
-097return setValue(value, 0, 
ArrayUtils.length(value));
-098  }
-099
-100  @Override
-101  public ExtendedCellBuilder 
setValue(final byte[] value, int vOffset, int vLength) {
-102this.value = value;
-103this.vOffset = vOffset;
-104this.vLength = vLength;
-105return this;
-106  }
-107
-108  @Override
-109  public ExtendedCellBuilder 
setTags(final byte[] tags) {
-110return setTags(tags, 0, 
ArrayUtils.length(tags));
-111  }
-112
-113  @Override
-114  public ExtendedCellBuilder 
setTags(final byte[] tags, int tagsOffset, int tagsLength) {
-115this.tags = tags;
-116this.tagsOffset = tagsOffset;
-117this.tagsLength = tagsLength;
-118return this;
-119  }
-120
-121  @Override
-122  public ExtendedCellBuilder 
setSequenceId(final long seqId) {
-123this.seqId = seqId;
+096  public ExtendedCellBuilder 
setType(final byte type) {
+097this.type = 
KeyValue.Type.codeToType(type);
+098return this;
+099  }
+100
+101  @Override
+102  public ExtendedCellBuilder 
setValue(final byte[] value) {
+103return setValue(value, 0, 
ArrayUtils.length(value));
+104  }
+105
+106  @Override
+107  public ExtendedCellBuilder 
setValue(final byte[] value, int vOffset, int vLength) {
+108this.value = value;
+109this.vOffset = vOffset;
+110this.vLength = vLength;
+111return this;
+112  }
+113
+114  @Override
+115  public ExtendedCellBuilder 
setTags(final byte[] tags) {
+116return setTags(tags, 0, 
ArrayUtils.length(tags));
+117  }
+118
+119  @Override
+120  public ExtendedCellBuilder 
setTags(final byte[] tags, int tagsOffset, int tagsLength) {
+121this.tags = tags;
+122this.tagsOffset = tagsOffset;
+123this.tagsLength = tagsLength;
 124return this;
 125  }
 126
-127  private void checkBeforeBuild() {
-128if (type == null) {
-129  throw new 
IllegalArgumentException("The type can't be NULL");
-130}
+127  @Override
+128  public E

[47/51] [partial] hbase-site git commit: Published site at .

2017-10-05 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/b838bdf0/devapidocs/org/apache/hadoop/hbase/coprocessor/CoprocessorHost.ObserverOperationWithoutResult.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/coprocessor/CoprocessorHost.ObserverOperationWithoutResult.html
 
b/devapidocs/org/apache/hadoop/hbase/coprocessor/CoprocessorHost.ObserverOperationWithoutResult.html
index 3f6a1f7..f220edd 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/coprocessor/CoprocessorHost.ObserverOperationWithoutResult.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/coprocessor/CoprocessorHost.ObserverOperationWithoutResult.html
@@ -208,7 +208,7 @@ extends ObserverContext
-bypass,
 complete,
 createAndPrepare,
 createAndPrepare,
 getCaller,
 getEnvironment,
 prepare,
 shouldBypass, shouldComplete
+bypass,
 complete,
 createAndPrepare,
 getCaller,
 getEnvironment,
 prepare,
 shouldBypass,
 shouldComplete
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/b838bdf0/devapidocs/org/apache/hadoop/hbase/coprocessor/Export.PrivilegedWriter.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/coprocessor/Export.PrivilegedWriter.html 
b/devapidocs/org/apache/hadoop/hbase/coprocessor/Export.PrivilegedWriter.html
index 7ef86b2..0ca9e38 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/coprocessor/Export.PrivilegedWriter.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/coprocessor/Export.PrivilegedWriter.html
@@ -117,7 +117,7 @@ var activeTableTab = "activeTableTab";
 
 
 
-private static class Export.PrivilegedWriter
+private static class Export.PrivilegedWriter
 extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true";
 title="class or interface in java.lang">Object
 implements http://docs.oracle.com/javase/8/docs/api/java/security/PrivilegedExceptionAction.html?is-external=true";
 title="class or interface in 
java.security">PrivilegedExceptionActionBoolean>, http://docs.oracle.com/javase/8/docs/api/java/io/Closeable.html?is-external=true";
 title="class or interface in java.io">Closeable
 
@@ -228,7 +228,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/security/Privi
 
 
 user
-private final User user
+private final User user
 
 
 
@@ -237,7 +237,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/security/Privi
 
 
 out
-private final org.apache.hadoop.io.SequenceFile.Writer out
+private final org.apache.hadoop.io.SequenceFile.Writer out
 
 
 
@@ -246,7 +246,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/security/Privi
 
 
 key
-private http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true";
 title="class or interface in java.lang">Object key
+private http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true";
 title="class or interface in java.lang">Object key
 
 
 
@@ -255,7 +255,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/security/Privi
 
 
 value
-private http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true";
 title="class or interface in java.lang">Object value
+private http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true";
 title="class or interface in java.lang">Object value
 
 
 
@@ -272,7 +272,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/security/Privi
 
 
 PrivilegedWriter
-PrivilegedWriter(User user,
+PrivilegedWriter(User user,
  org.apache.hadoop.io.SequenceFile.Writer out)
 
 
@@ -290,7 +290,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/security/Privi
 
 
 append
-void append(http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true";
 title="class or interface in java.lang">Object key,
+void append(http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true";
 title="class or interface in java.lang">Object key,
 http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true";
 title="class or interface in java.lang">Object value)
  throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true";
 title="class or interface in java.io">IOException
 
@@ -305,7 +305,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/security/Privi
 
 
 run
-public http://docs.oracle.com/javase/8/docs/api/java/lang/Boolean.html?is-external=true";
 title="class or interface in java.lang">Boolean run()
+public http://docs.oracle.com/javase/8/docs/api/java/lang/Boolean.html?is-external=true";
 title="class or interface in java.lang">Boolean run()
 throws http://docs.oracle.com/javase/8/docs/api/java/lang/Exception.html?is-external=true";
 title="class or interface in java.lang">Exception
 
 Specified by:
@@ -321

[50/51] [partial] hbase-site git commit: Published site at .

2017-10-05 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/b838bdf0/checkstyle-aggregate.html
--
diff --git a/checkstyle-aggregate.html b/checkstyle-aggregate.html
index 5c92e4c..922e340 100644
--- a/checkstyle-aggregate.html
+++ b/checkstyle-aggregate.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase – Checkstyle Results
 
@@ -289,7 +289,7 @@
 2053
 0
 0
-13753
+13738
 
 Files
 
@@ -3132,7 +3132,7 @@
 org/apache/hadoop/hbase/ipc/RpcServer.java
 0
 0
-11
+10
 
 org/apache/hadoop/hbase/ipc/RpcServerFactory.java
 0
@@ -4157,7 +4157,7 @@
 org/apache/hadoop/hbase/master/snapshot/SnapshotManager.java
 0
 0
-18
+17
 
 org/apache/hadoop/hbase/master/snapshot/TakeSnapshotHandler.java
 0
@@ -5422,7 +5422,7 @@
 org/apache/hadoop/hbase/regionserver/SecureBulkLoadManager.java
 0
 0
-14
+6
 
 org/apache/hadoop/hbase/regionserver/Segment.java
 0
@@ -5942,7 +5942,7 @@
 org/apache/hadoop/hbase/replication/BulkLoadCellFilter.java
 0
 0
-1
+2
 
 org/apache/hadoop/hbase/replication/ChainWALEntryFilter.java
 0
@@ -6712,7 +6712,7 @@
 org/apache/hadoop/hbase/security/access/AccessController.java
 0
 0
-37
+36
 
 org/apache/hadoop/hbase/security/access/AuthResult.java
 0
@@ -6782,7 +6782,7 @@
 org/apache/hadoop/hbase/security/token/TokenProvider.java
 0
 0
-2
+1
 
 org/apache/hadoop/hbase/security/token/TokenUtil.java
 0
@@ -6852,7 +6852,7 @@
 org/apache/hadoop/hbase/security/visibility/VisibilityController.java
 0
 0
-13
+10
 
 org/apache/hadoop/hbase/security/visibility/VisibilityExpEvaluator.java
 0
@@ -6907,7 +6907,7 @@
 org/apache/hadoop/hbase/security/visibility/VisibilityUtils.java
 0
 0
-12
+9
 
 org/apache/hadoop/hbase/security/visibility/ZKVisibilityLabelWatcher.java
 0
@@ -6917,7 +6917,7 @@
 org/apache/hadoop/hbase/shaded/protobuf/ProtobufUtil.java
 0
 0
-195
+197
 
 org/apache/hadoop/hbase/shaded/protobuf/RequestConverter.java
 0
@@ -8204,7 +8204,7 @@
 
 
 http://checkstyle.sourceforge.net/config_blocks.html#NeedBraces";>NeedBraces
-1696
+1695
  Error
 
 coding
@@ -8266,7 +8266,7 @@
 ordered: "true"
 sortStaticImportsAlphabetically: "true"
 option: "top"
-1845
+1835
  Error
 
 
@@ -8278,7 +8278,7 @@
 http://checkstyle.sourceforge.net/config_imports.html#UnusedImports";>UnusedImports
 
 processJavadoc: "true"
-109
+104
  Error
 
 indentation
@@ -8319,7 +8319,7 @@
 
 max: "100"
 ignorePattern: "^package.*|^import.*|a 
href|href|http://|https://|ftp://|org.apache.thrift.|com.google.protobuf.|hbase.protobuf.generated"
-1124
+1125
  Error
 
 
@@ -24368,7 +24368,7 @@
 javadoc
 NonEmptyAtclauseDescription
 At-clause should have a non-empty description.
-88
+87
 
 org/apache/hadoop/hbase/client/ZKAsyncRegistry.java
 
@@ -28778,7 +28778,7 @@
 design
 FinalClass
 Class Response should be declared as final.
-511
+508
 
 org/apache/hadoop/hbase/coprocessor/MasterCoprocessor.java
 
@@ -28987,14 +28987,14 @@
  Error
 imports
 ImportOrder
-Wrong order for 'org.apache.hadoop.hbase.CoprocessorEnvironment' 
import.
-25
+Wrong order for 'com.google.common.annotations.VisibleForTesting' 
import.
+22
 
  Error
 annotation
 MissingDeprecated
 Must include both @java.lang.Deprecated annotation and @deprecated Javadoc 
tag with description.
-122
+119
 
 org/apache/hadoop/hbase/coprocessor/RegionCoprocessor.java
 
@@ -41270,7 +41270,7 @@
 imports
 ImportOrder
 Wrong order for 'org.apache.hadoop.hbase.exceptions.TimeoutIOException' 
import.
-28
+29
 
 org/apache/hadoop/hbase/ipc/CellBlockBuilder.java
 
@@ -41891,25 +41891,25 @@
 imports
 ImportOrder
 Wrong order for 
'org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.VersionInfo' 
import.
-23
+24
 
  Error
 imports
 ImportOrder
 Wrong order for 'org.apache.hadoop.hbase.security.User' import.
-24
+25
 
  Error
 javadoc
 JavadocTagContinuationIndentation
 Line continuation have incorrect indentation level, expected level should 
be 2.
-36
+37
 
  Error
 javadoc
 NonEmptyAtclauseDescription
 At-clause should have a non-empty description.
-76
+79
 
 org/apache/hadoop/hbase/ipc/RpcClient.java
 
@@ -42118,26 +42118,14 @@
  Error
 imports
 ImportOrder
-Wrong order for 'java.io.IOException' import.
-25
+Wrong order for 
'org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting'
 import.
+73
 
  Error
-imports
-ImportOrder
-Wrong order for 'org.apache.hadoop.hbase.conf.ConfigurationObserver' 
import.
-51
-
- Error
 javadoc
 JavadocTagContinuationIndentation
 Line continuation have incorrect indentation level, expected level should 
be 2.
-252
-
- Error
-javadoc
-NonEmptyAtclauseDescription
-At-clause should have a non-empty description.
-256
+253
 
  Error
 javadoc
@@ -42146,10 +42134,10 @@
 257
 
  Error
-blocks
-NeedBraces
-'if' construct must use '{}'s.
-367
+javadoc
+NonEmptyAtclauseDescription
+At-clause should have a non-empty description.
+258
 
  Error
 blocks
@@ -42161,7 +42149,7 @@
 blocks
 NeedBraces
 'if' construct must use '{}'s.
-462
+369
 
  Error
 blocks
@@ -4217

[25/51] [partial] hbase-site git commit: Published site at .

2017-10-05 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/b838bdf0/devapidocs/src-html/org/apache/hadoop/hbase/master/snapshot/SnapshotManager.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/master/snapshot/SnapshotManager.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/master/snapshot/SnapshotManager.html
index 7433b34..ca8df99 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/master/snapshot/SnapshotManager.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/master/snapshot/SnapshotManager.html
@@ -49,1147 +49,1149 @@
 041import 
org.apache.hadoop.hbase.MetaTableAccessor;
 042import 
org.apache.hadoop.hbase.Stoppable;
 043import 
org.apache.hadoop.hbase.TableName;
-044import 
org.apache.yetus.audience.InterfaceAudience;
-045import 
org.apache.yetus.audience.InterfaceStability;
-046import 
org.apache.hadoop.hbase.client.TableDescriptor;
-047import 
org.apache.hadoop.hbase.client.TableDescriptorBuilder;
-048import 
org.apache.hadoop.hbase.client.TableState;
-049import 
org.apache.hadoop.hbase.errorhandling.ForeignException;
-050import 
org.apache.hadoop.hbase.executor.ExecutorService;
-051import 
org.apache.hadoop.hbase.ipc.RpcServer;
-052import 
org.apache.hadoop.hbase.master.MasterCoprocessorHost;
-053import 
org.apache.hadoop.hbase.master.MasterFileSystem;
-054import 
org.apache.hadoop.hbase.master.MasterServices;
-055import 
org.apache.hadoop.hbase.master.MetricsMaster;
-056import 
org.apache.hadoop.hbase.master.SnapshotSentinel;
-057import 
org.apache.hadoop.hbase.master.cleaner.HFileCleaner;
-058import 
org.apache.hadoop.hbase.master.cleaner.HFileLinkCleaner;
-059import 
org.apache.hadoop.hbase.master.procedure.CloneSnapshotProcedure;
-060import 
org.apache.hadoop.hbase.master.procedure.MasterProcedureEnv;
-061import 
org.apache.hadoop.hbase.master.procedure.RestoreSnapshotProcedure;
-062import 
org.apache.hadoop.hbase.procedure.MasterProcedureManager;
-063import 
org.apache.hadoop.hbase.procedure.Procedure;
-064import 
org.apache.hadoop.hbase.procedure.ProcedureCoordinator;
-065import 
org.apache.hadoop.hbase.procedure.ProcedureCoordinatorRpcs;
-066import 
org.apache.hadoop.hbase.procedure.ZKProcedureCoordinator;
-067import 
org.apache.hadoop.hbase.procedure2.ProcedureExecutor;
-068import 
org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
-069import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair;
-070import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ProcedureDescription;
-071import 
org.apache.hadoop.hbase.security.AccessDeniedException;
-072import 
org.apache.hadoop.hbase.security.User;
-073import 
org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription;
-074import 
org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription.Type;
-075import 
org.apache.hadoop.hbase.snapshot.ClientSnapshotDescriptionUtils;
-076import 
org.apache.hadoop.hbase.snapshot.HBaseSnapshotException;
-077import 
org.apache.hadoop.hbase.snapshot.RestoreSnapshotException;
-078import 
org.apache.hadoop.hbase.snapshot.SnapshotCreationException;
-079import 
org.apache.hadoop.hbase.snapshot.SnapshotDescriptionUtils;
-080import 
org.apache.hadoop.hbase.snapshot.SnapshotDoesNotExistException;
-081import 
org.apache.hadoop.hbase.snapshot.SnapshotExistsException;
-082import 
org.apache.hadoop.hbase.snapshot.SnapshotManifest;
-083import 
org.apache.hadoop.hbase.snapshot.SnapshotReferenceUtil;
-084import 
org.apache.hadoop.hbase.snapshot.TablePartiallyOpenException;
-085import 
org.apache.hadoop.hbase.snapshot.UnknownSnapshotException;
-086import 
org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
-087import 
org.apache.hadoop.hbase.util.FSUtils;
-088import 
org.apache.hadoop.hbase.util.KeyLocker;
-089import 
org.apache.hadoop.hbase.util.NonceKey;
-090import 
org.apache.zookeeper.KeeperException;
-091
-092/**
-093 * This class manages the procedure of 
taking and restoring snapshots. There is only one
-094 * SnapshotManager for the master.
-095 * 

-096 * The class provides methods for monitoring in-progress snapshot actions. -097 *

-098 * Note: Currently there can only be one snapshot being taken at a time over the cluster. This is a -099 * simplification in the current implementation. -100 */ -101@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.CONFIG) -102@InterfaceStability.Unstable -103public class SnapshotManager extends MasterProcedureManager implements Stoppable { -104 private static final Log LOG = LogFactory.getLog(SnapshotManager.class); -105 -106 /** By default, check to see if the snapshot is complete every WAKE MILLIS (ms) */ -107 private static final int SNAPSHOT_WAKE_MILLIS_DEFAULT = 500; -108 -109 /** -110 * Wait time before removing a finished sentinel from the in-progress map -111 * -112 * NOTE: This is used as a safety auto cleanup. -113 * The snapshot and restore handlers m


[37/51] [partial] hbase-site git commit: Published site at .

2017-10-05 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/b838bdf0/devapidocs/org/apache/hadoop/hbase/security/access/AccessController.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/security/access/AccessController.html 
b/devapidocs/org/apache/hadoop/hbase/security/access/AccessController.html
index 706a892..9db807a 100644
--- a/devapidocs/org/apache/hadoop/hbase/security/access/AccessController.html
+++ b/devapidocs/org/apache/hadoop/hbase/security/access/AccessController.html
@@ -114,7 +114,7 @@ var activeTableTab = "activeTableTab";
 
 
 @InterfaceAudience.LimitedPrivate(value="Configuration")
-public class AccessController
+public class AccessController
 extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true";
 title="class or interface in java.lang">Object
 implements MasterCoprocessor, RegionCoprocessor, RegionServerCoprocessor, 
org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.AccessControlService.Interface,
 MasterObserver, 
RegionObserver, 
RegionServerObserver, EndpointObserver, 
BulkLoadObserver
 Provides basic authorization checks for data access and 
administrative
@@ -373,7 +373,7 @@ implements 
 private User
-getActiveUser(ObserverContext ctx)
+getActiveUser(ObserverContext ctx)
 Returns the active user to which authorization checks 
should be applied.
 
 
@@ -1505,7 +1505,7 @@ implements 
 
 LOG
-private static final org.apache.commons.logging.Log LOG
+private static final org.apache.commons.logging.Log LOG
 
 
 
@@ -1514,7 +1514,7 @@ implements 
 
 AUDITLOG
-private static final org.apache.commons.logging.Log AUDITLOG
+private static final org.apache.commons.logging.Log AUDITLOG
 
 
 
@@ -1523,7 +1523,7 @@ implements 
 
 CHECK_COVERING_PERM
-private static final http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String CHECK_COVERING_PERM
+private static final http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String CHECK_COVERING_PERM
 
 See Also:
 Constant
 Field Values
@@ -1536,7 +1536,7 @@ implements 
 
 TAG_CHECK_PASSED
-private static final http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String TAG_CHECK_PASSED
+private static final http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String TAG_CHECK_PASSED
 
 See Also:
 Constant
 Field Values
@@ -1549,7 +1549,7 @@ implements 
 
 TRUE
-private static final byte[] TRUE
+private static final byte[] TRUE
 
 
 
@@ -1558,7 +1558,7 @@ implements 
 
 authManager
-TableAuthManager 
authManager
+TableAuthManager 
authManager
 
 
 
@@ -1567,7 +1567,7 @@ implements 
 
 aclRegion
-boolean aclRegion
+boolean aclRegion
 flags if we are running on a region of the _acl_ table
 
 
@@ -1577,7 +1577,7 @@ implements 
 
 regionEnv
-private RegionCoprocessorEnvironment regionEnv
+private RegionCoprocessorEnvironment regionEnv
 defined only for Endpoint implementation, so it can have 
way to
access region services
 
@@ -1588,7 +1588,7 @@ implements 
 
 scannerOwners
-private http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true";
 title="class or interface in java.util">MapString> scannerOwners
+private http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true";
 title="class or interface in java.util">MapString> scannerOwners
 Mapping of scanner instances to the user who created 
them
 
 
@@ -1598,7 +1598,7 @@ implements 
 
 tableAcls
-private http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true";
 title="class or interface in java.util">MapList> tableAcls
+private http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true";
 title="class or interface in java.util">MapList> tableAcls
 
 
 
@@ -1607,7 +1607,7 @@ implements 
 
 userProvider
-private UserProvider userProvider
+private UserProvider userProvider
 Provider for mapping principal names to Users
 
 
@@ -1617,7 +1617,7 @@ implements 
 
 authorizationEnabled
-boolean authorizationEnabled
+boolean authorizationEnabled
 if we are active, usually true, only not true if 
"hbase.security.authorization"
has been set to false in site configur

[41/51] [partial] hbase-site git commit: Published site at .

2017-10-05 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/b838bdf0/devapidocs/org/apache/hadoop/hbase/package-use.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/package-use.html 
b/devapidocs/org/apache/hadoop/hbase/package-use.html
index aec03af..05686ca 100644
--- a/devapidocs/org/apache/hadoop/hbase/package-use.html
+++ b/devapidocs/org/apache/hadoop/hbase/package-use.html
@@ -468,185 +468,190 @@ service.
 
 
 
+CellBuilder.DataType
+The valid types for user to build the cell.
+
+
+
 CellBuilderType
 Used by CellBuilderFactory and ExtendedCellBuilderFactory.
 
 
-
+
 CellComparator
 Compare two HBase cells.
 
 
-
+
 CellScannable
 Implementer can return a CellScanner over its Cell 
content.
 
 
-
+
 CellScanner
 An interface for iterating through a sequence of 
cells.
 
 
-
+
 CellUtil.EmptyByteBufferCell 
 
-
+
 CellUtil.EmptyCell 
 
-
+
 CellUtil.FirstOnRowByteBufferCell 
 
-
+
 CellUtil.FirstOnRowCell 
 
-
+
 CellUtil.FirstOnRowColByteBufferCell 
 
-
+
 CellUtil.FirstOnRowColCell 
 
-
+
 CellUtil.LastOnRowByteBufferCell 
 
-
+
 CellUtil.LastOnRowCell 
 
-
+
 CellUtil.TagRewriteByteBufferCell 
 
-
+
 CellUtil.TagRewriteCell
 This can be used when a Cell has to change with 
addition/removal of one or more tags.
 
 
-
+
 ChoreService
 ChoreService is a service that can be used to schedule 
instances of ScheduledChore to run
  periodically while sharing threads.
 
 
-
+
 ClusterId
 The identifier for this cluster.
 
 
-
+
 ClusterStatus
 Status information on the HBase cluster.
 
 
-
+
 ClusterStatus.Builder
 Builder for construct a ClusterStatus.
 
 
-
+
 ClusterStatus.Option
 Kinds of ClusterStatus
 
 
-
+
 CompareOperator
 Generic set of comparison operators.
 
 
-
+
 CompatibilityFactory
 Class that will create many instances of classes provided 
by the hbase-hadoop{1|2}-compat jars.
 
 
-
+
 CompatibilitySingletonFactory.SingletonStorage 
 
-
+
 CompoundConfiguration
 Do a shallow merge of multiple KV configuration pools.
 
 
-
+
 CompoundConfiguration.ImmutableConfigMap 
 
-
+
 CoordinatedStateManager
 Implementations of this interface will keep and return to 
clients
  implementations of classes providing API to execute
  coordinated operations.
 
 
-
+
 Coprocessor
 Base interface for the 4 coprocessors - MasterCoprocessor, 
RegionCoprocessor,
  RegionServerCoprocessor, and WALCoprocessor.
 
 
-
+
 Coprocessor.State
 Lifecycle state of a given coprocessor instance.
 
 
-
+
 CoprocessorEnvironment
 Coprocessor environment state.
 
 
-
+
 DoNotRetryIOException
 Subclass if exception is not meant to be retried: e.g.
 
 
-
+
 ExtendedCell
 Extension to Cell with server side required 
functions.
 
 
-
+
 ExtendedCellBuilder
 For internal purpose.
 
 
-
+
 ExtendedCellBuilderImpl 
 
-
+
 HBaseIOException
 All hbase specific IOExceptions should be subclasses of 
HBaseIOException
 
 
-
+
 HColumnDescriptor
 Deprecated. 
 
 
-
+
 HConstants.OperationStatusCode
 Status codes used for return values of bulk 
operations.
 
 
-
+
 HDFSBlocksDistribution
 Data structure to describe the distribution of HDFS blocks 
among hosts.
 
 
-
+
 HDFSBlocksDistribution.HostAndWeight
 Stores the hostname and weight for that hostname.
 
 
-
+
 HealthChecker
 A utility for executing an external script that checks the 
health of
  the node.
 
 
-
+
 HealthChecker.HealthCheckerExitStatus 
 
-
+
 HealthReport
 The Class HealthReport containing information about health 
of the node.
 
 
-
+
 HRegionInfo
 Deprecated. 
 As of release 2.0.0, this 
will be removed in HBase 3.0.0.
@@ -654,13 +659,13 @@ service.
 
 
 
-
+
 HRegionLocation
 Data structure to hold RegionInfo and the address for the 
hosting
  HRegionServer.
 
 
-
+
 HTableDescriptor
 Deprecated. 
 As of release 2.0.0, this 
will be removed in HBase 3.0.0.
@@ -668,153 +673,153 @@ service.
 
 
 
-
+
 KeepDeletedCells
 Ways to keep cells marked for delete around.
 
 
-
+
 KeyValue
 An HBase Key/Value.
 
 
-
+
 KeyValue.KeyOnlyKeyValue
 A simple form of KeyValue that creates a keyvalue with only 
the key part of the byte[]
  Mainly used in places where we need to compare two cells.
 
 
-
+
 KeyValue.KVComparator
 Deprecated. 
 : Use CellComparator. Deprecated for hbase 
2.0, remove for hbase 3.0.
 
 
 
-
+
 KeyValue.SamePrefixComparator
 Avoids redundant comparisons for better performance.
 
 
-
+
 KeyValue.Type
 Key type.
 
 
-
+
 MemoryCompactionPolicy
 Enum describing all possible memory compaction 
policies
 
 
-
+
 MetaTableAccessor.CollectingVisitor
 A MetaTableAccessor.Visitor that 
collects content out of passed Result.
 
 
-
+
 MetaTableAccessor.DefaultVisitorBase
 A Visitor that skips offline regions and split parents
 
 
-
+
 MetaTableAccessor.QueryType 
 
-
+
 MetaTableAccessor.Visitor
 Implementations 'visit' a catalog table row.
 
 
-
+
 NamespaceDescriptor
 Namespace POJO class.
 
 
-
+
 NamespaceDescriptor.Builder 
 
-
+
 NotAllMetaRegionsOnlineException
 Thrown when an operation requi

[35/51] [partial] hbase-site git commit: Published site at .

2017-10-05 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/b838bdf0/devapidocs/org/apache/hadoop/hbase/security/visibility/VisibilityController.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/security/visibility/VisibilityController.html
 
b/devapidocs/org/apache/hadoop/hbase/security/visibility/VisibilityController.html
index 6386c54..e1702ac 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/security/visibility/VisibilityController.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/security/visibility/VisibilityController.html
@@ -114,7 +114,7 @@ var activeTableTab = "activeTableTab";
 
 
 @InterfaceAudience.LimitedPrivate(value="Configuration")
-public class VisibilityController
+public class VisibilityController
 extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true";
 title="class or interface in java.lang">Object
 implements MasterCoprocessor, RegionCoprocessor, 
org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsService.Interface,
 MasterObserver, 
RegionObserver
 Coprocessor that has both the MasterObserver and 
RegionObserver implemented that supports in
@@ -619,7 +619,7 @@ implements 
 
 LOG
-private static final org.apache.commons.logging.Log LOG
+private static final org.apache.commons.logging.Log LOG
 
 
 
@@ -628,7 +628,7 @@ implements 
 
 AUDITLOG
-private static final org.apache.commons.logging.Log AUDITLOG
+private static final org.apache.commons.logging.Log AUDITLOG
 
 
 
@@ -637,7 +637,7 @@ implements 
 
 labelsRegion
-private boolean labelsRegion
+private boolean labelsRegion
 
 
 
@@ -646,7 +646,7 @@ implements 
 
 accessControllerAvailable
-private boolean accessControllerAvailable
+private boolean accessControllerAvailable
 
 
 
@@ -655,7 +655,7 @@ implements 
 
 conf
-private org.apache.hadoop.conf.Configuration conf
+private org.apache.hadoop.conf.Configuration conf
 
 
 
@@ -664,7 +664,7 @@ implements 
 
 initialized
-private volatile boolean initialized
+private volatile boolean initialized
 
 
 
@@ -673,7 +673,7 @@ implements 
 
 checkAuths
-private boolean checkAuths
+private boolean checkAuths
 
 
 
@@ -682,7 +682,7 @@ implements 
 
 scannerOwners
-private http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true";
 title="class or interface in java.util">MapString> scannerOwners
+private http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true";
 title="class or interface in java.util">MapString> scannerOwners
 Mapping of scanner instances to the user who created 
them
 
 
@@ -692,7 +692,7 @@ implements 
 
 visibilityLabelService
-private VisibilityLabelService visibilityLabelService
+private VisibilityLabelService visibilityLabelService
 
 
 
@@ -701,7 +701,7 @@ implements 
 
 authorizationEnabled
-boolean authorizationEnabled
+boolean authorizationEnabled
 if we are active, usually true, only not true if 
"hbase.security.authorization"
 has been set to false in site configuration
 
@@ -712,7 +712,7 @@ implements 
 
 RESERVED_VIS_TAG_TYPES
-private static http://docs.oracle.com/javase/8/docs/api/java/util/ArrayList.html?is-external=true";
 title="class or interface in java.util">ArrayListByte> RESERVED_VIS_TAG_TYPES
+private static http://docs.oracle.com/javase/8/docs/api/java/util/ArrayList.html?is-external=true";
 title="class or interface in java.util">ArrayListByte> RESERVED_VIS_TAG_TYPES
 
 
 
@@ -729,7 +729,7 @@ implements 
 
 VisibilityController
-public VisibilityController()
+public VisibilityController()
 
 
 
@@ -746,7 +746,7 @@ implements 
 
 isAuthorizationSupported
-public static boolean isAuthorizationSupported(org.apache.hadoop.conf.Configuration conf)
+public static boolean isAuthorizationSupported(org.apache.hadoop.conf.Configuration conf)
 
 
 
@@ -755,7 +755,7 @@ implements 
 
 isCellAuthorizationSupported
-public static boolean isCellAuthorizationSupported(org.apache.hadoop.conf.Configuration conf)
+public static boolean isCellAuthorizationSupported(org.apache.hadoop.conf.Configuration conf)
 
 
 
@@ -764,7 +764,7 @@ implements 
 
 start
-public void start(CoprocessorEnvironment env)
+public void start(CoprocessorEnvironment env)
throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true";
 title="class or interface in java.io">IOException
 Description copied from 
interface: Coprocessor
 Called by the CoprocessorEnvironment d

[42/51] [partial] hbase-site git commit: Published site at .

2017-10-05 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/b838bdf0/devapidocs/org/apache/hadoop/hbase/master/snapshot/SnapshotManager.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/master/snapshot/SnapshotManager.html 
b/devapidocs/org/apache/hadoop/hbase/master/snapshot/SnapshotManager.html
index f0a1ad0..ea9ce2d 100644
--- a/devapidocs/org/apache/hadoop/hbase/master/snapshot/SnapshotManager.html
+++ b/devapidocs/org/apache/hadoop/hbase/master/snapshot/SnapshotManager.html
@@ -125,7 +125,7 @@ var activeTableTab = "activeTableTab";
 
 @InterfaceAudience.LimitedPrivate(value="Configuration")
  @InterfaceStability.Unstable
-public class SnapshotManager
+public class SnapshotManager
 extends MasterProcedureManager
 implements Stoppable
 This class manages the procedure of taking and restoring 
snapshots. There is only one
@@ -555,7 +555,7 @@ implements 
 
 LOG
-private static final org.apache.commons.logging.Log LOG
+private static final org.apache.commons.logging.Log LOG
 
 
 
@@ -564,7 +564,7 @@ implements 
 
 SNAPSHOT_WAKE_MILLIS_DEFAULT
-private static final int SNAPSHOT_WAKE_MILLIS_DEFAULT
+private static final int SNAPSHOT_WAKE_MILLIS_DEFAULT
 By default, check to see if the snapshot is complete every 
WAKE MILLIS (ms)
 
 See Also:
@@ -578,7 +578,7 @@ implements 
 
 SNAPSHOT_SENTINELS_CLEANUP_TIMEOUT
-private static final int SNAPSHOT_SENTINELS_CLEANUP_TIMEOUT
+private static final int SNAPSHOT_SENTINELS_CLEANUP_TIMEOUT
 Wait time before removing a finished sentinel from the 
in-progress map
 
  NOTE: This is used as a safety auto cleanup.
@@ -600,7 +600,7 @@ implements 
 
 HBASE_SNAPSHOT_ENABLED
-public static final http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String HBASE_SNAPSHOT_ENABLED
+public static final http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String HBASE_SNAPSHOT_ENABLED
 Enable or disable snapshot support
 
 See Also:
@@ -614,7 +614,7 @@ implements 
 
 SNAPSHOT_WAKE_MILLIS_KEY
-private static final http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String SNAPSHOT_WAKE_MILLIS_KEY
+private static final http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String SNAPSHOT_WAKE_MILLIS_KEY
 Conf key for # of ms elapsed between checks for snapshot 
errors while waiting for
  completion.
 
@@ -629,7 +629,7 @@ implements 
 
 ONLINE_SNAPSHOT_CONTROLLER_DESCRIPTION
-public static final http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String ONLINE_SNAPSHOT_CONTROLLER_DESCRIPTION
+public static final http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String ONLINE_SNAPSHOT_CONTROLLER_DESCRIPTION
 Name of the operation to use in the controller
 
 See Also:
@@ -643,7 +643,7 @@ implements 
 
 SNAPSHOT_POOL_THREADS_KEY
-private static final http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String SNAPSHOT_POOL_THREADS_KEY
+private static final http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String SNAPSHOT_POOL_THREADS_KEY
 Conf key for # of threads used by the SnapshotManager 
thread pool
 
 See Also:
@@ -657,7 +657,7 @@ implements 
 
 SNAPSHOT_POOL_THREADS_DEFAULT
-private static final int SNAPSHOT_POOL_THREADS_DEFAULT
+private static final int SNAPSHOT_POOL_THREADS_DEFAULT
 number of current operations running on the master
 
 See Also:
@@ -671,7 +671,7 @@ implements 
 
 stopped
-private boolean stopped
+private boolean stopped
 
 
 
@@ -680,7 +680,7 @@ implements 
 
 master
-private MasterServices master
+private MasterServices master
 
 
 
@@ -689,7 +689,7 @@ implements 
 
 coordinator
-private ProcedureCoordinator coordinator
+private ProcedureCoordinator coordinator
 
 
 
@@ -698,7 +698,7 @@ implements 
 
 isSnapshotSupported
-private boolean isSnapshotSupported
+private boolean isSnapshotSupported
 
 
 
@@ -707,7 +707,7 @@ implements 
 
 snapshotHandlers
-private http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true";
 title="class or interface in java.util">Map snapshotHandlers
+private http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true";
 title="class or interface in java.util">Map snapshotHandlers
 
 
 
@@ -716,7 +716,7 @@ implements 
 
 restoreTableToProcIdMap
-private http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true";
 title="class or interface in java.util">Map

[30/51] [partial] hbase-site git commit: Published site at .

2017-10-05 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/b838bdf0/devapidocs/src-html/org/apache/hadoop/hbase/ipc/RpcServer.BlockingServiceAndInterface.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/ipc/RpcServer.BlockingServiceAndInterface.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/ipc/RpcServer.BlockingServiceAndInterface.html
index 9b65f75..c084a1b 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/ipc/RpcServer.BlockingServiceAndInterface.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/ipc/RpcServer.BlockingServiceAndInterface.html
@@ -28,780 +28,779 @@
 020
 021import static 
org.apache.hadoop.fs.CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHORIZATION;
 022
-023import 
org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
-024
-025import java.io.IOException;
-026import java.net.InetAddress;
-027import java.net.InetSocketAddress;
-028import java.nio.ByteBuffer;
-029import 
java.nio.channels.GatheringByteChannel;
-030import 
java.nio.channels.ReadableByteChannel;
-031import 
java.nio.channels.WritableByteChannel;
-032import java.util.ArrayList;
-033import java.util.Collections;
-034import java.util.HashMap;
-035import java.util.List;
-036import java.util.Locale;
-037import java.util.Map;
-038import 
java.util.concurrent.atomic.LongAdder;
-039
-040import org.apache.commons.logging.Log;
-041import 
org.apache.commons.logging.LogFactory;
-042import 
org.apache.hadoop.conf.Configuration;
-043import 
org.apache.hadoop.hbase.CallQueueTooBigException;
-044import 
org.apache.hadoop.hbase.CellScanner;
-045import 
org.apache.hadoop.hbase.DoNotRetryIOException;
-046import 
org.apache.hadoop.hbase.HBaseInterfaceAudience;
-047import 
org.apache.hadoop.hbase.HConstants;
-048import org.apache.hadoop.hbase.Server;
-049import 
org.apache.yetus.audience.InterfaceAudience;
-050import 
org.apache.yetus.audience.InterfaceStability;
-051import 
org.apache.hadoop.hbase.conf.ConfigurationObserver;
-052import 
org.apache.hadoop.hbase.exceptions.RequestTooBigException;
-053import 
org.apache.hadoop.hbase.io.ByteBufferPool;
-054import 
org.apache.hadoop.hbase.monitoring.MonitoredRPCHandler;
-055import 
org.apache.hadoop.hbase.monitoring.TaskMonitor;
-056import 
org.apache.hadoop.hbase.nio.ByteBuff;
-057import 
org.apache.hadoop.hbase.nio.MultiByteBuff;
-058import 
org.apache.hadoop.hbase.nio.SingleByteBuff;
-059import 
org.apache.hadoop.hbase.regionserver.RSRpcServices;
-060import 
org.apache.hadoop.hbase.security.SaslUtil;
-061import 
org.apache.hadoop.hbase.security.SaslUtil.QualityOfProtection;
-062import 
org.apache.hadoop.hbase.security.User;
-063import 
org.apache.hadoop.hbase.security.UserProvider;
-064import 
org.apache.hadoop.hbase.security.token.AuthenticationTokenSecretManager;
-065import 
org.apache.hadoop.hbase.shaded.com.google.protobuf.BlockingService;
-066import 
org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.MethodDescriptor;
-067import 
org.apache.hadoop.hbase.shaded.com.google.protobuf.Message;
-068import 
org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException;
-069import 
org.apache.hadoop.hbase.shaded.com.google.protobuf.TextFormat;
-070import 
org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
-071import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos;
-072import 
org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ConnectionHeader;
-073import 
org.apache.hadoop.hbase.util.Pair;
-074import 
org.apache.hadoop.security.UserGroupInformation;
-075import 
org.apache.hadoop.security.authorize.AuthorizationException;
-076import 
org.apache.hadoop.security.authorize.PolicyProvider;
-077import 
org.apache.hadoop.security.authorize.ServiceAuthorizationManager;
-078import 
org.apache.hadoop.security.token.SecretManager;
-079import 
org.apache.hadoop.security.token.TokenIdentifier;
-080import 
org.codehaus.jackson.map.ObjectMapper;
-081
-082/**
-083 * An RPC server that hosts protobuf 
described Services.
-084 *
-085 */
-086@InterfaceAudience.LimitedPrivate({HBaseInterfaceAudience.COPROC,
 HBaseInterfaceAudience.PHOENIX})
-087@InterfaceStability.Evolving
-088public abstract class RpcServer 
implements RpcServerInterface,
-089ConfigurationObserver {
-090  // LOG is being used in CallRunner and 
the log level is being changed in tests
-091  public static final Log LOG = 
LogFactory.getLog(RpcServer.class);
-092  protected static final 
CallQueueTooBigException CALL_QUEUE_TOO_BIG_EXCEPTION
-093  = new CallQueueTooBigException();
-094
-095  private final boolean authorize;
-096  protected boolean isSecurityEnabled;
-097
-098  public static final byte 
CURRENT_VERSION = 0;
-099
-100  /**
-101   * Whether we allow a fallback to 
SIMPLE auth for insecure clients when security is enabled.
-102   */
-103  public static final String 
FALLBACK_TO_INSECURE_CLIENT_AUTH =
-104  
"hbase.ipc.server.fallback-to-simple-auth-allow

[40/51] [partial] hbase-site git commit: Published site at .

2017-10-05 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/b838bdf0/devapidocs/org/apache/hadoop/hbase/regionserver/HRegion.RegionScannerImpl.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/regionserver/HRegion.RegionScannerImpl.html
 
b/devapidocs/org/apache/hadoop/hbase/regionserver/HRegion.RegionScannerImpl.html
index c5d1931..ab49dd7 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/regionserver/HRegion.RegionScannerImpl.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/regionserver/HRegion.RegionScannerImpl.html
@@ -121,7 +121,7 @@ var activeTableTab = "activeTableTab";
 
 
 
-class HRegion.RegionScannerImpl
+class HRegion.RegionScannerImpl
 extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true";
 title="class or interface in java.lang">Object
 implements RegionScanner, RpcCallback
 RegionScannerImpl is used to combine scanners from multiple 
Stores (aka column families).
@@ -425,7 +425,7 @@ implements 
 
 storeHeap
-KeyValueHeap storeHeap
+KeyValueHeap storeHeap
 
 
 
@@ -434,7 +434,7 @@ implements 
 
 joinedHeap
-KeyValueHeap joinedHeap
+KeyValueHeap joinedHeap
 Heap of key-values that are not essential for the provided 
filters and are thus read
  on demand, if on-demand column family loading is enabled.
 
@@ -445,7 +445,7 @@ implements 
 
 joinedContinuationRow
-protected Cell joinedContinuationRow
+protected Cell joinedContinuationRow
 If the joined heap data gathering is interrupted due to 
scan limits, this will
  contain the row for which we are populating the values.
 
@@ -456,7 +456,7 @@ implements 
 
 filterClosed
-private boolean filterClosed
+private boolean filterClosed
 
 
 
@@ -465,7 +465,7 @@ implements 
 
 stopRow
-protected final byte[] stopRow
+protected final byte[] stopRow
 
 
 
@@ -474,7 +474,7 @@ implements 
 
 includeStopRow
-protected final boolean includeStopRow
+protected final boolean includeStopRow
 
 
 
@@ -483,7 +483,7 @@ implements 
 
 region
-protected final HRegion region
+protected final HRegion region
 
 
 
@@ -492,7 +492,7 @@ implements 
 
 comparator
-protected final CellComparator comparator
+protected final CellComparator comparator
 
 
 
@@ -501,7 +501,7 @@ implements 
 
 readPt
-private final long readPt
+private final long readPt
 
 
 
@@ -510,7 +510,7 @@ implements 
 
 maxResultSize
-private final long maxResultSize
+private final long maxResultSize
 
 
 
@@ -519,7 +519,7 @@ implements 
 
 defaultScannerContext
-private final ScannerContext defaultScannerContext
+private final ScannerContext defaultScannerContext
 
 
 
@@ -528,7 +528,7 @@ implements 
 
 filter
-private final FilterWrapper filter
+private final FilterWrapper filter
 
 
 
@@ -545,7 +545,7 @@ implements 
 
 RegionScannerImpl
-RegionScannerImpl(Scan scan,
+RegionScannerImpl(Scan scan,
   http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List additionalScanners,
   HRegion region)
throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true";
 title="class or interface in java.io">IOException
@@ -561,7 +561,7 @@ implements 
 
 RegionScannerImpl
-RegionScannerImpl(Scan scan,
+RegionScannerImpl(Scan scan,
   http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List additionalScanners,
   HRegion region,
   long nonceGroup,
@@ -587,7 +587,7 @@ implements 
 
 getRegionInfo
-public RegionInfo getRegionInfo()
+public RegionInfo getRegionInfo()
 
 Specified by:
 getRegionInfo in
 interface RegionScanner
@@ -602,7 +602,7 @@ implements 
 
 initializeScanners
-protected void initializeScanners(Scan scan,
+protected void initializeScanners(Scan scan,
   http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List additionalScanners)
throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true";
 title="class or interface in java.io">IOException
 
@@ -617,7 +617,7 @@ implements 
 
 initializeKVHeap
-protected void initializeKVHeap(http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List scanners,
+protected void initializeKVHeap(http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List scanners,
 http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List joinedScanners,
 HRegion region)
  throws http://docs.oracle

[21/51] [partial] hbase-site git commit: Published site at .

2017-10-05 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/b838bdf0/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.MutationBatch.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.MutationBatch.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.MutationBatch.html
index 4a7f4ae..41e0c24 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.MutationBatch.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.MutationBatch.html
@@ -143,7970 +143,7971 @@
 135import 
org.apache.hadoop.hbase.ipc.CallerDisconnectedException;
 136import 
org.apache.hadoop.hbase.ipc.CoprocessorRpcUtils;
 137import 
org.apache.hadoop.hbase.ipc.RpcCall;
-138import 
org.apache.hadoop.hbase.ipc.RpcCallContext;
-139import 
org.apache.hadoop.hbase.ipc.RpcServer;
-140import 
org.apache.hadoop.hbase.monitoring.MonitoredTask;
-141import 
org.apache.hadoop.hbase.monitoring.TaskMonitor;
-142import 
org.apache.hadoop.hbase.regionserver.MultiVersionConcurrencyControl.WriteEntry;
-143import 
org.apache.hadoop.hbase.regionserver.ScannerContext.LimitScope;
-144import 
org.apache.hadoop.hbase.regionserver.ScannerContext.NextState;
-145import 
org.apache.hadoop.hbase.regionserver.compactions.CompactionContext;
-146import 
org.apache.hadoop.hbase.regionserver.compactions.CompactionLifeCycleTracker;
-147import 
org.apache.hadoop.hbase.regionserver.throttle.CompactionThroughputControllerFactory;
-148import 
org.apache.hadoop.hbase.regionserver.throttle.NoLimitThroughputController;
-149import 
org.apache.hadoop.hbase.regionserver.throttle.ThroughputController;
-150import 
org.apache.hadoop.hbase.regionserver.wal.WALUtil;
-151import 
org.apache.hadoop.hbase.security.User;
-152import 
org.apache.hadoop.hbase.snapshot.SnapshotDescriptionUtils;
-153import 
org.apache.hadoop.hbase.snapshot.SnapshotManifest;
-154import 
org.apache.hadoop.hbase.util.Bytes;
-155import 
org.apache.hadoop.hbase.util.CancelableProgressable;
-156import 
org.apache.hadoop.hbase.util.ClassSize;
-157import 
org.apache.hadoop.hbase.util.CollectionUtils;
-158import 
org.apache.hadoop.hbase.util.CompressionTest;
-159import 
org.apache.hadoop.hbase.util.EncryptionTest;
-160import 
org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
-161import 
org.apache.hadoop.hbase.util.FSUtils;
-162import 
org.apache.hadoop.hbase.util.HashedBytes;
-163import 
org.apache.hadoop.hbase.util.Pair;
-164import 
org.apache.hadoop.hbase.util.ServerRegionReplicaUtil;
-165import 
org.apache.hadoop.hbase.util.Threads;
-166import org.apache.hadoop.hbase.wal.WAL;
-167import 
org.apache.hadoop.hbase.wal.WALEdit;
-168import 
org.apache.hadoop.hbase.wal.WALFactory;
-169import 
org.apache.hadoop.hbase.wal.WALKey;
-170import 
org.apache.hadoop.hbase.wal.WALSplitter;
-171import 
org.apache.hadoop.hbase.wal.WALSplitter.MutationReplay;
-172import 
org.apache.hadoop.io.MultipleIOException;
-173import 
org.apache.hadoop.util.StringUtils;
-174import org.apache.htrace.Trace;
-175import org.apache.htrace.TraceScope;
-176import 
org.apache.yetus.audience.InterfaceAudience;
-177
-178import 
org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
-179import 
org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions;
-180import 
org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
-181import 
org.apache.hadoop.hbase.shaded.com.google.common.collect.Maps;
-182import 
org.apache.hadoop.hbase.shaded.com.google.common.io.Closeables;
-183import 
org.apache.hadoop.hbase.shaded.com.google.protobuf.TextFormat;
-184import 
org.apache.hadoop.hbase.shaded.com.google.protobuf.UnsafeByteOperations;
-185import 
org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
-186import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionInfoResponse.CompactionState;
-187import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos;
-188import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceCall;
-189import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionLoad;
-190import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.StoreSequenceId;
-191import 
org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription;
-192import 
org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos;
-193import 
org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.CompactionDescriptor;
-194import 
org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor;
-195import 
org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor.FlushAction;
-196import 
org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor;
-197import 
org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.RegionEventDescriptor;
-198import 
org.apache.hadoop.hba

[08/51] [partial] hbase-site git commit: Published site at .

2017-10-05 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/b838bdf0/devapidocs/src-html/org/apache/hadoop/hbase/security/access/AccessController.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/security/access/AccessController.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/security/access/AccessController.html
index 4260431..23ff345 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/security/access/AccessController.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/security/access/AccessController.html
@@ -26,399 +26,399 @@
 018 */
 019package 
org.apache.hadoop.hbase.security.access;
 020
-021import java.io.IOException;
-022import java.net.InetAddress;
-023import 
java.security.PrivilegedExceptionAction;
-024import java.util.ArrayList;
-025import java.util.Collection;
-026import java.util.Collections;
-027import java.util.HashMap;
-028import java.util.Iterator;
-029import java.util.List;
-030import java.util.Map;
-031import java.util.Map.Entry;
-032import java.util.Optional;
-033import java.util.Set;
-034import java.util.TreeMap;
-035import java.util.TreeSet;
-036
-037import com.google.protobuf.Message;
-038import com.google.protobuf.RpcCallback;
-039import 
com.google.protobuf.RpcController;
-040import com.google.protobuf.Service;
-041import org.apache.commons.logging.Log;
-042import 
org.apache.commons.logging.LogFactory;
-043import 
org.apache.hadoop.conf.Configuration;
-044import 
org.apache.hadoop.hbase.ArrayBackedTag;
-045import org.apache.hadoop.hbase.Cell;
-046import 
org.apache.hadoop.hbase.CellScanner;
-047import 
org.apache.hadoop.hbase.CellUtil;
-048import 
org.apache.hadoop.hbase.CompareOperator;
-049import 
org.apache.hadoop.hbase.CompoundConfiguration;
-050import 
org.apache.hadoop.hbase.CoprocessorEnvironment;
-051import 
org.apache.hadoop.hbase.DoNotRetryIOException;
-052import 
org.apache.hadoop.hbase.HBaseInterfaceAudience;
-053import 
org.apache.hadoop.hbase.HConstants;
-054import 
org.apache.hadoop.hbase.KeyValue;
-055import 
org.apache.hadoop.hbase.KeyValue.Type;
-056import 
org.apache.hadoop.hbase.MetaTableAccessor;
-057import 
org.apache.hadoop.hbase.NamespaceDescriptor;
-058import 
org.apache.hadoop.hbase.ServerName;
-059import 
org.apache.hadoop.hbase.TableName;
-060import org.apache.hadoop.hbase.Tag;
-061import 
org.apache.hadoop.hbase.client.Append;
-062import 
org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
-063import 
org.apache.hadoop.hbase.client.Delete;
-064import 
org.apache.hadoop.hbase.client.Durability;
-065import 
org.apache.hadoop.hbase.client.Get;
-066import 
org.apache.hadoop.hbase.client.Increment;
-067import 
org.apache.hadoop.hbase.client.MasterSwitchType;
-068import 
org.apache.hadoop.hbase.client.Mutation;
-069import 
org.apache.hadoop.hbase.client.Put;
-070import 
org.apache.hadoop.hbase.client.Query;
-071import 
org.apache.hadoop.hbase.client.RegionInfo;
-072import 
org.apache.hadoop.hbase.client.Result;
-073import 
org.apache.hadoop.hbase.client.Scan;
-074import 
org.apache.hadoop.hbase.client.SnapshotDescription;
-075import 
org.apache.hadoop.hbase.client.Table;
-076import 
org.apache.hadoop.hbase.client.TableDescriptor;
-077import 
org.apache.hadoop.hbase.coprocessor.BulkLoadObserver;
-078import 
org.apache.hadoop.hbase.coprocessor.CoprocessorException;
-079import 
org.apache.hadoop.hbase.coprocessor.EndpointObserver;
-080import 
org.apache.hadoop.hbase.coprocessor.MasterCoprocessor;
-081import 
org.apache.hadoop.hbase.coprocessor.MasterCoprocessorEnvironment;
-082import 
org.apache.hadoop.hbase.coprocessor.MasterObserver;
-083import 
org.apache.hadoop.hbase.coprocessor.ObserverContext;
-084import 
org.apache.hadoop.hbase.coprocessor.RegionCoprocessor;
-085import 
org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment;
-086import 
org.apache.hadoop.hbase.coprocessor.RegionObserver;
-087import 
org.apache.hadoop.hbase.coprocessor.RegionServerCoprocessor;
-088import 
org.apache.hadoop.hbase.coprocessor.RegionServerCoprocessorEnvironment;
-089import 
org.apache.hadoop.hbase.coprocessor.RegionServerObserver;
-090import 
org.apache.hadoop.hbase.filter.ByteArrayComparable;
-091import 
org.apache.hadoop.hbase.filter.Filter;
-092import 
org.apache.hadoop.hbase.filter.FilterList;
-093import 
org.apache.hadoop.hbase.io.hfile.HFile;
-094import 
org.apache.hadoop.hbase.ipc.CoprocessorRpcUtils;
-095import 
org.apache.hadoop.hbase.ipc.RpcServer;
-096import 
org.apache.hadoop.hbase.master.MasterServices;
-097import 
org.apache.hadoop.hbase.master.locking.LockProcedure;
-098import 
org.apache.hadoop.hbase.master.procedure.MasterProcedureEnv;
-099import 
org.apache.hadoop.hbase.net.Address;
-100import 
org.apache.hadoop.hbase.procedure2.LockType;
-101import 
org.apache.hadoop.hbase.procedure2.Procedure;
-102import 
org.apache.hadoop.hbase.procedure2.ProcedureExecutor;
-103import 
org.apache.hadoop.hbase.protobuf.ProtobufUtil;
-104import 
org.apache.hadoop.hb

[28/51] [partial] hbase-site git commit: Published site at .

2017-10-05 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/b838bdf0/devapidocs/src-html/org/apache/hadoop/hbase/ipc/RpcServer.html
--
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/ipc/RpcServer.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/ipc/RpcServer.html
index 9b65f75..c084a1b 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/ipc/RpcServer.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/ipc/RpcServer.html
@@ -28,780 +28,779 @@
 020
 021import static 
org.apache.hadoop.fs.CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHORIZATION;
 022
-023import 
org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
-024
-025import java.io.IOException;
-026import java.net.InetAddress;
-027import java.net.InetSocketAddress;
-028import java.nio.ByteBuffer;
-029import 
java.nio.channels.GatheringByteChannel;
-030import 
java.nio.channels.ReadableByteChannel;
-031import 
java.nio.channels.WritableByteChannel;
-032import java.util.ArrayList;
-033import java.util.Collections;
-034import java.util.HashMap;
-035import java.util.List;
-036import java.util.Locale;
-037import java.util.Map;
-038import 
java.util.concurrent.atomic.LongAdder;
-039
-040import org.apache.commons.logging.Log;
-041import 
org.apache.commons.logging.LogFactory;
-042import 
org.apache.hadoop.conf.Configuration;
-043import 
org.apache.hadoop.hbase.CallQueueTooBigException;
-044import 
org.apache.hadoop.hbase.CellScanner;
-045import 
org.apache.hadoop.hbase.DoNotRetryIOException;
-046import 
org.apache.hadoop.hbase.HBaseInterfaceAudience;
-047import 
org.apache.hadoop.hbase.HConstants;
-048import org.apache.hadoop.hbase.Server;
-049import 
org.apache.yetus.audience.InterfaceAudience;
-050import 
org.apache.yetus.audience.InterfaceStability;
-051import 
org.apache.hadoop.hbase.conf.ConfigurationObserver;
-052import 
org.apache.hadoop.hbase.exceptions.RequestTooBigException;
-053import 
org.apache.hadoop.hbase.io.ByteBufferPool;
-054import 
org.apache.hadoop.hbase.monitoring.MonitoredRPCHandler;
-055import 
org.apache.hadoop.hbase.monitoring.TaskMonitor;
-056import 
org.apache.hadoop.hbase.nio.ByteBuff;
-057import 
org.apache.hadoop.hbase.nio.MultiByteBuff;
-058import 
org.apache.hadoop.hbase.nio.SingleByteBuff;
-059import 
org.apache.hadoop.hbase.regionserver.RSRpcServices;
-060import 
org.apache.hadoop.hbase.security.SaslUtil;
-061import 
org.apache.hadoop.hbase.security.SaslUtil.QualityOfProtection;
-062import 
org.apache.hadoop.hbase.security.User;
-063import 
org.apache.hadoop.hbase.security.UserProvider;
-064import 
org.apache.hadoop.hbase.security.token.AuthenticationTokenSecretManager;
-065import 
org.apache.hadoop.hbase.shaded.com.google.protobuf.BlockingService;
-066import 
org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.MethodDescriptor;
-067import 
org.apache.hadoop.hbase.shaded.com.google.protobuf.Message;
-068import 
org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException;
-069import 
org.apache.hadoop.hbase.shaded.com.google.protobuf.TextFormat;
-070import 
org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
-071import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos;
-072import 
org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ConnectionHeader;
-073import 
org.apache.hadoop.hbase.util.Pair;
-074import 
org.apache.hadoop.security.UserGroupInformation;
-075import 
org.apache.hadoop.security.authorize.AuthorizationException;
-076import 
org.apache.hadoop.security.authorize.PolicyProvider;
-077import 
org.apache.hadoop.security.authorize.ServiceAuthorizationManager;
-078import 
org.apache.hadoop.security.token.SecretManager;
-079import 
org.apache.hadoop.security.token.TokenIdentifier;
-080import 
org.codehaus.jackson.map.ObjectMapper;
-081
-082/**
-083 * An RPC server that hosts protobuf 
described Services.
-084 *
-085 */
-086@InterfaceAudience.LimitedPrivate({HBaseInterfaceAudience.COPROC,
 HBaseInterfaceAudience.PHOENIX})
-087@InterfaceStability.Evolving
-088public abstract class RpcServer 
implements RpcServerInterface,
-089ConfigurationObserver {
-090  // LOG is being used in CallRunner and 
the log level is being changed in tests
-091  public static final Log LOG = 
LogFactory.getLog(RpcServer.class);
-092  protected static final 
CallQueueTooBigException CALL_QUEUE_TOO_BIG_EXCEPTION
-093  = new CallQueueTooBigException();
-094
-095  private final boolean authorize;
-096  protected boolean isSecurityEnabled;
-097
-098  public static final byte 
CURRENT_VERSION = 0;
-099
-100  /**
-101   * Whether we allow a fallback to 
SIMPLE auth for insecure clients when security is enabled.
-102   */
-103  public static final String 
FALLBACK_TO_INSECURE_CLIENT_AUTH =
-104  
"hbase.ipc.server.fallback-to-simple-auth-allowed";
-105
-106  /**
-107   * How many calls/handler are allowed 
in the queue.
-108   */
-109  protected static final int 
DEFAULT_MAX_CALLQUEUE

[20/51] [partial] hbase-site git commit: Published site at .

2017-10-05 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/b838bdf0/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.ObservedExceptionsInBatch.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.ObservedExceptionsInBatch.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.ObservedExceptionsInBatch.html
index 4a7f4ae..41e0c24 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.ObservedExceptionsInBatch.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.ObservedExceptionsInBatch.html
@@ -143,7970 +143,7971 @@
 135import 
org.apache.hadoop.hbase.ipc.CallerDisconnectedException;
 136import 
org.apache.hadoop.hbase.ipc.CoprocessorRpcUtils;
 137import 
org.apache.hadoop.hbase.ipc.RpcCall;
-138import 
org.apache.hadoop.hbase.ipc.RpcCallContext;
-139import 
org.apache.hadoop.hbase.ipc.RpcServer;
-140import 
org.apache.hadoop.hbase.monitoring.MonitoredTask;
-141import 
org.apache.hadoop.hbase.monitoring.TaskMonitor;
-142import 
org.apache.hadoop.hbase.regionserver.MultiVersionConcurrencyControl.WriteEntry;
-143import 
org.apache.hadoop.hbase.regionserver.ScannerContext.LimitScope;
-144import 
org.apache.hadoop.hbase.regionserver.ScannerContext.NextState;
-145import 
org.apache.hadoop.hbase.regionserver.compactions.CompactionContext;
-146import 
org.apache.hadoop.hbase.regionserver.compactions.CompactionLifeCycleTracker;
-147import 
org.apache.hadoop.hbase.regionserver.throttle.CompactionThroughputControllerFactory;
-148import 
org.apache.hadoop.hbase.regionserver.throttle.NoLimitThroughputController;
-149import 
org.apache.hadoop.hbase.regionserver.throttle.ThroughputController;
-150import 
org.apache.hadoop.hbase.regionserver.wal.WALUtil;
-151import 
org.apache.hadoop.hbase.security.User;
-152import 
org.apache.hadoop.hbase.snapshot.SnapshotDescriptionUtils;
-153import 
org.apache.hadoop.hbase.snapshot.SnapshotManifest;
-154import 
org.apache.hadoop.hbase.util.Bytes;
-155import 
org.apache.hadoop.hbase.util.CancelableProgressable;
-156import 
org.apache.hadoop.hbase.util.ClassSize;
-157import 
org.apache.hadoop.hbase.util.CollectionUtils;
-158import 
org.apache.hadoop.hbase.util.CompressionTest;
-159import 
org.apache.hadoop.hbase.util.EncryptionTest;
-160import 
org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
-161import 
org.apache.hadoop.hbase.util.FSUtils;
-162import 
org.apache.hadoop.hbase.util.HashedBytes;
-163import 
org.apache.hadoop.hbase.util.Pair;
-164import 
org.apache.hadoop.hbase.util.ServerRegionReplicaUtil;
-165import 
org.apache.hadoop.hbase.util.Threads;
-166import org.apache.hadoop.hbase.wal.WAL;
-167import 
org.apache.hadoop.hbase.wal.WALEdit;
-168import 
org.apache.hadoop.hbase.wal.WALFactory;
-169import 
org.apache.hadoop.hbase.wal.WALKey;
-170import 
org.apache.hadoop.hbase.wal.WALSplitter;
-171import 
org.apache.hadoop.hbase.wal.WALSplitter.MutationReplay;
-172import 
org.apache.hadoop.io.MultipleIOException;
-173import 
org.apache.hadoop.util.StringUtils;
-174import org.apache.htrace.Trace;
-175import org.apache.htrace.TraceScope;
-176import 
org.apache.yetus.audience.InterfaceAudience;
-177
-178import 
org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
-179import 
org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions;
-180import 
org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
-181import 
org.apache.hadoop.hbase.shaded.com.google.common.collect.Maps;
-182import 
org.apache.hadoop.hbase.shaded.com.google.common.io.Closeables;
-183import 
org.apache.hadoop.hbase.shaded.com.google.protobuf.TextFormat;
-184import 
org.apache.hadoop.hbase.shaded.com.google.protobuf.UnsafeByteOperations;
-185import 
org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
-186import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionInfoResponse.CompactionState;
-187import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos;
-188import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceCall;
-189import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionLoad;
-190import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.StoreSequenceId;
-191import 
org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription;
-192import 
org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos;
-193import 
org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.CompactionDescriptor;
-194import 
org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor;
-195import 
org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor.FlushAction;
-196import 
org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor;
-197import 
org.apache.hadoop.hbase.shaded.protobuf.generated.WALPro

[45/51] [partial] hbase-site git commit: Published site at .

2017-10-05 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/b838bdf0/devapidocs/org/apache/hadoop/hbase/ipc/RpcServer.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/ipc/RpcServer.html 
b/devapidocs/org/apache/hadoop/hbase/ipc/RpcServer.html
index 38a3736..412620b 100644
--- a/devapidocs/org/apache/hadoop/hbase/ipc/RpcServer.html
+++ b/devapidocs/org/apache/hadoop/hbase/ipc/RpcServer.html
@@ -119,7 +119,7 @@ var activeTableTab = "activeTableTab";
 
 @InterfaceAudience.LimitedPrivate(value={"Coprocesssor","Phoenix"})
  @InterfaceStability.Evolving
-public abstract class RpcServer
+public abstract class RpcServer
 extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true";
 title="class or interface in java.lang">Object
 implements RpcServerInterface, ConfigurationObserver
 An RPC server that hosts protobuf described Services.
@@ -493,7 +493,7 @@ implements getConf() 
 
 
-static RpcCall
+static http://docs.oracle.com/javase/8/docs/api/java/util/Optional.html?is-external=true";
 title="class or interface in java.util">Optional
 getCurrentCall()
 Needed for features such as delayed calls.
 
@@ -519,7 +519,7 @@ implements 
-static http://docs.oracle.com/javase/8/docs/api/java/net/InetAddress.html?is-external=true";
 title="class or interface in java.net">InetAddress
+static http://docs.oracle.com/javase/8/docs/api/java/util/Optional.html?is-external=true";
 title="class or interface in java.util">OptionalInetAddress>
 getRemoteAddress() 
 
 
@@ -530,17 +530,17 @@ implements 
-static User
+static http://docs.oracle.com/javase/8/docs/api/java/util/Optional.html?is-external=true";
 title="class or interface in java.util">Optional
 getRequestUser()
-Returns the user credentials associated with the current 
RPC request or
- null if no credentials were provided.
+Returns the user credentials associated with the current 
RPC request or not present if no
+ credentials were provided.
 
 
 
-static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
+static http://docs.oracle.com/javase/8/docs/api/java/util/Optional.html?is-external=true";
 title="class or interface in java.util">OptionalString>
 getRequestUserName()
 Returns the username for any user associated with the 
current RPC
- request or null if no user is set.
+ request or not present if no user is set.
 
 
 
@@ -659,7 +659,7 @@ implements 
 
 LOG
-public static final org.apache.commons.logging.Log LOG
+public static final org.apache.commons.logging.Log LOG
 
 
 
@@ -668,7 +668,7 @@ implements 
 
 CALL_QUEUE_TOO_BIG_EXCEPTION
-protected static final CallQueueTooBigException CALL_QUEUE_TOO_BIG_EXCEPTION
+protected static final CallQueueTooBigException CALL_QUEUE_TOO_BIG_EXCEPTION
 
 
 
@@ -677,7 +677,7 @@ implements 
 
 authorize
-private final boolean authorize
+private final boolean authorize
 
 
 
@@ -686,7 +686,7 @@ implements 
 
 isSecurityEnabled
-protected boolean isSecurityEnabled
+protected boolean isSecurityEnabled
 
 
 
@@ -695,7 +695,7 @@ implements 
 
 CURRENT_VERSION
-public static final byte CURRENT_VERSION
+public static final byte CURRENT_VERSION
 
 See Also:
 Constant
 Field Values
@@ -708,7 +708,7 @@ implements 
 
 FALLBACK_TO_INSECURE_CLIENT_AUTH
-public static final http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String FALLBACK_TO_INSECURE_CLIENT_AUTH
+public static final http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String FALLBACK_TO_INSECURE_CLIENT_AUTH
 Whether we allow a fallback to SIMPLE auth for insecure 
clients when security is enabled.
 
 See Also:
@@ -722,7 +722,7 @@ implements 
 
 DEFAULT_MAX_CALLQUEUE_LENGTH_PER_HANDLER
-protected static final int DEFAULT_MAX_CALLQUEUE_LENGTH_PER_HANDLER
+protected static final int DEFAULT_MAX_CALLQUEUE_LENGTH_PER_HANDLER
 How many calls/handler are allowed in the queue.
 
 See Also:
@@ -736,7 +736,7 @@ implements 
 
 cellBlockBuilder
-protected final CellBlockBuilder cellBlockBuilder
+protected final CellBlockBuilder cellBlockBuilder
 
 
 
@@ -745,7 +745,7 @@ implements 
 
 AUTH_FAILED_FOR
-protected static final http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String AUTH_FAILED_FOR
+protected static final http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String AUTH_FAILED_FOR
 
 See Also:
 Constant
 Field Values
@@ -758,7 +758,7 @@ implements 
 
 AUTH_SUCCESS

[24/51] [partial] hbase-site git commit: Published site at .

2017-10-05 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/b838bdf0/devapidocs/src-html/org/apache/hadoop/hbase/quotas/RegionServerRpcQuotaManager.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/quotas/RegionServerRpcQuotaManager.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/quotas/RegionServerRpcQuotaManager.html
index 074c798..41e60b2 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/quotas/RegionServerRpcQuotaManager.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/quotas/RegionServerRpcQuotaManager.html
@@ -184,28 +184,22 @@
 176  private OperationQuota checkQuota(final 
Region region,
 177  final int numWrites, final int 
numReads, final int numScans)
 178  throws IOException, 
ThrottlingException {
-179User user = 
RpcServer.getRequestUser();
-180UserGroupInformation ugi;
-181if (user != null) {
-182  ugi = user.getUGI();
-183} else {
-184  ugi = User.getCurrent().getUGI();
-185}
-186TableName table = 
region.getTableDescriptor().getTableName();
-187
-188OperationQuota quota = getQuota(ugi, 
table);
-189try {
-190  quota.checkQuota(numWrites, 
numReads, numScans);
-191} catch (ThrottlingException e) {
-192  LOG.debug("Throttling exception for 
user=" + ugi.getUserName() +
-193" table=" + table + " 
numWrites=" + numWrites +
-194" numReads=" + numReads + 
" numScans=" + numScans +
-195": " + e.getMessage());
-196  throw e;
-197}
-198return quota;
-199  }
-200}
+179UserGroupInformation ugi = 
RpcServer.getRequestUser().orElse(User.getCurrent()).getUGI();
+180TableName table = 
region.getTableDescriptor().getTableName();
+181
+182OperationQuota quota = getQuota(ugi, 
table);
+183try {
+184  quota.checkQuota(numWrites, 
numReads, numScans);
+185} catch (ThrottlingException e) {
+186  LOG.debug("Throttling exception for 
user=" + ugi.getUserName() +
+187" table=" + table + " 
numWrites=" + numWrites +
+188" numReads=" + numReads + 
" numScans=" + numScans +
+189": " + e.getMessage());
+190  throw e;
+191}
+192return quota;
+193  }
+194}
 
 
 



[31/51] [partial] hbase-site git commit: Published site at .

2017-10-05 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/b838bdf0/devapidocs/src-html/org/apache/hadoop/hbase/coprocessor/ObserverContext.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/coprocessor/ObserverContext.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/coprocessor/ObserverContext.html
index 1cbe7a9..11d5f8b 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/coprocessor/ObserverContext.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/coprocessor/ObserverContext.html
@@ -7,33 +7,33 @@
 
 
 001/*
-002 *
-003 * Licensed to the Apache Software 
Foundation (ASF) under one
-004 * or more contributor license 
agreements.  See the NOTICE file
-005 * distributed with this work for 
additional information
-006 * regarding copyright ownership.  The 
ASF licenses this file
-007 * to you under the Apache License, 
Version 2.0 (the
-008 * "License"); you may not use this file 
except in compliance
-009 * with the License.  You may obtain a 
copy of the License at
-010 *
-011 * 
http://www.apache.org/licenses/LICENSE-2.0
-012 *
-013 * Unless required by applicable law or 
agreed to in writing, software
-014 * distributed under the License is 
distributed on an "AS IS" BASIS,
-015 * WITHOUT WARRANTIES OR CONDITIONS OF 
ANY KIND, either express or implied.
-016 * See the License for the specific 
language governing permissions and
-017 * limitations under the License.
-018 */
+002 * Licensed to the Apache Software 
Foundation (ASF) under one
+003 * or more contributor license 
agreements.  See the NOTICE file
+004 * distributed with this work for 
additional information
+005 * regarding copyright ownership.  The 
ASF licenses this file
+006 * to you under the Apache License, 
Version 2.0 (the
+007 * "License"); you may not use this file 
except in compliance
+008 * with the License.  You may obtain a 
copy of the License at
+009 *
+010 * 
http://www.apache.org/licenses/LICENSE-2.0
+011 *
+012 * Unless required by applicable law or 
agreed to in writing, software
+013 * distributed under the License is 
distributed on an "AS IS" BASIS,
+014 * WITHOUT WARRANTIES OR CONDITIONS OF 
ANY KIND, either express or implied.
+015 * See the License for the specific 
language governing permissions and
+016 * limitations under the License.
+017 */
+018package 
org.apache.hadoop.hbase.coprocessor;
 019
-020package 
org.apache.hadoop.hbase.coprocessor;
+020import java.util.Optional;
 021
-022import 
edu.umd.cs.findbugs.annotations.Nullable;
-023import 
org.apache.yetus.audience.InterfaceAudience;
-024import 
org.apache.yetus.audience.InterfaceStability;
-025import 
org.apache.hadoop.hbase.CoprocessorEnvironment;
-026import 
org.apache.hadoop.hbase.HBaseInterfaceAudience;
-027import 
org.apache.hadoop.hbase.ipc.RpcServer;
-028import 
org.apache.hadoop.hbase.security.User;
+022import 
com.google.common.annotations.VisibleForTesting;
+023import 
org.apache.hadoop.hbase.CoprocessorEnvironment;
+024import 
org.apache.hadoop.hbase.HBaseInterfaceAudience;
+025import 
org.apache.hadoop.hbase.ipc.RpcServer;
+026import 
org.apache.hadoop.hbase.security.User;
+027import 
org.apache.yetus.audience.InterfaceAudience;
+028import 
org.apache.yetus.audience.InterfaceStability;
 029
 030/**
 031 * Carries the execution state for a 
given invocation of an Observer coprocessor
@@ -51,115 +51,89 @@
 043  private E env;
 044  private boolean bypass;
 045  private boolean complete;
-046  private User caller;
+046  private final User caller;
 047
-048  public ObserverContext(User caller) {
-049this.caller = caller;
-050  }
-051
-052  public E getEnvironment() {
-053return env;
-054  }
-055
-056  public void prepare(E env) {
-057this.env = env;
-058  }
-059
-060  /**
-061   * Call to indicate that the current 
coprocessor's return value should be
-062   * used in place of the normal HBase 
obtained value.
-063   */
-064  public void bypass() {
-065bypass = true;
-066  }
-067
-068  /**
-069   * Call to indicate that additional 
coprocessors further down the execution
-070   * chain do not need to be invoked.  
Implies that this coprocessor's response
-071   * is definitive.
-072   */
-073  public void complete() {
-074complete = true;
-075  }
-076
-077  /**
-078   * For use by the coprocessor 
framework.
-079   * @return 
true if {@link ObserverContext#bypass()}
-080   * was called by one of the loaded 
coprocessors, false otherwise.
-081   */
-082  public boolean shouldBypass() {
-083boolean current = bypass;
-084bypass = false;
-085return current;
-086  }
-087
-088  /**
-089   * For use by the coprocessor 
framework.
-090   * @return 
true if {@link ObserverContext#complete()}
-091   * was called by one of the loaded 
coprocessors, false otherwise.
-092   */
-093  public boolean shouldComplete() {
-094boolean current = complete;
-095complete = fal

[18/51] [partial] hbase-site git commit: Published site at .

2017-10-05 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/b838bdf0/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.RegionScannerImpl.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.RegionScannerImpl.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.RegionScannerImpl.html
index 4a7f4ae..41e0c24 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.RegionScannerImpl.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.RegionScannerImpl.html
@@ -143,7970 +143,7971 @@
 135import 
org.apache.hadoop.hbase.ipc.CallerDisconnectedException;
 136import 
org.apache.hadoop.hbase.ipc.CoprocessorRpcUtils;
 137import 
org.apache.hadoop.hbase.ipc.RpcCall;
-138import 
org.apache.hadoop.hbase.ipc.RpcCallContext;
-139import 
org.apache.hadoop.hbase.ipc.RpcServer;
-140import 
org.apache.hadoop.hbase.monitoring.MonitoredTask;
-141import 
org.apache.hadoop.hbase.monitoring.TaskMonitor;
-142import 
org.apache.hadoop.hbase.regionserver.MultiVersionConcurrencyControl.WriteEntry;
-143import 
org.apache.hadoop.hbase.regionserver.ScannerContext.LimitScope;
-144import 
org.apache.hadoop.hbase.regionserver.ScannerContext.NextState;
-145import 
org.apache.hadoop.hbase.regionserver.compactions.CompactionContext;
-146import 
org.apache.hadoop.hbase.regionserver.compactions.CompactionLifeCycleTracker;
-147import 
org.apache.hadoop.hbase.regionserver.throttle.CompactionThroughputControllerFactory;
-148import 
org.apache.hadoop.hbase.regionserver.throttle.NoLimitThroughputController;
-149import 
org.apache.hadoop.hbase.regionserver.throttle.ThroughputController;
-150import 
org.apache.hadoop.hbase.regionserver.wal.WALUtil;
-151import 
org.apache.hadoop.hbase.security.User;
-152import 
org.apache.hadoop.hbase.snapshot.SnapshotDescriptionUtils;
-153import 
org.apache.hadoop.hbase.snapshot.SnapshotManifest;
-154import 
org.apache.hadoop.hbase.util.Bytes;
-155import 
org.apache.hadoop.hbase.util.CancelableProgressable;
-156import 
org.apache.hadoop.hbase.util.ClassSize;
-157import 
org.apache.hadoop.hbase.util.CollectionUtils;
-158import 
org.apache.hadoop.hbase.util.CompressionTest;
-159import 
org.apache.hadoop.hbase.util.EncryptionTest;
-160import 
org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
-161import 
org.apache.hadoop.hbase.util.FSUtils;
-162import 
org.apache.hadoop.hbase.util.HashedBytes;
-163import 
org.apache.hadoop.hbase.util.Pair;
-164import 
org.apache.hadoop.hbase.util.ServerRegionReplicaUtil;
-165import 
org.apache.hadoop.hbase.util.Threads;
-166import org.apache.hadoop.hbase.wal.WAL;
-167import 
org.apache.hadoop.hbase.wal.WALEdit;
-168import 
org.apache.hadoop.hbase.wal.WALFactory;
-169import 
org.apache.hadoop.hbase.wal.WALKey;
-170import 
org.apache.hadoop.hbase.wal.WALSplitter;
-171import 
org.apache.hadoop.hbase.wal.WALSplitter.MutationReplay;
-172import 
org.apache.hadoop.io.MultipleIOException;
-173import 
org.apache.hadoop.util.StringUtils;
-174import org.apache.htrace.Trace;
-175import org.apache.htrace.TraceScope;
-176import 
org.apache.yetus.audience.InterfaceAudience;
-177
-178import 
org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
-179import 
org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions;
-180import 
org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
-181import 
org.apache.hadoop.hbase.shaded.com.google.common.collect.Maps;
-182import 
org.apache.hadoop.hbase.shaded.com.google.common.io.Closeables;
-183import 
org.apache.hadoop.hbase.shaded.com.google.protobuf.TextFormat;
-184import 
org.apache.hadoop.hbase.shaded.com.google.protobuf.UnsafeByteOperations;
-185import 
org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
-186import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionInfoResponse.CompactionState;
-187import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos;
-188import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceCall;
-189import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionLoad;
-190import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.StoreSequenceId;
-191import 
org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription;
-192import 
org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos;
-193import 
org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.CompactionDescriptor;
-194import 
org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor;
-195import 
org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor.FlushAction;
-196import 
org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor;
-197import 
org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.RegionEventDescriptor;
-198import 
o

[07/51] [partial] hbase-site git commit: Published site at .

2017-10-05 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/b838bdf0/devapidocs/src-html/org/apache/hadoop/hbase/security/token/TokenProvider.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/security/token/TokenProvider.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/security/token/TokenProvider.html
index 506695f..7f0026f 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/security/token/TokenProvider.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/security/token/TokenProvider.html
@@ -23,144 +23,137 @@
 015 * See the License for the specific 
language governing permissions and
 016 * limitations under the License.
 017 */
-018
-019package 
org.apache.hadoop.hbase.security.token;
-020
-021import java.io.IOException;
-022import java.util.Collections;
+018package 
org.apache.hadoop.hbase.security.token;
+019
+020import com.google.protobuf.RpcCallback;
+021import 
com.google.protobuf.RpcController;
+022import com.google.protobuf.Service;
 023
-024import org.apache.commons.logging.Log;
-025import 
org.apache.commons.logging.LogFactory;
-026import 
org.apache.hadoop.hbase.CoprocessorEnvironment;
-027import 
org.apache.hadoop.hbase.coprocessor.RegionCoprocessor;
-028import 
org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment;
-029import 
org.apache.hadoop.hbase.ipc.CoprocessorRpcUtils;
-030import 
org.apache.hadoop.hbase.ipc.RpcServer;
-031import 
org.apache.hadoop.hbase.ipc.RpcServerInterface;
-032import 
org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos;
-033import 
org.apache.hadoop.hbase.regionserver.RegionServerServices;
-034import 
org.apache.hadoop.hbase.security.AccessDeniedException;
-035import 
org.apache.hadoop.hbase.security.User;
-036import 
org.apache.hadoop.security.UserGroupInformation;
-037import 
org.apache.hadoop.security.UserGroupInformation.AuthenticationMethod;
-038import 
org.apache.hadoop.security.token.SecretManager;
-039import 
org.apache.hadoop.security.token.Token;
-040
-041import com.google.protobuf.RpcCallback;
-042import 
com.google.protobuf.RpcController;
-043import com.google.protobuf.Service;
-044import 
org.apache.yetus.audience.InterfaceAudience;
-045
-046/**
-047 * Provides a service for obtaining 
authentication tokens via the
-048 * {@link AuthenticationProtos} 
AuthenticationService coprocessor service.
-049 */
-050@InterfaceAudience.Private
-051public class TokenProvider implements 
AuthenticationProtos.AuthenticationService.Interface,
-052RegionCoprocessor {
-053
-054  private static final Log LOG = 
LogFactory.getLog(TokenProvider.class);
-055
-056  private 
AuthenticationTokenSecretManager secretManager;
+024import java.io.IOException;
+025import java.util.Collections;
+026
+027import org.apache.commons.logging.Log;
+028import 
org.apache.commons.logging.LogFactory;
+029import 
org.apache.hadoop.hbase.CoprocessorEnvironment;
+030import 
org.apache.hadoop.hbase.coprocessor.RegionCoprocessor;
+031import 
org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment;
+032import 
org.apache.hadoop.hbase.ipc.CoprocessorRpcUtils;
+033import 
org.apache.hadoop.hbase.ipc.RpcServer;
+034import 
org.apache.hadoop.hbase.ipc.RpcServerInterface;
+035import 
org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos;
+036import 
org.apache.hadoop.hbase.regionserver.RegionServerServices;
+037import 
org.apache.hadoop.hbase.security.AccessDeniedException;
+038import 
org.apache.hadoop.hbase.security.User;
+039import 
org.apache.hadoop.security.UserGroupInformation;
+040import 
org.apache.hadoop.security.UserGroupInformation.AuthenticationMethod;
+041import 
org.apache.hadoop.security.token.SecretManager;
+042import 
org.apache.hadoop.security.token.Token;
+043import 
org.apache.yetus.audience.InterfaceAudience;
+044
+045/**
+046 * Provides a service for obtaining 
authentication tokens via the
+047 * {@link AuthenticationProtos} 
AuthenticationService coprocessor service.
+048 */
+049@InterfaceAudience.Private
+050public class TokenProvider implements 
AuthenticationProtos.AuthenticationService.Interface,
+051RegionCoprocessor {
+052
+053  private static final Log LOG = 
LogFactory.getLog(TokenProvider.class);
+054
+055  private 
AuthenticationTokenSecretManager secretManager;
+056
 057
-058
-059  @Override
-060  public void 
start(CoprocessorEnvironment env) {
-061// if running at region
-062if (env instanceof 
RegionCoprocessorEnvironment) {
-063  RegionCoprocessorEnvironment 
regionEnv =
-064  
(RegionCoprocessorEnvironment)env;
-065  assert 
regionEnv.getCoprocessorRegionServerServices() instanceof 
RegionServerServices;
-066  RpcServerInterface server = 
((RegionServerServices) regionEnv
-067  
.getCoprocessorRegionServerServices()).getRpcServer();
-068  SecretManager mgr = 
((RpcServer)server).getSecretManager();
-069  if (mgr instanceof 
AuthenticationTokenSecretManager) {
-070secretM

[15/51] [partial] hbase-site git commit: Published site at .

2017-10-05 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/b838bdf0/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.RowLockImpl.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.RowLockImpl.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.RowLockImpl.html
index 4a7f4ae..41e0c24 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.RowLockImpl.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.RowLockImpl.html
@@ -143,7970 +143,7971 @@
 135import 
org.apache.hadoop.hbase.ipc.CallerDisconnectedException;
 136import 
org.apache.hadoop.hbase.ipc.CoprocessorRpcUtils;
 137import 
org.apache.hadoop.hbase.ipc.RpcCall;
-138import 
org.apache.hadoop.hbase.ipc.RpcCallContext;
-139import 
org.apache.hadoop.hbase.ipc.RpcServer;
-140import 
org.apache.hadoop.hbase.monitoring.MonitoredTask;
-141import 
org.apache.hadoop.hbase.monitoring.TaskMonitor;
-142import 
org.apache.hadoop.hbase.regionserver.MultiVersionConcurrencyControl.WriteEntry;
-143import 
org.apache.hadoop.hbase.regionserver.ScannerContext.LimitScope;
-144import 
org.apache.hadoop.hbase.regionserver.ScannerContext.NextState;
-145import 
org.apache.hadoop.hbase.regionserver.compactions.CompactionContext;
-146import 
org.apache.hadoop.hbase.regionserver.compactions.CompactionLifeCycleTracker;
-147import 
org.apache.hadoop.hbase.regionserver.throttle.CompactionThroughputControllerFactory;
-148import 
org.apache.hadoop.hbase.regionserver.throttle.NoLimitThroughputController;
-149import 
org.apache.hadoop.hbase.regionserver.throttle.ThroughputController;
-150import 
org.apache.hadoop.hbase.regionserver.wal.WALUtil;
-151import 
org.apache.hadoop.hbase.security.User;
-152import 
org.apache.hadoop.hbase.snapshot.SnapshotDescriptionUtils;
-153import 
org.apache.hadoop.hbase.snapshot.SnapshotManifest;
-154import 
org.apache.hadoop.hbase.util.Bytes;
-155import 
org.apache.hadoop.hbase.util.CancelableProgressable;
-156import 
org.apache.hadoop.hbase.util.ClassSize;
-157import 
org.apache.hadoop.hbase.util.CollectionUtils;
-158import 
org.apache.hadoop.hbase.util.CompressionTest;
-159import 
org.apache.hadoop.hbase.util.EncryptionTest;
-160import 
org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
-161import 
org.apache.hadoop.hbase.util.FSUtils;
-162import 
org.apache.hadoop.hbase.util.HashedBytes;
-163import 
org.apache.hadoop.hbase.util.Pair;
-164import 
org.apache.hadoop.hbase.util.ServerRegionReplicaUtil;
-165import 
org.apache.hadoop.hbase.util.Threads;
-166import org.apache.hadoop.hbase.wal.WAL;
-167import 
org.apache.hadoop.hbase.wal.WALEdit;
-168import 
org.apache.hadoop.hbase.wal.WALFactory;
-169import 
org.apache.hadoop.hbase.wal.WALKey;
-170import 
org.apache.hadoop.hbase.wal.WALSplitter;
-171import 
org.apache.hadoop.hbase.wal.WALSplitter.MutationReplay;
-172import 
org.apache.hadoop.io.MultipleIOException;
-173import 
org.apache.hadoop.util.StringUtils;
-174import org.apache.htrace.Trace;
-175import org.apache.htrace.TraceScope;
-176import 
org.apache.yetus.audience.InterfaceAudience;
-177
-178import 
org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
-179import 
org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions;
-180import 
org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
-181import 
org.apache.hadoop.hbase.shaded.com.google.common.collect.Maps;
-182import 
org.apache.hadoop.hbase.shaded.com.google.common.io.Closeables;
-183import 
org.apache.hadoop.hbase.shaded.com.google.protobuf.TextFormat;
-184import 
org.apache.hadoop.hbase.shaded.com.google.protobuf.UnsafeByteOperations;
-185import 
org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
-186import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionInfoResponse.CompactionState;
-187import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos;
-188import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceCall;
-189import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionLoad;
-190import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.StoreSequenceId;
-191import 
org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription;
-192import 
org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos;
-193import 
org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.CompactionDescriptor;
-194import 
org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor;
-195import 
org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor.FlushAction;
-196import 
org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor;
-197import 
org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.RegionEventDescriptor;
-198import 
org.apache.hadoop.hbase.shaded.

[13/51] [partial] hbase-site git commit: Published site at .

2017-10-05 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/b838bdf0/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.html
index 4a7f4ae..41e0c24 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.html
@@ -143,7970 +143,7971 @@
 135import 
org.apache.hadoop.hbase.ipc.CallerDisconnectedException;
 136import 
org.apache.hadoop.hbase.ipc.CoprocessorRpcUtils;
 137import 
org.apache.hadoop.hbase.ipc.RpcCall;
-138import 
org.apache.hadoop.hbase.ipc.RpcCallContext;
-139import 
org.apache.hadoop.hbase.ipc.RpcServer;
-140import 
org.apache.hadoop.hbase.monitoring.MonitoredTask;
-141import 
org.apache.hadoop.hbase.monitoring.TaskMonitor;
-142import 
org.apache.hadoop.hbase.regionserver.MultiVersionConcurrencyControl.WriteEntry;
-143import 
org.apache.hadoop.hbase.regionserver.ScannerContext.LimitScope;
-144import 
org.apache.hadoop.hbase.regionserver.ScannerContext.NextState;
-145import 
org.apache.hadoop.hbase.regionserver.compactions.CompactionContext;
-146import 
org.apache.hadoop.hbase.regionserver.compactions.CompactionLifeCycleTracker;
-147import 
org.apache.hadoop.hbase.regionserver.throttle.CompactionThroughputControllerFactory;
-148import 
org.apache.hadoop.hbase.regionserver.throttle.NoLimitThroughputController;
-149import 
org.apache.hadoop.hbase.regionserver.throttle.ThroughputController;
-150import 
org.apache.hadoop.hbase.regionserver.wal.WALUtil;
-151import 
org.apache.hadoop.hbase.security.User;
-152import 
org.apache.hadoop.hbase.snapshot.SnapshotDescriptionUtils;
-153import 
org.apache.hadoop.hbase.snapshot.SnapshotManifest;
-154import 
org.apache.hadoop.hbase.util.Bytes;
-155import 
org.apache.hadoop.hbase.util.CancelableProgressable;
-156import 
org.apache.hadoop.hbase.util.ClassSize;
-157import 
org.apache.hadoop.hbase.util.CollectionUtils;
-158import 
org.apache.hadoop.hbase.util.CompressionTest;
-159import 
org.apache.hadoop.hbase.util.EncryptionTest;
-160import 
org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
-161import 
org.apache.hadoop.hbase.util.FSUtils;
-162import 
org.apache.hadoop.hbase.util.HashedBytes;
-163import 
org.apache.hadoop.hbase.util.Pair;
-164import 
org.apache.hadoop.hbase.util.ServerRegionReplicaUtil;
-165import 
org.apache.hadoop.hbase.util.Threads;
-166import org.apache.hadoop.hbase.wal.WAL;
-167import 
org.apache.hadoop.hbase.wal.WALEdit;
-168import 
org.apache.hadoop.hbase.wal.WALFactory;
-169import 
org.apache.hadoop.hbase.wal.WALKey;
-170import 
org.apache.hadoop.hbase.wal.WALSplitter;
-171import 
org.apache.hadoop.hbase.wal.WALSplitter.MutationReplay;
-172import 
org.apache.hadoop.io.MultipleIOException;
-173import 
org.apache.hadoop.util.StringUtils;
-174import org.apache.htrace.Trace;
-175import org.apache.htrace.TraceScope;
-176import 
org.apache.yetus.audience.InterfaceAudience;
-177
-178import 
org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
-179import 
org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions;
-180import 
org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
-181import 
org.apache.hadoop.hbase.shaded.com.google.common.collect.Maps;
-182import 
org.apache.hadoop.hbase.shaded.com.google.common.io.Closeables;
-183import 
org.apache.hadoop.hbase.shaded.com.google.protobuf.TextFormat;
-184import 
org.apache.hadoop.hbase.shaded.com.google.protobuf.UnsafeByteOperations;
-185import 
org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
-186import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionInfoResponse.CompactionState;
-187import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos;
-188import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceCall;
-189import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionLoad;
-190import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.StoreSequenceId;
-191import 
org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription;
-192import 
org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos;
-193import 
org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.CompactionDescriptor;
-194import 
org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor;
-195import 
org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor.FlushAction;
-196import 
org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor;
-197import 
org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.RegionEventDescriptor;
-198import 
org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.RegionEventDescriptor.EventType;
-

[03/51] [partial] hbase-site git commit: Published site at .

2017-10-05 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/b838bdf0/hbase-build-configuration/hbase-archetypes/hbase-archetype-builder/dependency-info.html
--
diff --git 
a/hbase-build-configuration/hbase-archetypes/hbase-archetype-builder/dependency-info.html
 
b/hbase-build-configuration/hbase-archetypes/hbase-archetype-builder/dependency-info.html
index af0a727..1b03e97 100644
--- 
a/hbase-build-configuration/hbase-archetypes/hbase-archetype-builder/dependency-info.html
+++ 
b/hbase-build-configuration/hbase-archetypes/hbase-archetype-builder/dependency-info.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase - Archetype builder – Dependency 
Information
 
@@ -148,7 +148,7 @@
 https://www.apache.org/";>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2017-10-04
+  Last Published: 
2017-10-05
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/b838bdf0/hbase-build-configuration/hbase-archetypes/hbase-archetype-builder/dependency-management.html
--
diff --git 
a/hbase-build-configuration/hbase-archetypes/hbase-archetype-builder/dependency-management.html
 
b/hbase-build-configuration/hbase-archetypes/hbase-archetype-builder/dependency-management.html
index e829b1b..42cad4b 100644
--- 
a/hbase-build-configuration/hbase-archetypes/hbase-archetype-builder/dependency-management.html
+++ 
b/hbase-build-configuration/hbase-archetypes/hbase-archetype-builder/dependency-management.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase - Archetype builder – Project Dependency 
Management
 
@@ -766,7 +766,7 @@
 https://www.apache.org/";>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2017-10-04
+  Last Published: 
2017-10-05
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/b838bdf0/hbase-build-configuration/hbase-archetypes/hbase-archetype-builder/index.html
--
diff --git 
a/hbase-build-configuration/hbase-archetypes/hbase-archetype-builder/index.html 
b/hbase-build-configuration/hbase-archetypes/hbase-archetype-builder/index.html
index 70545a7..efa92ff 100644
--- 
a/hbase-build-configuration/hbase-archetypes/hbase-archetype-builder/index.html
+++ 
b/hbase-build-configuration/hbase-archetypes/hbase-archetype-builder/index.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase - Archetype builder – About
 
@@ -119,7 +119,7 @@
 https://www.apache.org/";>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2017-10-04
+  Last Published: 
2017-10-05
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/b838bdf0/hbase-build-configuration/hbase-archetypes/hbase-archetype-builder/integration.html
--
diff --git 
a/hbase-build-configuration/hbase-archetypes/hbase-archetype-builder/integration.html
 
b/hbase-build-configuration/hbase-archetypes/hbase-archetype-builder/integration.html
index f704da7..ca0888e 100644
--- 
a/hbase-build-configuration/hbase-archetypes/hbase-archetype-builder/integration.html
+++ 
b/hbase-build-configuration/hbase-archetypes/hbase-archetype-builder/integration.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase - Archetype builder – CI Management
 
@@ -126,7 +126,7 @@
 https://www.apache.org/";>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2017-10-04
+  Last Published: 
2017-10-05
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/b838bdf0/hbase-build-configuration/hbase-archetypes/hbase-archetype-builder/issue-tracking.html
--
diff --git 
a/hbase-build-configuration/hbase-archetypes/hbase-archetype-builder/issue-tracking.html
 
b/hbase-build-configuration/hbase-archetypes/hbase-archetype-builder/issue-tracking.html
index d70e41e..6e9b001 100644
--- 
a/hbase-build-configuration/hbase-archetypes/hbase-archetype-builder/issue-tracking.html
+++ 
b/hbase-build-configuration/hbase-archetypes/hbase-archetype-builder/issue-tracking.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase - Archetype builder – Issue Management
 
@@ -123,7 +123,7 @@
 https://www.apache.org/";>The Apache Software 
Foundation.

[09/51] [partial] hbase-site git commit: Published site at .

2017-10-05 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/b838bdf0/devapidocs/src-html/org/apache/hadoop/hbase/security/access/AccessController.OpType.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/security/access/AccessController.OpType.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/security/access/AccessController.OpType.html
index 4260431..23ff345 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/security/access/AccessController.OpType.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/security/access/AccessController.OpType.html
@@ -26,399 +26,399 @@
 018 */
 019package 
org.apache.hadoop.hbase.security.access;
 020
-021import java.io.IOException;
-022import java.net.InetAddress;
-023import 
java.security.PrivilegedExceptionAction;
-024import java.util.ArrayList;
-025import java.util.Collection;
-026import java.util.Collections;
-027import java.util.HashMap;
-028import java.util.Iterator;
-029import java.util.List;
-030import java.util.Map;
-031import java.util.Map.Entry;
-032import java.util.Optional;
-033import java.util.Set;
-034import java.util.TreeMap;
-035import java.util.TreeSet;
-036
-037import com.google.protobuf.Message;
-038import com.google.protobuf.RpcCallback;
-039import 
com.google.protobuf.RpcController;
-040import com.google.protobuf.Service;
-041import org.apache.commons.logging.Log;
-042import 
org.apache.commons.logging.LogFactory;
-043import 
org.apache.hadoop.conf.Configuration;
-044import 
org.apache.hadoop.hbase.ArrayBackedTag;
-045import org.apache.hadoop.hbase.Cell;
-046import 
org.apache.hadoop.hbase.CellScanner;
-047import 
org.apache.hadoop.hbase.CellUtil;
-048import 
org.apache.hadoop.hbase.CompareOperator;
-049import 
org.apache.hadoop.hbase.CompoundConfiguration;
-050import 
org.apache.hadoop.hbase.CoprocessorEnvironment;
-051import 
org.apache.hadoop.hbase.DoNotRetryIOException;
-052import 
org.apache.hadoop.hbase.HBaseInterfaceAudience;
-053import 
org.apache.hadoop.hbase.HConstants;
-054import 
org.apache.hadoop.hbase.KeyValue;
-055import 
org.apache.hadoop.hbase.KeyValue.Type;
-056import 
org.apache.hadoop.hbase.MetaTableAccessor;
-057import 
org.apache.hadoop.hbase.NamespaceDescriptor;
-058import 
org.apache.hadoop.hbase.ServerName;
-059import 
org.apache.hadoop.hbase.TableName;
-060import org.apache.hadoop.hbase.Tag;
-061import 
org.apache.hadoop.hbase.client.Append;
-062import 
org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
-063import 
org.apache.hadoop.hbase.client.Delete;
-064import 
org.apache.hadoop.hbase.client.Durability;
-065import 
org.apache.hadoop.hbase.client.Get;
-066import 
org.apache.hadoop.hbase.client.Increment;
-067import 
org.apache.hadoop.hbase.client.MasterSwitchType;
-068import 
org.apache.hadoop.hbase.client.Mutation;
-069import 
org.apache.hadoop.hbase.client.Put;
-070import 
org.apache.hadoop.hbase.client.Query;
-071import 
org.apache.hadoop.hbase.client.RegionInfo;
-072import 
org.apache.hadoop.hbase.client.Result;
-073import 
org.apache.hadoop.hbase.client.Scan;
-074import 
org.apache.hadoop.hbase.client.SnapshotDescription;
-075import 
org.apache.hadoop.hbase.client.Table;
-076import 
org.apache.hadoop.hbase.client.TableDescriptor;
-077import 
org.apache.hadoop.hbase.coprocessor.BulkLoadObserver;
-078import 
org.apache.hadoop.hbase.coprocessor.CoprocessorException;
-079import 
org.apache.hadoop.hbase.coprocessor.EndpointObserver;
-080import 
org.apache.hadoop.hbase.coprocessor.MasterCoprocessor;
-081import 
org.apache.hadoop.hbase.coprocessor.MasterCoprocessorEnvironment;
-082import 
org.apache.hadoop.hbase.coprocessor.MasterObserver;
-083import 
org.apache.hadoop.hbase.coprocessor.ObserverContext;
-084import 
org.apache.hadoop.hbase.coprocessor.RegionCoprocessor;
-085import 
org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment;
-086import 
org.apache.hadoop.hbase.coprocessor.RegionObserver;
-087import 
org.apache.hadoop.hbase.coprocessor.RegionServerCoprocessor;
-088import 
org.apache.hadoop.hbase.coprocessor.RegionServerCoprocessorEnvironment;
-089import 
org.apache.hadoop.hbase.coprocessor.RegionServerObserver;
-090import 
org.apache.hadoop.hbase.filter.ByteArrayComparable;
-091import 
org.apache.hadoop.hbase.filter.Filter;
-092import 
org.apache.hadoop.hbase.filter.FilterList;
-093import 
org.apache.hadoop.hbase.io.hfile.HFile;
-094import 
org.apache.hadoop.hbase.ipc.CoprocessorRpcUtils;
-095import 
org.apache.hadoop.hbase.ipc.RpcServer;
-096import 
org.apache.hadoop.hbase.master.MasterServices;
-097import 
org.apache.hadoop.hbase.master.locking.LockProcedure;
-098import 
org.apache.hadoop.hbase.master.procedure.MasterProcedureEnv;
-099import 
org.apache.hadoop.hbase.net.Address;
-100import 
org.apache.hadoop.hbase.procedure2.LockType;
-101import 
org.apache.hadoop.hbase.procedure2.Procedure;
-102import 
org.apache.hadoop.hbase.procedure2.ProcedureExecutor;
-103import 
org.apache.hadoop.hbase.protobuf.ProtobufUti

[05/51] [partial] hbase-site git commit: Published site at .

2017-10-05 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/b838bdf0/devapidocs/src-html/org/apache/hadoop/hbase/security/visibility/VisibilityController.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/security/visibility/VisibilityController.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/security/visibility/VisibilityController.html
index dc1bb25..f04e965 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/security/visibility/VisibilityController.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/security/visibility/VisibilityController.html
@@ -31,876 +31,876 @@
 023import static 
org.apache.hadoop.hbase.security.visibility.VisibilityConstants.LABELS_TABLE_FAMILY;
 024import static 
org.apache.hadoop.hbase.security.visibility.VisibilityConstants.LABELS_TABLE_NAME;
 025
-026import java.io.IOException;
-027import java.net.InetAddress;
-028import java.util.ArrayList;
-029import java.util.Collections;
-030import java.util.HashMap;
-031import java.util.Iterator;
-032import java.util.List;
-033import java.util.Map;
-034import java.util.Optional;
-035
-036import org.apache.commons.logging.Log;
-037import 
org.apache.commons.logging.LogFactory;
-038import 
org.apache.hadoop.conf.Configuration;
-039import 
org.apache.hadoop.hbase.AuthUtil;
-040import org.apache.hadoop.hbase.Cell;
-041import 
org.apache.hadoop.hbase.CellScanner;
-042import 
org.apache.hadoop.hbase.CellUtil;
-043import 
org.apache.hadoop.hbase.CoprocessorEnvironment;
-044import 
org.apache.hadoop.hbase.DoNotRetryIOException;
-045import 
org.apache.hadoop.hbase.HBaseInterfaceAudience;
-046import 
org.apache.hadoop.hbase.HColumnDescriptor;
-047import 
org.apache.hadoop.hbase.HConstants;
-048import 
org.apache.hadoop.hbase.HTableDescriptor;
-049import 
org.apache.hadoop.hbase.MetaTableAccessor;
-050import 
org.apache.hadoop.hbase.TableName;
-051import org.apache.hadoop.hbase.Tag;
-052import org.apache.hadoop.hbase.TagType;
-053import org.apache.hadoop.hbase.TagUtil;
-054import 
org.apache.yetus.audience.InterfaceAudience;
-055import 
org.apache.hadoop.hbase.client.Append;
-056import 
org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
-057import 
org.apache.hadoop.hbase.client.Delete;
-058import 
org.apache.hadoop.hbase.client.Get;
-059import 
org.apache.hadoop.hbase.client.Increment;
-060import 
org.apache.hadoop.hbase.client.MasterSwitchType;
-061import 
org.apache.hadoop.hbase.client.Mutation;
-062import 
org.apache.hadoop.hbase.client.Put;
-063import 
org.apache.hadoop.hbase.client.Result;
-064import 
org.apache.hadoop.hbase.client.Scan;
-065import 
org.apache.hadoop.hbase.client.TableDescriptor;
-066import 
org.apache.hadoop.hbase.constraint.ConstraintException;
-067import 
org.apache.hadoop.hbase.coprocessor.CoprocessorException;
-068import 
org.apache.hadoop.hbase.coprocessor.CoprocessorHost;
-069import 
org.apache.hadoop.hbase.coprocessor.MasterCoprocessor;
-070import 
org.apache.hadoop.hbase.coprocessor.MasterCoprocessorEnvironment;
-071import 
org.apache.hadoop.hbase.coprocessor.MasterObserver;
-072import 
org.apache.hadoop.hbase.coprocessor.ObserverContext;
-073import 
org.apache.hadoop.hbase.coprocessor.RegionCoprocessor;
-074import 
org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment;
-075import 
org.apache.hadoop.hbase.coprocessor.RegionObserver;
-076import 
org.apache.hadoop.hbase.exceptions.DeserializationException;
-077import 
org.apache.hadoop.hbase.exceptions.FailedSanityCheckException;
-078import 
org.apache.hadoop.hbase.filter.Filter;
-079import 
org.apache.hadoop.hbase.filter.FilterBase;
-080import 
org.apache.hadoop.hbase.filter.FilterList;
-081import 
org.apache.hadoop.hbase.io.hfile.HFile;
-082import 
org.apache.hadoop.hbase.ipc.CoprocessorRpcUtils;
-083import 
org.apache.hadoop.hbase.ipc.RpcServer;
-084import 
org.apache.hadoop.hbase.master.MasterServices;
-085import 
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair;
-086import 
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult;
-087import 
org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos;
-088import 
org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.GetAuthsRequest;
-089import 
org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.GetAuthsResponse;
-090import 
org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.ListLabelsRequest;
-091import 
org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.ListLabelsResponse;
-092import 
org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.SetAuthsRequest;
-093import 
org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabel;
-094import 
org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsRequest;
-095import 
org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsResponse;
-096import 
org.apache.hadoop.hbase.protobuf.g

[49/51] [partial] hbase-site git commit: Published site at .

2017-10-05 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/b838bdf0/checkstyle.rss
--
diff --git a/checkstyle.rss b/checkstyle.rss
index 4a0f1d2..6a8a703 100644
--- a/checkstyle.rss
+++ b/checkstyle.rss
@@ -26,7 +26,7 @@ under the License.
 ©2007 - 2017 The Apache Software Foundation
 
   File: 2053,
- Errors: 13753,
+ Errors: 13738,
  Warnings: 0,
  Infos: 0
   
@@ -1217,7 +1217,7 @@ under the License.
   0
 
 
-  14
+  6
 
   
   
@@ -7363,7 +7363,7 @@ under the License.
   0
 
 
-  12
+  9
 
   
   
@@ -11577,7 +11577,7 @@ under the License.
   0
 
 
-  1
+  2
 
   
   
@@ -15175,7 +15175,7 @@ under the License.
   0
 
 
-  37
+  36
 
   
   
@@ -16337,7 +16337,7 @@ under the License.
   0
 
 
-  11
+  10
 
   
   
@@ -20355,7 +20355,7 @@ under the License.
   0
 
 
-  195
+  197
 
   
   
@@ -24443,7 +24443,7 @@ under the License.
   0
 
 
-  2
+  1
 
   
   
@@ -27481,7 +27481,7 @@ under the License.
   0
 
 
-  13
+  10
 
   
   
@@ -28377,7 +28377,7 @@ under the License.
   0
 
 
-  18
+  17
 
   
   

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/b838bdf0/coc.html
--
diff --git a/coc.html b/coc.html
index fbf3e65..dcf4b87 100644
--- a/coc.html
+++ b/coc.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase – 
   Code of Conduct Policy
@@ -380,7 +380,7 @@ email to mailto:priv...@hbase.apache.org";>the priv
 https://www.apache.org/";>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2017-10-04
+  Last Published: 
2017-10-05
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/b838bdf0/cygwin.html
--
diff --git a/cygwin.html b/cygwin.html
index 551d766..e3585df 100644
--- a/cygwin.html
+++ b/cygwin.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase – Installing Apache HBase (TM) on Windows using 
Cygwin
 
@@ -679,7 +679,7 @@ Now your HBase server is running, start 
coding and build that next
 https://www.apache.org/";>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2017-10-04
+  Last Published: 
2017-10-05
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/b838bdf0/dependencies.html
--
diff --git a/dependencies.html b/dependencies.html
index b6bccbc..bd5d4dc 100644
--- a/dependencies.html
+++ b/dependencies.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase – Project Dependencies
 
@@ -445,7 +445,7 @@
 https://www.apache.org/";>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2017-10-04
+  Last Published: 
2017-10-05
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/b838bdf0/dependency-convergence.html
--
diff --git a/dependency-convergence.html b/dependency-convergence.html
index 36c461f..3c4486f 100644
--- a/dependency-convergence.html
+++ b/dependency-convergence.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase – Reactor Dependency Convergence
 
@@ -894,7 +894,7 @@
 https://www.apache.org/";>The Apache Software 
Foundation.
   

[38/51] [partial] hbase-site git commit: Published site at .

2017-10-05 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/b838bdf0/devapidocs/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.BulkLoadObserverOperation.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.BulkLoadObserverOperation.html
 
b/devapidocs/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.BulkLoadObserverOperation.html
index 066ac89..bf3cd21 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.BulkLoadObserverOperation.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.BulkLoadObserverOperation.html
@@ -172,7 +172,7 @@ extends ObserverContext
-bypass,
 complete,
 createAndPrepare,
 createAndPrepare,
 getCaller,
 getEnvironment,
 prepare,
 shouldBypass, shouldComplete
+bypass,
 complete,
 createAndPrepare,
 getCaller,
 getEnvironment,
 prepare,
 shouldBypass,
 shouldComplete
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/b838bdf0/devapidocs/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.RegionObserverOperation.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.RegionObserverOperation.html
 
b/devapidocs/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.RegionObserverOperation.html
index 5f51d1e..1021b1c 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.RegionObserverOperation.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.RegionObserverOperation.html
@@ -175,7 +175,7 @@ extends ObserverContext
-bypass,
 complete,
 createAndPrepare,
 createAndPrepare,
 getCaller,
 getEnvironment,
 prepare,
 shouldBypass, shouldComplete
+bypass,
 complete,
 createAndPrepare,
 getCaller,
 getEnvironment,
 prepare,
 shouldBypass,
 shouldComplete
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/b838bdf0/devapidocs/org/apache/hadoop/hbase/regionserver/RegionServerCoprocessorHost.RegionServerObserverOperation.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/regionserver/RegionServerCoprocessorHost.RegionServerObserverOperation.html
 
b/devapidocs/org/apache/hadoop/hbase/regionserver/RegionServerCoprocessorHost.RegionServerObserverOperation.html
index 9a98c9d..aaae7a8 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/regionserver/RegionServerCoprocessorHost.RegionServerObserverOperation.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/regionserver/RegionServerCoprocessorHost.RegionServerObserverOperation.html
@@ -175,7 +175,7 @@ extends ObserverContext
-bypass,
 complete,
 createAndPrepare,
 createAndPrepare,
 getCaller,
 getEnvironment,
 prepare,
 shouldBypass, shouldComplete
+bypass,
 complete,
 createAndPrepare,
 getCaller,
 getEnvironment,
 prepare,
 shouldBypass,
 shouldComplete
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/b838bdf0/devapidocs/org/apache/hadoop/hbase/regionserver/SecureBulkLoadManager.SecureBulkLoadListener.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/regionserver/SecureBulkLoadManager.SecureBulkLoadListener.html
 
b/devapidocs/org/apache/hadoop/hbase/regionserver/SecureBulkLoadManager.SecureBulkLoadListener.html
index ede7de7..270c1e9 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/regionserver/SecureBulkLoadManager.SecureBulkLoadListener.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/regionserver/SecureBulkLoadManager.SecureBulkLoadListener.html
@@ -117,7 +117,7 @@ var activeTableTab = "activeTableTab";
 
 
 
-private static class SecureBulkLoadManager.SecureBulkLoadListener
+private static class SecureBulkLoadManager.SecureBulkLoadListener
 extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true";
 title="class or interface in java.lang">Object
 implements Region.BulkLoadListener
 
@@ -248,7 +248,7 @@ implements 
 
 fs
-private final org.apache.hadoop.fs.FileSystem fs
+private final org.apache.hadoop.fs.FileSystem fs
 
 
 
@@ -257,7 +257,7 @@ implements 
 
 stagingDir
-private final http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String stagingDir
+private final http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String stagingDir
 
 
 
@@ -266,7 +266,7 @@ implements 
 
 conf
-private final org.apache.hadoop.conf.Configuration conf
+private final org.apache.hadoop.conf.Configuration conf
 
 
 
@@ -275,7 +275,7 @@ implements 
 
 srcFs
-private org.apache.hadoop.fs.FileSystem srcFs
+private org.apache.hadoop.fs.FileSystem srcFs
 
 
 
@@ -284,7 +284,7 @@ implements 
 
 origPermissions
-private http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?

[51/51] [partial] hbase-site git commit: Published site at .

2017-10-05 Thread git-site-role
Published site at .


Project: http://git-wip-us.apache.org/repos/asf/hbase-site/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase-site/commit/b838bdf0
Tree: http://git-wip-us.apache.org/repos/asf/hbase-site/tree/b838bdf0
Diff: http://git-wip-us.apache.org/repos/asf/hbase-site/diff/b838bdf0

Branch: refs/heads/asf-site
Commit: b838bdf0b6cd484dfb428baf72d002544624b5c5
Parents: 9c3f9b8
Author: jenkins 
Authored: Thu Oct 5 15:13:04 2017 +
Committer: jenkins 
Committed: Thu Oct 5 15:13:04 2017 +

--
 acid-semantics.html | 4 +-
 apache_hbase_reference_guide.pdf| 4 +-
 apidocs/index-all.html  | 2 +-
 .../org/apache/hadoop/hbase/CellBuilder.html|28 +-
 .../hadoop/hbase/class-use/CellBuilder.html | 2 +-
 .../org/apache/hadoop/hbase/CellBuilder.html|58 +-
 .../org/apache/hadoop/hbase/CellUtil.html   | 6 +-
 book.html   | 2 +-
 bulk-loads.html | 4 +-
 checkstyle-aggregate.html   | 15146 
 checkstyle.rss  |20 +-
 coc.html| 4 +-
 cygwin.html | 4 +-
 dependencies.html   | 4 +-
 dependency-convergence.html | 4 +-
 dependency-info.html| 4 +-
 dependency-management.html  | 4 +-
 devapidocs/allclasses-frame.html| 1 +
 devapidocs/allclasses-noframe.html  | 1 +
 devapidocs/constant-values.html | 6 +-
 devapidocs/deprecated-list.html | 2 +-
 devapidocs/index-all.html   |48 +-
 .../hadoop/hbase/CellBuilder.DataType.html  |   384 +
 .../org/apache/hadoop/hbase/CellBuilder.html|57 +-
 .../apache/hadoop/hbase/CellBuilderFactory.html | 4 +-
 .../hadoop/hbase/ExtendedCellBuilder.html   |52 +-
 .../hadoop/hbase/ExtendedCellBuilderImpl.html   |83 +-
 .../hbase/IndividualBytesFieldCellBuilder.html  |17 +-
 .../apache/hadoop/hbase/KeyValueBuilder.html|17 +-
 .../impl/IncrementalTableBackupClient.html  |14 +-
 .../hadoop/hbase/backup/package-tree.html   | 4 +-
 .../hbase/class-use/CellBuilder.DataType.html   |   199 +
 .../hadoop/hbase/class-use/CellBuilder.html |26 +-
 .../hbase/class-use/CoprocessorEnvironment.html |12 +-
 .../hbase/class-use/ExtendedCellBuilder.html|32 +
 .../hadoop/hbase/class-use/KeyValue.Type.html   |10 +-
 .../hadoop/hbase/client/VersionInfoUtil.html|10 +-
 .../hadoop/hbase/client/package-tree.html   |24 +-
 .../CoprocessorHost.ObserverOperation.html  | 2 +-
 ...ocessorHost.ObserverOperationWithResult.html | 2 +-
 ...ssorHost.ObserverOperationWithoutResult.html | 2 +-
 .../coprocessor/Export.PrivilegedWriter.html|18 +-
 .../hbase/coprocessor/Export.Response.html  |14 +-
 .../hbase/coprocessor/Export.SecureWriter.html  | 2 +-
 .../hbase/coprocessor/ObserverContext.html  |   100 +-
 .../coprocessor/class-use/ObserverContext.html  |30 +-
 .../hadoop/hbase/executor/package-tree.html | 2 +-
 .../hadoop/hbase/filter/package-tree.html   | 8 +-
 .../hadoop/hbase/io/hfile/package-tree.html | 6 +-
 .../org/apache/hadoop/hbase/ipc/CallRunner.html |26 +-
 .../hadoop/hbase/ipc/NettyServerCall.html   | 9 +-
 .../apache/hadoop/hbase/ipc/RpcCallContext.html |50 +-
 .../RpcServer.BlockingServiceAndInterface.html  |12 +-
 .../hadoop/hbase/ipc/RpcServer.CallCleanup.html | 4 +-
 .../org/apache/hadoop/hbase/ipc/RpcServer.html  |   190 +-
 .../org/apache/hadoop/hbase/ipc/ServerCall.html |   212 +-
 .../hadoop/hbase/ipc/SimpleRpcScheduler.html|24 +-
 .../hadoop/hbase/ipc/SimpleServerCall.html  | 9 +-
 .../hadoop/hbase/ipc/class-use/RpcCall.html |19 +-
 .../apache/hadoop/hbase/ipc/package-tree.html   | 2 +-
 .../hadoop/hbase/mapreduce/package-tree.html| 4 +-
 ...CoprocessorHost.MasterObserverOperation.html | 2 +-
 .../hbase/master/balancer/package-tree.html | 2 +-
 .../hadoop/hbase/master/package-tree.html   | 6 +-
 .../master/procedure/MasterProcedureEnv.html|26 +-
 .../hbase/master/procedure/package-tree.html| 2 +-
 .../hbase/master/snapshot/SnapshotManager.html  |   110 +-
 .../hadoop/hbase/monitoring/package-tree.html   | 2 +-
 .../org/apache/hadoop/hbase/package-frame.html  | 1 +
 .../apache/hadoop/hbase/package-summary.html|28 +-
 .../org/apache/hadoop/hbase/package-tree.html   |13 +-
 .../org/apache/hadoop/hbase/package-use.html|   153 +-
 .../hadoop/hbase/procedure2/package-tree.html   | 4 +-
 .../hadoop/hbase/quotas/p

[43/51] [partial] hbase-site git commit: Published site at .

2017-10-05 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/b838bdf0/devapidocs/org/apache/hadoop/hbase/ipc/SimpleServerCall.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/ipc/SimpleServerCall.html 
b/devapidocs/org/apache/hadoop/hbase/ipc/SimpleServerCall.html
index 667670d..f95421a 100644
--- a/devapidocs/org/apache/hadoop/hbase/ipc/SimpleServerCall.html
+++ b/devapidocs/org/apache/hadoop/hbase/ipc/SimpleServerCall.html
@@ -221,7 +221,7 @@ extends 
 
 Methods inherited from class org.apache.hadoop.hbase.ipc.ServerCall
-cleanup,
 createHeaderAndMessageBytes,
 disconnectSince,
 getCellScanner,
 getClientVersionInfo,
 getDeadline,
 getHeader,
 getMethod,
 getParam,
 getPriority,
 getReceiveTime,
 getRemoteAddress,
 getRemotePort,
 getRequestUser,
 getRequestUserName,
 getResponse,
 getResponseBlockSize,
 getResponseCellSize, getResponseExceptionSize,
 getService,
 getSize,
 getStartTime,
 getTimeout,
 getTraceInfo,
 incrementResponseBlockSize,
 incrementResponseCellSize,
 incrementResponseExceptionSize,
 isClientCellBlockSupported,
 isRetryImmediatelySupported,
 setCallBack,
 setExceptionResponse,
 setResponse,
 setStartTime, toShortString,
 toString,
 wrapWithSasl
+cleanup,
 createHeaderAndMessageBytes,
 disconnectSince,
 getCellScanner,
 getClientVersionInfo,
 getDeadline,
 getHeader,
 getMethod,
 getParam,
 getPriority,
 getReceiveTime,
 getRemoteAddress,
 getRemotePort,
 getRequestUser,
 getResponse,
 getResponseBlockSize,
 getResponseCellSize,
 getResponseExceptionSize, getService,
 getSize,
 getStartTime,
 getTimeout,
 getTraceInfo,
 incrementResponseBlockSize,
 incrementResponseCellSize,
 incrementResponseExceptionSize,
 isClientCellBlockSupported,
 isRetryImmediatelySupported,
 setCallBack,
 setExceptionResponse,
 setResponse,
 setStartTime,
 toShortString, toString,
 wrapWithSasl
 
 
 
@@ -230,6 +230,13 @@ extends Methods inherited from class java.lang.http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true";
 title="class or interface in java.lang">Object
 http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#clone--";
 title="class or interface in java.lang">clone, http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#equals-java.lang.Object-";
 title="class or interface in java.lang">equals, http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#finalize--";
 title="class or interface in java.lang">finalize, http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#getClass--";
 title="class or interface in java.lang">getClass, http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#hashCode--";
 title="class or interface in java.lang">hashCode, http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#notify--";
 title="class or interface in java.lang">notify, http://docs.oracle.com/javase/8/docs/api/java/lang
 /Object.html?is-external=true#notifyAll--" title="class or interface in 
java.lang">notifyAll, http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#wait--";
 title="class or interface in java.lang">wait, http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#wait-long-";
 title="class or interface in java.lang">wait, http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#wait-long-int-";
 title="class or interface in java.lang">wait
 
+
+
+
+
+Methods inherited from interface org.apache.hadoop.hbase.ipc.RpcCallContext
+getRequestUserName
+
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/b838bdf0/devapidocs/org/apache/hadoop/hbase/ipc/class-use/RpcCall.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/ipc/class-use/RpcCall.html 
b/devapidocs/org/apache/hadoop/hbase/ipc/class-use/RpcCall.html
index 61d176d..49605a4 100644
--- a/devapidocs/org/apache/hadoop/hbase/ipc/class-use/RpcCall.html
+++ b/devapidocs/org/apache/hadoop/hbase/ipc/class-use/RpcCall.html
@@ -164,15 +164,24 @@
 
 
 
-static RpcCall
+RpcCall
+CallRunner.getRpcCall() 
+
+
+
+
+Methods in org.apache.hadoop.hbase.ipc
 that return types with arguments of type RpcCall 
+
+Modifier and Type
+Method and Description
+
+
+
+static http://docs.oracle.com/javase/8/docs/api/java/util/Optional.html?is-external=true";
 title="class or interface in java.util">Optional
 RpcServer.getCurrentCall()
 Needed for features such as delayed calls.
 
 
-
-RpcCall
-CallRunner.getRpcCall() 
-
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/b838bdf0/devapidocs/org/apache/hadoop/hbase/ipc/package-tree.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/ipc/package-tree.html 
b/devapidocs/org/apache/hadoop/hbase/ipc/package-tree.html
index ea3d1c5..007957e 10064

[44/51] [partial] hbase-site git commit: Published site at .

2017-10-05 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/b838bdf0/devapidocs/org/apache/hadoop/hbase/ipc/ServerCall.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/ipc/ServerCall.html 
b/devapidocs/org/apache/hadoop/hbase/ipc/ServerCall.html
index 37f49ae..6b81a1c 100644
--- a/devapidocs/org/apache/hadoop/hbase/ipc/ServerCall.html
+++ b/devapidocs/org/apache/hadoop/hbase/ipc/ServerCall.html
@@ -18,7 +18,7 @@
 catch(err) {
 }
 //-->
-var methods = 
{"i0":10,"i1":9,"i2":9,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10,"i14":10,"i15":10,"i16":10,"i17":10,"i18":10,"i19":10,"i20":10,"i21":10,"i22":10,"i23":10,"i24":10,"i25":10,"i26":10,"i27":10,"i28":10,"i29":10,"i30":10,"i31":10,"i32":9,"i33":10,"i34":10,"i35":10,"i36":10,"i37":10,"i38":9};
+var methods = 
{"i0":10,"i1":9,"i2":9,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10,"i14":10,"i15":10,"i16":10,"i17":10,"i18":10,"i19":10,"i20":10,"i21":10,"i22":10,"i23":10,"i24":10,"i25":10,"i26":10,"i27":10,"i28":10,"i29":10,"i30":10,"i31":9,"i32":10,"i33":10,"i34":10,"i35":10,"i36":10,"i37":9};
 var tabs = {65535:["t0","All Methods"],1:["t1","Static 
Methods"],2:["t2","Instance Methods"],8:["t4","Concrete Methods"]};
 var altColor = "altColor";
 var rowColor = "rowColor";
@@ -118,7 +118,7 @@ var activeTableTab = "activeTableTab";
 
 
 @InterfaceAudience.Private
-abstract class ServerCall
+abstract class ServerCall
 extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true";
 title="class or interface in java.lang">Object
 implements RpcCall, RpcResponse
 Datastructure that holds all necessary to a method 
invocation and then afterward, carries
@@ -368,94 +368,90 @@ implements getRemotePort() 
 
 
-User
+http://docs.oracle.com/javase/8/docs/api/java/util/Optional.html?is-external=true";
 title="class or interface in java.util">Optional
 getRequestUser()
-Returns the user credentials associated with the current 
RPC request or
- null if no credentials were provided.
+Returns the user credentials associated with the current 
RPC request or not present if no
+ credentials were provided.
 
 
 
-http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
-getRequestUserName() 
-
-
 BufferChain
 getResponse() 
 
-
+
 long
 getResponseBlockSize() 
 
-
+
 long
 getResponseCellSize()
 The size of response cells that have been accumulated so 
far.
 
 
-
+
 long
 getResponseExceptionSize() 
 
-
+
 org.apache.hadoop.hbase.shaded.com.google.protobuf.BlockingService
 getService() 
 
-
+
 long
 getSize()
 Used to calculate the request call queue size.
 
 
-
+
 long
 getStartTime() 
 
-
+
 int
 getTimeout() 
 
-
+
 org.apache.htrace.TraceInfo
 getTraceInfo() 
 
-
+
 void
 incrementResponseBlockSize(long blockSize) 
 
-
+
 void
 incrementResponseCellSize(long cellSize)
 Add on the given amount to the retained cell size.
 
 
-
+
 void
 incrementResponseExceptionSize(long exSize) 
 
-
+
 boolean
 isClientCellBlockSupported()
 If the client connected and specified a codec to use, then 
we will use this codec making
  cellblocks to return.
 
 
-
+
 boolean
 isRetryImmediatelySupported() 
 
-
+
 void
 setCallBack(RpcCallback callback)
 Sets a callback which has to be executed at the end of this 
RPC call.
 
 
-
+
 (package private) static void
 setExceptionResponse(http://docs.oracle.com/javase/8/docs/api/java/lang/Throwable.html?is-external=true";
 title="class or interface in java.lang">Throwable t,
 http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String errorMsg,
 
org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ResponseHeader.Builder headerBuilder) 
 
-
+
 void
 setResponse(org.apache.hadoop.hbase.shaded.com.google.protobuf.Message m,
CellScanner cells,
@@ -464,25 +460,25 @@ implements Set the response resulting from this RPC call.
 
 
-
+
 void
 setStartTime(long t)
 Set the time when the call starts to be executed.
 
 
-
+
 http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
 toShortString() 
 
-
+
 http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
 toString() 
 
-
+
 protected BufferChain
 wrapWithSasl(BufferChain bc) 
 
-
+
 private static void
 writeToCOS(org.apache.hadoop.hbase.shaded.com.google.protobuf.Message result,
   
org.apache.hadoop.hbase.shaded.com.google.protobuf.Message header,
@@ -504,6 +500,13 @@ implements Methods inherited from interface org.apache.hadoop.hbase.ipc.RpcCall
 sendResponseIfReady
 
+
+
+
+
+Methods inherited from int

  1   2   >