svn commit: r29846 - in /dev/hbase/hbase-1.4.8RC0: ./ compat-check-report.html hbase-1.4.8-bin.tar.gz hbase-1.4.8-bin.tar.gz.asc hbase-1.4.8-bin.tar.gz.sha512 hbase-1.4.8-src.tar.gz hbase-1.4.8-src.ta

2018-10-02 Thread apurtell
Author: apurtell
Date: Tue Oct  2 19:55:56 2018
New Revision: 29846

Log:
Stage HBase 1.4.8RC0 artifacts

Added:
dev/hbase/hbase-1.4.8RC0/
dev/hbase/hbase-1.4.8RC0/compat-check-report.html
dev/hbase/hbase-1.4.8RC0/hbase-1.4.8-bin.tar.gz   (with props)
dev/hbase/hbase-1.4.8RC0/hbase-1.4.8-bin.tar.gz.asc
dev/hbase/hbase-1.4.8RC0/hbase-1.4.8-bin.tar.gz.sha512
dev/hbase/hbase-1.4.8RC0/hbase-1.4.8-src.tar.gz   (with props)
dev/hbase/hbase-1.4.8RC0/hbase-1.4.8-src.tar.gz.asc
dev/hbase/hbase-1.4.8RC0/hbase-1.4.8-src.tar.gz.sha512

Added: dev/hbase/hbase-1.4.8RC0/compat-check-report.html
==
--- dev/hbase/hbase-1.4.8RC0/compat-check-report.html (added)
+++ dev/hbase/hbase-1.4.8RC0/compat-check-report.html Tue Oct  2 19:55:56 2018
@@ -0,0 +1,487 @@
+
+
+http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd;>
+http://www.w3.org/1999/xhtml; xml:lang="en" lang="en">
+
+
+
+
+
+hbase: rel/1.4.7 to 1.4.8RC0 compatibility report
+
+body {
+font-family:Arial, sans-serif;
+background-color:White;
+color:Black;
+}
+hr {
+color:Black;
+background-color:Black;
+height:1px;
+border:0;
+}
+h1 {
+margin-bottom:0px;
+padding-bottom:0px;
+font-size:1.625em;
+}
+h2 {
+margin-bottom:0px;
+padding-bottom:0px;
+font-size:1.25em;
+white-space:nowrap;
+}
+div.symbols {
+color:#003E69;
+}
+div.symbols i {
+color:Brown;
+}
+span.section {
+font-weight:bold;
+cursor:pointer;
+color:#003E69;
+white-space:nowrap;
+margin-left:0.3125em;
+}
+span:hover.section {
+color:#336699;
+}
+span.sect_aff {
+cursor:pointer;
+padding-left:1.55em;
+font-size:0.875em;
+color:#cc3300;
+}
+span.ext {
+font-weight:normal;
+}
+span.jar {
+color:#cc3300;
+font-size:0.875em;
+font-weight:bold;
+}
+div.jar_list {
+padding-left:0.4em;
+font-size:0.94em;
+}
+span.pkg_t {
+color:#408080;
+font-size:0.875em;
+}
+span.pkg {
+color:#408080;
+font-size:0.875em;
+font-weight:bold;
+}
+span.cname {
+color:Green;
+font-size:0.875em;
+font-weight:bold;
+}
+span.iname_b {
+font-weight:bold;
+}
+span.iname_a {
+color:#33;
+font-weight:bold;
+font-size:0.94em;
+}
+span.sym_p {
+font-weight:normal;
+white-space:normal;
+}
+span.sym_pd {
+white-space:normal;
+}
+span.sym_p span, span.sym_pd span {
+white-space:nowrap;
+}
+span.attr {
+color:Black;
+font-weight:normal;
+}
+span.deprecated {
+color:Red;
+font-weight:bold;
+font-family:Monaco, monospace;
+}
+div.affect {
+padding-left:1em;
+padding-bottom:10px;
+font-size:0.87em;
+font-style:italic;
+line-height:0.9em;
+}
+div.affected {
+padding-left:2em;
+padding-top:10px;
+}
+table.ptable {
+border-collapse:collapse;
+border:1px outset black;
+margin-left:0.95em;
+margin-top:3px;
+margin-bottom:3px;
+width:56.25em;
+}
+table.ptable td {
+border:1px solid Gray;
+padding:3px;
+font-size:0.875em;
+text-align:left;
+vertical-align:top;
+max-width:28em;
+word-wrap:break-word;
+}
+table.ptable th {
+background-color:#ee;
+font-weight:bold;
+color:#33;
+font-family:Verdana, Arial;
+font-size:0.875em;
+border:1px solid Gray;
+text-align:center;
+vertical-align:top;
+white-space:nowrap;
+padding:3px;
+}
+table.summary {
+border-collapse:collapse;
+border:1px outset black;
+}
+table.summary th {
+background-color:#ee;
+font-weight:normal;
+text-align:left;
+font-size:0.94em;
+white-space:nowrap;
+border:1px inset Gray;
+padding:3px;
+}
+table.summary td {
+text-align:right;
+white-space:nowrap;
+border:1px inset Gray;
+padding:3px 5px 3px 10px;
+}
+span.mngl {
+padding-left:1em;
+font-size:0.875em;
+cursor:text;
+color:#44;
+font-weight:bold;
+}
+span.pleft {
+padding-left:2.5em;
+}
+span.color_p {
+font-style:italic;
+color:Brown;
+}
+span.param {
+font-style:italic;
+}
+span.focus_p {
+font-style:italic;
+background-color:#DCDCDC;
+}
+span.ttype {
+font-weight:normal;
+}
+span.nowrap {
+white-space:nowrap;
+}
+span.value {
+white-space:nowrap;
+font-weight:bold;
+}
+.passed {
+background-color:#CCFFCC;
+font-weight:normal;
+}
+.warning {
+background-color:#F4F4AF;
+font-weight:normal;
+}
+.failed {
+background-color:#FF;
+font-weight:normal;
+}
+.new {
+background-color:#C6DEFF;
+font-weight:normal;
+}
+
+.compatible {
+background-color:#CCFFCC;
+font-weight:normal;
+}
+.almost_compatible {
+background-color:#FFDAA3;
+font-weight:normal;
+}
+.incompatible {
+background-color:#FF;
+font-weight:normal;
+}
+.gray {
+background-color:#DCDCDC;
+font-weight:normal;
+}
+
+.top_ref {
+font-size:0.69em;
+}

hbase git commit: Update POMs and CHANGES.txt for 1.4.8 RC0

2018-10-02 Thread apurtell
Repository: hbase
Updated Branches:
  refs/heads/branch-1.4 b22095eea -> 91118ce5f
Updated Tags:  refs/tags/1.4.8RC0 [created] 2f0492554


Update POMs and CHANGES.txt for 1.4.8 RC0


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/91118ce5
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/91118ce5
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/91118ce5

Branch: refs/heads/branch-1.4
Commit: 91118ce5f10fb4efa03cc8c5a47c7ce97175e85e
Parents: b22095e
Author: Andrew Purtell 
Authored: Tue Oct 2 10:06:47 2018 -0700
Committer: Andrew Purtell 
Committed: Tue Oct 2 10:06:47 2018 -0700

--
 CHANGES.txt | 46 
 hbase-annotations/pom.xml   |  2 +-
 .../hbase-archetype-builder/pom.xml |  2 +-
 hbase-archetypes/hbase-client-project/pom.xml   |  2 +-
 .../hbase-shaded-client-project/pom.xml |  2 +-
 hbase-archetypes/pom.xml|  2 +-
 hbase-assembly/pom.xml  |  2 +-
 hbase-checkstyle/pom.xml|  4 +-
 hbase-client/pom.xml|  2 +-
 hbase-common/pom.xml|  2 +-
 hbase-error-prone/pom.xml   |  4 +-
 hbase-examples/pom.xml  |  2 +-
 hbase-external-blockcache/pom.xml   |  2 +-
 hbase-hadoop-compat/pom.xml |  2 +-
 hbase-hadoop2-compat/pom.xml|  2 +-
 hbase-it/pom.xml|  2 +-
 hbase-metrics-api/pom.xml   |  2 +-
 hbase-metrics/pom.xml   |  2 +-
 hbase-prefix-tree/pom.xml   |  2 +-
 hbase-procedure/pom.xml |  2 +-
 hbase-protocol/pom.xml  |  2 +-
 hbase-resource-bundle/pom.xml   |  2 +-
 hbase-rest/pom.xml  |  2 +-
 hbase-rsgroup/pom.xml   |  2 +-
 hbase-server/pom.xml|  2 +-
 .../hbase-shaded-check-invariants/pom.xml   |  2 +-
 hbase-shaded/hbase-shaded-client/pom.xml|  2 +-
 hbase-shaded/hbase-shaded-server/pom.xml|  2 +-
 hbase-shaded/pom.xml|  2 +-
 hbase-shell/pom.xml |  2 +-
 hbase-testing-util/pom.xml  |  2 +-
 hbase-thrift/pom.xml|  2 +-
 pom.xml |  2 +-
 33 files changed, 80 insertions(+), 34 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/91118ce5/CHANGES.txt
--
diff --git a/CHANGES.txt b/CHANGES.txt
index 2cc9302..b9d6764 100644
--- a/CHANGES.txt
+++ b/CHANGES.txt
@@ -1,5 +1,51 @@
 HBase Change Log
 
+Release Notes - HBase - Version 1.4.8 10/8/2018
+
+** Sub-task
+* [HBASE-19275] - TestSnapshotFileCache never worked properly
+* [HBASE-20317] - Backport HBASE-20261 "Table page (table.jsp) in Master 
UI does not show replicaIds for hbase meta table" to branch-1
+* [HBASE-21061] - fix synchronization of 
org.apache.hadoop.hbase.ipc.RpcServer$Connection.useWrap
+* [HBASE-21189] - flaky job should gather machine stats
+* [HBASE-21190] - Log files and count of entries in each as we load from 
the MasterProcWAL store
+
+** Bug
+* [HBASE-18451] - PeriodicMemstoreFlusher should inspect the queue before 
adding a delayed flush request
+* [HBASE-18549] - Unclaimed replication queues can go undetected
+* [HBASE-19418] - RANGE_OF_DELAY in PeriodicMemstoreFlusher should be 
configurable.
+* [HBASE-20307] - LoadTestTool prints too much zookeeper logging
+* [HBASE-20704] - Sometimes some compacted storefiles are not archived on 
region close
+* [HBASE-20766] - Verify Replication Tool Has Typo "remove cluster"
+* [HBASE-21030] - Correct javadoc for append operation
+* [HBASE-21088] - HStoreFile should be closed in HStore#hasReferences
+* [HBASE-21117] - Backport HBASE-18350  (fix RSGroups)  to branch-1 (Only 
port the part fixing table locking issue.)
+* [HBASE-21127] - TableRecordReader need to handle cursor result too
+* [HBASE-21158] - Empty qualifier cell is always returned when using 
QualifierFilter
+* [HBASE-21162] - Revert suspicious change to BoundedByteBufferPool and 
disable use of direct buffers for IPC reservoir by default
+* [HBASE-21166] - Creating a CoprocessorHConnection re-retrieves the 
cluster id from ZK
+* [HBASE-21173] - Remove the duplicate HRegion#close in TestHRegion
+* [HBASE-21174] - [REST] Failed to parse empty qualifier in 
TableResource#getScanResource
+* [HBASE-21179] - Fix the number of actions in responseTooSlow 

[22/26] hbase-site git commit: Published site at 42aa3dd463c0d30a9b940d296b87316b5c67e1f5.

2018-10-02 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/37b8a04a/devapidocs/org/apache/hadoop/hbase/replication/regionserver/MetricsReplicationSourceSource.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/replication/regionserver/MetricsReplicationSourceSource.html
 
b/devapidocs/org/apache/hadoop/hbase/replication/regionserver/MetricsReplicationSourceSource.html
index d031298..f9fa8ff 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/replication/regionserver/MetricsReplicationSourceSource.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/replication/regionserver/MetricsReplicationSourceSource.html
@@ -18,7 +18,7 @@
 catch(err) {
 }
 //-->
-var methods = 
{"i0":6,"i1":6,"i2":6,"i3":6,"i4":6,"i5":6,"i6":6,"i7":6,"i8":6,"i9":6,"i10":6,"i11":6,"i12":6,"i13":6,"i14":6,"i15":6,"i16":6,"i17":6,"i18":6,"i19":6,"i20":6,"i21":6};
+var methods = 
{"i0":6,"i1":6,"i2":6,"i3":6,"i4":6,"i5":6,"i6":6,"i7":6,"i8":6,"i9":6,"i10":6,"i11":6,"i12":6,"i13":6,"i14":6,"i15":6,"i16":6,"i17":6,"i18":6,"i19":6,"i20":6,"i21":6,"i22":6};
 var tabs = {65535:["t0","All Methods"],2:["t2","Instance 
Methods"],4:["t3","Abstract Methods"]};
 var altColor = "altColor";
 var rowColor = "rowColor";
@@ -148,61 +148,65 @@ extends 
 static https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
-SOURCE_LOG_EDITS_FILTERED
+SOURCE_FAILED_RECOVERY_QUEUES
 
 
 static https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
-SOURCE_LOG_READ_IN_BYTES
+SOURCE_LOG_EDITS_FILTERED
 
 
 static https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
-SOURCE_LOG_READ_IN_EDITS
+SOURCE_LOG_READ_IN_BYTES
 
 
 static https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
-SOURCE_REPEATED_LOG_FILE_BYTES
+SOURCE_LOG_READ_IN_EDITS
 
 
 static https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
-SOURCE_RESTARTED_LOG_READING
+SOURCE_REPEATED_LOG_FILE_BYTES
 
 
 static https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
-SOURCE_SHIPPED_BATCHES
+SOURCE_RESTARTED_LOG_READING
 
 
 static https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
-SOURCE_SHIPPED_BYTES
+SOURCE_SHIPPED_BATCHES
 
 
 static https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
-SOURCE_SHIPPED_HFILES
+SOURCE_SHIPPED_BYTES
 
 
 static https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
+SOURCE_SHIPPED_HFILES
+
+
+static https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 SOURCE_SHIPPED_KBS
 Deprecated.
 Use SOURCE_SHIPPED_BYTES
 instead
 
 
 
-
+
 static https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 SOURCE_SHIPPED_OPS
 
-
+
 static https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 SOURCE_SIZE_OF_HFILE_REFS_QUEUE
 
-
+
 static https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 SOURCE_SIZE_OF_LOG_QUEUE
 
-
+
 static https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 SOURCE_UNCLEANLY_CLOSED_IGNORED_IN_BYTES
 
-
+
 static https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 SOURCE_UNCLEANLY_CLOSED_LOGS
 
@@ -266,54 +270,58 @@ extends 
 void
-incrHFilesShipped(longhfiles)
+incrFailedRecoveryQueue()
 
 
 void
-incrLogEditsFiltered(longsize)
+incrHFilesShipped(longhfiles)
 
 
 void
-incrLogReadInBytes(longsize)
+incrLogEditsFiltered(longsize)
 
 
 void
-incrLogReadInEdits(longsize)
+incrLogReadInBytes(longsize)
 
 
 void
-incrOpsShipped(longops)
+incrLogReadInEdits(longsize)
 
 
 void
-incrRepeatedFileBytes(longbytes)
+incrOpsShipped(longops)
 
 
 void
-incrRestartedWALReading()
+incrRepeatedFileBytes(longbytes)
 
 
 void
-incrShippedBytes(longsize)
+incrRestartedWALReading()
 
 
 void
-incrSizeOfHFileRefsQueue(longsize)
+incrShippedBytes(longsize)
 
 
 void
-incrSizeOfLogQueue(intsize)
+incrSizeOfHFileRefsQueue(longsize)
 
 
 void
-incrUncleanlyClosedWALs()
+incrSizeOfLogQueue(intsize)
 
 
 void
-incrUnknownFileLengthForClosedWAL()
+incrUncleanlyClosedWALs()
 
 
 void

[08/26] hbase-site git commit: Published site at 42aa3dd463c0d30a9b940d296b87316b5c67e1f5.

2018-10-02 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/37b8a04a/testdevapidocs/src-html/org/apache/hadoop/hbase/replication/TestReplicationEndpoint.EverythingPassesWALEntryFilterSubclass.html
--
diff --git 
a/testdevapidocs/src-html/org/apache/hadoop/hbase/replication/TestReplicationEndpoint.EverythingPassesWALEntryFilterSubclass.html
 
b/testdevapidocs/src-html/org/apache/hadoop/hbase/replication/TestReplicationEndpoint.EverythingPassesWALEntryFilterSubclass.html
index ef39f9e..f188fed 100644
--- 
a/testdevapidocs/src-html/org/apache/hadoop/hbase/replication/TestReplicationEndpoint.EverythingPassesWALEntryFilterSubclass.html
+++ 
b/testdevapidocs/src-html/org/apache/hadoop/hbase/replication/TestReplicationEndpoint.EverythingPassesWALEntryFilterSubclass.html
@@ -25,543 +25,555 @@
 017 */
 018package 
org.apache.hadoop.hbase.replication;
 019
-020import static org.mockito.Mockito.mock;
-021import static 
org.mockito.Mockito.verify;
-022import static org.mockito.Mockito.when;
-023
-024import java.io.IOException;
-025import java.util.ArrayList;
-026import java.util.HashMap;
-027import java.util.List;
-028import java.util.Map;
-029import java.util.UUID;
-030import java.util.concurrent.Callable;
-031import 
java.util.concurrent.atomic.AtomicBoolean;
-032import 
java.util.concurrent.atomic.AtomicInteger;
-033import 
java.util.concurrent.atomic.AtomicReference;
-034import org.apache.hadoop.hbase.Cell;
-035import 
org.apache.hadoop.hbase.HBaseClassTestRule;
-036import org.apache.hadoop.hbase.Waiter;
-037import 
org.apache.hadoop.hbase.client.Connection;
-038import 
org.apache.hadoop.hbase.client.ConnectionFactory;
-039import 
org.apache.hadoop.hbase.client.Put;
-040import 
org.apache.hadoop.hbase.client.RegionInfo;
-041import 
org.apache.hadoop.hbase.client.Table;
-042import 
org.apache.hadoop.hbase.regionserver.HRegion;
-043import 
org.apache.hadoop.hbase.replication.regionserver.HBaseInterClusterReplicationEndpoint;
-044import 
org.apache.hadoop.hbase.replication.regionserver.MetricsReplicationGlobalSourceSource;
-045import 
org.apache.hadoop.hbase.replication.regionserver.MetricsReplicationSourceImpl;
-046import 
org.apache.hadoop.hbase.replication.regionserver.MetricsReplicationSourceSource;
-047import 
org.apache.hadoop.hbase.replication.regionserver.MetricsReplicationSourceSourceImpl;
-048import 
org.apache.hadoop.hbase.replication.regionserver.MetricsSource;
-049import 
org.apache.hadoop.hbase.testclassification.MediumTests;
-050import 
org.apache.hadoop.hbase.testclassification.ReplicationTests;
-051import 
org.apache.hadoop.hbase.util.Bytes;
-052import 
org.apache.hadoop.hbase.util.JVMClusterUtil.RegionServerThread;
-053import 
org.apache.hadoop.hbase.util.Threads;
-054import 
org.apache.hadoop.hbase.wal.WAL.Entry;
-055import 
org.apache.hadoop.hbase.zookeeper.ZKConfig;
-056import 
org.apache.hadoop.metrics2.lib.DynamicMetricsRegistry;
-057import org.junit.AfterClass;
-058import org.junit.Assert;
-059import org.junit.Before;
-060import org.junit.BeforeClass;
-061import org.junit.ClassRule;
-062import org.junit.Test;
-063import 
org.junit.experimental.categories.Category;
-064import org.slf4j.Logger;
-065import org.slf4j.LoggerFactory;
-066
-067/**
-068 * Tests ReplicationSource and 
ReplicationEndpoint interactions
-069 */
-070@Category({ ReplicationTests.class, 
MediumTests.class })
-071public class TestReplicationEndpoint 
extends TestReplicationBase {
-072
-073  @ClassRule
-074  public static final HBaseClassTestRule 
CLASS_RULE =
-075  
HBaseClassTestRule.forClass(TestReplicationEndpoint.class);
-076
-077  private static final Logger LOG = 
LoggerFactory.getLogger(TestReplicationEndpoint.class);
+020import static 
org.mockito.Mockito.doNothing;
+021import static org.mockito.Mockito.mock;
+022import static org.mockito.Mockito.spy;
+023import static 
org.mockito.Mockito.verify;
+024import static org.mockito.Mockito.when;
+025
+026import java.io.IOException;
+027import java.util.ArrayList;
+028import java.util.HashMap;
+029import java.util.List;
+030import java.util.Map;
+031import java.util.UUID;
+032import java.util.concurrent.Callable;
+033import 
java.util.concurrent.atomic.AtomicBoolean;
+034import 
java.util.concurrent.atomic.AtomicInteger;
+035import 
java.util.concurrent.atomic.AtomicReference;
+036import org.apache.hadoop.hbase.Cell;
+037import 
org.apache.hadoop.hbase.HBaseClassTestRule;
+038import org.apache.hadoop.hbase.Waiter;
+039import 
org.apache.hadoop.hbase.client.Connection;
+040import 
org.apache.hadoop.hbase.client.ConnectionFactory;
+041import 
org.apache.hadoop.hbase.client.Put;
+042import 
org.apache.hadoop.hbase.client.RegionInfo;
+043import 
org.apache.hadoop.hbase.client.Table;
+044import 
org.apache.hadoop.hbase.regionserver.HRegion;
+045import 
org.apache.hadoop.hbase.replication.regionserver.HBaseInterClusterReplicationEndpoint;
+046import 

[14/26] hbase-site git commit: Published site at 42aa3dd463c0d30a9b940d296b87316b5c67e1f5.

2018-10-02 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/37b8a04a/devapidocs/src-html/org/apache/hadoop/hbase/replication/regionserver/ReplicationSourceManager.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/replication/regionserver/ReplicationSourceManager.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/replication/regionserver/ReplicationSourceManager.html
index e984063..083ab07 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/replication/regionserver/ReplicationSourceManager.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/replication/regionserver/ReplicationSourceManager.html
@@ -53,1074 +53,1082 @@
 045import 
org.apache.hadoop.conf.Configuration;
 046import org.apache.hadoop.fs.FileSystem;
 047import org.apache.hadoop.fs.Path;
-048import 
org.apache.hadoop.hbase.HConstants;
-049import org.apache.hadoop.hbase.Server;
-050import 
org.apache.hadoop.hbase.ServerName;
-051import 
org.apache.hadoop.hbase.TableName;
-052import 
org.apache.hadoop.hbase.replication.ReplicationException;
-053import 
org.apache.hadoop.hbase.replication.ReplicationListener;
-054import 
org.apache.hadoop.hbase.replication.ReplicationPeer;
-055import 
org.apache.hadoop.hbase.replication.ReplicationPeer.PeerState;
-056import 
org.apache.hadoop.hbase.replication.ReplicationPeerConfig;
-057import 
org.apache.hadoop.hbase.replication.ReplicationPeerImpl;
-058import 
org.apache.hadoop.hbase.replication.ReplicationPeers;
-059import 
org.apache.hadoop.hbase.replication.ReplicationQueueInfo;
-060import 
org.apache.hadoop.hbase.replication.ReplicationQueueStorage;
-061import 
org.apache.hadoop.hbase.replication.ReplicationTracker;
-062import 
org.apache.hadoop.hbase.replication.ReplicationUtils;
-063import 
org.apache.hadoop.hbase.replication.SyncReplicationState;
-064import 
org.apache.hadoop.hbase.util.Pair;
-065import 
org.apache.hadoop.hbase.wal.AbstractFSWALProvider;
-066import 
org.apache.hadoop.hbase.wal.SyncReplicationWALProvider;
-067import 
org.apache.yetus.audience.InterfaceAudience;
-068import 
org.apache.zookeeper.KeeperException;
-069import org.slf4j.Logger;
-070import org.slf4j.LoggerFactory;
-071
-072import 
org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;
-073import 
org.apache.hbase.thirdparty.com.google.common.util.concurrent.ThreadFactoryBuilder;
-074
-075/**
-076 * This class is responsible to manage 
all the replication sources. There are two classes of
-077 * sources:
-078 * ul
-079 * liNormal sources are 
persistent and one per peer cluster/li
-080 * liOld sources are recovered 
from a failed region server and our only goal is to finish
-081 * replicating the WAL queue it 
had/li
-082 * /ul
-083 * p
-084 * When a region server dies, this class 
uses a watcher to get notified and it tries to grab a lock
-085 * in order to transfer all the queues in 
a local old source.
-086 * p
-087 * Synchronization specification:
-088 * ul
-089 * liNo need synchronized on 
{@link #sources}. {@link #sources} is a ConcurrentHashMap and there
-090 * is a Lock for peer id in {@link 
PeerProcedureHandlerImpl}. So there is no race for peer
-091 * operations./li
-092 * liNeed synchronized on {@link 
#walsById}. There are four methods which modify it,
-093 * {@link #addPeer(String)}, {@link 
#removePeer(String)},
-094 * {@link #cleanOldLogs(String, boolean, 
ReplicationSourceInterface)} and {@link #preLogRoll(Path)}.
-095 * {@link #walsById} is a 
ConcurrentHashMap and there is a Lock for peer id in
-096 * {@link PeerProcedureHandlerImpl}. So 
there is no race between {@link #addPeer(String)} and
-097 * {@link #removePeer(String)}. {@link 
#cleanOldLogs(String, boolean, ReplicationSourceInterface)}
-098 * is called by {@link 
ReplicationSourceInterface}. So no race with {@link #addPeer(String)}.
-099 * {@link #removePeer(String)} will 
terminate the {@link ReplicationSourceInterface} firstly, then
-100 * remove the wals from {@link 
#walsById}. So no race with {@link #removePeer(String)}. The only
-101 * case need synchronized is {@link 
#cleanOldLogs(String, boolean, ReplicationSourceInterface)} and
-102 * {@link 
#preLogRoll(Path)}./li
-103 * liNo need synchronized on 
{@link #walsByIdRecoveredQueues}. There are three methods which
-104 * modify it, {@link #removePeer(String)} 
,
-105 * {@link #cleanOldLogs(String, boolean, 
ReplicationSourceInterface)} and
-106 * {@link 
ReplicationSourceManager.NodeFailoverWorker#run()}.
-107 * {@link #cleanOldLogs(String, boolean, 
ReplicationSourceInterface)} is called by
-108 * {@link ReplicationSourceInterface}. 
{@link #removePeer(String)} will terminate the
-109 * {@link ReplicationSourceInterface} 
firstly, then remove the wals from
-110 * {@link #walsByIdRecoveredQueues}. And 
{@link ReplicationSourceManager.NodeFailoverWorker#run()}
-111 * will add the wals to {@link 
#walsByIdRecoveredQueues} firstly, then start up a
-112 * {@link ReplicationSourceInterface}. 

hbase-site git commit: INFRA-10751 Empty commit

2018-10-02 Thread git-site-role
Repository: hbase-site
Updated Branches:
  refs/heads/asf-site 37b8a04a7 -> 5b11381f6


INFRA-10751 Empty commit


Project: http://git-wip-us.apache.org/repos/asf/hbase-site/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase-site/commit/5b11381f
Tree: http://git-wip-us.apache.org/repos/asf/hbase-site/tree/5b11381f
Diff: http://git-wip-us.apache.org/repos/asf/hbase-site/diff/5b11381f

Branch: refs/heads/asf-site
Commit: 5b11381f62c2c7a816b5339afbf1016d27bfddc4
Parents: 37b8a04
Author: jenkins 
Authored: Tue Oct 2 14:53:54 2018 +
Committer: jenkins 
Committed: Tue Oct 2 14:53:54 2018 +

--

--




[24/26] hbase-site git commit: Published site at 42aa3dd463c0d30a9b940d296b87316b5c67e1f5.

2018-10-02 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/37b8a04a/devapidocs/org/apache/hadoop/hbase/replication/ZKReplicationQueueStorage.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/replication/ZKReplicationQueueStorage.html 
b/devapidocs/org/apache/hadoop/hbase/replication/ZKReplicationQueueStorage.html
index 85c6bf7..20c5cc1 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/replication/ZKReplicationQueueStorage.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/replication/ZKReplicationQueueStorage.html
@@ -374,8 +374,10 @@ implements getReplicableHFiles0(https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in 
java.lang">StringpeerId)
 
 
-private https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
-getRsNode(ServerNameserverName)
+https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
+getRsNode(ServerNameserverName)
+Get full znode name for given region server
+
 
 
 (package private) https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
@@ -621,7 +623,17 @@ implements 
 
 getRsNode
-privatehttps://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringgetRsNode(ServerNameserverName)
+publichttps://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringgetRsNode(ServerNameserverName)
+Description copied from 
interface:ReplicationQueueStorage
+Get full znode name for given region server
+
+Specified by:
+getRsNodein
 interfaceReplicationQueueStorage
+Parameters:
+serverName - the name of the region server
+Returns:
+full znode name
+
 
 
 
@@ -630,7 +642,7 @@ implements 
 
 getQueueNode
-privatehttps://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringgetQueueNode(ServerNameserverName,
+privatehttps://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringgetQueueNode(ServerNameserverName,
 https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringqueueId)
 
 
@@ -640,7 +652,7 @@ implements 
 
 getFileNode
-privatehttps://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringgetFileNode(https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringqueueNode,
+privatehttps://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringgetFileNode(https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringqueueNode,
https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringfileName)
 
 
@@ -650,7 +662,7 @@ implements 
 
 getFileNode
-privatehttps://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringgetFileNode(ServerNameserverName,
+privatehttps://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringgetFileNode(ServerNameserverName,
https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringqueueId,
https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringfileName)
 
@@ -661,7 +673,7 @@ implements 
 
 getSerialReplicationRegionPeerNode
-https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringgetSerialReplicationRegionPeerNode(https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringencodedRegionName,
+https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringgetSerialReplicationRegionPeerNode(https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringencodedRegionName,
   https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringpeerId)
 
  Put all regions under /hbase/replication/regions znode will lead to too 

[04/26] hbase-site git commit: Published site at 42aa3dd463c0d30a9b940d296b87316b5c67e1f5.

2018-10-02 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/37b8a04a/testdevapidocs/src-html/org/apache/hadoop/hbase/replication/TestReplicationEndpoint.ReplicationEndpointWithWALEntryFilter.html
--
diff --git 
a/testdevapidocs/src-html/org/apache/hadoop/hbase/replication/TestReplicationEndpoint.ReplicationEndpointWithWALEntryFilter.html
 
b/testdevapidocs/src-html/org/apache/hadoop/hbase/replication/TestReplicationEndpoint.ReplicationEndpointWithWALEntryFilter.html
index ef39f9e..f188fed 100644
--- 
a/testdevapidocs/src-html/org/apache/hadoop/hbase/replication/TestReplicationEndpoint.ReplicationEndpointWithWALEntryFilter.html
+++ 
b/testdevapidocs/src-html/org/apache/hadoop/hbase/replication/TestReplicationEndpoint.ReplicationEndpointWithWALEntryFilter.html
@@ -25,543 +25,555 @@
 017 */
 018package 
org.apache.hadoop.hbase.replication;
 019
-020import static org.mockito.Mockito.mock;
-021import static 
org.mockito.Mockito.verify;
-022import static org.mockito.Mockito.when;
-023
-024import java.io.IOException;
-025import java.util.ArrayList;
-026import java.util.HashMap;
-027import java.util.List;
-028import java.util.Map;
-029import java.util.UUID;
-030import java.util.concurrent.Callable;
-031import 
java.util.concurrent.atomic.AtomicBoolean;
-032import 
java.util.concurrent.atomic.AtomicInteger;
-033import 
java.util.concurrent.atomic.AtomicReference;
-034import org.apache.hadoop.hbase.Cell;
-035import 
org.apache.hadoop.hbase.HBaseClassTestRule;
-036import org.apache.hadoop.hbase.Waiter;
-037import 
org.apache.hadoop.hbase.client.Connection;
-038import 
org.apache.hadoop.hbase.client.ConnectionFactory;
-039import 
org.apache.hadoop.hbase.client.Put;
-040import 
org.apache.hadoop.hbase.client.RegionInfo;
-041import 
org.apache.hadoop.hbase.client.Table;
-042import 
org.apache.hadoop.hbase.regionserver.HRegion;
-043import 
org.apache.hadoop.hbase.replication.regionserver.HBaseInterClusterReplicationEndpoint;
-044import 
org.apache.hadoop.hbase.replication.regionserver.MetricsReplicationGlobalSourceSource;
-045import 
org.apache.hadoop.hbase.replication.regionserver.MetricsReplicationSourceImpl;
-046import 
org.apache.hadoop.hbase.replication.regionserver.MetricsReplicationSourceSource;
-047import 
org.apache.hadoop.hbase.replication.regionserver.MetricsReplicationSourceSourceImpl;
-048import 
org.apache.hadoop.hbase.replication.regionserver.MetricsSource;
-049import 
org.apache.hadoop.hbase.testclassification.MediumTests;
-050import 
org.apache.hadoop.hbase.testclassification.ReplicationTests;
-051import 
org.apache.hadoop.hbase.util.Bytes;
-052import 
org.apache.hadoop.hbase.util.JVMClusterUtil.RegionServerThread;
-053import 
org.apache.hadoop.hbase.util.Threads;
-054import 
org.apache.hadoop.hbase.wal.WAL.Entry;
-055import 
org.apache.hadoop.hbase.zookeeper.ZKConfig;
-056import 
org.apache.hadoop.metrics2.lib.DynamicMetricsRegistry;
-057import org.junit.AfterClass;
-058import org.junit.Assert;
-059import org.junit.Before;
-060import org.junit.BeforeClass;
-061import org.junit.ClassRule;
-062import org.junit.Test;
-063import 
org.junit.experimental.categories.Category;
-064import org.slf4j.Logger;
-065import org.slf4j.LoggerFactory;
-066
-067/**
-068 * Tests ReplicationSource and 
ReplicationEndpoint interactions
-069 */
-070@Category({ ReplicationTests.class, 
MediumTests.class })
-071public class TestReplicationEndpoint 
extends TestReplicationBase {
-072
-073  @ClassRule
-074  public static final HBaseClassTestRule 
CLASS_RULE =
-075  
HBaseClassTestRule.forClass(TestReplicationEndpoint.class);
-076
-077  private static final Logger LOG = 
LoggerFactory.getLogger(TestReplicationEndpoint.class);
+020import static 
org.mockito.Mockito.doNothing;
+021import static org.mockito.Mockito.mock;
+022import static org.mockito.Mockito.spy;
+023import static 
org.mockito.Mockito.verify;
+024import static org.mockito.Mockito.when;
+025
+026import java.io.IOException;
+027import java.util.ArrayList;
+028import java.util.HashMap;
+029import java.util.List;
+030import java.util.Map;
+031import java.util.UUID;
+032import java.util.concurrent.Callable;
+033import 
java.util.concurrent.atomic.AtomicBoolean;
+034import 
java.util.concurrent.atomic.AtomicInteger;
+035import 
java.util.concurrent.atomic.AtomicReference;
+036import org.apache.hadoop.hbase.Cell;
+037import 
org.apache.hadoop.hbase.HBaseClassTestRule;
+038import org.apache.hadoop.hbase.Waiter;
+039import 
org.apache.hadoop.hbase.client.Connection;
+040import 
org.apache.hadoop.hbase.client.ConnectionFactory;
+041import 
org.apache.hadoop.hbase.client.Put;
+042import 
org.apache.hadoop.hbase.client.RegionInfo;
+043import 
org.apache.hadoop.hbase.client.Table;
+044import 
org.apache.hadoop.hbase.regionserver.HRegion;
+045import 
org.apache.hadoop.hbase.replication.regionserver.HBaseInterClusterReplicationEndpoint;
+046import 

[12/26] hbase-site git commit: Published site at 42aa3dd463c0d30a9b940d296b87316b5c67e1f5.

2018-10-02 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/37b8a04a/testdevapidocs/org/apache/hadoop/hbase/replication/TestReplicationEndpoint.ReplicationEndpointForTest.html
--
diff --git 
a/testdevapidocs/org/apache/hadoop/hbase/replication/TestReplicationEndpoint.ReplicationEndpointForTest.html
 
b/testdevapidocs/org/apache/hadoop/hbase/replication/TestReplicationEndpoint.ReplicationEndpointForTest.html
index 7ebc7bc..e774933 100644
--- 
a/testdevapidocs/org/apache/hadoop/hbase/replication/TestReplicationEndpoint.ReplicationEndpointForTest.html
+++ 
b/testdevapidocs/org/apache/hadoop/hbase/replication/TestReplicationEndpoint.ReplicationEndpointForTest.html
@@ -131,7 +131,7 @@ var activeTableTab = "activeTableTab";
 
 
 
-public static class TestReplicationEndpoint.ReplicationEndpointForTest
+public static class TestReplicationEndpoint.ReplicationEndpointForTest
 extends org.apache.hadoop.hbase.replication.BaseReplicationEndpoint
 
 
@@ -309,7 +309,7 @@ extends 
org.apache.hadoop.hbase.replication.BaseReplicationEndpoint
 
 
 uuid
-statichttps://docs.oracle.com/javase/8/docs/api/java/util/UUID.html?is-external=true;
 title="class or interface in java.util">UUID uuid
+statichttps://docs.oracle.com/javase/8/docs/api/java/util/UUID.html?is-external=true;
 title="class or interface in java.util">UUID uuid
 
 
 
@@ -318,7 +318,7 @@ extends 
org.apache.hadoop.hbase.replication.BaseReplicationEndpoint
 
 
 contructedCount
-statichttps://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/AtomicInteger.html?is-external=true;
 title="class or interface in java.util.concurrent.atomic">AtomicInteger contructedCount
+statichttps://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/AtomicInteger.html?is-external=true;
 title="class or interface in java.util.concurrent.atomic">AtomicInteger contructedCount
 
 
 
@@ -327,7 +327,7 @@ extends 
org.apache.hadoop.hbase.replication.BaseReplicationEndpoint
 
 
 startedCount
-statichttps://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/AtomicInteger.html?is-external=true;
 title="class or interface in java.util.concurrent.atomic">AtomicInteger startedCount
+statichttps://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/AtomicInteger.html?is-external=true;
 title="class or interface in java.util.concurrent.atomic">AtomicInteger startedCount
 
 
 
@@ -336,7 +336,7 @@ extends 
org.apache.hadoop.hbase.replication.BaseReplicationEndpoint
 
 
 stoppedCount
-statichttps://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/AtomicInteger.html?is-external=true;
 title="class or interface in java.util.concurrent.atomic">AtomicInteger stoppedCount
+statichttps://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/AtomicInteger.html?is-external=true;
 title="class or interface in java.util.concurrent.atomic">AtomicInteger stoppedCount
 
 
 
@@ -345,7 +345,7 @@ extends 
org.apache.hadoop.hbase.replication.BaseReplicationEndpoint
 
 
 replicateCount
-statichttps://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/AtomicInteger.html?is-external=true;
 title="class or interface in java.util.concurrent.atomic">AtomicInteger replicateCount
+statichttps://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/AtomicInteger.html?is-external=true;
 title="class or interface in java.util.concurrent.atomic">AtomicInteger replicateCount
 
 
 
@@ -354,7 +354,7 @@ extends 
org.apache.hadoop.hbase.replication.BaseReplicationEndpoint
 
 
 lastEntries
-static volatilehttps://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in 
java.util">Listorg.apache.hadoop.hbase.wal.WAL.Entry lastEntries
+static volatilehttps://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in 
java.util">Listorg.apache.hadoop.hbase.wal.WAL.Entry lastEntries
 
 
 
@@ -371,7 +371,7 @@ extends 
org.apache.hadoop.hbase.replication.BaseReplicationEndpoint
 
 
 ReplicationEndpointForTest
-publicReplicationEndpointForTest()
+publicReplicationEndpointForTest()
 
 
 
@@ -388,7 +388,7 @@ extends 
org.apache.hadoop.hbase.replication.BaseReplicationEndpoint
 
 
 getPeerUUID
-publichttps://docs.oracle.com/javase/8/docs/api/java/util/UUID.html?is-external=true;
 title="class or interface in java.util">UUIDgetPeerUUID()
+publichttps://docs.oracle.com/javase/8/docs/api/java/util/UUID.html?is-external=true;
 title="class or interface in java.util">UUIDgetPeerUUID()
 
 
 
@@ -397,7 +397,7 @@ extends 
org.apache.hadoop.hbase.replication.BaseReplicationEndpoint
 
 
 replicate
-publicbooleanreplicate(org.apache.hadoop.hbase.replication.ReplicationEndpoint.ReplicateContextreplicateContext)
+publicbooleanreplicate(org.apache.hadoop.hbase.replication.ReplicationEndpoint.ReplicateContextreplicateContext)
 
 
 
@@ -406,7 +406,7 @@ extends 
org.apache.hadoop.hbase.replication.BaseReplicationEndpoint
 
 
 start

[05/26] hbase-site git commit: Published site at 42aa3dd463c0d30a9b940d296b87316b5c67e1f5.

2018-10-02 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/37b8a04a/testdevapidocs/src-html/org/apache/hadoop/hbase/replication/TestReplicationEndpoint.ReplicationEndpointReturningFalse.html
--
diff --git 
a/testdevapidocs/src-html/org/apache/hadoop/hbase/replication/TestReplicationEndpoint.ReplicationEndpointReturningFalse.html
 
b/testdevapidocs/src-html/org/apache/hadoop/hbase/replication/TestReplicationEndpoint.ReplicationEndpointReturningFalse.html
index ef39f9e..f188fed 100644
--- 
a/testdevapidocs/src-html/org/apache/hadoop/hbase/replication/TestReplicationEndpoint.ReplicationEndpointReturningFalse.html
+++ 
b/testdevapidocs/src-html/org/apache/hadoop/hbase/replication/TestReplicationEndpoint.ReplicationEndpointReturningFalse.html
@@ -25,543 +25,555 @@
 017 */
 018package 
org.apache.hadoop.hbase.replication;
 019
-020import static org.mockito.Mockito.mock;
-021import static 
org.mockito.Mockito.verify;
-022import static org.mockito.Mockito.when;
-023
-024import java.io.IOException;
-025import java.util.ArrayList;
-026import java.util.HashMap;
-027import java.util.List;
-028import java.util.Map;
-029import java.util.UUID;
-030import java.util.concurrent.Callable;
-031import 
java.util.concurrent.atomic.AtomicBoolean;
-032import 
java.util.concurrent.atomic.AtomicInteger;
-033import 
java.util.concurrent.atomic.AtomicReference;
-034import org.apache.hadoop.hbase.Cell;
-035import 
org.apache.hadoop.hbase.HBaseClassTestRule;
-036import org.apache.hadoop.hbase.Waiter;
-037import 
org.apache.hadoop.hbase.client.Connection;
-038import 
org.apache.hadoop.hbase.client.ConnectionFactory;
-039import 
org.apache.hadoop.hbase.client.Put;
-040import 
org.apache.hadoop.hbase.client.RegionInfo;
-041import 
org.apache.hadoop.hbase.client.Table;
-042import 
org.apache.hadoop.hbase.regionserver.HRegion;
-043import 
org.apache.hadoop.hbase.replication.regionserver.HBaseInterClusterReplicationEndpoint;
-044import 
org.apache.hadoop.hbase.replication.regionserver.MetricsReplicationGlobalSourceSource;
-045import 
org.apache.hadoop.hbase.replication.regionserver.MetricsReplicationSourceImpl;
-046import 
org.apache.hadoop.hbase.replication.regionserver.MetricsReplicationSourceSource;
-047import 
org.apache.hadoop.hbase.replication.regionserver.MetricsReplicationSourceSourceImpl;
-048import 
org.apache.hadoop.hbase.replication.regionserver.MetricsSource;
-049import 
org.apache.hadoop.hbase.testclassification.MediumTests;
-050import 
org.apache.hadoop.hbase.testclassification.ReplicationTests;
-051import 
org.apache.hadoop.hbase.util.Bytes;
-052import 
org.apache.hadoop.hbase.util.JVMClusterUtil.RegionServerThread;
-053import 
org.apache.hadoop.hbase.util.Threads;
-054import 
org.apache.hadoop.hbase.wal.WAL.Entry;
-055import 
org.apache.hadoop.hbase.zookeeper.ZKConfig;
-056import 
org.apache.hadoop.metrics2.lib.DynamicMetricsRegistry;
-057import org.junit.AfterClass;
-058import org.junit.Assert;
-059import org.junit.Before;
-060import org.junit.BeforeClass;
-061import org.junit.ClassRule;
-062import org.junit.Test;
-063import 
org.junit.experimental.categories.Category;
-064import org.slf4j.Logger;
-065import org.slf4j.LoggerFactory;
-066
-067/**
-068 * Tests ReplicationSource and 
ReplicationEndpoint interactions
-069 */
-070@Category({ ReplicationTests.class, 
MediumTests.class })
-071public class TestReplicationEndpoint 
extends TestReplicationBase {
-072
-073  @ClassRule
-074  public static final HBaseClassTestRule 
CLASS_RULE =
-075  
HBaseClassTestRule.forClass(TestReplicationEndpoint.class);
-076
-077  private static final Logger LOG = 
LoggerFactory.getLogger(TestReplicationEndpoint.class);
+020import static 
org.mockito.Mockito.doNothing;
+021import static org.mockito.Mockito.mock;
+022import static org.mockito.Mockito.spy;
+023import static 
org.mockito.Mockito.verify;
+024import static org.mockito.Mockito.when;
+025
+026import java.io.IOException;
+027import java.util.ArrayList;
+028import java.util.HashMap;
+029import java.util.List;
+030import java.util.Map;
+031import java.util.UUID;
+032import java.util.concurrent.Callable;
+033import 
java.util.concurrent.atomic.AtomicBoolean;
+034import 
java.util.concurrent.atomic.AtomicInteger;
+035import 
java.util.concurrent.atomic.AtomicReference;
+036import org.apache.hadoop.hbase.Cell;
+037import 
org.apache.hadoop.hbase.HBaseClassTestRule;
+038import org.apache.hadoop.hbase.Waiter;
+039import 
org.apache.hadoop.hbase.client.Connection;
+040import 
org.apache.hadoop.hbase.client.ConnectionFactory;
+041import 
org.apache.hadoop.hbase.client.Put;
+042import 
org.apache.hadoop.hbase.client.RegionInfo;
+043import 
org.apache.hadoop.hbase.client.Table;
+044import 
org.apache.hadoop.hbase.regionserver.HRegion;
+045import 
org.apache.hadoop.hbase.replication.regionserver.HBaseInterClusterReplicationEndpoint;
+046import 
org.apache.hadoop.hbase.replication.regionserver.MetricsReplicationGlobalSourceSource;

[15/26] hbase-site git commit: Published site at 42aa3dd463c0d30a9b940d296b87316b5c67e1f5.

2018-10-02 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/37b8a04a/devapidocs/src-html/org/apache/hadoop/hbase/replication/regionserver/ReplicationSourceManager.ReplicationQueueOperation.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/replication/regionserver/ReplicationSourceManager.ReplicationQueueOperation.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/replication/regionserver/ReplicationSourceManager.ReplicationQueueOperation.html
index e984063..083ab07 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/replication/regionserver/ReplicationSourceManager.ReplicationQueueOperation.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/replication/regionserver/ReplicationSourceManager.ReplicationQueueOperation.html
@@ -53,1074 +53,1082 @@
 045import 
org.apache.hadoop.conf.Configuration;
 046import org.apache.hadoop.fs.FileSystem;
 047import org.apache.hadoop.fs.Path;
-048import 
org.apache.hadoop.hbase.HConstants;
-049import org.apache.hadoop.hbase.Server;
-050import 
org.apache.hadoop.hbase.ServerName;
-051import 
org.apache.hadoop.hbase.TableName;
-052import 
org.apache.hadoop.hbase.replication.ReplicationException;
-053import 
org.apache.hadoop.hbase.replication.ReplicationListener;
-054import 
org.apache.hadoop.hbase.replication.ReplicationPeer;
-055import 
org.apache.hadoop.hbase.replication.ReplicationPeer.PeerState;
-056import 
org.apache.hadoop.hbase.replication.ReplicationPeerConfig;
-057import 
org.apache.hadoop.hbase.replication.ReplicationPeerImpl;
-058import 
org.apache.hadoop.hbase.replication.ReplicationPeers;
-059import 
org.apache.hadoop.hbase.replication.ReplicationQueueInfo;
-060import 
org.apache.hadoop.hbase.replication.ReplicationQueueStorage;
-061import 
org.apache.hadoop.hbase.replication.ReplicationTracker;
-062import 
org.apache.hadoop.hbase.replication.ReplicationUtils;
-063import 
org.apache.hadoop.hbase.replication.SyncReplicationState;
-064import 
org.apache.hadoop.hbase.util.Pair;
-065import 
org.apache.hadoop.hbase.wal.AbstractFSWALProvider;
-066import 
org.apache.hadoop.hbase.wal.SyncReplicationWALProvider;
-067import 
org.apache.yetus.audience.InterfaceAudience;
-068import 
org.apache.zookeeper.KeeperException;
-069import org.slf4j.Logger;
-070import org.slf4j.LoggerFactory;
-071
-072import 
org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;
-073import 
org.apache.hbase.thirdparty.com.google.common.util.concurrent.ThreadFactoryBuilder;
-074
-075/**
-076 * This class is responsible to manage 
all the replication sources. There are two classes of
-077 * sources:
-078 * ul
-079 * liNormal sources are 
persistent and one per peer cluster/li
-080 * liOld sources are recovered 
from a failed region server and our only goal is to finish
-081 * replicating the WAL queue it 
had/li
-082 * /ul
-083 * p
-084 * When a region server dies, this class 
uses a watcher to get notified and it tries to grab a lock
-085 * in order to transfer all the queues in 
a local old source.
-086 * p
-087 * Synchronization specification:
-088 * ul
-089 * liNo need synchronized on 
{@link #sources}. {@link #sources} is a ConcurrentHashMap and there
-090 * is a Lock for peer id in {@link 
PeerProcedureHandlerImpl}. So there is no race for peer
-091 * operations./li
-092 * liNeed synchronized on {@link 
#walsById}. There are four methods which modify it,
-093 * {@link #addPeer(String)}, {@link 
#removePeer(String)},
-094 * {@link #cleanOldLogs(String, boolean, 
ReplicationSourceInterface)} and {@link #preLogRoll(Path)}.
-095 * {@link #walsById} is a 
ConcurrentHashMap and there is a Lock for peer id in
-096 * {@link PeerProcedureHandlerImpl}. So 
there is no race between {@link #addPeer(String)} and
-097 * {@link #removePeer(String)}. {@link 
#cleanOldLogs(String, boolean, ReplicationSourceInterface)}
-098 * is called by {@link 
ReplicationSourceInterface}. So no race with {@link #addPeer(String)}.
-099 * {@link #removePeer(String)} will 
terminate the {@link ReplicationSourceInterface} firstly, then
-100 * remove the wals from {@link 
#walsById}. So no race with {@link #removePeer(String)}. The only
-101 * case need synchronized is {@link 
#cleanOldLogs(String, boolean, ReplicationSourceInterface)} and
-102 * {@link 
#preLogRoll(Path)}./li
-103 * liNo need synchronized on 
{@link #walsByIdRecoveredQueues}. There are three methods which
-104 * modify it, {@link #removePeer(String)} 
,
-105 * {@link #cleanOldLogs(String, boolean, 
ReplicationSourceInterface)} and
-106 * {@link 
ReplicationSourceManager.NodeFailoverWorker#run()}.
-107 * {@link #cleanOldLogs(String, boolean, 
ReplicationSourceInterface)} is called by
-108 * {@link ReplicationSourceInterface}. 
{@link #removePeer(String)} will terminate the
-109 * {@link ReplicationSourceInterface} 
firstly, then remove the wals from
-110 * {@link #walsByIdRecoveredQueues}. And 
{@link ReplicationSourceManager.NodeFailoverWorker#run()}

[10/26] hbase-site git commit: Published site at 42aa3dd463c0d30a9b940d296b87316b5c67e1f5.

2018-10-02 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/37b8a04a/testdevapidocs/src-html/org/apache/hadoop/hbase/master/snapshot/TestSnapshotFileCache.html
--
diff --git 
a/testdevapidocs/src-html/org/apache/hadoop/hbase/master/snapshot/TestSnapshotFileCache.html
 
b/testdevapidocs/src-html/org/apache/hadoop/hbase/master/snapshot/TestSnapshotFileCache.html
index 9319626..201cbdc 100644
--- 
a/testdevapidocs/src-html/org/apache/hadoop/hbase/master/snapshot/TestSnapshotFileCache.html
+++ 
b/testdevapidocs/src-html/org/apache/hadoop/hbase/master/snapshot/TestSnapshotFileCache.html
@@ -76,230 +76,226 @@
 068
 069  private static final Logger LOG = 
LoggerFactory.getLogger(TestSnapshotFileCache.class);
 070  private static final 
HBaseTestingUtility UTIL = new HBaseTestingUtility();
-071  private static long sequenceId = 0;
-072  private static FileSystem fs;
-073  private static Path rootDir;
-074
-075  @BeforeClass
-076  public static void startCluster() 
throws Exception {
-077UTIL.startMiniDFSCluster(1);
-078fs = 
UTIL.getDFSCluster().getFileSystem();
-079rootDir = 
UTIL.getDefaultRootDirPath();
-080  }
-081
-082  @AfterClass
-083  public static void stopCluster() throws 
Exception {
-084UTIL.shutdownMiniDFSCluster();
-085  }
-086
-087  @After
-088  public void cleanupFiles() throws 
Exception {
-089// cleanup the snapshot directory
-090Path snapshotDir = 
SnapshotDescriptionUtils.getSnapshotsDir(rootDir);
-091fs.delete(snapshotDir, true);
-092  }
-093
-094  @Test
-095  public void testLoadAndDelete() throws 
IOException {
-096// don't refresh the cache unless we 
tell it to
-097long period = Long.MAX_VALUE;
-098SnapshotFileCache cache = new 
SnapshotFileCache(fs, rootDir, period, 1000,
-099
"test-snapshot-file-cache-refresh", new SnapshotFiles());
-100
-101createAndTestSnapshotV1(cache, 
"snapshot1a", false, true);
-102createAndTestSnapshotV1(cache, 
"snapshot1b", true, true);
-103
-104createAndTestSnapshotV2(cache, 
"snapshot2a", false, true);
-105createAndTestSnapshotV2(cache, 
"snapshot2b", true, true);
-106  }
-107
-108  @Test
-109  public void 
testReloadModifiedDirectory() throws IOException {
-110// don't refresh the cache unless we 
tell it to
-111long period = Long.MAX_VALUE;
-112SnapshotFileCache cache = new 
SnapshotFileCache(fs, rootDir, period, 1000,
-113
"test-snapshot-file-cache-refresh", new SnapshotFiles());
-114
-115createAndTestSnapshotV1(cache, 
"snapshot1", false, true);
-116// now delete the snapshot and add a 
file with a different name
-117createAndTestSnapshotV1(cache, 
"snapshot1", false, false);
-118
-119createAndTestSnapshotV2(cache, 
"snapshot2", false, true);
-120// now delete the snapshot and add a 
file with a different name
-121createAndTestSnapshotV2(cache, 
"snapshot2", false, false);
-122  }
-123
-124  @Test
-125  public void testSnapshotTempDirReload() 
throws IOException {
-126long period = Long.MAX_VALUE;
-127// This doesn't refresh cache until 
we invoke it explicitly
-128SnapshotFileCache cache = new 
SnapshotFileCache(fs, rootDir, period, 1000,
-129
"test-snapshot-file-cache-refresh", new SnapshotFiles());
-130
-131// Add a new non-tmp snapshot
-132createAndTestSnapshotV1(cache, 
"snapshot0v1", false, false);
-133createAndTestSnapshotV1(cache, 
"snapshot0v2", false, false);
-134
-135// Add a new tmp snapshot
-136createAndTestSnapshotV2(cache, 
"snapshot1", true, false);
-137
-138// Add another tmp snapshot
-139createAndTestSnapshotV2(cache, 
"snapshot2", true, false);
-140  }
-141
-142  @Test
-143  public void 
testWeNeverCacheTmpDirAndLoadIt() throws Exception {
-144
-145final AtomicInteger count = new 
AtomicInteger(0);
-146// don't refresh the cache unless we 
tell it to
-147long period = Long.MAX_VALUE;
-148SnapshotFileCache cache = new 
SnapshotFileCache(fs, rootDir, period, 1000,
-149
"test-snapshot-file-cache-refresh", new SnapshotFiles()) {
-150  @Override
-151  ListString 
getSnapshotsInProgress(final SnapshotManager snapshotManager)
-152  throws IOException {
-153ListString result = 
super.getSnapshotsInProgress(snapshotManager);
-154count.incrementAndGet();
-155return result;
-156  }
-157
-158  @Override public void 
triggerCacheRefreshForTesting() {
-159
super.triggerCacheRefreshForTesting();
-160  }
-161};
-162
-163SnapshotMock.SnapshotBuilder complete 
=
-164createAndTestSnapshotV1(cache, 
"snapshot", false, false);
-165
-166SnapshotMock.SnapshotBuilder 
inProgress =
-167createAndTestSnapshotV1(cache, 
"snapshotInProgress", true, false);
+071  private static FileSystem fs;
+072  private static Path rootDir;
+073
+074  @BeforeClass
+075  public static void startCluster() 
throws Exception {
+076

[11/26] hbase-site git commit: Published site at 42aa3dd463c0d30a9b940d296b87316b5c67e1f5.

2018-10-02 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/37b8a04a/testdevapidocs/src-html/org/apache/hadoop/hbase/master/snapshot/TestSnapshotFileCache.SnapshotFiles.html
--
diff --git 
a/testdevapidocs/src-html/org/apache/hadoop/hbase/master/snapshot/TestSnapshotFileCache.SnapshotFiles.html
 
b/testdevapidocs/src-html/org/apache/hadoop/hbase/master/snapshot/TestSnapshotFileCache.SnapshotFiles.html
index 9319626..201cbdc 100644
--- 
a/testdevapidocs/src-html/org/apache/hadoop/hbase/master/snapshot/TestSnapshotFileCache.SnapshotFiles.html
+++ 
b/testdevapidocs/src-html/org/apache/hadoop/hbase/master/snapshot/TestSnapshotFileCache.SnapshotFiles.html
@@ -76,230 +76,226 @@
 068
 069  private static final Logger LOG = 
LoggerFactory.getLogger(TestSnapshotFileCache.class);
 070  private static final 
HBaseTestingUtility UTIL = new HBaseTestingUtility();
-071  private static long sequenceId = 0;
-072  private static FileSystem fs;
-073  private static Path rootDir;
-074
-075  @BeforeClass
-076  public static void startCluster() 
throws Exception {
-077UTIL.startMiniDFSCluster(1);
-078fs = 
UTIL.getDFSCluster().getFileSystem();
-079rootDir = 
UTIL.getDefaultRootDirPath();
-080  }
-081
-082  @AfterClass
-083  public static void stopCluster() throws 
Exception {
-084UTIL.shutdownMiniDFSCluster();
-085  }
-086
-087  @After
-088  public void cleanupFiles() throws 
Exception {
-089// cleanup the snapshot directory
-090Path snapshotDir = 
SnapshotDescriptionUtils.getSnapshotsDir(rootDir);
-091fs.delete(snapshotDir, true);
-092  }
-093
-094  @Test
-095  public void testLoadAndDelete() throws 
IOException {
-096// don't refresh the cache unless we 
tell it to
-097long period = Long.MAX_VALUE;
-098SnapshotFileCache cache = new 
SnapshotFileCache(fs, rootDir, period, 1000,
-099
"test-snapshot-file-cache-refresh", new SnapshotFiles());
-100
-101createAndTestSnapshotV1(cache, 
"snapshot1a", false, true);
-102createAndTestSnapshotV1(cache, 
"snapshot1b", true, true);
-103
-104createAndTestSnapshotV2(cache, 
"snapshot2a", false, true);
-105createAndTestSnapshotV2(cache, 
"snapshot2b", true, true);
-106  }
-107
-108  @Test
-109  public void 
testReloadModifiedDirectory() throws IOException {
-110// don't refresh the cache unless we 
tell it to
-111long period = Long.MAX_VALUE;
-112SnapshotFileCache cache = new 
SnapshotFileCache(fs, rootDir, period, 1000,
-113
"test-snapshot-file-cache-refresh", new SnapshotFiles());
-114
-115createAndTestSnapshotV1(cache, 
"snapshot1", false, true);
-116// now delete the snapshot and add a 
file with a different name
-117createAndTestSnapshotV1(cache, 
"snapshot1", false, false);
-118
-119createAndTestSnapshotV2(cache, 
"snapshot2", false, true);
-120// now delete the snapshot and add a 
file with a different name
-121createAndTestSnapshotV2(cache, 
"snapshot2", false, false);
-122  }
-123
-124  @Test
-125  public void testSnapshotTempDirReload() 
throws IOException {
-126long period = Long.MAX_VALUE;
-127// This doesn't refresh cache until 
we invoke it explicitly
-128SnapshotFileCache cache = new 
SnapshotFileCache(fs, rootDir, period, 1000,
-129
"test-snapshot-file-cache-refresh", new SnapshotFiles());
-130
-131// Add a new non-tmp snapshot
-132createAndTestSnapshotV1(cache, 
"snapshot0v1", false, false);
-133createAndTestSnapshotV1(cache, 
"snapshot0v2", false, false);
-134
-135// Add a new tmp snapshot
-136createAndTestSnapshotV2(cache, 
"snapshot1", true, false);
-137
-138// Add another tmp snapshot
-139createAndTestSnapshotV2(cache, 
"snapshot2", true, false);
-140  }
-141
-142  @Test
-143  public void 
testWeNeverCacheTmpDirAndLoadIt() throws Exception {
-144
-145final AtomicInteger count = new 
AtomicInteger(0);
-146// don't refresh the cache unless we 
tell it to
-147long period = Long.MAX_VALUE;
-148SnapshotFileCache cache = new 
SnapshotFileCache(fs, rootDir, period, 1000,
-149
"test-snapshot-file-cache-refresh", new SnapshotFiles()) {
-150  @Override
-151  ListString 
getSnapshotsInProgress(final SnapshotManager snapshotManager)
-152  throws IOException {
-153ListString result = 
super.getSnapshotsInProgress(snapshotManager);
-154count.incrementAndGet();
-155return result;
-156  }
-157
-158  @Override public void 
triggerCacheRefreshForTesting() {
-159
super.triggerCacheRefreshForTesting();
-160  }
-161};
-162
-163SnapshotMock.SnapshotBuilder complete 
=
-164createAndTestSnapshotV1(cache, 
"snapshot", false, false);
-165
-166SnapshotMock.SnapshotBuilder 
inProgress =
-167createAndTestSnapshotV1(cache, 
"snapshotInProgress", true, false);
+071  private static FileSystem fs;
+072  private static Path rootDir;
+073
+074  @BeforeClass

[02/26] hbase-site git commit: Published site at 42aa3dd463c0d30a9b940d296b87316b5c67e1f5.

2018-10-02 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/37b8a04a/testdevapidocs/src-html/org/apache/hadoop/hbase/rsgroup/TestRSGroups.CPMasterObserver.html
--
diff --git 
a/testdevapidocs/src-html/org/apache/hadoop/hbase/rsgroup/TestRSGroups.CPMasterObserver.html
 
b/testdevapidocs/src-html/org/apache/hadoop/hbase/rsgroup/TestRSGroups.CPMasterObserver.html
index 16827da..beaa49d 100644
--- 
a/testdevapidocs/src-html/org/apache/hadoop/hbase/rsgroup/TestRSGroups.CPMasterObserver.html
+++ 
b/testdevapidocs/src-html/org/apache/hadoop/hbase/rsgroup/TestRSGroups.CPMasterObserver.html
@@ -141,7 +141,7 @@
 133  INIT = true;
 134  afterMethod();
 135}
-136
+136observer.resetFlags();
 137  }
 138
 139  @After
@@ -313,315 +313,340 @@
 305boolean 
preGetRSGroupInfoOfServerCalled = false;
 306boolean 
postGetRSGroupInfoOfServerCalled = false;
 307
-308@Override
-309public OptionalMasterObserver 
getMasterObserver() {
-310  return Optional.of(this);
-311}
-312@Override
-313public void 
preMoveServersAndTables(final 
ObserverContextMasterCoprocessorEnvironment ctx,
-314SetAddress servers, 
SetTableName tables, String targetGroup) throws IOException {
-315  preMoveServersAndTables = true;
-316}
-317@Override
-318public void 
postMoveServersAndTables(final 
ObserverContextMasterCoprocessorEnvironment ctx,
-319SetAddress servers, 
SetTableName tables, String targetGroup) throws IOException {
-320  postMoveServersAndTables = true;
-321}
-322@Override
-323public void preRemoveServers(
-324final 
ObserverContextMasterCoprocessorEnvironment ctx,
-325SetAddress servers) 
throws IOException {
-326  preRemoveServersCalled = true;
-327}
-328@Override
-329public void postRemoveServers(
-330final 
ObserverContextMasterCoprocessorEnvironment ctx,
-331SetAddress servers) 
throws IOException {
-332  postRemoveServersCalled = true;
-333}
-334@Override
-335public void preRemoveRSGroup(final 
ObserverContextMasterCoprocessorEnvironment ctx,
-336String name) throws IOException 
{
-337  preRemoveRSGroupCalled = true;
-338}
-339@Override
-340public void postRemoveRSGroup(final 
ObserverContextMasterCoprocessorEnvironment ctx,
-341String name) throws IOException 
{
-342  postRemoveRSGroupCalled = true;
-343}
-344@Override
-345public void preAddRSGroup(final 
ObserverContextMasterCoprocessorEnvironment ctx,
-346String name) throws IOException 
{
-347  preAddRSGroupCalled = true;
-348}
-349@Override
-350public void postAddRSGroup(final 
ObserverContextMasterCoprocessorEnvironment ctx,
-351String name) throws IOException 
{
-352  postAddRSGroupCalled = true;
-353}
-354@Override
-355public void preMoveTables(final 
ObserverContextMasterCoprocessorEnvironment ctx,
-356SetTableName tables, 
String targetGroup) throws IOException {
-357  preMoveTablesCalled = true;
+308void resetFlags() {
+309  preBalanceRSGroupCalled = false;
+310  postBalanceRSGroupCalled = false;
+311  preMoveServersCalled = false;
+312  postMoveServersCalled = false;
+313  preMoveTablesCalled = false;
+314  postMoveTablesCalled = false;
+315  preAddRSGroupCalled = false;
+316  postAddRSGroupCalled = false;
+317  preRemoveRSGroupCalled = false;
+318  postRemoveRSGroupCalled = false;
+319  preRemoveServersCalled = false;
+320  postRemoveServersCalled = false;
+321  preMoveServersAndTables = false;
+322  postMoveServersAndTables = false;
+323  preGetRSGroupInfoCalled = false;
+324  postGetRSGroupInfoCalled = false;
+325  preGetRSGroupInfoOfTableCalled = 
false;
+326  postGetRSGroupInfoOfTableCalled = 
false;
+327  preListRSGroupsCalled = false;
+328  postListRSGroupsCalled = false;
+329  preGetRSGroupInfoOfServerCalled = 
false;
+330  postGetRSGroupInfoOfServerCalled = 
false;
+331}
+332
+333@Override
+334public OptionalMasterObserver 
getMasterObserver() {
+335  return Optional.of(this);
+336}
+337@Override
+338public void 
preMoveServersAndTables(final 
ObserverContextMasterCoprocessorEnvironment ctx,
+339SetAddress servers, 
SetTableName tables, String targetGroup) throws IOException {
+340  preMoveServersAndTables = true;
+341}
+342@Override
+343public void 
postMoveServersAndTables(final 
ObserverContextMasterCoprocessorEnvironment ctx,
+344SetAddress servers, 
SetTableName tables, String targetGroup) throws IOException {
+345  postMoveServersAndTables = true;
+346}
+347@Override
+348public void preRemoveServers(
+349final 
ObserverContextMasterCoprocessorEnvironment ctx,
+350SetAddress servers) 
throws IOException {
+351  preRemoveServersCalled = true;
+352}

[06/26] hbase-site git commit: Published site at 42aa3dd463c0d30a9b940d296b87316b5c67e1f5.

2018-10-02 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/37b8a04a/testdevapidocs/src-html/org/apache/hadoop/hbase/replication/TestReplicationEndpoint.ReplicationEndpointForTest.html
--
diff --git 
a/testdevapidocs/src-html/org/apache/hadoop/hbase/replication/TestReplicationEndpoint.ReplicationEndpointForTest.html
 
b/testdevapidocs/src-html/org/apache/hadoop/hbase/replication/TestReplicationEndpoint.ReplicationEndpointForTest.html
index ef39f9e..f188fed 100644
--- 
a/testdevapidocs/src-html/org/apache/hadoop/hbase/replication/TestReplicationEndpoint.ReplicationEndpointForTest.html
+++ 
b/testdevapidocs/src-html/org/apache/hadoop/hbase/replication/TestReplicationEndpoint.ReplicationEndpointForTest.html
@@ -25,543 +25,555 @@
 017 */
 018package 
org.apache.hadoop.hbase.replication;
 019
-020import static org.mockito.Mockito.mock;
-021import static 
org.mockito.Mockito.verify;
-022import static org.mockito.Mockito.when;
-023
-024import java.io.IOException;
-025import java.util.ArrayList;
-026import java.util.HashMap;
-027import java.util.List;
-028import java.util.Map;
-029import java.util.UUID;
-030import java.util.concurrent.Callable;
-031import 
java.util.concurrent.atomic.AtomicBoolean;
-032import 
java.util.concurrent.atomic.AtomicInteger;
-033import 
java.util.concurrent.atomic.AtomicReference;
-034import org.apache.hadoop.hbase.Cell;
-035import 
org.apache.hadoop.hbase.HBaseClassTestRule;
-036import org.apache.hadoop.hbase.Waiter;
-037import 
org.apache.hadoop.hbase.client.Connection;
-038import 
org.apache.hadoop.hbase.client.ConnectionFactory;
-039import 
org.apache.hadoop.hbase.client.Put;
-040import 
org.apache.hadoop.hbase.client.RegionInfo;
-041import 
org.apache.hadoop.hbase.client.Table;
-042import 
org.apache.hadoop.hbase.regionserver.HRegion;
-043import 
org.apache.hadoop.hbase.replication.regionserver.HBaseInterClusterReplicationEndpoint;
-044import 
org.apache.hadoop.hbase.replication.regionserver.MetricsReplicationGlobalSourceSource;
-045import 
org.apache.hadoop.hbase.replication.regionserver.MetricsReplicationSourceImpl;
-046import 
org.apache.hadoop.hbase.replication.regionserver.MetricsReplicationSourceSource;
-047import 
org.apache.hadoop.hbase.replication.regionserver.MetricsReplicationSourceSourceImpl;
-048import 
org.apache.hadoop.hbase.replication.regionserver.MetricsSource;
-049import 
org.apache.hadoop.hbase.testclassification.MediumTests;
-050import 
org.apache.hadoop.hbase.testclassification.ReplicationTests;
-051import 
org.apache.hadoop.hbase.util.Bytes;
-052import 
org.apache.hadoop.hbase.util.JVMClusterUtil.RegionServerThread;
-053import 
org.apache.hadoop.hbase.util.Threads;
-054import 
org.apache.hadoop.hbase.wal.WAL.Entry;
-055import 
org.apache.hadoop.hbase.zookeeper.ZKConfig;
-056import 
org.apache.hadoop.metrics2.lib.DynamicMetricsRegistry;
-057import org.junit.AfterClass;
-058import org.junit.Assert;
-059import org.junit.Before;
-060import org.junit.BeforeClass;
-061import org.junit.ClassRule;
-062import org.junit.Test;
-063import 
org.junit.experimental.categories.Category;
-064import org.slf4j.Logger;
-065import org.slf4j.LoggerFactory;
-066
-067/**
-068 * Tests ReplicationSource and 
ReplicationEndpoint interactions
-069 */
-070@Category({ ReplicationTests.class, 
MediumTests.class })
-071public class TestReplicationEndpoint 
extends TestReplicationBase {
-072
-073  @ClassRule
-074  public static final HBaseClassTestRule 
CLASS_RULE =
-075  
HBaseClassTestRule.forClass(TestReplicationEndpoint.class);
-076
-077  private static final Logger LOG = 
LoggerFactory.getLogger(TestReplicationEndpoint.class);
+020import static 
org.mockito.Mockito.doNothing;
+021import static org.mockito.Mockito.mock;
+022import static org.mockito.Mockito.spy;
+023import static 
org.mockito.Mockito.verify;
+024import static org.mockito.Mockito.when;
+025
+026import java.io.IOException;
+027import java.util.ArrayList;
+028import java.util.HashMap;
+029import java.util.List;
+030import java.util.Map;
+031import java.util.UUID;
+032import java.util.concurrent.Callable;
+033import 
java.util.concurrent.atomic.AtomicBoolean;
+034import 
java.util.concurrent.atomic.AtomicInteger;
+035import 
java.util.concurrent.atomic.AtomicReference;
+036import org.apache.hadoop.hbase.Cell;
+037import 
org.apache.hadoop.hbase.HBaseClassTestRule;
+038import org.apache.hadoop.hbase.Waiter;
+039import 
org.apache.hadoop.hbase.client.Connection;
+040import 
org.apache.hadoop.hbase.client.ConnectionFactory;
+041import 
org.apache.hadoop.hbase.client.Put;
+042import 
org.apache.hadoop.hbase.client.RegionInfo;
+043import 
org.apache.hadoop.hbase.client.Table;
+044import 
org.apache.hadoop.hbase.regionserver.HRegion;
+045import 
org.apache.hadoop.hbase.replication.regionserver.HBaseInterClusterReplicationEndpoint;
+046import 
org.apache.hadoop.hbase.replication.regionserver.MetricsReplicationGlobalSourceSource;
+047import 

[25/26] hbase-site git commit: Published site at 42aa3dd463c0d30a9b940d296b87316b5c67e1f5.

2018-10-02 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/37b8a04a/devapidocs/org/apache/hadoop/hbase/monitoring/package-tree.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/monitoring/package-tree.html 
b/devapidocs/org/apache/hadoop/hbase/monitoring/package-tree.html
index 4b549da..7b435d2 100644
--- a/devapidocs/org/apache/hadoop/hbase/monitoring/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/monitoring/package-tree.html
@@ -127,8 +127,8 @@
 
 java.lang.https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true;
 title="class or interface in java.lang">EnumE (implements java.lang.https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true;
 title="class or interface in java.lang">ComparableT, java.io.https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true;
 title="class or interface in java.io">Serializable)
 
-org.apache.hadoop.hbase.monitoring.TaskMonitor.TaskFilter.TaskType
 org.apache.hadoop.hbase.monitoring.MonitoredTask.State
+org.apache.hadoop.hbase.monitoring.TaskMonitor.TaskFilter.TaskType
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/37b8a04a/devapidocs/org/apache/hadoop/hbase/package-tree.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/package-tree.html 
b/devapidocs/org/apache/hadoop/hbase/package-tree.html
index ea7b4ec..5bc968f 100644
--- a/devapidocs/org/apache/hadoop/hbase/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/package-tree.html
@@ -437,19 +437,19 @@
 
 java.lang.https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true;
 title="class or interface in java.lang">EnumE (implements java.lang.https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true;
 title="class or interface in java.lang">ComparableT, java.io.https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true;
 title="class or interface in java.io">Serializable)
 
-org.apache.hadoop.hbase.CompatibilitySingletonFactory.SingletonStorage
-org.apache.hadoop.hbase.KeyValue.Type
+org.apache.hadoop.hbase.MemoryCompactionPolicy
 org.apache.hadoop.hbase.HealthChecker.HealthCheckerExitStatus
-org.apache.hadoop.hbase.CompareOperator
-org.apache.hadoop.hbase.ClusterMetrics.Option
+org.apache.hadoop.hbase.MetaTableAccessor.QueryType
+org.apache.hadoop.hbase.KeepDeletedCells
+org.apache.hadoop.hbase.Cell.Type
 org.apache.hadoop.hbase.Size.Unit
 org.apache.hadoop.hbase.Coprocessor.State
-org.apache.hadoop.hbase.MetaTableAccessor.QueryType
+org.apache.hadoop.hbase.KeyValue.Type
+org.apache.hadoop.hbase.CompatibilitySingletonFactory.SingletonStorage
+org.apache.hadoop.hbase.CompareOperator
 org.apache.hadoop.hbase.CellBuilderType
-org.apache.hadoop.hbase.Cell.Type
 org.apache.hadoop.hbase.HConstants.OperationStatusCode
-org.apache.hadoop.hbase.KeepDeletedCells
-org.apache.hadoop.hbase.MemoryCompactionPolicy
+org.apache.hadoop.hbase.ClusterMetrics.Option
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/37b8a04a/devapidocs/org/apache/hadoop/hbase/procedure2/package-tree.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/procedure2/package-tree.html 
b/devapidocs/org/apache/hadoop/hbase/procedure2/package-tree.html
index 12c3557..057a83a 100644
--- a/devapidocs/org/apache/hadoop/hbase/procedure2/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/procedure2/package-tree.html
@@ -216,11 +216,11 @@
 
 java.lang.https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true;
 title="class or interface in java.lang">EnumE (implements java.lang.https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true;
 title="class or interface in java.lang">ComparableT, java.io.https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true;
 title="class or interface in java.io">Serializable)
 
+org.apache.hadoop.hbase.procedure2.RootProcedureState.State
 org.apache.hadoop.hbase.procedure2.LockedResourceType
-org.apache.hadoop.hbase.procedure2.Procedure.LockState
 org.apache.hadoop.hbase.procedure2.StateMachineProcedure.Flow
+org.apache.hadoop.hbase.procedure2.Procedure.LockState
 org.apache.hadoop.hbase.procedure2.LockType
-org.apache.hadoop.hbase.procedure2.RootProcedureState.State
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/37b8a04a/devapidocs/org/apache/hadoop/hbase/quotas/package-tree.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/quotas/package-tree.html 
b/devapidocs/org/apache/hadoop/hbase/quotas/package-tree.html
index e0ffe44..4acf265 100644
--- a/devapidocs/org/apache/hadoop/hbase/quotas/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/quotas/package-tree.html
@@ 

[18/26] hbase-site git commit: Published site at 42aa3dd463c0d30a9b940d296b87316b5c67e1f5.

2018-10-02 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/37b8a04a/devapidocs/src-html/org/apache/hadoop/hbase/replication/ZKReplicationQueueStorage.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/replication/ZKReplicationQueueStorage.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/replication/ZKReplicationQueueStorage.html
index 55713d8..de7bd4a 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/replication/ZKReplicationQueueStorage.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/replication/ZKReplicationQueueStorage.html
@@ -126,564 +126,565 @@
 118
.get(ZOOKEEPER_ZNODE_REPLICATION_REGIONS_KEY, 
ZOOKEEPER_ZNODE_REPLICATION_REGIONS_DEFAULT));
 119  }
 120
-121  private String getRsNode(ServerName 
serverName) {
-122return 
ZNodePaths.joinZNode(queuesZNode, serverName.getServerName());
-123  }
-124
-125  private String getQueueNode(ServerName 
serverName, String queueId) {
-126return 
ZNodePaths.joinZNode(getRsNode(serverName), queueId);
-127  }
-128
-129  private String getFileNode(String 
queueNode, String fileName) {
-130return 
ZNodePaths.joinZNode(queueNode, fileName);
-131  }
-132
-133  private String getFileNode(ServerName 
serverName, String queueId, String fileName) {
-134return 
getFileNode(getQueueNode(serverName, queueId), fileName);
-135  }
-136
-137  /**
-138   * p
-139   * Put all regions under 
/hbase/replication/regions znode will lead to too many children because
-140   * of the huge number of regions in 
real production environment. So here we will distribute the
-141   * znodes to multiple directories.
-142   * /p
-143   * p
-144   * So the final znode path will be 
format like this:
-145   *
-146   * pre
-147   * 
/hbase/replication/regions/dd/04/e76a6966d4ffa908ed0586764767-100
-148   * /pre
-149   *
-150   * Here the full encoded region name is 
dd04e76a6966d4ffa908ed0586764767, and we use the first two
-151   * characters 'dd' as the first level 
directory name, and use the next two characters '04' as the
-152   * second level directory name, and the 
rest part as the prefix of the znode, and the suffix '100'
-153   * is the peer id.
-154   * /p
-155   * @param encodedRegionName the encoded 
region name.
-156   * @param peerId peer id for 
replication.
-157   * @return ZNode path to persist the 
max sequence id that we've pushed for the given region and
-158   * peer.
-159   */
-160  @VisibleForTesting
-161  String 
getSerialReplicationRegionPeerNode(String encodedRegionName, String peerId) {
-162if (encodedRegionName == null || 
encodedRegionName.length() != RegionInfo.MD5_HEX_LENGTH) {
-163  throw new 
IllegalArgumentException(
-164  "Invalid encoded region name: " 
+ encodedRegionName + ", length should be 32.");
-165}
-166return new 
StringBuilder(regionsZNode).append(ZNodePaths.ZNODE_PATH_SEPARATOR)
-167
.append(encodedRegionName.substring(0, 
2)).append(ZNodePaths.ZNODE_PATH_SEPARATOR)
-168
.append(encodedRegionName.substring(2, 
4)).append(ZNodePaths.ZNODE_PATH_SEPARATOR)
-169
.append(encodedRegionName.substring(4)).append("-").append(peerId).toString();
-170  }
-171
-172  @Override
-173  public void removeQueue(ServerName 
serverName, String queueId) throws ReplicationException {
-174try {
-175  
ZKUtil.deleteNodeRecursively(zookeeper, getQueueNode(serverName, queueId));
-176} catch (KeeperException e) {
-177  throw new ReplicationException(
-178  "Failed to delete queue 
(serverName=" + serverName + ", queueId=" + queueId + ")", e);
-179}
-180  }
-181
-182  @Override
-183  public void addWAL(ServerName 
serverName, String queueId, String fileName)
-184  throws ReplicationException {
-185try {
-186  ZKUtil.createWithParents(zookeeper, 
getFileNode(serverName, queueId, fileName));
-187} catch (KeeperException e) {
-188  throw new 
ReplicationException("Failed to add wal to queue (serverName=" + serverName
-189  + ", queueId=" + queueId + ", 
fileName=" + fileName + ")", e);
-190}
-191  }
-192
-193  @Override
-194  public void removeWAL(ServerName 
serverName, String queueId, String fileName)
-195  throws ReplicationException {
-196String fileNode = 
getFileNode(serverName, queueId, fileName);
-197try {
-198  ZKUtil.deleteNode(zookeeper, 
fileNode);
-199} catch (NoNodeException e) {
-200  LOG.warn("{} already deleted when 
removing log", fileNode);
-201} catch (KeeperException e) {
-202  throw new 
ReplicationException("Failed to remove wal from queue (serverName=" + 
serverName +
-203", queueId=" + queueId + ", 
fileName=" + fileName + ")", e);
-204}
-205  }
-206
-207  private void addLastSeqIdsToOps(String 
queueId, MapString, Long lastSeqIds,
-208  ListZKUtilOp listOfOps) 
throws KeeperException, ReplicationException {
-209String peerId = new 

[19/26] hbase-site git commit: Published site at 42aa3dd463c0d30a9b940d296b87316b5c67e1f5.

2018-10-02 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/37b8a04a/devapidocs/org/apache/hadoop/metrics2/lib/class-use/MutableFastCounter.html
--
diff --git 
a/devapidocs/org/apache/hadoop/metrics2/lib/class-use/MutableFastCounter.html 
b/devapidocs/org/apache/hadoop/metrics2/lib/class-use/MutableFastCounter.html
index 9d366f9..196ea9c 100644
--- 
a/devapidocs/org/apache/hadoop/metrics2/lib/class-use/MutableFastCounter.html
+++ 
b/devapidocs/org/apache/hadoop/metrics2/lib/class-use/MutableFastCounter.html
@@ -477,117 +477,121 @@ service.
 
 
 private MutableFastCounter
-MetricsReplicationSinkSourceImpl.hfilesCounter
+MetricsReplicationGlobalSourceSource.failedRecoveryQueue
 
 
 private MutableFastCounter
-MetricsReplicationSourceSourceImpl.logEditsFilteredCounter
+MetricsReplicationSinkSourceImpl.hfilesCounter
 
 
 private MutableFastCounter
-MetricsReplicationGlobalSourceSource.logEditsFilteredCounter
+MetricsReplicationSourceSourceImpl.logEditsFilteredCounter
 
 
 private MutableFastCounter
-MetricsReplicationSourceSourceImpl.logReadInBytesCounter
+MetricsReplicationGlobalSourceSource.logEditsFilteredCounter
 
 
 private MutableFastCounter
-MetricsReplicationGlobalSourceSource.logReadInBytesCounter
+MetricsReplicationSourceSourceImpl.logReadInBytesCounter
 
 
 private MutableFastCounter
-MetricsReplicationSourceSourceImpl.logReadInEditsCounter
+MetricsReplicationGlobalSourceSource.logReadInBytesCounter
 
 
 private MutableFastCounter
-MetricsReplicationGlobalSourceSource.logReadInEditsCounter
+MetricsReplicationSourceSourceImpl.logReadInEditsCounter
 
 
 private MutableFastCounter
-MetricsReplicationSinkSourceImpl.opsCounter
+MetricsReplicationGlobalSourceSource.logReadInEditsCounter
 
 
 private MutableFastCounter
-MetricsReplicationSourceSourceImpl.repeatedFileBytes
+MetricsReplicationSinkSourceImpl.opsCounter
 
 
 private MutableFastCounter
-MetricsReplicationGlobalSourceSource.repeatedFileBytes
+MetricsReplicationSourceSourceImpl.repeatedFileBytes
 
 
 private MutableFastCounter
-MetricsReplicationSourceSourceImpl.restartWALReading
+MetricsReplicationGlobalSourceSource.repeatedFileBytes
 
 
 private MutableFastCounter
-MetricsReplicationGlobalSourceSource.restartWALReading
+MetricsReplicationSourceSourceImpl.restartWALReading
 
 
 private MutableFastCounter
-MetricsReplicationSourceSourceImpl.shippedBatchesCounter
+MetricsReplicationGlobalSourceSource.restartWALReading
 
 
 private MutableFastCounter
-MetricsReplicationGlobalSourceSource.shippedBatchesCounter
+MetricsReplicationSourceSourceImpl.shippedBatchesCounter
 
 
 private MutableFastCounter
-MetricsReplicationSourceSourceImpl.shippedBytesCounter
+MetricsReplicationGlobalSourceSource.shippedBatchesCounter
 
 
 private MutableFastCounter
-MetricsReplicationGlobalSourceSource.shippedBytesCounter
+MetricsReplicationSourceSourceImpl.shippedBytesCounter
 
 
 private MutableFastCounter
-MetricsReplicationSourceSourceImpl.shippedHFilesCounter
+MetricsReplicationGlobalSourceSource.shippedBytesCounter
 
 
 private MutableFastCounter
-MetricsReplicationGlobalSourceSource.shippedHFilesCounter
+MetricsReplicationSourceSourceImpl.shippedHFilesCounter
 
 
 private MutableFastCounter
-MetricsReplicationSourceSourceImpl.shippedKBsCounter
+MetricsReplicationGlobalSourceSource.shippedHFilesCounter
 
 
 private MutableFastCounter
+MetricsReplicationSourceSourceImpl.shippedKBsCounter
+
+
+private MutableFastCounter
 MetricsReplicationGlobalSourceSource.shippedKBsCounter
 Deprecated.
 since 1.3.0. Use MetricsReplicationGlobalSourceSource.shippedBytesCounter
 instead.
 
 
 
-
+
 private MutableFastCounter
 MetricsReplicationSourceSourceImpl.shippedOpsCounter
 
-
+
 private MutableFastCounter
 MetricsReplicationGlobalSourceSource.shippedOpsCounter
 
-
+
 private MutableFastCounter
 MetricsReplicationSourceSourceImpl.uncleanlyClosedSkippedBytes
 
-
+
 private MutableFastCounter
 MetricsReplicationGlobalSourceSource.uncleanlyClosedSkippedBytes
 
-
+
 private MutableFastCounter
 MetricsReplicationSourceSourceImpl.uncleanlyClosedWAL
 
-
+
 private MutableFastCounter
 MetricsReplicationGlobalSourceSource.uncleanlyClosedWAL
 
-
+
 private MutableFastCounter
 MetricsReplicationSourceSourceImpl.unknownFileLengthForClosedWAL
 
-
+
 private MutableFastCounter
 MetricsReplicationGlobalSourceSource.unknownFileLengthForClosedWAL
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/37b8a04a/devapidocs/src-html/org/apache/hadoop/hbase/Version.html
--
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/Version.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/Version.html
index a7b2b34..dbb0b4b 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/Version.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/Version.html
@@ -16,11 +16,11 @@
 008@InterfaceAudience.Private
 009public class Version {
 010  public static final String 

[07/26] hbase-site git commit: Published site at 42aa3dd463c0d30a9b940d296b87316b5c67e1f5.

2018-10-02 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/37b8a04a/testdevapidocs/src-html/org/apache/hadoop/hbase/replication/TestReplicationEndpoint.InterClusterReplicationEndpointForTest.html
--
diff --git 
a/testdevapidocs/src-html/org/apache/hadoop/hbase/replication/TestReplicationEndpoint.InterClusterReplicationEndpointForTest.html
 
b/testdevapidocs/src-html/org/apache/hadoop/hbase/replication/TestReplicationEndpoint.InterClusterReplicationEndpointForTest.html
index ef39f9e..f188fed 100644
--- 
a/testdevapidocs/src-html/org/apache/hadoop/hbase/replication/TestReplicationEndpoint.InterClusterReplicationEndpointForTest.html
+++ 
b/testdevapidocs/src-html/org/apache/hadoop/hbase/replication/TestReplicationEndpoint.InterClusterReplicationEndpointForTest.html
@@ -25,543 +25,555 @@
 017 */
 018package 
org.apache.hadoop.hbase.replication;
 019
-020import static org.mockito.Mockito.mock;
-021import static 
org.mockito.Mockito.verify;
-022import static org.mockito.Mockito.when;
-023
-024import java.io.IOException;
-025import java.util.ArrayList;
-026import java.util.HashMap;
-027import java.util.List;
-028import java.util.Map;
-029import java.util.UUID;
-030import java.util.concurrent.Callable;
-031import 
java.util.concurrent.atomic.AtomicBoolean;
-032import 
java.util.concurrent.atomic.AtomicInteger;
-033import 
java.util.concurrent.atomic.AtomicReference;
-034import org.apache.hadoop.hbase.Cell;
-035import 
org.apache.hadoop.hbase.HBaseClassTestRule;
-036import org.apache.hadoop.hbase.Waiter;
-037import 
org.apache.hadoop.hbase.client.Connection;
-038import 
org.apache.hadoop.hbase.client.ConnectionFactory;
-039import 
org.apache.hadoop.hbase.client.Put;
-040import 
org.apache.hadoop.hbase.client.RegionInfo;
-041import 
org.apache.hadoop.hbase.client.Table;
-042import 
org.apache.hadoop.hbase.regionserver.HRegion;
-043import 
org.apache.hadoop.hbase.replication.regionserver.HBaseInterClusterReplicationEndpoint;
-044import 
org.apache.hadoop.hbase.replication.regionserver.MetricsReplicationGlobalSourceSource;
-045import 
org.apache.hadoop.hbase.replication.regionserver.MetricsReplicationSourceImpl;
-046import 
org.apache.hadoop.hbase.replication.regionserver.MetricsReplicationSourceSource;
-047import 
org.apache.hadoop.hbase.replication.regionserver.MetricsReplicationSourceSourceImpl;
-048import 
org.apache.hadoop.hbase.replication.regionserver.MetricsSource;
-049import 
org.apache.hadoop.hbase.testclassification.MediumTests;
-050import 
org.apache.hadoop.hbase.testclassification.ReplicationTests;
-051import 
org.apache.hadoop.hbase.util.Bytes;
-052import 
org.apache.hadoop.hbase.util.JVMClusterUtil.RegionServerThread;
-053import 
org.apache.hadoop.hbase.util.Threads;
-054import 
org.apache.hadoop.hbase.wal.WAL.Entry;
-055import 
org.apache.hadoop.hbase.zookeeper.ZKConfig;
-056import 
org.apache.hadoop.metrics2.lib.DynamicMetricsRegistry;
-057import org.junit.AfterClass;
-058import org.junit.Assert;
-059import org.junit.Before;
-060import org.junit.BeforeClass;
-061import org.junit.ClassRule;
-062import org.junit.Test;
-063import 
org.junit.experimental.categories.Category;
-064import org.slf4j.Logger;
-065import org.slf4j.LoggerFactory;
-066
-067/**
-068 * Tests ReplicationSource and 
ReplicationEndpoint interactions
-069 */
-070@Category({ ReplicationTests.class, 
MediumTests.class })
-071public class TestReplicationEndpoint 
extends TestReplicationBase {
-072
-073  @ClassRule
-074  public static final HBaseClassTestRule 
CLASS_RULE =
-075  
HBaseClassTestRule.forClass(TestReplicationEndpoint.class);
-076
-077  private static final Logger LOG = 
LoggerFactory.getLogger(TestReplicationEndpoint.class);
+020import static 
org.mockito.Mockito.doNothing;
+021import static org.mockito.Mockito.mock;
+022import static org.mockito.Mockito.spy;
+023import static 
org.mockito.Mockito.verify;
+024import static org.mockito.Mockito.when;
+025
+026import java.io.IOException;
+027import java.util.ArrayList;
+028import java.util.HashMap;
+029import java.util.List;
+030import java.util.Map;
+031import java.util.UUID;
+032import java.util.concurrent.Callable;
+033import 
java.util.concurrent.atomic.AtomicBoolean;
+034import 
java.util.concurrent.atomic.AtomicInteger;
+035import 
java.util.concurrent.atomic.AtomicReference;
+036import org.apache.hadoop.hbase.Cell;
+037import 
org.apache.hadoop.hbase.HBaseClassTestRule;
+038import org.apache.hadoop.hbase.Waiter;
+039import 
org.apache.hadoop.hbase.client.Connection;
+040import 
org.apache.hadoop.hbase.client.ConnectionFactory;
+041import 
org.apache.hadoop.hbase.client.Put;
+042import 
org.apache.hadoop.hbase.client.RegionInfo;
+043import 
org.apache.hadoop.hbase.client.Table;
+044import 
org.apache.hadoop.hbase.regionserver.HRegion;
+045import 
org.apache.hadoop.hbase.replication.regionserver.HBaseInterClusterReplicationEndpoint;
+046import 

[09/26] hbase-site git commit: Published site at 42aa3dd463c0d30a9b940d296b87316b5c67e1f5.

2018-10-02 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/37b8a04a/testdevapidocs/src-html/org/apache/hadoop/hbase/replication/TestReplicationEndpoint.EverythingPassesWALEntryFilter.html
--
diff --git 
a/testdevapidocs/src-html/org/apache/hadoop/hbase/replication/TestReplicationEndpoint.EverythingPassesWALEntryFilter.html
 
b/testdevapidocs/src-html/org/apache/hadoop/hbase/replication/TestReplicationEndpoint.EverythingPassesWALEntryFilter.html
index ef39f9e..f188fed 100644
--- 
a/testdevapidocs/src-html/org/apache/hadoop/hbase/replication/TestReplicationEndpoint.EverythingPassesWALEntryFilter.html
+++ 
b/testdevapidocs/src-html/org/apache/hadoop/hbase/replication/TestReplicationEndpoint.EverythingPassesWALEntryFilter.html
@@ -25,543 +25,555 @@
 017 */
 018package 
org.apache.hadoop.hbase.replication;
 019
-020import static org.mockito.Mockito.mock;
-021import static 
org.mockito.Mockito.verify;
-022import static org.mockito.Mockito.when;
-023
-024import java.io.IOException;
-025import java.util.ArrayList;
-026import java.util.HashMap;
-027import java.util.List;
-028import java.util.Map;
-029import java.util.UUID;
-030import java.util.concurrent.Callable;
-031import 
java.util.concurrent.atomic.AtomicBoolean;
-032import 
java.util.concurrent.atomic.AtomicInteger;
-033import 
java.util.concurrent.atomic.AtomicReference;
-034import org.apache.hadoop.hbase.Cell;
-035import 
org.apache.hadoop.hbase.HBaseClassTestRule;
-036import org.apache.hadoop.hbase.Waiter;
-037import 
org.apache.hadoop.hbase.client.Connection;
-038import 
org.apache.hadoop.hbase.client.ConnectionFactory;
-039import 
org.apache.hadoop.hbase.client.Put;
-040import 
org.apache.hadoop.hbase.client.RegionInfo;
-041import 
org.apache.hadoop.hbase.client.Table;
-042import 
org.apache.hadoop.hbase.regionserver.HRegion;
-043import 
org.apache.hadoop.hbase.replication.regionserver.HBaseInterClusterReplicationEndpoint;
-044import 
org.apache.hadoop.hbase.replication.regionserver.MetricsReplicationGlobalSourceSource;
-045import 
org.apache.hadoop.hbase.replication.regionserver.MetricsReplicationSourceImpl;
-046import 
org.apache.hadoop.hbase.replication.regionserver.MetricsReplicationSourceSource;
-047import 
org.apache.hadoop.hbase.replication.regionserver.MetricsReplicationSourceSourceImpl;
-048import 
org.apache.hadoop.hbase.replication.regionserver.MetricsSource;
-049import 
org.apache.hadoop.hbase.testclassification.MediumTests;
-050import 
org.apache.hadoop.hbase.testclassification.ReplicationTests;
-051import 
org.apache.hadoop.hbase.util.Bytes;
-052import 
org.apache.hadoop.hbase.util.JVMClusterUtil.RegionServerThread;
-053import 
org.apache.hadoop.hbase.util.Threads;
-054import 
org.apache.hadoop.hbase.wal.WAL.Entry;
-055import 
org.apache.hadoop.hbase.zookeeper.ZKConfig;
-056import 
org.apache.hadoop.metrics2.lib.DynamicMetricsRegistry;
-057import org.junit.AfterClass;
-058import org.junit.Assert;
-059import org.junit.Before;
-060import org.junit.BeforeClass;
-061import org.junit.ClassRule;
-062import org.junit.Test;
-063import 
org.junit.experimental.categories.Category;
-064import org.slf4j.Logger;
-065import org.slf4j.LoggerFactory;
-066
-067/**
-068 * Tests ReplicationSource and 
ReplicationEndpoint interactions
-069 */
-070@Category({ ReplicationTests.class, 
MediumTests.class })
-071public class TestReplicationEndpoint 
extends TestReplicationBase {
-072
-073  @ClassRule
-074  public static final HBaseClassTestRule 
CLASS_RULE =
-075  
HBaseClassTestRule.forClass(TestReplicationEndpoint.class);
-076
-077  private static final Logger LOG = 
LoggerFactory.getLogger(TestReplicationEndpoint.class);
+020import static 
org.mockito.Mockito.doNothing;
+021import static org.mockito.Mockito.mock;
+022import static org.mockito.Mockito.spy;
+023import static 
org.mockito.Mockito.verify;
+024import static org.mockito.Mockito.when;
+025
+026import java.io.IOException;
+027import java.util.ArrayList;
+028import java.util.HashMap;
+029import java.util.List;
+030import java.util.Map;
+031import java.util.UUID;
+032import java.util.concurrent.Callable;
+033import 
java.util.concurrent.atomic.AtomicBoolean;
+034import 
java.util.concurrent.atomic.AtomicInteger;
+035import 
java.util.concurrent.atomic.AtomicReference;
+036import org.apache.hadoop.hbase.Cell;
+037import 
org.apache.hadoop.hbase.HBaseClassTestRule;
+038import org.apache.hadoop.hbase.Waiter;
+039import 
org.apache.hadoop.hbase.client.Connection;
+040import 
org.apache.hadoop.hbase.client.ConnectionFactory;
+041import 
org.apache.hadoop.hbase.client.Put;
+042import 
org.apache.hadoop.hbase.client.RegionInfo;
+043import 
org.apache.hadoop.hbase.client.Table;
+044import 
org.apache.hadoop.hbase.regionserver.HRegion;
+045import 
org.apache.hadoop.hbase.replication.regionserver.HBaseInterClusterReplicationEndpoint;
+046import 
org.apache.hadoop.hbase.replication.regionserver.MetricsReplicationGlobalSourceSource;
+047import 

[13/26] hbase-site git commit: Published site at 42aa3dd463c0d30a9b940d296b87316b5c67e1f5.

2018-10-02 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/37b8a04a/downloads.html
--
diff --git a/downloads.html b/downloads.html
index 89859a5..dfbe758 100644
--- a/downloads.html
+++ b/downloads.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase  Apache HBase Downloads
 
@@ -423,7 +423,7 @@ under the License. -->
 https://www.apache.org/;>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2018-09-30
+  Last Published: 
2018-10-02
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/37b8a04a/export_control.html
--
diff --git a/export_control.html b/export_control.html
index 08293cb..ee0fece 100644
--- a/export_control.html
+++ b/export_control.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase  
   Export Control
@@ -331,7 +331,7 @@ for more details.
 https://www.apache.org/;>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2018-09-30
+  Last Published: 
2018-10-02
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/37b8a04a/index.html
--
diff --git a/index.html b/index.html
index 6e2e1bb..9fc5e76 100644
--- a/index.html
+++ b/index.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase  Apache HBase™ Home
 
@@ -411,7 +411,7 @@ Apache HBase is an open-source, distributed, versioned, 
non-relational database
 https://www.apache.org/;>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2018-09-30
+  Last Published: 
2018-10-02
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/37b8a04a/integration.html
--
diff --git a/integration.html b/integration.html
index e5601a3..a01c528 100644
--- a/integration.html
+++ b/integration.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase  CI Management
 
@@ -291,7 +291,7 @@
 https://www.apache.org/;>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2018-09-30
+  Last Published: 
2018-10-02
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/37b8a04a/issue-tracking.html
--
diff --git a/issue-tracking.html b/issue-tracking.html
index 2c5321e..5ad20ae 100644
--- a/issue-tracking.html
+++ b/issue-tracking.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase  Issue Management
 
@@ -288,7 +288,7 @@
 https://www.apache.org/;>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2018-09-30
+  Last Published: 
2018-10-02
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/37b8a04a/license.html
--
diff --git a/license.html b/license.html
index 1ebca5d..17db7f0 100644
--- a/license.html
+++ b/license.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase  Project Licenses
 
@@ -491,7 +491,7 @@
 https://www.apache.org/;>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2018-09-30
+  Last Published: 
2018-10-02
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/37b8a04a/mail-lists.html
--
diff --git a/mail-lists.html b/mail-lists.html
index e6c17ce..9036efb 100644
--- a/mail-lists.html
+++ b/mail-lists.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase  Project Mailing Lists
 
@@ -341,7 +341,7 @@
 https://www.apache.org/;>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2018-09-30
+  Last Published: 
2018-10-02
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/37b8a04a/metrics.html
--
diff --git a/metrics.html b/metrics.html
index 68935f9..1f21c24 100644
--- a/metrics.html
+++ b/metrics.html

[21/26] hbase-site git commit: Published site at 42aa3dd463c0d30a9b940d296b87316b5c67e1f5.

2018-10-02 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/37b8a04a/devapidocs/org/apache/hadoop/hbase/replication/regionserver/MetricsSource.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/replication/regionserver/MetricsSource.html
 
b/devapidocs/org/apache/hadoop/hbase/replication/regionserver/MetricsSource.html
index e77672f..edd489e 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/replication/regionserver/MetricsSource.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/replication/regionserver/MetricsSource.html
@@ -18,7 +18,7 @@
 catch(err) {
 }
 //-->
-var methods = 
{"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10,"i14":10,"i15":42,"i16":10,"i17":10,"i18":10,"i19":10,"i20":10,"i21":10,"i22":10,"i23":10,"i24":10,"i25":10,"i26":10,"i27":10,"i28":10,"i29":10,"i30":10,"i31":10,"i32":10,"i33":10,"i34":10,"i35":10,"i36":10,"i37":10,"i38":10,"i39":10,"i40":10};
+var methods = 
{"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10,"i14":10,"i15":42,"i16":10,"i17":10,"i18":10,"i19":10,"i20":10,"i21":10,"i22":10,"i23":10,"i24":10,"i25":10,"i26":10,"i27":10,"i28":10,"i29":10,"i30":10,"i31":10,"i32":10,"i33":10,"i34":10,"i35":10,"i36":10,"i37":10,"i38":10,"i39":10,"i40":10,"i41":10};
 var tabs = {65535:["t0","All Methods"],2:["t2","Instance 
Methods"],8:["t4","Concrete Methods"],32:["t6","Deprecated Methods"]};
 var altColor = "altColor";
 var rowColor = "rowColor";
@@ -339,107 +339,111 @@ implements 
 void
+incrFailedRecoveryQueue()
+
+
+void
 incrLogEditsFiltered()
 The number of log edits filtered out.
 
 
-
+
 void
 incrLogEditsFiltered(longdelta)
 Add on the number of log edits filtered
 
 
-
+
 void
 incrLogEditsRead()
 Increment the number of log edits read by one.
 
 
-
+
 private void
 incrLogEditsRead(longdelta)
 Add on the the number of log edits read
 
 
-
+
 void
 incrLogReadInBytes(longreadInBytes)
 increase the byte number read by source from log file
 
 
-
+
 void
 incrRepeatedFileBytes(longbytes)
 
-
+
 void
 incrRestartedWALReading()
 
-
+
 void
 incrSizeOfHFileRefsQueue(longsize)
 
-
+
 void
 incrSizeOfLogQueue()
 Increment size of the log queue.
 
 
-
+
 void
 incrUncleanlyClosedWALs()
 
-
+
 void
 incrUnknownFileLengthForClosedWAL()
 
-
+
 void
 init()
 Clear out the metrics and re-prepare the source.
 
 
-
+
 void
 refreshAgeOfLastShippedOp(https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringwalGroupId)
 Convenience method to use the last given timestamp to 
refresh the age of the last edit.
 
 
-
+
 void
 removeMetric(https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringkey)
 Remove a metric and no longer announce it.
 
 
-
+
 void
 setAgeOfLastShippedOp(longtimestamp,
  https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringwalGroup)
 Set the age of the last edit that was shipped
 
 
-
+
 void
 setAgeOfLastShippedOpByTable(longtimestamp,
 https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringtableName)
 Set the age of the last edit that was shipped group by 
table
 
 
-
+
 void
 setGauge(https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringgaugeName,
 longvalue)
 Set a gauge to a specific value.
 
 
-
+
 void
 shipBatch(longbatchSize,
  intsizeInBytes)
 Convience method to apply changes to metrics do to shipping 
a batch of logs.
 
 
-
+
 void
 shipBatch(longbatchSize,
  intsizeInBytes,
@@ -447,7 +451,7 @@ implements Convience method to apply changes to metrics do to shipping 
a batch of logs.
 
 
-
+
 void
 updateHistogram(https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringname,
longvalue)
@@ -957,13 +961,22 @@ publiclongincrCompletedRecoveryQueue()
 
 
+
+
+
+
+
+incrFailedRecoveryQueue
+publicvoidincrFailedRecoveryQueue()
+
+
 
 
 
 
 
 init
-publicvoidinit()
+publicvoidinit()
 Description copied from 
interface:BaseSource
 Clear out the metrics and re-prepare the source.
 
@@ -978,7 +991,7 @@ publiclong
 
 setGauge
-publicvoidsetGauge(https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringgaugeName,
+publicvoidsetGauge(https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringgaugeName,
  longvalue)
 Description copied from 
interface:BaseSource
 Set a gauge to a specific value.
@@ 

[03/26] hbase-site git commit: Published site at 42aa3dd463c0d30a9b940d296b87316b5c67e1f5.

2018-10-02 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/37b8a04a/testdevapidocs/src-html/org/apache/hadoop/hbase/replication/TestReplicationEndpoint.html
--
diff --git 
a/testdevapidocs/src-html/org/apache/hadoop/hbase/replication/TestReplicationEndpoint.html
 
b/testdevapidocs/src-html/org/apache/hadoop/hbase/replication/TestReplicationEndpoint.html
index ef39f9e..f188fed 100644
--- 
a/testdevapidocs/src-html/org/apache/hadoop/hbase/replication/TestReplicationEndpoint.html
+++ 
b/testdevapidocs/src-html/org/apache/hadoop/hbase/replication/TestReplicationEndpoint.html
@@ -25,543 +25,555 @@
 017 */
 018package 
org.apache.hadoop.hbase.replication;
 019
-020import static org.mockito.Mockito.mock;
-021import static 
org.mockito.Mockito.verify;
-022import static org.mockito.Mockito.when;
-023
-024import java.io.IOException;
-025import java.util.ArrayList;
-026import java.util.HashMap;
-027import java.util.List;
-028import java.util.Map;
-029import java.util.UUID;
-030import java.util.concurrent.Callable;
-031import 
java.util.concurrent.atomic.AtomicBoolean;
-032import 
java.util.concurrent.atomic.AtomicInteger;
-033import 
java.util.concurrent.atomic.AtomicReference;
-034import org.apache.hadoop.hbase.Cell;
-035import 
org.apache.hadoop.hbase.HBaseClassTestRule;
-036import org.apache.hadoop.hbase.Waiter;
-037import 
org.apache.hadoop.hbase.client.Connection;
-038import 
org.apache.hadoop.hbase.client.ConnectionFactory;
-039import 
org.apache.hadoop.hbase.client.Put;
-040import 
org.apache.hadoop.hbase.client.RegionInfo;
-041import 
org.apache.hadoop.hbase.client.Table;
-042import 
org.apache.hadoop.hbase.regionserver.HRegion;
-043import 
org.apache.hadoop.hbase.replication.regionserver.HBaseInterClusterReplicationEndpoint;
-044import 
org.apache.hadoop.hbase.replication.regionserver.MetricsReplicationGlobalSourceSource;
-045import 
org.apache.hadoop.hbase.replication.regionserver.MetricsReplicationSourceImpl;
-046import 
org.apache.hadoop.hbase.replication.regionserver.MetricsReplicationSourceSource;
-047import 
org.apache.hadoop.hbase.replication.regionserver.MetricsReplicationSourceSourceImpl;
-048import 
org.apache.hadoop.hbase.replication.regionserver.MetricsSource;
-049import 
org.apache.hadoop.hbase.testclassification.MediumTests;
-050import 
org.apache.hadoop.hbase.testclassification.ReplicationTests;
-051import 
org.apache.hadoop.hbase.util.Bytes;
-052import 
org.apache.hadoop.hbase.util.JVMClusterUtil.RegionServerThread;
-053import 
org.apache.hadoop.hbase.util.Threads;
-054import 
org.apache.hadoop.hbase.wal.WAL.Entry;
-055import 
org.apache.hadoop.hbase.zookeeper.ZKConfig;
-056import 
org.apache.hadoop.metrics2.lib.DynamicMetricsRegistry;
-057import org.junit.AfterClass;
-058import org.junit.Assert;
-059import org.junit.Before;
-060import org.junit.BeforeClass;
-061import org.junit.ClassRule;
-062import org.junit.Test;
-063import 
org.junit.experimental.categories.Category;
-064import org.slf4j.Logger;
-065import org.slf4j.LoggerFactory;
-066
-067/**
-068 * Tests ReplicationSource and 
ReplicationEndpoint interactions
-069 */
-070@Category({ ReplicationTests.class, 
MediumTests.class })
-071public class TestReplicationEndpoint 
extends TestReplicationBase {
-072
-073  @ClassRule
-074  public static final HBaseClassTestRule 
CLASS_RULE =
-075  
HBaseClassTestRule.forClass(TestReplicationEndpoint.class);
-076
-077  private static final Logger LOG = 
LoggerFactory.getLogger(TestReplicationEndpoint.class);
+020import static 
org.mockito.Mockito.doNothing;
+021import static org.mockito.Mockito.mock;
+022import static org.mockito.Mockito.spy;
+023import static 
org.mockito.Mockito.verify;
+024import static org.mockito.Mockito.when;
+025
+026import java.io.IOException;
+027import java.util.ArrayList;
+028import java.util.HashMap;
+029import java.util.List;
+030import java.util.Map;
+031import java.util.UUID;
+032import java.util.concurrent.Callable;
+033import 
java.util.concurrent.atomic.AtomicBoolean;
+034import 
java.util.concurrent.atomic.AtomicInteger;
+035import 
java.util.concurrent.atomic.AtomicReference;
+036import org.apache.hadoop.hbase.Cell;
+037import 
org.apache.hadoop.hbase.HBaseClassTestRule;
+038import org.apache.hadoop.hbase.Waiter;
+039import 
org.apache.hadoop.hbase.client.Connection;
+040import 
org.apache.hadoop.hbase.client.ConnectionFactory;
+041import 
org.apache.hadoop.hbase.client.Put;
+042import 
org.apache.hadoop.hbase.client.RegionInfo;
+043import 
org.apache.hadoop.hbase.client.Table;
+044import 
org.apache.hadoop.hbase.regionserver.HRegion;
+045import 
org.apache.hadoop.hbase.replication.regionserver.HBaseInterClusterReplicationEndpoint;
+046import 
org.apache.hadoop.hbase.replication.regionserver.MetricsReplicationGlobalSourceSource;
+047import 
org.apache.hadoop.hbase.replication.regionserver.MetricsReplicationSourceImpl;
+048import 

[20/26] hbase-site git commit: Published site at 42aa3dd463c0d30a9b940d296b87316b5c67e1f5.

2018-10-02 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/37b8a04a/devapidocs/org/apache/hadoop/hbase/replication/regionserver/ReplicationSourceManager.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/replication/regionserver/ReplicationSourceManager.html
 
b/devapidocs/org/apache/hadoop/hbase/replication/regionserver/ReplicationSourceManager.html
index cfe9811..576c53f 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/replication/regionserver/ReplicationSourceManager.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/replication/regionserver/ReplicationSourceManager.html
@@ -114,7 +114,7 @@ var activeTableTab = "activeTableTab";
 
 
 @InterfaceAudience.Private
-public class ReplicationSourceManager
+public class ReplicationSourceManager
 extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
 implements ReplicationListener
 This class is responsible to manage all the replication 
sources. There are two classes of
@@ -608,7 +608,7 @@ implements 
 
 LOG
-private static finalorg.slf4j.Logger LOG
+private static finalorg.slf4j.Logger LOG
 
 
 
@@ -617,7 +617,7 @@ implements 
 
 sources
-private finalhttps://docs.oracle.com/javase/8/docs/api/java/util/concurrent/ConcurrentMap.html?is-external=true;
 title="class or interface in java.util.concurrent">ConcurrentMaphttps://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String,ReplicationSourceInterface
 sources
+private finalhttps://docs.oracle.com/javase/8/docs/api/java/util/concurrent/ConcurrentMap.html?is-external=true;
 title="class or interface in java.util.concurrent">ConcurrentMaphttps://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String,ReplicationSourceInterface
 sources
 
 
 
@@ -626,7 +626,7 @@ implements 
 
 oldsources
-private finalhttps://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListReplicationSourceInterface
 oldsources
+private finalhttps://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListReplicationSourceInterface
 oldsources
 
 
 
@@ -635,7 +635,7 @@ implements 
 
 queueStorage
-private finalReplicationQueueStorage queueStorage
+private finalReplicationQueueStorage queueStorage
 
 
 
@@ -644,7 +644,7 @@ implements 
 
 replicationTracker
-private finalReplicationTracker replicationTracker
+private finalReplicationTracker replicationTracker
 
 
 
@@ -653,7 +653,7 @@ implements 
 
 replicationPeers
-private finalReplicationPeers replicationPeers
+private finalReplicationPeers replicationPeers
 
 
 
@@ -662,7 +662,7 @@ implements 
 
 clusterId
-private finalhttps://docs.oracle.com/javase/8/docs/api/java/util/UUID.html?is-external=true;
 title="class or interface in java.util">UUID clusterId
+private finalhttps://docs.oracle.com/javase/8/docs/api/java/util/UUID.html?is-external=true;
 title="class or interface in java.util">UUID clusterId
 
 
 
@@ -671,7 +671,7 @@ implements 
 
 server
-private finalServer server
+private finalServer server
 
 
 
@@ -680,7 +680,7 @@ implements 
 
 walsById
-private finalhttps://docs.oracle.com/javase/8/docs/api/java/util/concurrent/ConcurrentMap.html?is-external=true;
 title="class or interface in java.util.concurrent">ConcurrentMaphttps://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String,https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">Maphttps://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String,https://docs.oracle.com/javase/8/docs/api/java/util/NavigableSet.html?is-external=true;
 title="class or interface in java.util">NavigableSethttps://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String walsById
+private finalhttps://docs.oracle.com/javase/8/docs/api/java/util/concurrent/ConcurrentMap.html?is-external=true;
 title="class or interface in java.util.concurrent">ConcurrentMaphttps://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String,https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">Maphttps://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String,https://docs.oracle.com/javase/8/docs/api/java/util/NavigableSet.html?is-external=true;
 title="class or interface in java.util">NavigableSethttps://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in 

[16/26] hbase-site git commit: Published site at 42aa3dd463c0d30a9b940d296b87316b5c67e1f5.

2018-10-02 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/37b8a04a/devapidocs/src-html/org/apache/hadoop/hbase/replication/regionserver/ReplicationSourceManager.NodeFailoverWorker.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/replication/regionserver/ReplicationSourceManager.NodeFailoverWorker.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/replication/regionserver/ReplicationSourceManager.NodeFailoverWorker.html
index e984063..083ab07 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/replication/regionserver/ReplicationSourceManager.NodeFailoverWorker.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/replication/regionserver/ReplicationSourceManager.NodeFailoverWorker.html
@@ -53,1074 +53,1082 @@
 045import 
org.apache.hadoop.conf.Configuration;
 046import org.apache.hadoop.fs.FileSystem;
 047import org.apache.hadoop.fs.Path;
-048import 
org.apache.hadoop.hbase.HConstants;
-049import org.apache.hadoop.hbase.Server;
-050import 
org.apache.hadoop.hbase.ServerName;
-051import 
org.apache.hadoop.hbase.TableName;
-052import 
org.apache.hadoop.hbase.replication.ReplicationException;
-053import 
org.apache.hadoop.hbase.replication.ReplicationListener;
-054import 
org.apache.hadoop.hbase.replication.ReplicationPeer;
-055import 
org.apache.hadoop.hbase.replication.ReplicationPeer.PeerState;
-056import 
org.apache.hadoop.hbase.replication.ReplicationPeerConfig;
-057import 
org.apache.hadoop.hbase.replication.ReplicationPeerImpl;
-058import 
org.apache.hadoop.hbase.replication.ReplicationPeers;
-059import 
org.apache.hadoop.hbase.replication.ReplicationQueueInfo;
-060import 
org.apache.hadoop.hbase.replication.ReplicationQueueStorage;
-061import 
org.apache.hadoop.hbase.replication.ReplicationTracker;
-062import 
org.apache.hadoop.hbase.replication.ReplicationUtils;
-063import 
org.apache.hadoop.hbase.replication.SyncReplicationState;
-064import 
org.apache.hadoop.hbase.util.Pair;
-065import 
org.apache.hadoop.hbase.wal.AbstractFSWALProvider;
-066import 
org.apache.hadoop.hbase.wal.SyncReplicationWALProvider;
-067import 
org.apache.yetus.audience.InterfaceAudience;
-068import 
org.apache.zookeeper.KeeperException;
-069import org.slf4j.Logger;
-070import org.slf4j.LoggerFactory;
-071
-072import 
org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;
-073import 
org.apache.hbase.thirdparty.com.google.common.util.concurrent.ThreadFactoryBuilder;
-074
-075/**
-076 * This class is responsible to manage 
all the replication sources. There are two classes of
-077 * sources:
-078 * ul
-079 * liNormal sources are 
persistent and one per peer cluster/li
-080 * liOld sources are recovered 
from a failed region server and our only goal is to finish
-081 * replicating the WAL queue it 
had/li
-082 * /ul
-083 * p
-084 * When a region server dies, this class 
uses a watcher to get notified and it tries to grab a lock
-085 * in order to transfer all the queues in 
a local old source.
-086 * p
-087 * Synchronization specification:
-088 * ul
-089 * liNo need synchronized on 
{@link #sources}. {@link #sources} is a ConcurrentHashMap and there
-090 * is a Lock for peer id in {@link 
PeerProcedureHandlerImpl}. So there is no race for peer
-091 * operations./li
-092 * liNeed synchronized on {@link 
#walsById}. There are four methods which modify it,
-093 * {@link #addPeer(String)}, {@link 
#removePeer(String)},
-094 * {@link #cleanOldLogs(String, boolean, 
ReplicationSourceInterface)} and {@link #preLogRoll(Path)}.
-095 * {@link #walsById} is a 
ConcurrentHashMap and there is a Lock for peer id in
-096 * {@link PeerProcedureHandlerImpl}. So 
there is no race between {@link #addPeer(String)} and
-097 * {@link #removePeer(String)}. {@link 
#cleanOldLogs(String, boolean, ReplicationSourceInterface)}
-098 * is called by {@link 
ReplicationSourceInterface}. So no race with {@link #addPeer(String)}.
-099 * {@link #removePeer(String)} will 
terminate the {@link ReplicationSourceInterface} firstly, then
-100 * remove the wals from {@link 
#walsById}. So no race with {@link #removePeer(String)}. The only
-101 * case need synchronized is {@link 
#cleanOldLogs(String, boolean, ReplicationSourceInterface)} and
-102 * {@link 
#preLogRoll(Path)}./li
-103 * liNo need synchronized on 
{@link #walsByIdRecoveredQueues}. There are three methods which
-104 * modify it, {@link #removePeer(String)} 
,
-105 * {@link #cleanOldLogs(String, boolean, 
ReplicationSourceInterface)} and
-106 * {@link 
ReplicationSourceManager.NodeFailoverWorker#run()}.
-107 * {@link #cleanOldLogs(String, boolean, 
ReplicationSourceInterface)} is called by
-108 * {@link ReplicationSourceInterface}. 
{@link #removePeer(String)} will terminate the
-109 * {@link ReplicationSourceInterface} 
firstly, then remove the wals from
-110 * {@link #walsByIdRecoveredQueues}. And 
{@link ReplicationSourceManager.NodeFailoverWorker#run()}
-111 * will add the wals to {@link 

[23/26] hbase-site git commit: Published site at 42aa3dd463c0d30a9b940d296b87316b5c67e1f5.

2018-10-02 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/37b8a04a/devapidocs/org/apache/hadoop/hbase/replication/regionserver/MetricsReplicationGlobalSourceSource.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/replication/regionserver/MetricsReplicationGlobalSourceSource.html
 
b/devapidocs/org/apache/hadoop/hbase/replication/regionserver/MetricsReplicationGlobalSourceSource.html
index a284487..1015127 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/replication/regionserver/MetricsReplicationGlobalSourceSource.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/replication/regionserver/MetricsReplicationGlobalSourceSource.html
@@ -18,7 +18,7 @@
 catch(err) {
 }
 //-->
-var methods = 
{"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10,"i14":10,"i15":10,"i16":9,"i17":10,"i18":10,"i19":10,"i20":10,"i21":10,"i22":10,"i23":10,"i24":10,"i25":10,"i26":10,"i27":10,"i28":10,"i29":10,"i30":10,"i31":10,"i32":10,"i33":10};
+var methods = 
{"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10,"i14":10,"i15":10,"i16":9,"i17":10,"i18":10,"i19":10,"i20":10,"i21":10,"i22":10,"i23":10,"i24":10,"i25":10,"i26":10,"i27":10,"i28":10,"i29":10,"i30":10,"i31":10,"i32":10,"i33":10,"i34":10};
 var tabs = {65535:["t0","All Methods"],1:["t1","Static 
Methods"],2:["t2","Instance Methods"],8:["t4","Concrete Methods"]};
 var altColor = "altColor";
 var rowColor = "rowColor";
@@ -148,46 +148,50 @@ implements completedWAL
 
 
+private MutableFastCounter
+failedRecoveryQueue
+
+
 private static https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 KEY_PREFIX
 
-
+
 private MutableFastCounter
 logEditsFilteredCounter
 
-
+
 private MutableFastCounter
 logReadInBytesCounter
 
-
+
 private MutableFastCounter
 logReadInEditsCounter
 
-
+
 private MutableFastCounter
 repeatedFileBytes
 
-
+
 private MutableFastCounter
 restartWALReading
 
-
+
 private MetricsReplicationSourceImpl
 rms
 
-
+
 private MutableFastCounter
 shippedBatchesCounter
 
-
+
 private MutableFastCounter
 shippedBytesCounter
 
-
+
 private MutableFastCounter
 shippedHFilesCounter
 
-
+
 private MutableFastCounter
 shippedKBsCounter
 Deprecated.
@@ -195,27 +199,27 @@ implements 
+
 private MutableFastCounter
 shippedOpsCounter
 
-
+
 private 
org.apache.hadoop.metrics2.lib.MutableGaugeLong
 sizeOfHFileRefsQueueGauge
 
-
+
 private 
org.apache.hadoop.metrics2.lib.MutableGaugeLong
 sizeOfLogQueueGauge
 
-
+
 private MutableFastCounter
 uncleanlyClosedSkippedBytes
 
-
+
 private MutableFastCounter
 uncleanlyClosedWAL
 
-
+
 private MutableFastCounter
 unknownFileLengthForClosedWAL
 
@@ -225,7 +229,7 @@ implements MetricsReplicationSourceSource
-SOURCE_AGE_OF_LAST_SHIPPED_OP,
 SOURCE_CLOSED_LOGS_WITH_UNKNOWN_LENGTH,
 SOURCE_COMPLETED_LOGS,
 SOURCE_COMPLETED_RECOVERY_QUEUES,
 SOURCE_LOG_EDITS_FILTERED,
 SOURCE_LOG_READ_IN_BYTES, SOURCE_LOG_READ_IN_EDITS,
 SOURCE_REPEATED_LOG_FILE_BYTES,
 SOURCE_RESTARTED_LOG_READING,
 SOURCE_SHIPPED_BATCHES,
 SOURCE_SHIPPED_BYTES,
 SOURCE_SHIPPED_HFILES, 
SOURCE_SHIPPED_KBS,
 SOURCE_SHIPPED_OPS,
 SOURCE_SIZE_OF_HFILE_REFS_QUEUE,
 SOURCE_SIZE_OF_LOG_QUEUE,
 SOURCE_UNCLEANLY_CLOSED_IGNORED_IN_BYTES,
 SOURCE_UNCLEANLY_CLOSED_LOGS
+SOURCE_AGE_OF_LAST_SHIPPED_OP,
 SOURCE_CLOSED_LOGS_WITH_UNKNOWN_LENGTH,
 SOURCE_COMPLETED_LOGS,
 SOURCE_COMPLETED_RECOVERY_QUEUES,
 SOURCE_FAILED_RECOVERY_QUEUES,
 SOURCE_LOG_EDITS_FILTERED, 
SOURCE_LOG_READ_IN_BYTES,
 SOURCE_LOG_READ_IN_EDITS,
 SOURCE_REPEATED_LOG_FILE_BYTES,
 SOURCE_RESTARTED_LOG_READING,
 SOURCE_SHIPPED_BATCHES,
 SOURCE_SHIPPED_BYTES,
 SOURCE_SHIPPED_HFILES,
 SOURCE_SHIPPED_KBS,
 SOURCE_SHIPPED_OPS,
 SOURCE_SIZE_OF_HFILE_REFS_QUEUE,
 SOURCE_SIZE_OF_LOG_QUEUE,
 SOURCE_UNCLEANLY_CLOSED_IGNORED_IN_BYTES,
 SOURCE_UNCLEANLY_CLOSED_LOGS
 
 
 
@@ -353,76 +357,80 @@ implements 
 void
-incrHFilesShipped(longhfiles)
+incrFailedRecoveryQueue()
 
 
 void
-incrLogEditsFiltered(longsize)
+incrHFilesShipped(longhfiles)
 
 
 void
-incrLogReadInBytes(longsize)
+incrLogEditsFiltered(longsize)
 
 
 void
-incrLogReadInEdits(longsize)
+incrLogReadInBytes(longsize)
 
 
 void
-incrOpsShipped(longops)
+incrLogReadInEdits(longsize)
 
 
 void
-incrRepeatedFileBytes(longbytes)
+incrOpsShipped(longops)
 
 
 void
-incrRestartedWALReading()
+incrRepeatedFileBytes(longbytes)
 
 
 void
-incrShippedBytes(longsize)
+incrRestartedWALReading()
 
 
 void
-incrSizeOfHFileRefsQueue(longsize)
+incrShippedBytes(longsize)
 
 
 void
-incrSizeOfLogQueue(intsize)
+incrSizeOfHFileRefsQueue(longsize)
 
 
 void
-incrUncleanlyClosedWALs()
+incrSizeOfLogQueue(intsize)
 
 
 void
-incrUnknownFileLengthForClosedWAL()
+incrUncleanlyClosedWALs()
 
 
 void
+incrUnknownFileLengthForClosedWAL()
+
+
+void
 init()
 Clear out the metrics and re-prepare the source.
 
 
-
+
 void
 

[26/26] hbase-site git commit: Published site at 42aa3dd463c0d30a9b940d296b87316b5c67e1f5.

2018-10-02 Thread git-site-role
Published site at 42aa3dd463c0d30a9b940d296b87316b5c67e1f5.


Project: http://git-wip-us.apache.org/repos/asf/hbase-site/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase-site/commit/37b8a04a
Tree: http://git-wip-us.apache.org/repos/asf/hbase-site/tree/37b8a04a
Diff: http://git-wip-us.apache.org/repos/asf/hbase-site/diff/37b8a04a

Branch: refs/heads/asf-site
Commit: 37b8a04a71b405280bc7d315858cc1ad7f12f22e
Parents: d949795
Author: jenkins 
Authored: Tue Oct 2 14:53:34 2018 +
Committer: jenkins 
Committed: Tue Oct 2 14:53:34 2018 +

--
 acid-semantics.html |4 +-
 apache_hbase_reference_guide.pdf|4 +-
 book.html   |2 +-
 bulk-loads.html |4 +-
 checkstyle-aggregate.html   |   30 +-
 coc.html|4 +-
 dependencies.html   |4 +-
 dependency-convergence.html |4 +-
 dependency-info.html|4 +-
 dependency-management.html  |4 +-
 devapidocs/constant-values.html |   39 +-
 devapidocs/index-all.html   |   16 +
 .../hadoop/hbase/backup/package-tree.html   |4 +-
 .../hadoop/hbase/class-use/ServerName.html  |   34 +-
 .../hadoop/hbase/client/package-tree.html   |   28 +-
 .../hadoop/hbase/coprocessor/package-tree.html  |2 +-
 .../hadoop/hbase/executor/package-tree.html |2 +-
 .../hadoop/hbase/filter/package-tree.html   |   10 +-
 .../hadoop/hbase/io/hfile/package-tree.html |6 +-
 .../apache/hadoop/hbase/ipc/package-tree.html   |2 +-
 .../hadoop/hbase/mapreduce/package-tree.html|4 +-
 .../hadoop/hbase/master/package-tree.html   |6 +-
 .../hbase/master/procedure/package-tree.html|2 +-
 .../hadoop/hbase/monitoring/package-tree.html   |2 +-
 .../org/apache/hadoop/hbase/package-tree.html   |   16 +-
 .../hadoop/hbase/procedure2/package-tree.html   |4 +-
 .../hadoop/hbase/quotas/package-tree.html   |   10 +-
 .../hadoop/hbase/regionserver/package-tree.html |   18 +-
 .../regionserver/querymatcher/package-tree.html |2 +-
 .../hbase/regionserver/wal/package-tree.html|4 +-
 .../replication/ReplicationQueueStorage.html|   46 +-
 .../replication/ZKReplicationQueueStorage.html  |   92 +-
 .../MetricsReplicationGlobalSourceSource.html   |  170 +-
 .../MetricsReplicationSourceSource.html |  130 +-
 .../MetricsReplicationSourceSourceImpl.html |   75 +-
 .../replication/regionserver/MetricsSource.html |   77 +-
 ...icationSourceManager.NodeFailoverWorker.html |   12 +-
 ...SourceManager.ReplicationQueueOperation.html |4 +-
 .../regionserver/ReplicationSourceManager.html  |  132 +-
 .../hadoop/hbase/rest/model/package-tree.html   |2 +-
 .../hbase/security/access/package-tree.html |4 +-
 .../hadoop/hbase/security/package-tree.html |2 +-
 .../apache/hadoop/hbase/util/package-tree.html  |   10 +-
 .../lib/class-use/MutableFastCounter.html   |   58 +-
 .../org/apache/hadoop/hbase/Version.html|6 +-
 .../replication/ReplicationQueueStorage.html|9 +-
 .../replication/ZKReplicationQueueStorage.html  | 1117 +-
 .../MetricsReplicationGlobalSourceSource.html   |  408 ++--
 .../MetricsReplicationSourceSource.html |   50 +-
 .../MetricsReplicationSourceSourceImpl.html |  111 +-
 .../replication/regionserver/MetricsSource.html |  138 +-
 ...icationSourceManager.NodeFailoverWorker.html | 2088 +-
 ...SourceManager.ReplicationQueueOperation.html | 2088 +-
 .../regionserver/ReplicationSourceManager.html  | 2088 +-
 downloads.html  |4 +-
 export_control.html |4 +-
 index.html  |4 +-
 integration.html|4 +-
 issue-tracking.html |4 +-
 license.html|4 +-
 mail-lists.html |4 +-
 metrics.html|4 +-
 old_news.html   |4 +-
 plugin-management.html  |4 +-
 plugins.html|4 +-
 poweredbyhbase.html |4 +-
 project-info.html   |4 +-
 project-reports.html|4 +-
 project-summary.html|4 +-
 pseudo-distributed.html |4 +-
 replication.html|4 +-
 resources.html  |4 +-
 source-repository.html  |4 +-
 

[17/26] hbase-site git commit: Published site at 42aa3dd463c0d30a9b940d296b87316b5c67e1f5.

2018-10-02 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/37b8a04a/devapidocs/src-html/org/apache/hadoop/hbase/replication/regionserver/MetricsReplicationGlobalSourceSource.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/replication/regionserver/MetricsReplicationGlobalSourceSource.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/replication/regionserver/MetricsReplicationGlobalSourceSource.html
index 13d09d1..6c7169e 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/replication/regionserver/MetricsReplicationGlobalSourceSource.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/replication/regionserver/MetricsReplicationGlobalSourceSource.html
@@ -60,209 +60,215 @@
 052  private final MutableFastCounter 
repeatedFileBytes;
 053  private final MutableFastCounter 
completedWAL;
 054  private final MutableFastCounter 
completedRecoveryQueue;
-055
-056  public 
MetricsReplicationGlobalSourceSource(MetricsReplicationSourceImpl rms) {
-057this.rms = rms;
-058
-059ageOfLastShippedOpHist = 
rms.getMetricsRegistry().getHistogram(SOURCE_AGE_OF_LAST_SHIPPED_OP);
-060
-061sizeOfLogQueueGauge = 
rms.getMetricsRegistry().getGauge(SOURCE_SIZE_OF_LOG_QUEUE, 0L);
-062
-063shippedBatchesCounter = 
rms.getMetricsRegistry().getCounter(SOURCE_SHIPPED_BATCHES, 0L);
-064
-065shippedOpsCounter = 
rms.getMetricsRegistry().getCounter(SOURCE_SHIPPED_OPS, 0L);
-066
-067shippedKBsCounter = 
rms.getMetricsRegistry().getCounter(SOURCE_SHIPPED_KBS, 0L);
-068
-069shippedBytesCounter = 
rms.getMetricsRegistry().getCounter(SOURCE_SHIPPED_BYTES, 0L);
-070
-071logReadInBytesCounter = 
rms.getMetricsRegistry().getCounter(SOURCE_LOG_READ_IN_BYTES, 0L);
-072
-073logReadInEditsCounter = 
rms.getMetricsRegistry().getCounter(SOURCE_LOG_READ_IN_EDITS, 0L);
-074
-075logEditsFilteredCounter = 
rms.getMetricsRegistry().getCounter(SOURCE_LOG_EDITS_FILTERED, 0L);
-076
-077shippedHFilesCounter = 
rms.getMetricsRegistry().getCounter(SOURCE_SHIPPED_HFILES, 0L);
-078
-079sizeOfHFileRefsQueueGauge =
-080
rms.getMetricsRegistry().getGauge(SOURCE_SIZE_OF_HFILE_REFS_QUEUE, 0L);
-081
-082unknownFileLengthForClosedWAL = 
rms.getMetricsRegistry()
-083
.getCounter(SOURCE_CLOSED_LOGS_WITH_UNKNOWN_LENGTH, 0L);
-084uncleanlyClosedWAL = 
rms.getMetricsRegistry().getCounter(SOURCE_UNCLEANLY_CLOSED_LOGS, 0L);
-085uncleanlyClosedSkippedBytes = 
rms.getMetricsRegistry()
-086
.getCounter(SOURCE_UNCLEANLY_CLOSED_IGNORED_IN_BYTES, 0L);
-087restartWALReading = 
rms.getMetricsRegistry().getCounter(SOURCE_RESTARTED_LOG_READING, 0L);
-088repeatedFileBytes = 
rms.getMetricsRegistry().getCounter(SOURCE_REPEATED_LOG_FILE_BYTES, 0L);
-089completedWAL = 
rms.getMetricsRegistry().getCounter(SOURCE_COMPLETED_LOGS, 0L);
-090completedRecoveryQueue = 
rms.getMetricsRegistry()
-091
.getCounter(SOURCE_COMPLETED_RECOVERY_QUEUES, 0L);
-092  }
-093
-094  @Override public void 
setLastShippedAge(long age) {
-095ageOfLastShippedOpHist.add(age);
-096  }
-097
-098  @Override public void 
incrSizeOfLogQueue(int size) {
-099sizeOfLogQueueGauge.incr(size);
-100  }
-101
-102  @Override public void 
decrSizeOfLogQueue(int size) {
-103sizeOfLogQueueGauge.decr(size);
-104  }
-105
-106  @Override public void 
incrLogReadInEdits(long size) {
-107logReadInEditsCounter.incr(size);
-108  }
-109
-110  @Override public void 
incrLogEditsFiltered(long size) {
-111logEditsFilteredCounter.incr(size);
-112  }
-113
-114  @Override public void 
incrBatchesShipped(int batches) {
-115
shippedBatchesCounter.incr(batches);
-116  }
-117
-118  @Override public void 
incrOpsShipped(long ops) {
-119shippedOpsCounter.incr(ops);
-120  }
-121
-122  @Override public void 
incrShippedBytes(long size) {
-123shippedBytesCounter.incr(size);
-124// obtained value maybe smaller than 
1024. We should make sure that KB count
-125// eventually picks up even from 
multiple smaller updates.
-126
incrementKBsCounter(shippedBytesCounter, shippedKBsCounter);
-127  }
-128
-129  static void 
incrementKBsCounter(MutableFastCounter bytesCounter, MutableFastCounter 
kbsCounter) {
-130// Following code should be 
thread-safe.
-131long delta = 0;
-132while(true) {
-133  long bytes = 
bytesCounter.value();
-134  delta = (bytes / 1024) - 
kbsCounter.value();
-135  if (delta  0) {
-136kbsCounter.incr(delta);
-137  } else {
-138break;
-139  }
-140}
-141  }
-142
-143  @Override public void 
incrLogReadInBytes(long size) {
-144logReadInBytesCounter.incr(size);
-145  }
-146
-147  @Override public void clear() {
+055  private final MutableFastCounter 
failedRecoveryQueue;
+056
+057  public 
MetricsReplicationGlobalSourceSource(MetricsReplicationSourceImpl rms) {
+058this.rms = rms;
+059
+060ageOfLastShippedOpHist = 

[01/26] hbase-site git commit: Published site at 42aa3dd463c0d30a9b940d296b87316b5c67e1f5.

2018-10-02 Thread git-site-role
Repository: hbase-site
Updated Branches:
  refs/heads/asf-site d94979517 -> 37b8a04a7


http://git-wip-us.apache.org/repos/asf/hbase-site/blob/37b8a04a/testdevapidocs/src-html/org/apache/hadoop/hbase/rsgroup/TestRSGroups.html
--
diff --git 
a/testdevapidocs/src-html/org/apache/hadoop/hbase/rsgroup/TestRSGroups.html 
b/testdevapidocs/src-html/org/apache/hadoop/hbase/rsgroup/TestRSGroups.html
index 16827da..beaa49d 100644
--- a/testdevapidocs/src-html/org/apache/hadoop/hbase/rsgroup/TestRSGroups.html
+++ b/testdevapidocs/src-html/org/apache/hadoop/hbase/rsgroup/TestRSGroups.html
@@ -141,7 +141,7 @@
 133  INIT = true;
 134  afterMethod();
 135}
-136
+136observer.resetFlags();
 137  }
 138
 139  @After
@@ -313,315 +313,340 @@
 305boolean 
preGetRSGroupInfoOfServerCalled = false;
 306boolean 
postGetRSGroupInfoOfServerCalled = false;
 307
-308@Override
-309public OptionalMasterObserver 
getMasterObserver() {
-310  return Optional.of(this);
-311}
-312@Override
-313public void 
preMoveServersAndTables(final 
ObserverContextMasterCoprocessorEnvironment ctx,
-314SetAddress servers, 
SetTableName tables, String targetGroup) throws IOException {
-315  preMoveServersAndTables = true;
-316}
-317@Override
-318public void 
postMoveServersAndTables(final 
ObserverContextMasterCoprocessorEnvironment ctx,
-319SetAddress servers, 
SetTableName tables, String targetGroup) throws IOException {
-320  postMoveServersAndTables = true;
-321}
-322@Override
-323public void preRemoveServers(
-324final 
ObserverContextMasterCoprocessorEnvironment ctx,
-325SetAddress servers) 
throws IOException {
-326  preRemoveServersCalled = true;
-327}
-328@Override
-329public void postRemoveServers(
-330final 
ObserverContextMasterCoprocessorEnvironment ctx,
-331SetAddress servers) 
throws IOException {
-332  postRemoveServersCalled = true;
-333}
-334@Override
-335public void preRemoveRSGroup(final 
ObserverContextMasterCoprocessorEnvironment ctx,
-336String name) throws IOException 
{
-337  preRemoveRSGroupCalled = true;
-338}
-339@Override
-340public void postRemoveRSGroup(final 
ObserverContextMasterCoprocessorEnvironment ctx,
-341String name) throws IOException 
{
-342  postRemoveRSGroupCalled = true;
-343}
-344@Override
-345public void preAddRSGroup(final 
ObserverContextMasterCoprocessorEnvironment ctx,
-346String name) throws IOException 
{
-347  preAddRSGroupCalled = true;
-348}
-349@Override
-350public void postAddRSGroup(final 
ObserverContextMasterCoprocessorEnvironment ctx,
-351String name) throws IOException 
{
-352  postAddRSGroupCalled = true;
-353}
-354@Override
-355public void preMoveTables(final 
ObserverContextMasterCoprocessorEnvironment ctx,
-356SetTableName tables, 
String targetGroup) throws IOException {
-357  preMoveTablesCalled = true;
+308void resetFlags() {
+309  preBalanceRSGroupCalled = false;
+310  postBalanceRSGroupCalled = false;
+311  preMoveServersCalled = false;
+312  postMoveServersCalled = false;
+313  preMoveTablesCalled = false;
+314  postMoveTablesCalled = false;
+315  preAddRSGroupCalled = false;
+316  postAddRSGroupCalled = false;
+317  preRemoveRSGroupCalled = false;
+318  postRemoveRSGroupCalled = false;
+319  preRemoveServersCalled = false;
+320  postRemoveServersCalled = false;
+321  preMoveServersAndTables = false;
+322  postMoveServersAndTables = false;
+323  preGetRSGroupInfoCalled = false;
+324  postGetRSGroupInfoCalled = false;
+325  preGetRSGroupInfoOfTableCalled = 
false;
+326  postGetRSGroupInfoOfTableCalled = 
false;
+327  preListRSGroupsCalled = false;
+328  postListRSGroupsCalled = false;
+329  preGetRSGroupInfoOfServerCalled = 
false;
+330  postGetRSGroupInfoOfServerCalled = 
false;
+331}
+332
+333@Override
+334public OptionalMasterObserver 
getMasterObserver() {
+335  return Optional.of(this);
+336}
+337@Override
+338public void 
preMoveServersAndTables(final 
ObserverContextMasterCoprocessorEnvironment ctx,
+339SetAddress servers, 
SetTableName tables, String targetGroup) throws IOException {
+340  preMoveServersAndTables = true;
+341}
+342@Override
+343public void 
postMoveServersAndTables(final 
ObserverContextMasterCoprocessorEnvironment ctx,
+344SetAddress servers, 
SetTableName tables, String targetGroup) throws IOException {
+345  postMoveServersAndTables = true;
+346}
+347@Override
+348public void preRemoveServers(
+349final 
ObserverContextMasterCoprocessorEnvironment ctx,
+350SetAddress servers) 
throws IOException {
+351  preRemoveServersCalled = true;
+352}