hbase git commit: HBASE-21217 Revisit the executeProcedure method for open/close region

2018-09-25 Thread zhangduo
Repository: hbase
Updated Branches:
  refs/heads/master c686b535c -> 8eaaa6311


HBASE-21217 Revisit the executeProcedure method for open/close region


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/8eaaa631
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/8eaaa631
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/8eaaa631

Branch: refs/heads/master
Commit: 8eaaa63114a64bcaeaf0ed9bdd88615ee22255c1
Parents: c686b53
Author: zhangduo 
Authored: Mon Sep 24 21:49:27 2018 +0800
Committer: zhangduo 
Committed: Tue Sep 25 17:30:21 2018 +0800

--
 .../hadoop/hbase/executor/ExecutorService.java  |  25 ++-
 .../org/apache/hadoop/hbase/master/HMaster.java |  15 +-
 .../hbase/regionserver/HRegionServer.java   |  16 +-
 .../hbase/regionserver/RSRpcServices.java   |  81 +++--
 .../regionserver/RegionServerServices.java  |  19 +--
 .../handler/AssignRegionHandler.java| 166 +++
 .../handler/CloseRegionHandler.java |   5 +
 .../regionserver/handler/OpenRegionHandler.java |   9 +-
 .../handler/UnassignRegionHandler.java  | 139 
 .../hadoop/hbase/MockRegionServerServices.java  |  14 +-
 .../hadoop/hbase/master/MockRegionServer.java   |  14 +-
 .../hbase/master/TestCloseAnOpeningRegion.java  | 146 
 12 files changed, 588 insertions(+), 61 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/8eaaa631/hbase-server/src/main/java/org/apache/hadoop/hbase/executor/ExecutorService.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/executor/ExecutorService.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/executor/ExecutorService.java
index 342d441..4f8909e 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/executor/ExecutorService.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/executor/ExecutorService.java
@@ -27,19 +27,22 @@ import java.util.Map.Entry;
 import java.util.concurrent.BlockingQueue;
 import java.util.concurrent.ConcurrentHashMap;
 import java.util.concurrent.ConcurrentMap;
+import java.util.concurrent.Executors;
 import java.util.concurrent.LinkedBlockingQueue;
 import java.util.concurrent.ThreadPoolExecutor;
 import java.util.concurrent.TimeUnit;
 import java.util.concurrent.atomic.AtomicLong;
-
+import org.apache.hadoop.hbase.monitoring.ThreadMonitoring;
 import org.apache.yetus.audience.InterfaceAudience;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
-import org.apache.hadoop.hbase.monitoring.ThreadMonitoring;
 
 import 
org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;
 import org.apache.hbase.thirdparty.com.google.common.collect.Lists;
 import org.apache.hbase.thirdparty.com.google.common.collect.Maps;
+import 
org.apache.hbase.thirdparty.com.google.common.util.concurrent.ListenableFuture;
+import 
org.apache.hbase.thirdparty.com.google.common.util.concurrent.ListeningScheduledExecutorService;
+import 
org.apache.hbase.thirdparty.com.google.common.util.concurrent.MoreExecutors;
 import 
org.apache.hbase.thirdparty.com.google.common.util.concurrent.ThreadFactoryBuilder;
 
 /**
@@ -64,12 +67,15 @@ public class ExecutorService {
   // Name of the server hosting this executor service.
   private final String servername;
 
+  private final ListeningScheduledExecutorService delayedSubmitTimer =
+MoreExecutors.listeningDecorator(Executors.newScheduledThreadPool(1, new 
ThreadFactoryBuilder()
+  
.setDaemon(true).setNameFormat("Event-Executor-Delay-Submit-Timer").build()));
+
   /**
* Default constructor.
* @param servername Name of the hosting server.
*/
   public ExecutorService(final String servername) {
-super();
 this.servername = servername;
   }
 
@@ -99,6 +105,7 @@ public class ExecutorService {
   }
 
   public void shutdown() {
+this.delayedSubmitTimer.shutdownNow();
 for(Entry entry: this.executorMap.entrySet()) {
   List wasRunning =
 entry.getValue().threadPoolExecutor.shutdownNow();
@@ -146,6 +153,18 @@ public class ExecutorService {
 }
   }
 
+  // Submit the handler after the given delay. Used for retrying.
+  public void delayedSubmit(EventHandler eh, long delay, TimeUnit unit) {
+ListenableFuture future = delayedSubmitTimer.schedule(() -> submit(eh), 
delay, unit);
+future.addListener(() -> {
+  try {
+future.get();
+  } catch (Exception e) {
+LOG.error("Failed to submit the event handler {} to executor", eh, e);
+  }
+}, MoreExecutors.directExecutor());
+  }
+
   public Map getAllExecutorStatuses() {
 Map ret = Maps.newHashMap();
 for (Map.Entry e : executorMap.entrySet()) {

http://git-wip-us.apache.org/repos/as

hbase git commit: HBASE-21217 Revisit the executeProcedure method for open/close region

2018-09-25 Thread zhangduo
Repository: hbase
Updated Branches:
  refs/heads/branch-2 df5310fc1 -> fa2888ebf


HBASE-21217 Revisit the executeProcedure method for open/close region


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/fa2888eb
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/fa2888eb
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/fa2888eb

Branch: refs/heads/branch-2
Commit: fa2888ebf0c815485a14e4768fd9dccac3669841
Parents: df5310f
Author: zhangduo 
Authored: Mon Sep 24 21:49:27 2018 +0800
Committer: zhangduo 
Committed: Tue Sep 25 18:10:47 2018 +0800

--
 .../hadoop/hbase/executor/ExecutorService.java  |  25 ++-
 .../org/apache/hadoop/hbase/master/HMaster.java |  15 +-
 .../hbase/regionserver/HRegionServer.java   |  16 +-
 .../hbase/regionserver/RSRpcServices.java   |  81 +++--
 .../regionserver/RegionServerServices.java  |  19 +--
 .../handler/AssignRegionHandler.java| 166 +++
 .../handler/CloseRegionHandler.java |   5 +
 .../regionserver/handler/OpenRegionHandler.java |   9 +-
 .../handler/UnassignRegionHandler.java  | 139 
 .../hadoop/hbase/MockRegionServerServices.java  |  14 +-
 .../hadoop/hbase/master/MockRegionServer.java   |  14 +-
 .../hbase/master/TestCloseAnOpeningRegion.java  | 146 
 12 files changed, 588 insertions(+), 61 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/fa2888eb/hbase-server/src/main/java/org/apache/hadoop/hbase/executor/ExecutorService.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/executor/ExecutorService.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/executor/ExecutorService.java
index 342d441..4f8909e 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/executor/ExecutorService.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/executor/ExecutorService.java
@@ -27,19 +27,22 @@ import java.util.Map.Entry;
 import java.util.concurrent.BlockingQueue;
 import java.util.concurrent.ConcurrentHashMap;
 import java.util.concurrent.ConcurrentMap;
+import java.util.concurrent.Executors;
 import java.util.concurrent.LinkedBlockingQueue;
 import java.util.concurrent.ThreadPoolExecutor;
 import java.util.concurrent.TimeUnit;
 import java.util.concurrent.atomic.AtomicLong;
-
+import org.apache.hadoop.hbase.monitoring.ThreadMonitoring;
 import org.apache.yetus.audience.InterfaceAudience;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
-import org.apache.hadoop.hbase.monitoring.ThreadMonitoring;
 
 import 
org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;
 import org.apache.hbase.thirdparty.com.google.common.collect.Lists;
 import org.apache.hbase.thirdparty.com.google.common.collect.Maps;
+import 
org.apache.hbase.thirdparty.com.google.common.util.concurrent.ListenableFuture;
+import 
org.apache.hbase.thirdparty.com.google.common.util.concurrent.ListeningScheduledExecutorService;
+import 
org.apache.hbase.thirdparty.com.google.common.util.concurrent.MoreExecutors;
 import 
org.apache.hbase.thirdparty.com.google.common.util.concurrent.ThreadFactoryBuilder;
 
 /**
@@ -64,12 +67,15 @@ public class ExecutorService {
   // Name of the server hosting this executor service.
   private final String servername;
 
+  private final ListeningScheduledExecutorService delayedSubmitTimer =
+MoreExecutors.listeningDecorator(Executors.newScheduledThreadPool(1, new 
ThreadFactoryBuilder()
+  
.setDaemon(true).setNameFormat("Event-Executor-Delay-Submit-Timer").build()));
+
   /**
* Default constructor.
* @param servername Name of the hosting server.
*/
   public ExecutorService(final String servername) {
-super();
 this.servername = servername;
   }
 
@@ -99,6 +105,7 @@ public class ExecutorService {
   }
 
   public void shutdown() {
+this.delayedSubmitTimer.shutdownNow();
 for(Entry entry: this.executorMap.entrySet()) {
   List wasRunning =
 entry.getValue().threadPoolExecutor.shutdownNow();
@@ -146,6 +153,18 @@ public class ExecutorService {
 }
   }
 
+  // Submit the handler after the given delay. Used for retrying.
+  public void delayedSubmit(EventHandler eh, long delay, TimeUnit unit) {
+ListenableFuture future = delayedSubmitTimer.schedule(() -> submit(eh), 
delay, unit);
+future.addListener(() -> {
+  try {
+future.get();
+  } catch (Exception e) {
+LOG.error("Failed to submit the event handler {} to executor", eh, e);
+  }
+}, MoreExecutors.directExecutor());
+  }
+
   public Map getAllExecutorStatuses() {
 Map ret = Maps.newHashMap();
 for (Map.Entry e : executorMap.entrySet()) {

http://git-wip-us.apache.org/repo

[03/51] [partial] hbase-site git commit: Published site at 8eaaa63114a64bcaeaf0ed9bdd88615ee22255c1.

2018-09-25 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f6f9d4f3/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/handler/OpenRegionHandler.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/handler/OpenRegionHandler.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/handler/OpenRegionHandler.html
index 4a11f27..7c7966d 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/handler/OpenRegionHandler.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/handler/OpenRegionHandler.html
@@ -49,287 +49,290 @@
 041 * Handles opening of a region on a 
region server.
 042 * 

043 * This is executed after receiving an OPEN RPC from the master or client. -044 */ -045@InterfaceAudience.Private -046public class OpenRegionHandler extends EventHandler { -047 private static final Logger LOG = LoggerFactory.getLogger(OpenRegionHandler.class); -048 -049 protected final RegionServerServices rsServices; -050 -051 private final RegionInfo regionInfo; -052 private final TableDescriptor htd; -053 private final long masterSystemTime; -054 -055 public OpenRegionHandler(final Server server, -056 final RegionServerServices rsServices, RegionInfo regionInfo, -057 TableDescriptor htd, long masterSystemTime) { -058this(server, rsServices, regionInfo, htd, masterSystemTime, EventType.M_RS_OPEN_REGION); -059 } -060 -061 protected OpenRegionHandler(final Server server, -062 final RegionServerServices rsServices, final RegionInfo regionInfo, -063 final TableDescriptor htd, long masterSystemTime, EventType eventType) { -064super(server, eventType); -065this.rsServices = rsServices; -066this.regionInfo = regionInfo; -067this.htd = htd; -068this.masterSystemTime = masterSystemTime; -069 } -070 -071 public RegionInfo getRegionInfo() { -072return regionInfo; -073 } -074 -075 @Override -076 public void process() throws IOException { -077boolean openSuccessful = false; -078final String regionName = regionInfo.getRegionNameAsString(); -079HRegion region = null; -080 -081try { -082 if (this.server.isStopped() || this.rsServices.isStopping()) { -083return; -084 } -085 final String encodedName = regionInfo.getEncodedName(); -086 -087 // 2 different difficult situations can occur -088 // 1) The opening was cancelled. This is an expected situation -089 // 2) The region is now marked as online while we're suppose to open. This would be a bug. -090 -091 // Check that this region is not already online -092 if (this.rsServices.getRegion(encodedName) != null) { -093LOG.error("Region " + encodedName + -094" was already online when we started processing the opening. " + -095"Marking this new attempt as failed"); -096return; -097 } -098 -099 // Check that we're still supposed to open the region. -100 // If fails, just return. Someone stole the region from under us. -101 if (!isRegionStillOpening()){ -102LOG.error("Region " + encodedName + " opening cancelled"); -103return; -104 } -105 -106 // Open region. After a successful open, failures in subsequent -107 // processing needs to do a close as part of cleanup. -108 region = openRegion(); -109 if (region == null) { -110return; -111 } -112 -113 if (!updateMeta(region, masterSystemTime) || this.server.isStopped() || -114 this.rsServices.isStopping()) { -115return; -116 } -117 -118 if (!isRegionStillOpening()) { -119return; -120 } -121 -122 // Successful region open, and add it to MutableOnlineRegions -123 this.rsServices.addRegion(region); -124 openSuccessful = true; -125 -126 // Done! Successful region open -127 LOG.debug("Opened " + regionName + " on " + this.server.getServerName()); -128} finally { -129 // Do all clean up here -130 if (!openSuccessful) { -131doCleanUpOnFailedOpen(region); -132 } -133 final Boolean current = this.rsServices.getRegionsInTransitionInRS(). -134 remove(this.regionInfo.getEncodedNameAsBytes()); -135 -136 // Let's check if we have met a race condition on open cancellation -137 // A better solution would be to not have any race condition. -138 // this.rsServices.getRegionsInTransitionInRS().remove( -139 // this.regionInfo.getEncodedNameAsBytes(), Boolean.TRUE); -140 // would help. -141 if (openSuccessful) { -142if (current == null) { // Should NEVER happen, but let's be paranoid. -143 LOG.error("Bad state: we've just opened a region that was NOT in transition. Region=" -144 + regionName); -145} else if (Boolean.FALSE.equals(cur


[04/51] [partial] hbase-site git commit: Published site at 8eaaa63114a64bcaeaf0ed9bdd88615ee22255c1.

2018-09-25 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f6f9d4f3/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/handler/OpenRegionHandler.PostOpenDeployTasksThread.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/handler/OpenRegionHandler.PostOpenDeployTasksThread.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/handler/OpenRegionHandler.PostOpenDeployTasksThread.html
index 4a11f27..7c7966d 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/handler/OpenRegionHandler.PostOpenDeployTasksThread.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/handler/OpenRegionHandler.PostOpenDeployTasksThread.html
@@ -49,287 +49,290 @@
 041 * Handles opening of a region on a 
region server.
 042 * 

043 * This is executed after receiving an OPEN RPC from the master or client. -044 */ -045@InterfaceAudience.Private -046public class OpenRegionHandler extends EventHandler { -047 private static final Logger LOG = LoggerFactory.getLogger(OpenRegionHandler.class); -048 -049 protected final RegionServerServices rsServices; -050 -051 private final RegionInfo regionInfo; -052 private final TableDescriptor htd; -053 private final long masterSystemTime; -054 -055 public OpenRegionHandler(final Server server, -056 final RegionServerServices rsServices, RegionInfo regionInfo, -057 TableDescriptor htd, long masterSystemTime) { -058this(server, rsServices, regionInfo, htd, masterSystemTime, EventType.M_RS_OPEN_REGION); -059 } -060 -061 protected OpenRegionHandler(final Server server, -062 final RegionServerServices rsServices, final RegionInfo regionInfo, -063 final TableDescriptor htd, long masterSystemTime, EventType eventType) { -064super(server, eventType); -065this.rsServices = rsServices; -066this.regionInfo = regionInfo; -067this.htd = htd; -068this.masterSystemTime = masterSystemTime; -069 } -070 -071 public RegionInfo getRegionInfo() { -072return regionInfo; -073 } -074 -075 @Override -076 public void process() throws IOException { -077boolean openSuccessful = false; -078final String regionName = regionInfo.getRegionNameAsString(); -079HRegion region = null; -080 -081try { -082 if (this.server.isStopped() || this.rsServices.isStopping()) { -083return; -084 } -085 final String encodedName = regionInfo.getEncodedName(); -086 -087 // 2 different difficult situations can occur -088 // 1) The opening was cancelled. This is an expected situation -089 // 2) The region is now marked as online while we're suppose to open. This would be a bug. -090 -091 // Check that this region is not already online -092 if (this.rsServices.getRegion(encodedName) != null) { -093LOG.error("Region " + encodedName + -094" was already online when we started processing the opening. " + -095"Marking this new attempt as failed"); -096return; -097 } -098 -099 // Check that we're still supposed to open the region. -100 // If fails, just return. Someone stole the region from under us. -101 if (!isRegionStillOpening()){ -102LOG.error("Region " + encodedName + " opening cancelled"); -103return; -104 } -105 -106 // Open region. After a successful open, failures in subsequent -107 // processing needs to do a close as part of cleanup. -108 region = openRegion(); -109 if (region == null) { -110return; -111 } -112 -113 if (!updateMeta(region, masterSystemTime) || this.server.isStopped() || -114 this.rsServices.isStopping()) { -115return; -116 } -117 -118 if (!isRegionStillOpening()) { -119return; -120 } -121 -122 // Successful region open, and add it to MutableOnlineRegions -123 this.rsServices.addRegion(region); -124 openSuccessful = true; -125 -126 // Done! Successful region open -127 LOG.debug("Opened " + regionName + " on " + this.server.getServerName()); -128} finally { -129 // Do all clean up here -130 if (!openSuccessful) { -131doCleanUpOnFailedOpen(region); -132 } -133 final Boolean current = this.rsServices.getRegionsInTransitionInRS(). -134 remove(this.regionInfo.getEncodedNameAsBytes()); -135 -136 // Let's check if we have met a race condition on open cancellation -137 // A better solution would be to not have any race condition. -138 // this.rsServices.getRegionsInTransitionInRS().remove( -139 // this.regionInfo.getEncodedNameAsBytes(), Boolean.TRUE); -140 // would help. -141 if (openSuccessful) { -142if (current == null) { // Should NEVER happen, but let's be paranoid. -143 LOG.error("Bad state: we've just open


[16/51] [partial] hbase-site git commit: Published site at 8eaaa63114a64bcaeaf0ed9bdd88615ee22255c1.

2018-09-25 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f6f9d4f3/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.PeriodicMemStoreFlusher.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.PeriodicMemStoreFlusher.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.PeriodicMemStoreFlusher.html
index 8cc5add..34858d6 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.PeriodicMemStoreFlusher.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.PeriodicMemStoreFlusher.html
@@ -2188,1428 +2188,1428 @@
 2180  }
 2181
 2182  @Override
-2183  public void postOpenDeployTasks(final 
PostOpenDeployContext context)
-2184  throws KeeperException, 
IOException {
-2185HRegion r = context.getRegion();
-2186long masterSystemTime = 
context.getMasterSystemTime();
-2187rpcServices.checkOpen();
-2188LOG.info("Post open deploy tasks for 
" + r.getRegionInfo().getRegionNameAsString());
-2189// Do checks to see if we need to 
compact (references or too many files)
-2190for (HStore s : r.stores.values()) 
{
-2191  if (s.hasReferences() || 
s.needsCompaction()) {
-2192
this.compactSplitThread.requestSystemCompaction(r, s, "Opening Region");
-2193  }
-2194}
-2195long openSeqNum = 
r.getOpenSeqNum();
-2196if (openSeqNum == 
HConstants.NO_SEQNUM) {
-2197  // If we opened a region, we 
should have read some sequence number from it.
-2198  LOG.error("No sequence number 
found when opening " +
-2199
r.getRegionInfo().getRegionNameAsString());
-2200  openSeqNum = 0;
-2201}
-2202
-2203// Notify master
-2204if (!reportRegionStateTransition(new 
RegionStateTransitionContext(
-2205TransitionCode.OPENED, 
openSeqNum, masterSystemTime, r.getRegionInfo( {
-2206  throw new IOException("Failed to 
report opened region to master: "
-2207+ 
r.getRegionInfo().getRegionNameAsString());
-2208}
-2209
-2210triggerFlushInPrimaryRegion(r);
-2211
-2212LOG.debug("Finished post open deploy 
task for " + r.getRegionInfo().getRegionNameAsString());
-2213  }
-2214
-2215  @Override
-2216  public boolean 
reportRegionStateTransition(final RegionStateTransitionContext context) {
-2217TransitionCode code = 
context.getCode();
-2218long openSeqNum = 
context.getOpenSeqNum();
-2219long masterSystemTime = 
context.getMasterSystemTime();
-2220RegionInfo[] hris = 
context.getHris();
-2221
-if (TEST_SKIP_REPORTING_TRANSITION) 
{
-2223  // This is for testing only in 
case there is no master
-2224  // to handle the region transition 
report at all.
-2225  if (code == TransitionCode.OPENED) 
{
-2226Preconditions.checkArgument(hris 
!= null && hris.length == 1);
-2227if (hris[0].isMetaRegion()) {
-2228  try {
-2229
MetaTableLocator.setMetaLocation(getZooKeeper(), serverName,
-2230
hris[0].getReplicaId(),State.OPEN);
-2231  } catch (KeeperException e) 
{
-2232LOG.info("Failed to update 
meta location", e);
-2233return false;
-2234  }
-2235} else {
-2236  try {
-2237
MetaTableAccessor.updateRegionLocation(clusterConnection,
-2238  hris[0], serverName, 
openSeqNum, masterSystemTime);
-2239  } catch (IOException e) {
-2240LOG.info("Failed to update 
meta", e);
-2241return false;
-2242  }
-2243}
-2244  }
-2245  return true;
-2246}
-2247
-2248
ReportRegionStateTransitionRequest.Builder builder =
-2249  
ReportRegionStateTransitionRequest.newBuilder();
-2250
builder.setServer(ProtobufUtil.toServerName(serverName));
-2251RegionStateTransition.Builder 
transition = builder.addTransitionBuilder();
-2252
transition.setTransitionCode(code);
-2253if (code == TransitionCode.OPENED 
&& openSeqNum >= 0) {
-2254  
transition.setOpenSeqNum(openSeqNum);
-2255}
-2256for (RegionInfo hri: hris) {
-2257  
transition.addRegionInfo(ProtobufUtil.toRegionInfo(hri));
-2258}
-2259ReportRegionStateTransitionRequest 
request = builder.build();
-2260int tries = 0;
-2261long pauseTime = 
INIT_PAUSE_TIME_MS;
-2262// Keep looping till we get an 
error. We want to send reports even though server is going down.
-2263// Only go down if clusterConnection 
is null. It is set to null almost as last thing as the
-2264// HRegionServer does down.
-2265while (this.clusterConnection != 
null && !this.clusterConnection.isClosed()) {
-2266  
RegionServerStatusService.BlockingInterface rss = rssStub;
-2267  try {
-2268if (rss == null) {
-2269  
createRegionServerStatusStub();
-2270  continue;
-2271}
-2272
ReportRegionStateTransitionResponse

[06/51] [partial] hbase-site git commit: Published site at 8eaaa63114a64bcaeaf0ed9bdd88615ee22255c1.

2018-09-25 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f6f9d4f3/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RegionServerServices.RegionStateTransitionContext.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RegionServerServices.RegionStateTransitionContext.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RegionServerServices.RegionStateTransitionContext.html
index acc491f..e6c6561 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RegionServerServices.RegionStateTransitionContext.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RegionServerServices.RegionStateTransitionContext.html
@@ -26,256 +26,255 @@
 018 */
 019package 
org.apache.hadoop.hbase.regionserver;
 020
-021import java.io.IOException;
-022import java.util.Collection;
-023import java.util.List;
-024import java.util.Map.Entry;
-025import 
java.util.concurrent.ConcurrentMap;
-026
+021import com.google.protobuf.Service;
+022import java.io.IOException;
+023import java.util.Collection;
+024import java.util.List;
+025import java.util.Map.Entry;
+026import 
java.util.concurrent.ConcurrentMap;
 027import 
org.apache.hadoop.hbase.Abortable;
 028import org.apache.hadoop.hbase.Server;
-029import 
org.apache.hadoop.hbase.TableName;
-030import 
org.apache.hadoop.hbase.client.RegionInfo;
-031import 
org.apache.hadoop.hbase.client.locking.EntityLock;
-032import 
org.apache.hadoop.hbase.executor.ExecutorService;
-033import 
org.apache.hadoop.hbase.ipc.RpcServerInterface;
-034import 
org.apache.hadoop.hbase.quotas.RegionServerRpcQuotaManager;
-035import 
org.apache.hadoop.hbase.quotas.RegionServerSpaceQuotaManager;
-036import 
org.apache.hadoop.hbase.quotas.RegionSizeStore;
-037import 
org.apache.hadoop.hbase.regionserver.compactions.CompactionRequester;
-038import 
org.apache.hadoop.hbase.regionserver.throttle.ThroughputController;
-039import org.apache.hadoop.hbase.wal.WAL;
-040import 
org.apache.yetus.audience.InterfaceAudience;
-041import 
org.apache.zookeeper.KeeperException;
+029import 
org.apache.hadoop.hbase.TableDescriptors;
+030import 
org.apache.hadoop.hbase.TableName;
+031import 
org.apache.hadoop.hbase.client.RegionInfo;
+032import 
org.apache.hadoop.hbase.client.locking.EntityLock;
+033import 
org.apache.hadoop.hbase.executor.ExecutorService;
+034import 
org.apache.hadoop.hbase.ipc.RpcServerInterface;
+035import 
org.apache.hadoop.hbase.quotas.RegionServerRpcQuotaManager;
+036import 
org.apache.hadoop.hbase.quotas.RegionServerSpaceQuotaManager;
+037import 
org.apache.hadoop.hbase.quotas.RegionSizeStore;
+038import 
org.apache.hadoop.hbase.regionserver.compactions.CompactionRequester;
+039import 
org.apache.hadoop.hbase.regionserver.throttle.ThroughputController;
+040import org.apache.hadoop.hbase.wal.WAL;
+041import 
org.apache.yetus.audience.InterfaceAudience;
 042
 043import 
org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionStateTransition.TransitionCode;
 044
-045import com.google.protobuf.Service;
-046
-047/**
-048 * A curated subset of services provided 
by {@link HRegionServer}.
-049 * For use internally only. Passed to 
Managers, Services and Chores so can pass less-than-a
-050 * full-on HRegionServer at test-time. Be 
judicious adding API. Changes cause ripples through
-051 * the code base.
-052 */
-053@InterfaceAudience.Private
-054public interface RegionServerServices 
extends Server, MutableOnlineRegions, FavoredNodesForRegion {
-055
-056  /** @return the WAL for a particular 
region. Pass null for getting the
-057   * default (common) WAL */
-058  WAL getWAL(RegionInfo regionInfo) 
throws IOException;
-059
-060  /** @return the List of WALs that are 
used by this server
-061   *  Doesn't include the meta WAL
-062   */
-063  List getWALs() throws 
IOException;
-064
-065  /**
-066   * @return Implementation of {@link 
FlushRequester} or null. Usually it will not be null unless
-067   * during intialization.
-068   */
-069  FlushRequester getFlushRequester();
-070
-071  /**
-072   * @return Implementation of {@link 
CompactionRequester} or null. Usually it will not be null
-073   * unless during 
intialization.
-074   */
-075  CompactionRequester 
getCompactionRequestor();
-076
-077  /**
-078   * @return the RegionServerAccounting 
for this Region Server
-079   */
-080  RegionServerAccounting 
getRegionServerAccounting();
-081
-082  /**
-083   * @return RegionServer's instance of 
{@link RegionServerRpcQuotaManager}
-084   */
-085  RegionServerRpcQuotaManager 
getRegionServerRpcQuotaManager();
-086
-087  /**
-088   * @return RegionServer's instance of 
{@link SecureBulkLoadManager}
-089   */
-090  SecureBulkLoadManager 
getSecureBulkLoadManager();
-091
-092  /**
-093   * @return RegionServer's instance of 
{@link RegionServerSpaceQuotaManager}
-094   */
-095  RegionServerSpaceQuotaManager 
getRegionServerSpaceQuotaManager

[15/51] [partial] hbase-site git commit: Published site at 8eaaa63114a64bcaeaf0ed9bdd88615ee22255c1.

2018-09-25 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f6f9d4f3/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html
index 8cc5add..34858d6 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html
@@ -2188,1428 +2188,1428 @@
 2180  }
 2181
 2182  @Override
-2183  public void postOpenDeployTasks(final 
PostOpenDeployContext context)
-2184  throws KeeperException, 
IOException {
-2185HRegion r = context.getRegion();
-2186long masterSystemTime = 
context.getMasterSystemTime();
-2187rpcServices.checkOpen();
-2188LOG.info("Post open deploy tasks for 
" + r.getRegionInfo().getRegionNameAsString());
-2189// Do checks to see if we need to 
compact (references or too many files)
-2190for (HStore s : r.stores.values()) 
{
-2191  if (s.hasReferences() || 
s.needsCompaction()) {
-2192
this.compactSplitThread.requestSystemCompaction(r, s, "Opening Region");
-2193  }
-2194}
-2195long openSeqNum = 
r.getOpenSeqNum();
-2196if (openSeqNum == 
HConstants.NO_SEQNUM) {
-2197  // If we opened a region, we 
should have read some sequence number from it.
-2198  LOG.error("No sequence number 
found when opening " +
-2199
r.getRegionInfo().getRegionNameAsString());
-2200  openSeqNum = 0;
-2201}
-2202
-2203// Notify master
-2204if (!reportRegionStateTransition(new 
RegionStateTransitionContext(
-2205TransitionCode.OPENED, 
openSeqNum, masterSystemTime, r.getRegionInfo( {
-2206  throw new IOException("Failed to 
report opened region to master: "
-2207+ 
r.getRegionInfo().getRegionNameAsString());
-2208}
-2209
-2210triggerFlushInPrimaryRegion(r);
-2211
-2212LOG.debug("Finished post open deploy 
task for " + r.getRegionInfo().getRegionNameAsString());
-2213  }
-2214
-2215  @Override
-2216  public boolean 
reportRegionStateTransition(final RegionStateTransitionContext context) {
-2217TransitionCode code = 
context.getCode();
-2218long openSeqNum = 
context.getOpenSeqNum();
-2219long masterSystemTime = 
context.getMasterSystemTime();
-2220RegionInfo[] hris = 
context.getHris();
-2221
-if (TEST_SKIP_REPORTING_TRANSITION) 
{
-2223  // This is for testing only in 
case there is no master
-2224  // to handle the region transition 
report at all.
-2225  if (code == TransitionCode.OPENED) 
{
-2226Preconditions.checkArgument(hris 
!= null && hris.length == 1);
-2227if (hris[0].isMetaRegion()) {
-2228  try {
-2229
MetaTableLocator.setMetaLocation(getZooKeeper(), serverName,
-2230
hris[0].getReplicaId(),State.OPEN);
-2231  } catch (KeeperException e) 
{
-2232LOG.info("Failed to update 
meta location", e);
-2233return false;
-2234  }
-2235} else {
-2236  try {
-2237
MetaTableAccessor.updateRegionLocation(clusterConnection,
-2238  hris[0], serverName, 
openSeqNum, masterSystemTime);
-2239  } catch (IOException e) {
-2240LOG.info("Failed to update 
meta", e);
-2241return false;
-2242  }
-2243}
-2244  }
-2245  return true;
-2246}
-2247
-2248
ReportRegionStateTransitionRequest.Builder builder =
-2249  
ReportRegionStateTransitionRequest.newBuilder();
-2250
builder.setServer(ProtobufUtil.toServerName(serverName));
-2251RegionStateTransition.Builder 
transition = builder.addTransitionBuilder();
-2252
transition.setTransitionCode(code);
-2253if (code == TransitionCode.OPENED 
&& openSeqNum >= 0) {
-2254  
transition.setOpenSeqNum(openSeqNum);
-2255}
-2256for (RegionInfo hri: hris) {
-2257  
transition.addRegionInfo(ProtobufUtil.toRegionInfo(hri));
-2258}
-2259ReportRegionStateTransitionRequest 
request = builder.build();
-2260int tries = 0;
-2261long pauseTime = 
INIT_PAUSE_TIME_MS;
-2262// Keep looping till we get an 
error. We want to send reports even though server is going down.
-2263// Only go down if clusterConnection 
is null. It is set to null almost as last thing as the
-2264// HRegionServer does down.
-2265while (this.clusterConnection != 
null && !this.clusterConnection.isClosed()) {
-2266  
RegionServerStatusService.BlockingInterface rss = rssStub;
-2267  try {
-2268if (rss == null) {
-2269  
createRegionServerStatusStub();
-2270  continue;
-2271}
-2272
ReportRegionStateTransitionResponse response =
-2273  
rss.reportRegionStateTransition(null, request);
-2274if (response.hasErrorMessage()) 

[11/51] [partial] hbase-site git commit: Published site at 8eaaa63114a64bcaeaf0ed9bdd88615ee22255c1.

2018-09-25 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f6f9d4f3/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RSRpcServices.RegionScannerShippedCallBack.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RSRpcServices.RegionScannerShippedCallBack.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RSRpcServices.RegionScannerShippedCallBack.html
index 25f458d..20e3eaa 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RSRpcServices.RegionScannerShippedCallBack.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RSRpcServices.RegionScannerShippedCallBack.html
@@ -28,3711 +28,3756 @@
 020import java.io.FileNotFoundException;
 021import java.io.IOException;
 022import java.io.InterruptedIOException;
-023import 
java.lang.reflect.InvocationTargetException;
-024import java.net.BindException;
-025import java.net.InetSocketAddress;
-026import java.net.UnknownHostException;
-027import java.nio.ByteBuffer;
-028import java.util.ArrayList;
-029import java.util.Arrays;
-030import java.util.Collections;
-031import java.util.HashMap;
-032import java.util.Iterator;
-033import java.util.List;
-034import java.util.Map;
-035import java.util.Map.Entry;
-036import java.util.NavigableMap;
-037import java.util.Set;
-038import java.util.TreeSet;
-039import 
java.util.concurrent.ConcurrentHashMap;
-040import 
java.util.concurrent.ConcurrentMap;
-041import java.util.concurrent.TimeUnit;
-042import 
java.util.concurrent.atomic.AtomicBoolean;
-043import 
java.util.concurrent.atomic.AtomicLong;
-044import 
java.util.concurrent.atomic.LongAdder;
-045import 
org.apache.commons.lang3.mutable.MutableObject;
-046import 
org.apache.hadoop.conf.Configuration;
-047import org.apache.hadoop.fs.Path;
-048import 
org.apache.hadoop.hbase.ByteBufferExtendedCell;
-049import 
org.apache.hadoop.hbase.CacheEvictionStats;
-050import 
org.apache.hadoop.hbase.CacheEvictionStatsBuilder;
-051import org.apache.hadoop.hbase.Cell;
-052import 
org.apache.hadoop.hbase.CellScannable;
-053import 
org.apache.hadoop.hbase.CellScanner;
-054import 
org.apache.hadoop.hbase.CellUtil;
-055import 
org.apache.hadoop.hbase.CompareOperator;
-056import 
org.apache.hadoop.hbase.DoNotRetryIOException;
-057import 
org.apache.hadoop.hbase.DroppedSnapshotException;
-058import 
org.apache.hadoop.hbase.HBaseIOException;
-059import 
org.apache.hadoop.hbase.HConstants;
-060import 
org.apache.hadoop.hbase.MultiActionResultTooLarge;
-061import 
org.apache.hadoop.hbase.NotServingRegionException;
-062import 
org.apache.hadoop.hbase.PrivateCellUtil;
-063import 
org.apache.hadoop.hbase.RegionTooBusyException;
-064import org.apache.hadoop.hbase.Server;
-065import 
org.apache.hadoop.hbase.ServerName;
-066import 
org.apache.hadoop.hbase.TableName;
-067import 
org.apache.hadoop.hbase.UnknownScannerException;
-068import 
org.apache.hadoop.hbase.client.Append;
-069import 
org.apache.hadoop.hbase.client.ConnectionUtils;
-070import 
org.apache.hadoop.hbase.client.Delete;
-071import 
org.apache.hadoop.hbase.client.Durability;
-072import 
org.apache.hadoop.hbase.client.Get;
-073import 
org.apache.hadoop.hbase.client.Increment;
-074import 
org.apache.hadoop.hbase.client.Mutation;
-075import 
org.apache.hadoop.hbase.client.Put;
-076import 
org.apache.hadoop.hbase.client.RegionInfo;
-077import 
org.apache.hadoop.hbase.client.RegionReplicaUtil;
-078import 
org.apache.hadoop.hbase.client.Result;
-079import 
org.apache.hadoop.hbase.client.Row;
-080import 
org.apache.hadoop.hbase.client.RowMutations;
-081import 
org.apache.hadoop.hbase.client.Scan;
-082import 
org.apache.hadoop.hbase.client.TableDescriptor;
-083import 
org.apache.hadoop.hbase.client.VersionInfoUtil;
-084import 
org.apache.hadoop.hbase.conf.ConfigurationObserver;
-085import 
org.apache.hadoop.hbase.exceptions.FailedSanityCheckException;
-086import 
org.apache.hadoop.hbase.exceptions.OutOfOrderScannerNextException;
-087import 
org.apache.hadoop.hbase.exceptions.ScannerResetException;
-088import 
org.apache.hadoop.hbase.exceptions.UnknownProtocolException;
-089import 
org.apache.hadoop.hbase.filter.ByteArrayComparable;
-090import 
org.apache.hadoop.hbase.io.TimeRange;
-091import 
org.apache.hadoop.hbase.ipc.HBaseRPCErrorHandler;
-092import 
org.apache.hadoop.hbase.ipc.HBaseRpcController;
-093import 
org.apache.hadoop.hbase.ipc.PriorityFunction;
-094import 
org.apache.hadoop.hbase.ipc.QosPriority;
-095import 
org.apache.hadoop.hbase.ipc.RpcCallContext;
-096import 
org.apache.hadoop.hbase.ipc.RpcCallback;
-097import 
org.apache.hadoop.hbase.ipc.RpcScheduler;
-098import 
org.apache.hadoop.hbase.ipc.RpcServer;
-099import 
org.apache.hadoop.hbase.ipc.RpcServer.BlockingServiceAndInterface;
-100import 
org.apache.hadoop.hbase.ipc.RpcServerFactory;
-101import 
org.apache.hadoop.hbase.ipc.RpcServerInterface;
-102import 
org.apache.hadoop.hbase.ipc.ServerNotRunningYetException;
-103import 
org.apac

[12/51] [partial] hbase-site git commit: Published site at 8eaaa63114a64bcaeaf0ed9bdd88615ee22255c1.

2018-09-25 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f6f9d4f3/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RSRpcServices.RegionScannerHolder.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RSRpcServices.RegionScannerHolder.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RSRpcServices.RegionScannerHolder.html
index 25f458d..20e3eaa 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RSRpcServices.RegionScannerHolder.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RSRpcServices.RegionScannerHolder.html
@@ -28,3711 +28,3756 @@
 020import java.io.FileNotFoundException;
 021import java.io.IOException;
 022import java.io.InterruptedIOException;
-023import 
java.lang.reflect.InvocationTargetException;
-024import java.net.BindException;
-025import java.net.InetSocketAddress;
-026import java.net.UnknownHostException;
-027import java.nio.ByteBuffer;
-028import java.util.ArrayList;
-029import java.util.Arrays;
-030import java.util.Collections;
-031import java.util.HashMap;
-032import java.util.Iterator;
-033import java.util.List;
-034import java.util.Map;
-035import java.util.Map.Entry;
-036import java.util.NavigableMap;
-037import java.util.Set;
-038import java.util.TreeSet;
-039import 
java.util.concurrent.ConcurrentHashMap;
-040import 
java.util.concurrent.ConcurrentMap;
-041import java.util.concurrent.TimeUnit;
-042import 
java.util.concurrent.atomic.AtomicBoolean;
-043import 
java.util.concurrent.atomic.AtomicLong;
-044import 
java.util.concurrent.atomic.LongAdder;
-045import 
org.apache.commons.lang3.mutable.MutableObject;
-046import 
org.apache.hadoop.conf.Configuration;
-047import org.apache.hadoop.fs.Path;
-048import 
org.apache.hadoop.hbase.ByteBufferExtendedCell;
-049import 
org.apache.hadoop.hbase.CacheEvictionStats;
-050import 
org.apache.hadoop.hbase.CacheEvictionStatsBuilder;
-051import org.apache.hadoop.hbase.Cell;
-052import 
org.apache.hadoop.hbase.CellScannable;
-053import 
org.apache.hadoop.hbase.CellScanner;
-054import 
org.apache.hadoop.hbase.CellUtil;
-055import 
org.apache.hadoop.hbase.CompareOperator;
-056import 
org.apache.hadoop.hbase.DoNotRetryIOException;
-057import 
org.apache.hadoop.hbase.DroppedSnapshotException;
-058import 
org.apache.hadoop.hbase.HBaseIOException;
-059import 
org.apache.hadoop.hbase.HConstants;
-060import 
org.apache.hadoop.hbase.MultiActionResultTooLarge;
-061import 
org.apache.hadoop.hbase.NotServingRegionException;
-062import 
org.apache.hadoop.hbase.PrivateCellUtil;
-063import 
org.apache.hadoop.hbase.RegionTooBusyException;
-064import org.apache.hadoop.hbase.Server;
-065import 
org.apache.hadoop.hbase.ServerName;
-066import 
org.apache.hadoop.hbase.TableName;
-067import 
org.apache.hadoop.hbase.UnknownScannerException;
-068import 
org.apache.hadoop.hbase.client.Append;
-069import 
org.apache.hadoop.hbase.client.ConnectionUtils;
-070import 
org.apache.hadoop.hbase.client.Delete;
-071import 
org.apache.hadoop.hbase.client.Durability;
-072import 
org.apache.hadoop.hbase.client.Get;
-073import 
org.apache.hadoop.hbase.client.Increment;
-074import 
org.apache.hadoop.hbase.client.Mutation;
-075import 
org.apache.hadoop.hbase.client.Put;
-076import 
org.apache.hadoop.hbase.client.RegionInfo;
-077import 
org.apache.hadoop.hbase.client.RegionReplicaUtil;
-078import 
org.apache.hadoop.hbase.client.Result;
-079import 
org.apache.hadoop.hbase.client.Row;
-080import 
org.apache.hadoop.hbase.client.RowMutations;
-081import 
org.apache.hadoop.hbase.client.Scan;
-082import 
org.apache.hadoop.hbase.client.TableDescriptor;
-083import 
org.apache.hadoop.hbase.client.VersionInfoUtil;
-084import 
org.apache.hadoop.hbase.conf.ConfigurationObserver;
-085import 
org.apache.hadoop.hbase.exceptions.FailedSanityCheckException;
-086import 
org.apache.hadoop.hbase.exceptions.OutOfOrderScannerNextException;
-087import 
org.apache.hadoop.hbase.exceptions.ScannerResetException;
-088import 
org.apache.hadoop.hbase.exceptions.UnknownProtocolException;
-089import 
org.apache.hadoop.hbase.filter.ByteArrayComparable;
-090import 
org.apache.hadoop.hbase.io.TimeRange;
-091import 
org.apache.hadoop.hbase.ipc.HBaseRPCErrorHandler;
-092import 
org.apache.hadoop.hbase.ipc.HBaseRpcController;
-093import 
org.apache.hadoop.hbase.ipc.PriorityFunction;
-094import 
org.apache.hadoop.hbase.ipc.QosPriority;
-095import 
org.apache.hadoop.hbase.ipc.RpcCallContext;
-096import 
org.apache.hadoop.hbase.ipc.RpcCallback;
-097import 
org.apache.hadoop.hbase.ipc.RpcScheduler;
-098import 
org.apache.hadoop.hbase.ipc.RpcServer;
-099import 
org.apache.hadoop.hbase.ipc.RpcServer.BlockingServiceAndInterface;
-100import 
org.apache.hadoop.hbase.ipc.RpcServerFactory;
-101import 
org.apache.hadoop.hbase.ipc.RpcServerInterface;
-102import 
org.apache.hadoop.hbase.ipc.ServerNotRunningYetException;
-103import 
org.apache.hadoop.hbase.ipc.ServerRpcController;
-104

[09/51] [partial] hbase-site git commit: Published site at 8eaaa63114a64bcaeaf0ed9bdd88615ee22255c1.

2018-09-25 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f6f9d4f3/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RSRpcServices.ScannerListener.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RSRpcServices.ScannerListener.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RSRpcServices.ScannerListener.html
index 25f458d..20e3eaa 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RSRpcServices.ScannerListener.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RSRpcServices.ScannerListener.html
@@ -28,3711 +28,3756 @@
 020import java.io.FileNotFoundException;
 021import java.io.IOException;
 022import java.io.InterruptedIOException;
-023import 
java.lang.reflect.InvocationTargetException;
-024import java.net.BindException;
-025import java.net.InetSocketAddress;
-026import java.net.UnknownHostException;
-027import java.nio.ByteBuffer;
-028import java.util.ArrayList;
-029import java.util.Arrays;
-030import java.util.Collections;
-031import java.util.HashMap;
-032import java.util.Iterator;
-033import java.util.List;
-034import java.util.Map;
-035import java.util.Map.Entry;
-036import java.util.NavigableMap;
-037import java.util.Set;
-038import java.util.TreeSet;
-039import 
java.util.concurrent.ConcurrentHashMap;
-040import 
java.util.concurrent.ConcurrentMap;
-041import java.util.concurrent.TimeUnit;
-042import 
java.util.concurrent.atomic.AtomicBoolean;
-043import 
java.util.concurrent.atomic.AtomicLong;
-044import 
java.util.concurrent.atomic.LongAdder;
-045import 
org.apache.commons.lang3.mutable.MutableObject;
-046import 
org.apache.hadoop.conf.Configuration;
-047import org.apache.hadoop.fs.Path;
-048import 
org.apache.hadoop.hbase.ByteBufferExtendedCell;
-049import 
org.apache.hadoop.hbase.CacheEvictionStats;
-050import 
org.apache.hadoop.hbase.CacheEvictionStatsBuilder;
-051import org.apache.hadoop.hbase.Cell;
-052import 
org.apache.hadoop.hbase.CellScannable;
-053import 
org.apache.hadoop.hbase.CellScanner;
-054import 
org.apache.hadoop.hbase.CellUtil;
-055import 
org.apache.hadoop.hbase.CompareOperator;
-056import 
org.apache.hadoop.hbase.DoNotRetryIOException;
-057import 
org.apache.hadoop.hbase.DroppedSnapshotException;
-058import 
org.apache.hadoop.hbase.HBaseIOException;
-059import 
org.apache.hadoop.hbase.HConstants;
-060import 
org.apache.hadoop.hbase.MultiActionResultTooLarge;
-061import 
org.apache.hadoop.hbase.NotServingRegionException;
-062import 
org.apache.hadoop.hbase.PrivateCellUtil;
-063import 
org.apache.hadoop.hbase.RegionTooBusyException;
-064import org.apache.hadoop.hbase.Server;
-065import 
org.apache.hadoop.hbase.ServerName;
-066import 
org.apache.hadoop.hbase.TableName;
-067import 
org.apache.hadoop.hbase.UnknownScannerException;
-068import 
org.apache.hadoop.hbase.client.Append;
-069import 
org.apache.hadoop.hbase.client.ConnectionUtils;
-070import 
org.apache.hadoop.hbase.client.Delete;
-071import 
org.apache.hadoop.hbase.client.Durability;
-072import 
org.apache.hadoop.hbase.client.Get;
-073import 
org.apache.hadoop.hbase.client.Increment;
-074import 
org.apache.hadoop.hbase.client.Mutation;
-075import 
org.apache.hadoop.hbase.client.Put;
-076import 
org.apache.hadoop.hbase.client.RegionInfo;
-077import 
org.apache.hadoop.hbase.client.RegionReplicaUtil;
-078import 
org.apache.hadoop.hbase.client.Result;
-079import 
org.apache.hadoop.hbase.client.Row;
-080import 
org.apache.hadoop.hbase.client.RowMutations;
-081import 
org.apache.hadoop.hbase.client.Scan;
-082import 
org.apache.hadoop.hbase.client.TableDescriptor;
-083import 
org.apache.hadoop.hbase.client.VersionInfoUtil;
-084import 
org.apache.hadoop.hbase.conf.ConfigurationObserver;
-085import 
org.apache.hadoop.hbase.exceptions.FailedSanityCheckException;
-086import 
org.apache.hadoop.hbase.exceptions.OutOfOrderScannerNextException;
-087import 
org.apache.hadoop.hbase.exceptions.ScannerResetException;
-088import 
org.apache.hadoop.hbase.exceptions.UnknownProtocolException;
-089import 
org.apache.hadoop.hbase.filter.ByteArrayComparable;
-090import 
org.apache.hadoop.hbase.io.TimeRange;
-091import 
org.apache.hadoop.hbase.ipc.HBaseRPCErrorHandler;
-092import 
org.apache.hadoop.hbase.ipc.HBaseRpcController;
-093import 
org.apache.hadoop.hbase.ipc.PriorityFunction;
-094import 
org.apache.hadoop.hbase.ipc.QosPriority;
-095import 
org.apache.hadoop.hbase.ipc.RpcCallContext;
-096import 
org.apache.hadoop.hbase.ipc.RpcCallback;
-097import 
org.apache.hadoop.hbase.ipc.RpcScheduler;
-098import 
org.apache.hadoop.hbase.ipc.RpcServer;
-099import 
org.apache.hadoop.hbase.ipc.RpcServer.BlockingServiceAndInterface;
-100import 
org.apache.hadoop.hbase.ipc.RpcServerFactory;
-101import 
org.apache.hadoop.hbase.ipc.RpcServerInterface;
-102import 
org.apache.hadoop.hbase.ipc.ServerNotRunningYetException;
-103import 
org.apache.hadoop.hbase.ipc.ServerRpcController;
-104import 
org.apache.h

[08/51] [partial] hbase-site git commit: Published site at 8eaaa63114a64bcaeaf0ed9bdd88615ee22255c1.

2018-09-25 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f6f9d4f3/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RSRpcServices.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RSRpcServices.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RSRpcServices.html
index 25f458d..20e3eaa 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RSRpcServices.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RSRpcServices.html
@@ -28,3711 +28,3756 @@
 020import java.io.FileNotFoundException;
 021import java.io.IOException;
 022import java.io.InterruptedIOException;
-023import 
java.lang.reflect.InvocationTargetException;
-024import java.net.BindException;
-025import java.net.InetSocketAddress;
-026import java.net.UnknownHostException;
-027import java.nio.ByteBuffer;
-028import java.util.ArrayList;
-029import java.util.Arrays;
-030import java.util.Collections;
-031import java.util.HashMap;
-032import java.util.Iterator;
-033import java.util.List;
-034import java.util.Map;
-035import java.util.Map.Entry;
-036import java.util.NavigableMap;
-037import java.util.Set;
-038import java.util.TreeSet;
-039import 
java.util.concurrent.ConcurrentHashMap;
-040import 
java.util.concurrent.ConcurrentMap;
-041import java.util.concurrent.TimeUnit;
-042import 
java.util.concurrent.atomic.AtomicBoolean;
-043import 
java.util.concurrent.atomic.AtomicLong;
-044import 
java.util.concurrent.atomic.LongAdder;
-045import 
org.apache.commons.lang3.mutable.MutableObject;
-046import 
org.apache.hadoop.conf.Configuration;
-047import org.apache.hadoop.fs.Path;
-048import 
org.apache.hadoop.hbase.ByteBufferExtendedCell;
-049import 
org.apache.hadoop.hbase.CacheEvictionStats;
-050import 
org.apache.hadoop.hbase.CacheEvictionStatsBuilder;
-051import org.apache.hadoop.hbase.Cell;
-052import 
org.apache.hadoop.hbase.CellScannable;
-053import 
org.apache.hadoop.hbase.CellScanner;
-054import 
org.apache.hadoop.hbase.CellUtil;
-055import 
org.apache.hadoop.hbase.CompareOperator;
-056import 
org.apache.hadoop.hbase.DoNotRetryIOException;
-057import 
org.apache.hadoop.hbase.DroppedSnapshotException;
-058import 
org.apache.hadoop.hbase.HBaseIOException;
-059import 
org.apache.hadoop.hbase.HConstants;
-060import 
org.apache.hadoop.hbase.MultiActionResultTooLarge;
-061import 
org.apache.hadoop.hbase.NotServingRegionException;
-062import 
org.apache.hadoop.hbase.PrivateCellUtil;
-063import 
org.apache.hadoop.hbase.RegionTooBusyException;
-064import org.apache.hadoop.hbase.Server;
-065import 
org.apache.hadoop.hbase.ServerName;
-066import 
org.apache.hadoop.hbase.TableName;
-067import 
org.apache.hadoop.hbase.UnknownScannerException;
-068import 
org.apache.hadoop.hbase.client.Append;
-069import 
org.apache.hadoop.hbase.client.ConnectionUtils;
-070import 
org.apache.hadoop.hbase.client.Delete;
-071import 
org.apache.hadoop.hbase.client.Durability;
-072import 
org.apache.hadoop.hbase.client.Get;
-073import 
org.apache.hadoop.hbase.client.Increment;
-074import 
org.apache.hadoop.hbase.client.Mutation;
-075import 
org.apache.hadoop.hbase.client.Put;
-076import 
org.apache.hadoop.hbase.client.RegionInfo;
-077import 
org.apache.hadoop.hbase.client.RegionReplicaUtil;
-078import 
org.apache.hadoop.hbase.client.Result;
-079import 
org.apache.hadoop.hbase.client.Row;
-080import 
org.apache.hadoop.hbase.client.RowMutations;
-081import 
org.apache.hadoop.hbase.client.Scan;
-082import 
org.apache.hadoop.hbase.client.TableDescriptor;
-083import 
org.apache.hadoop.hbase.client.VersionInfoUtil;
-084import 
org.apache.hadoop.hbase.conf.ConfigurationObserver;
-085import 
org.apache.hadoop.hbase.exceptions.FailedSanityCheckException;
-086import 
org.apache.hadoop.hbase.exceptions.OutOfOrderScannerNextException;
-087import 
org.apache.hadoop.hbase.exceptions.ScannerResetException;
-088import 
org.apache.hadoop.hbase.exceptions.UnknownProtocolException;
-089import 
org.apache.hadoop.hbase.filter.ByteArrayComparable;
-090import 
org.apache.hadoop.hbase.io.TimeRange;
-091import 
org.apache.hadoop.hbase.ipc.HBaseRPCErrorHandler;
-092import 
org.apache.hadoop.hbase.ipc.HBaseRpcController;
-093import 
org.apache.hadoop.hbase.ipc.PriorityFunction;
-094import 
org.apache.hadoop.hbase.ipc.QosPriority;
-095import 
org.apache.hadoop.hbase.ipc.RpcCallContext;
-096import 
org.apache.hadoop.hbase.ipc.RpcCallback;
-097import 
org.apache.hadoop.hbase.ipc.RpcScheduler;
-098import 
org.apache.hadoop.hbase.ipc.RpcServer;
-099import 
org.apache.hadoop.hbase.ipc.RpcServer.BlockingServiceAndInterface;
-100import 
org.apache.hadoop.hbase.ipc.RpcServerFactory;
-101import 
org.apache.hadoop.hbase.ipc.RpcServerInterface;
-102import 
org.apache.hadoop.hbase.ipc.ServerNotRunningYetException;
-103import 
org.apache.hadoop.hbase.ipc.ServerRpcController;
-104import 
org.apache.hadoop.hbase.log.HBaseMarkers;
-105import 
org.apache.hadoop.hbase.master.MasterRp

[51/51] [partial] hbase-site git commit: Published site at 8eaaa63114a64bcaeaf0ed9bdd88615ee22255c1.

2018-09-25 Thread git-site-role
Published site at 8eaaa63114a64bcaeaf0ed9bdd88615ee22255c1.


Project: http://git-wip-us.apache.org/repos/asf/hbase-site/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase-site/commit/f6f9d4f3
Tree: http://git-wip-us.apache.org/repos/asf/hbase-site/tree/f6f9d4f3
Diff: http://git-wip-us.apache.org/repos/asf/hbase-site/diff/f6f9d4f3

Branch: refs/heads/asf-site
Commit: f6f9d4f3ec90654a8acba14f6dc92677f03424d4
Parents: e4b87e9
Author: jenkins 
Authored: Tue Sep 25 14:54:27 2018 +
Committer: jenkins 
Committed: Tue Sep 25 14:54:27 2018 +

--
 acid-semantics.html | 4 +-
 apache_hbase_reference_guide.pdf| 4 +-
 .../hbase/util/Bytes.ByteArrayComparator.html   | 8 +-
 .../hbase/util/Bytes.RowEndKeyComparator.html   | 8 +-
 apidocs/org/apache/hadoop/hbase/util/Bytes.html |   278 +-
 .../hbase/util/Bytes.ByteArrayComparator.html   |  4965 ++---
 .../hbase/util/Bytes.RowEndKeyComparator.html   |  4965 ++---
 .../org/apache/hadoop/hbase/util/Bytes.html |  4965 ++---
 book.html   | 2 +-
 bulk-loads.html | 4 +-
 checkstyle-aggregate.html   | 18986 -
 checkstyle.rss  |54 +-
 coc.html| 4 +-
 dependencies.html   | 4 +-
 dependency-convergence.html | 4 +-
 dependency-info.html| 4 +-
 dependency-management.html  | 4 +-
 devapidocs/allclasses-frame.html| 2 +
 devapidocs/allclasses-noframe.html  | 2 +
 devapidocs/constant-values.html | 6 +-
 devapidocs/deprecated-list.html |25 +-
 devapidocs/index-all.html   |   163 +-
 .../hadoop/hbase/backup/package-tree.html   | 4 +-
 .../apache/hadoop/hbase/class-use/Server.html   | 8 +-
 .../hadoop/hbase/class-use/ServerName.html  |29 +-
 .../hbase/class-use/TableDescriptors.html   | 8 +-
 .../hadoop/hbase/class-use/TableName.html   |14 +
 .../hbase/client/class-use/RegionInfo.html  |50 +-
 .../hbase/client/class-use/TableDescriptor.html |59 +-
 .../hadoop/hbase/client/package-tree.html   |22 +-
 .../hadoop/hbase/executor/EventHandler.html | 2 +-
 .../executor/ExecutorService.Executor.html  |24 +-
 .../ExecutorService.ExecutorStatus.html |12 +-
 .../ExecutorService.RunningEventStatus.html | 8 +-
 ...cutorService.TrackingThreadPoolExecutor.html |12 +-
 .../hadoop/hbase/executor/ExecutorService.html  |82 +-
 .../hbase/executor/class-use/EventHandler.html  |38 +-
 .../hbase/executor/class-use/EventType.html |20 +-
 .../hadoop/hbase/filter/package-tree.html   | 4 +-
 .../hadoop/hbase/io/hfile/package-tree.html | 6 +-
 .../hadoop/hbase/mapreduce/package-tree.html| 2 +-
 .../master/HMaster.InitializationMonitor.html   |20 +-
 .../master/HMaster.MasterStoppedException.html  | 4 +-
 .../hbase/master/HMaster.RedirectServlet.html   |12 +-
 .../master/HMaster.TableDescriptorGetter.html   | 4 +-
 .../org/apache/hadoop/hbase/master/HMaster.html |   690 +-
 .../master/HMasterCommandLine.LocalHMaster.html | 6 +-
 .../assignment/class-use/AssignmentManager.html | 6 +-
 .../hbase/master/class-use/MasterServices.html  | 8 +-
 .../hadoop/hbase/master/package-tree.html   | 4 +-
 .../hbase/master/procedure/package-tree.html| 2 +-
 .../org/apache/hadoop/hbase/package-tree.html   |16 +-
 .../hadoop/hbase/procedure2/package-tree.html   | 4 +-
 .../hadoop/hbase/quotas/package-tree.html   | 8 +-
 .../HRegionServer.MovedRegionInfo.html  |16 +-
 .../HRegionServer.MovedRegionsCleaner.html  |16 +-
 .../hbase/regionserver/HRegionServer.html   |   190 +-
 .../regionserver/RSRpcServices.LogDelegate.html | 4 +-
 ...SRpcServices.RegionScannerCloseCallBack.html | 8 +-
 .../RSRpcServices.RegionScannerHolder.html  |24 +-
 ...pcServices.RegionScannerShippedCallBack.html |12 +-
 ...RpcServices.RegionScannersCloseCallBack.html |10 +-
 .../RSRpcServices.ScannerListener.html  | 8 +-
 .../hbase/regionserver/RSRpcServices.html   |   427 +-
 ...ionServerServices.PostOpenDeployContext.html |12 +-
 ...erServices.RegionStateTransitionContext.html |20 +-
 .../regionserver/RegionServerServices.html  |   105 +-
 .../hbase/regionserver/class-use/HRegion.html   |19 +-
 ...ionServerServices.PostOpenDeployContext.html | 3 +-
 .../class-use/RegionServerServices.html |75 +-
 .../handler/AssignRegionHandler.html|   471 +
 .../regionserver/handler/CloseMetaHandler.html  | 4 +-
 .../handler/CloseRegionHandler.

[22/51] [partial] hbase-site git commit: Published site at 8eaaa63114a64bcaeaf0ed9bdd88615ee22255c1.

2018-09-25 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f6f9d4f3/devapidocs/src-html/org/apache/hadoop/hbase/master/HMaster.RedirectServlet.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/master/HMaster.RedirectServlet.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/master/HMaster.RedirectServlet.html
index 0cf012a..976894f 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/master/HMaster.RedirectServlet.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/master/HMaster.RedirectServlet.html
@@ -63,3884 +63,3883 @@
 055import javax.servlet.http.HttpServlet;
 056import 
javax.servlet.http.HttpServletRequest;
 057import 
javax.servlet.http.HttpServletResponse;
-058
-059import 
org.apache.commons.lang3.StringUtils;
-060import 
org.apache.hadoop.conf.Configuration;
-061import org.apache.hadoop.fs.Path;
-062import 
org.apache.hadoop.hbase.ChoreService;
-063import 
org.apache.hadoop.hbase.ClusterId;
-064import 
org.apache.hadoop.hbase.ClusterMetrics;
-065import 
org.apache.hadoop.hbase.ClusterMetrics.Option;
-066import 
org.apache.hadoop.hbase.ClusterMetricsBuilder;
-067import 
org.apache.hadoop.hbase.CompoundConfiguration;
-068import 
org.apache.hadoop.hbase.DoNotRetryIOException;
-069import 
org.apache.hadoop.hbase.HBaseIOException;
-070import 
org.apache.hadoop.hbase.HBaseInterfaceAudience;
-071import 
org.apache.hadoop.hbase.HConstants;
-072import 
org.apache.hadoop.hbase.InvalidFamilyOperationException;
-073import 
org.apache.hadoop.hbase.MasterNotRunningException;
-074import 
org.apache.hadoop.hbase.MetaTableAccessor;
-075import 
org.apache.hadoop.hbase.NamespaceDescriptor;
-076import 
org.apache.hadoop.hbase.PleaseHoldException;
-077import 
org.apache.hadoop.hbase.ReplicationPeerNotFoundException;
-078import 
org.apache.hadoop.hbase.ServerName;
-079import 
org.apache.hadoop.hbase.TableDescriptors;
-080import 
org.apache.hadoop.hbase.TableName;
-081import 
org.apache.hadoop.hbase.TableNotDisabledException;
-082import 
org.apache.hadoop.hbase.TableNotFoundException;
-083import 
org.apache.hadoop.hbase.UnknownRegionException;
-084import 
org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
-085import 
org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
-086import 
org.apache.hadoop.hbase.client.MasterSwitchType;
-087import 
org.apache.hadoop.hbase.client.RegionInfo;
-088import 
org.apache.hadoop.hbase.client.RegionInfoBuilder;
-089import 
org.apache.hadoop.hbase.client.Result;
-090import 
org.apache.hadoop.hbase.client.TableDescriptor;
-091import 
org.apache.hadoop.hbase.client.TableDescriptorBuilder;
-092import 
org.apache.hadoop.hbase.client.TableState;
-093import 
org.apache.hadoop.hbase.coprocessor.CoprocessorHost;
-094import 
org.apache.hadoop.hbase.exceptions.DeserializationException;
-095import 
org.apache.hadoop.hbase.exceptions.MergeRegionException;
-096import 
org.apache.hadoop.hbase.executor.ExecutorType;
-097import 
org.apache.hadoop.hbase.favored.FavoredNodesManager;
-098import 
org.apache.hadoop.hbase.favored.FavoredNodesPromoter;
-099import 
org.apache.hadoop.hbase.http.InfoServer;
-100import 
org.apache.hadoop.hbase.ipc.CoprocessorRpcUtils;
-101import 
org.apache.hadoop.hbase.ipc.RpcServer;
-102import 
org.apache.hadoop.hbase.ipc.ServerNotRunningYetException;
-103import 
org.apache.hadoop.hbase.log.HBaseMarkers;
-104import 
org.apache.hadoop.hbase.master.MasterRpcServices.BalanceSwitchMode;
-105import 
org.apache.hadoop.hbase.master.assignment.AssignProcedure;
-106import 
org.apache.hadoop.hbase.master.assignment.AssignmentManager;
-107import 
org.apache.hadoop.hbase.master.assignment.MergeTableRegionsProcedure;
-108import 
org.apache.hadoop.hbase.master.assignment.MoveRegionProcedure;
-109import 
org.apache.hadoop.hbase.master.assignment.RegionStateNode;
-110import 
org.apache.hadoop.hbase.master.assignment.RegionStates;
-111import 
org.apache.hadoop.hbase.master.assignment.TransitRegionStateProcedure;
-112import 
org.apache.hadoop.hbase.master.assignment.UnassignProcedure;
-113import 
org.apache.hadoop.hbase.master.balancer.BalancerChore;
-114import 
org.apache.hadoop.hbase.master.balancer.BaseLoadBalancer;
-115import 
org.apache.hadoop.hbase.master.balancer.ClusterStatusChore;
-116import 
org.apache.hadoop.hbase.master.balancer.LoadBalancerFactory;
-117import 
org.apache.hadoop.hbase.master.cleaner.CleanerChore;
-118import 
org.apache.hadoop.hbase.master.cleaner.HFileCleaner;
-119import 
org.apache.hadoop.hbase.master.cleaner.LogCleaner;
-120import 
org.apache.hadoop.hbase.master.cleaner.ReplicationBarrierCleaner;
-121import 
org.apache.hadoop.hbase.master.locking.LockManager;
-122import 
org.apache.hadoop.hbase.master.normalizer.NormalizationPlan;
-123import 
org.apache.hadoop.hbase.master.normalizer.NormalizationPlan.PlanType;
-124import 
org.apache.hadoop.hbase.master.normalizer.RegionNormalizer;
-125import 
org.apache.hadoop.hbase.master.normalizer.RegionNormalizerChore;
-126

[24/51] [partial] hbase-site git commit: Published site at 8eaaa63114a64bcaeaf0ed9bdd88615ee22255c1.

2018-09-25 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f6f9d4f3/devapidocs/src-html/org/apache/hadoop/hbase/master/HMaster.InitializationMonitor.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/master/HMaster.InitializationMonitor.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/master/HMaster.InitializationMonitor.html
index 0cf012a..976894f 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/master/HMaster.InitializationMonitor.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/master/HMaster.InitializationMonitor.html
@@ -63,3884 +63,3883 @@
 055import javax.servlet.http.HttpServlet;
 056import 
javax.servlet.http.HttpServletRequest;
 057import 
javax.servlet.http.HttpServletResponse;
-058
-059import 
org.apache.commons.lang3.StringUtils;
-060import 
org.apache.hadoop.conf.Configuration;
-061import org.apache.hadoop.fs.Path;
-062import 
org.apache.hadoop.hbase.ChoreService;
-063import 
org.apache.hadoop.hbase.ClusterId;
-064import 
org.apache.hadoop.hbase.ClusterMetrics;
-065import 
org.apache.hadoop.hbase.ClusterMetrics.Option;
-066import 
org.apache.hadoop.hbase.ClusterMetricsBuilder;
-067import 
org.apache.hadoop.hbase.CompoundConfiguration;
-068import 
org.apache.hadoop.hbase.DoNotRetryIOException;
-069import 
org.apache.hadoop.hbase.HBaseIOException;
-070import 
org.apache.hadoop.hbase.HBaseInterfaceAudience;
-071import 
org.apache.hadoop.hbase.HConstants;
-072import 
org.apache.hadoop.hbase.InvalidFamilyOperationException;
-073import 
org.apache.hadoop.hbase.MasterNotRunningException;
-074import 
org.apache.hadoop.hbase.MetaTableAccessor;
-075import 
org.apache.hadoop.hbase.NamespaceDescriptor;
-076import 
org.apache.hadoop.hbase.PleaseHoldException;
-077import 
org.apache.hadoop.hbase.ReplicationPeerNotFoundException;
-078import 
org.apache.hadoop.hbase.ServerName;
-079import 
org.apache.hadoop.hbase.TableDescriptors;
-080import 
org.apache.hadoop.hbase.TableName;
-081import 
org.apache.hadoop.hbase.TableNotDisabledException;
-082import 
org.apache.hadoop.hbase.TableNotFoundException;
-083import 
org.apache.hadoop.hbase.UnknownRegionException;
-084import 
org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
-085import 
org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
-086import 
org.apache.hadoop.hbase.client.MasterSwitchType;
-087import 
org.apache.hadoop.hbase.client.RegionInfo;
-088import 
org.apache.hadoop.hbase.client.RegionInfoBuilder;
-089import 
org.apache.hadoop.hbase.client.Result;
-090import 
org.apache.hadoop.hbase.client.TableDescriptor;
-091import 
org.apache.hadoop.hbase.client.TableDescriptorBuilder;
-092import 
org.apache.hadoop.hbase.client.TableState;
-093import 
org.apache.hadoop.hbase.coprocessor.CoprocessorHost;
-094import 
org.apache.hadoop.hbase.exceptions.DeserializationException;
-095import 
org.apache.hadoop.hbase.exceptions.MergeRegionException;
-096import 
org.apache.hadoop.hbase.executor.ExecutorType;
-097import 
org.apache.hadoop.hbase.favored.FavoredNodesManager;
-098import 
org.apache.hadoop.hbase.favored.FavoredNodesPromoter;
-099import 
org.apache.hadoop.hbase.http.InfoServer;
-100import 
org.apache.hadoop.hbase.ipc.CoprocessorRpcUtils;
-101import 
org.apache.hadoop.hbase.ipc.RpcServer;
-102import 
org.apache.hadoop.hbase.ipc.ServerNotRunningYetException;
-103import 
org.apache.hadoop.hbase.log.HBaseMarkers;
-104import 
org.apache.hadoop.hbase.master.MasterRpcServices.BalanceSwitchMode;
-105import 
org.apache.hadoop.hbase.master.assignment.AssignProcedure;
-106import 
org.apache.hadoop.hbase.master.assignment.AssignmentManager;
-107import 
org.apache.hadoop.hbase.master.assignment.MergeTableRegionsProcedure;
-108import 
org.apache.hadoop.hbase.master.assignment.MoveRegionProcedure;
-109import 
org.apache.hadoop.hbase.master.assignment.RegionStateNode;
-110import 
org.apache.hadoop.hbase.master.assignment.RegionStates;
-111import 
org.apache.hadoop.hbase.master.assignment.TransitRegionStateProcedure;
-112import 
org.apache.hadoop.hbase.master.assignment.UnassignProcedure;
-113import 
org.apache.hadoop.hbase.master.balancer.BalancerChore;
-114import 
org.apache.hadoop.hbase.master.balancer.BaseLoadBalancer;
-115import 
org.apache.hadoop.hbase.master.balancer.ClusterStatusChore;
-116import 
org.apache.hadoop.hbase.master.balancer.LoadBalancerFactory;
-117import 
org.apache.hadoop.hbase.master.cleaner.CleanerChore;
-118import 
org.apache.hadoop.hbase.master.cleaner.HFileCleaner;
-119import 
org.apache.hadoop.hbase.master.cleaner.LogCleaner;
-120import 
org.apache.hadoop.hbase.master.cleaner.ReplicationBarrierCleaner;
-121import 
org.apache.hadoop.hbase.master.locking.LockManager;
-122import 
org.apache.hadoop.hbase.master.normalizer.NormalizationPlan;
-123import 
org.apache.hadoop.hbase.master.normalizer.NormalizationPlan.PlanType;
-124import 
org.apache.hadoop.hbase.master.normalizer.RegionNormalizer;
-125import 
org.apache.hadoop.hbase.master.normaliz

[38/51] [partial] hbase-site git commit: Published site at 8eaaa63114a64bcaeaf0ed9bdd88615ee22255c1.

2018-09-25 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f6f9d4f3/devapidocs/org/apache/hadoop/hbase/quotas/package-tree.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/quotas/package-tree.html 
b/devapidocs/org/apache/hadoop/hbase/quotas/package-tree.html
index ef2df60..bc216d6 100644
--- a/devapidocs/org/apache/hadoop/hbase/quotas/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/quotas/package-tree.html
@@ -229,13 +229,13 @@
 
 java.lang.https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true";
 title="class or interface in java.lang">Enum (implements java.lang.https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true";
 title="class or interface in java.lang">Comparable, java.io.https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true";
 title="class or interface in java.io">Serializable)
 
-org.apache.hadoop.hbase.quotas.ThrottleType
-org.apache.hadoop.hbase.quotas.SpaceViolationPolicy
-org.apache.hadoop.hbase.quotas.QuotaScope
 org.apache.hadoop.hbase.quotas.QuotaType
+org.apache.hadoop.hbase.quotas.OperationQuota.OperationType
+org.apache.hadoop.hbase.quotas.SpaceViolationPolicy
 org.apache.hadoop.hbase.quotas.RpcThrottlingException.Type
+org.apache.hadoop.hbase.quotas.ThrottleType
 org.apache.hadoop.hbase.quotas.ThrottlingException.Type
-org.apache.hadoop.hbase.quotas.OperationQuota.OperationType
+org.apache.hadoop.hbase.quotas.QuotaScope
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f6f9d4f3/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionServer.MovedRegionInfo.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionServer.MovedRegionInfo.html
 
b/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionServer.MovedRegionInfo.html
index ee14089..19d70bf 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionServer.MovedRegionInfo.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionServer.MovedRegionInfo.html
@@ -113,7 +113,7 @@ var activeTableTab = "activeTableTab";
 
 
 
-private static class HRegionServer.MovedRegionInfo
+private static class HRegionServer.MovedRegionInfo
 extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true";
 title="class or interface in java.lang">Object
 
 
@@ -218,7 +218,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 serverName
-private final ServerName serverName
+private final ServerName serverName
 
 
 
@@ -227,7 +227,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 seqNum
-private final long seqNum
+private final long seqNum
 
 
 
@@ -236,7 +236,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 ts
-private final long ts
+private final long ts
 
 
 
@@ -253,7 +253,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 MovedRegionInfo
-public MovedRegionInfo(ServerName serverName,
+public MovedRegionInfo(ServerName serverName,
long closeSeqNum)
 
 
@@ -271,7 +271,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 getServerName
-public ServerName getServerName()
+public ServerName getServerName()
 
 
 
@@ -280,7 +280,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 getSeqNum
-public long getSeqNum()
+public long getSeqNum()
 
 
 
@@ -289,7 +289,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 getMoveTime
-public long getMoveTime()
+public long getMoveTime()
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f6f9d4f3/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionServer.MovedRegionsCleaner.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionServer.MovedRegionsCleaner.html
 
b/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionServer.MovedRegionsCleaner.html
index 81ffcf7..3d963f4 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionServer.MovedRegionsCleaner.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionServer.MovedRegionsCleaner.html
@@ -122,7 +122,7 @@ var activeTableTab = "activeTableTab";
 
 
 
-protected static final class HRegionServer.MovedRegionsCleaner
+protected static final class HRegionServer.MovedRegionsCleaner
 extends ScheduledChore
 implements Stoppable
 Creates a Chore thread to clean the moved region 
cache.
@@ -242,7 +242,7 @@ implements 
 
 regionServer
-private HRegionServer regionServer
+private HRegionServer regionServer
 
 
 
@@ -251,7 +251,7 @@ implements 
 
 stoppable
-Stoppable stoppable
+Stoppable stoppable
 
 
 
@@ -268,7 +268,7 @@ implements 
 
 MovedRegionsCleaner
-private MovedRegionsCleaner(HRegionServer regionSe

[23/51] [partial] hbase-site git commit: Published site at 8eaaa63114a64bcaeaf0ed9bdd88615ee22255c1.

2018-09-25 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f6f9d4f3/devapidocs/src-html/org/apache/hadoop/hbase/master/HMaster.MasterStoppedException.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/master/HMaster.MasterStoppedException.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/master/HMaster.MasterStoppedException.html
index 0cf012a..976894f 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/master/HMaster.MasterStoppedException.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/master/HMaster.MasterStoppedException.html
@@ -63,3884 +63,3883 @@
 055import javax.servlet.http.HttpServlet;
 056import 
javax.servlet.http.HttpServletRequest;
 057import 
javax.servlet.http.HttpServletResponse;
-058
-059import 
org.apache.commons.lang3.StringUtils;
-060import 
org.apache.hadoop.conf.Configuration;
-061import org.apache.hadoop.fs.Path;
-062import 
org.apache.hadoop.hbase.ChoreService;
-063import 
org.apache.hadoop.hbase.ClusterId;
-064import 
org.apache.hadoop.hbase.ClusterMetrics;
-065import 
org.apache.hadoop.hbase.ClusterMetrics.Option;
-066import 
org.apache.hadoop.hbase.ClusterMetricsBuilder;
-067import 
org.apache.hadoop.hbase.CompoundConfiguration;
-068import 
org.apache.hadoop.hbase.DoNotRetryIOException;
-069import 
org.apache.hadoop.hbase.HBaseIOException;
-070import 
org.apache.hadoop.hbase.HBaseInterfaceAudience;
-071import 
org.apache.hadoop.hbase.HConstants;
-072import 
org.apache.hadoop.hbase.InvalidFamilyOperationException;
-073import 
org.apache.hadoop.hbase.MasterNotRunningException;
-074import 
org.apache.hadoop.hbase.MetaTableAccessor;
-075import 
org.apache.hadoop.hbase.NamespaceDescriptor;
-076import 
org.apache.hadoop.hbase.PleaseHoldException;
-077import 
org.apache.hadoop.hbase.ReplicationPeerNotFoundException;
-078import 
org.apache.hadoop.hbase.ServerName;
-079import 
org.apache.hadoop.hbase.TableDescriptors;
-080import 
org.apache.hadoop.hbase.TableName;
-081import 
org.apache.hadoop.hbase.TableNotDisabledException;
-082import 
org.apache.hadoop.hbase.TableNotFoundException;
-083import 
org.apache.hadoop.hbase.UnknownRegionException;
-084import 
org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
-085import 
org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
-086import 
org.apache.hadoop.hbase.client.MasterSwitchType;
-087import 
org.apache.hadoop.hbase.client.RegionInfo;
-088import 
org.apache.hadoop.hbase.client.RegionInfoBuilder;
-089import 
org.apache.hadoop.hbase.client.Result;
-090import 
org.apache.hadoop.hbase.client.TableDescriptor;
-091import 
org.apache.hadoop.hbase.client.TableDescriptorBuilder;
-092import 
org.apache.hadoop.hbase.client.TableState;
-093import 
org.apache.hadoop.hbase.coprocessor.CoprocessorHost;
-094import 
org.apache.hadoop.hbase.exceptions.DeserializationException;
-095import 
org.apache.hadoop.hbase.exceptions.MergeRegionException;
-096import 
org.apache.hadoop.hbase.executor.ExecutorType;
-097import 
org.apache.hadoop.hbase.favored.FavoredNodesManager;
-098import 
org.apache.hadoop.hbase.favored.FavoredNodesPromoter;
-099import 
org.apache.hadoop.hbase.http.InfoServer;
-100import 
org.apache.hadoop.hbase.ipc.CoprocessorRpcUtils;
-101import 
org.apache.hadoop.hbase.ipc.RpcServer;
-102import 
org.apache.hadoop.hbase.ipc.ServerNotRunningYetException;
-103import 
org.apache.hadoop.hbase.log.HBaseMarkers;
-104import 
org.apache.hadoop.hbase.master.MasterRpcServices.BalanceSwitchMode;
-105import 
org.apache.hadoop.hbase.master.assignment.AssignProcedure;
-106import 
org.apache.hadoop.hbase.master.assignment.AssignmentManager;
-107import 
org.apache.hadoop.hbase.master.assignment.MergeTableRegionsProcedure;
-108import 
org.apache.hadoop.hbase.master.assignment.MoveRegionProcedure;
-109import 
org.apache.hadoop.hbase.master.assignment.RegionStateNode;
-110import 
org.apache.hadoop.hbase.master.assignment.RegionStates;
-111import 
org.apache.hadoop.hbase.master.assignment.TransitRegionStateProcedure;
-112import 
org.apache.hadoop.hbase.master.assignment.UnassignProcedure;
-113import 
org.apache.hadoop.hbase.master.balancer.BalancerChore;
-114import 
org.apache.hadoop.hbase.master.balancer.BaseLoadBalancer;
-115import 
org.apache.hadoop.hbase.master.balancer.ClusterStatusChore;
-116import 
org.apache.hadoop.hbase.master.balancer.LoadBalancerFactory;
-117import 
org.apache.hadoop.hbase.master.cleaner.CleanerChore;
-118import 
org.apache.hadoop.hbase.master.cleaner.HFileCleaner;
-119import 
org.apache.hadoop.hbase.master.cleaner.LogCleaner;
-120import 
org.apache.hadoop.hbase.master.cleaner.ReplicationBarrierCleaner;
-121import 
org.apache.hadoop.hbase.master.locking.LockManager;
-122import 
org.apache.hadoop.hbase.master.normalizer.NormalizationPlan;
-123import 
org.apache.hadoop.hbase.master.normalizer.NormalizationPlan.PlanType;
-124import 
org.apache.hadoop.hbase.master.normalizer.RegionNormalizer;
-125import 
org.apache.hadoop.hbase.master.nor

[26/51] [partial] hbase-site git commit: Published site at 8eaaa63114a64bcaeaf0ed9bdd88615ee22255c1.

2018-09-25 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f6f9d4f3/devapidocs/src-html/org/apache/hadoop/hbase/executor/ExecutorService.TrackingThreadPoolExecutor.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/executor/ExecutorService.TrackingThreadPoolExecutor.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/executor/ExecutorService.TrackingThreadPoolExecutor.html
index 49f081b..33c9cc0 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/executor/ExecutorService.TrackingThreadPoolExecutor.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/executor/ExecutorService.TrackingThreadPoolExecutor.html
@@ -35,309 +35,328 @@
 027import 
java.util.concurrent.BlockingQueue;
 028import 
java.util.concurrent.ConcurrentHashMap;
 029import 
java.util.concurrent.ConcurrentMap;
-030import 
java.util.concurrent.LinkedBlockingQueue;
-031import 
java.util.concurrent.ThreadPoolExecutor;
-032import java.util.concurrent.TimeUnit;
-033import 
java.util.concurrent.atomic.AtomicLong;
-034
-035import 
org.apache.yetus.audience.InterfaceAudience;
-036import org.slf4j.Logger;
-037import org.slf4j.LoggerFactory;
-038import 
org.apache.hadoop.hbase.monitoring.ThreadMonitoring;
+030import java.util.concurrent.Executors;
+031import 
java.util.concurrent.LinkedBlockingQueue;
+032import 
java.util.concurrent.ThreadPoolExecutor;
+033import java.util.concurrent.TimeUnit;
+034import 
java.util.concurrent.atomic.AtomicLong;
+035import 
org.apache.hadoop.hbase.monitoring.ThreadMonitoring;
+036import 
org.apache.yetus.audience.InterfaceAudience;
+037import org.slf4j.Logger;
+038import org.slf4j.LoggerFactory;
 039
 040import 
org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;
 041import 
org.apache.hbase.thirdparty.com.google.common.collect.Lists;
 042import 
org.apache.hbase.thirdparty.com.google.common.collect.Maps;
-043import 
org.apache.hbase.thirdparty.com.google.common.util.concurrent.ThreadFactoryBuilder;
-044
-045/**
-046 * This is a generic executor service. 
This component abstracts a
-047 * threadpool, a queue to which {@link 
EventType}s can be submitted,
-048 * and a 
Runnable that handles the object that is added to the 
queue.
-049 *
-050 * 

In order to create a new service, create an instance of this class and -051 * then do: instance.startExecutorService("myService");. When done -052 * call {@link #shutdown()}. -053 * -054 *

In order to use the service created above, call -055 * {@link #submit(EventHandler)}. -056 */ -057@InterfaceAudience.Private -058public class ExecutorService { -059 private static final Logger LOG = LoggerFactory.getLogger(ExecutorService.class); -060 -061 // hold the all the executors created in a map addressable by their names -062 private final ConcurrentHashMap executorMap = new ConcurrentHashMap<>(); +043import org.apache.hbase.thirdparty.com.google.common.util.concurrent.ListenableFuture; +044import org.apache.hbase.thirdparty.com.google.common.util.concurrent.ListeningScheduledExecutorService; +045import org.apache.hbase.thirdparty.com.google.common.util.concurrent.MoreExecutors; +046import org.apache.hbase.thirdparty.com.google.common.util.concurrent.ThreadFactoryBuilder; +047 +048/** +049 * This is a generic executor service. This component abstracts a +050 * threadpool, a queue to which {@link EventType}s can be submitted, +051 * and a Runnable that handles the object that is added to the queue. +052 * +053 *

In order to create a new service, create an instance of this class and +054 * then do: instance.startExecutorService("myService");. When done +055 * call {@link #shutdown()}. +056 * +057 *

In order to use the service created above, call +058 * {@link #submit(EventHandler)}. +059 */ +060@InterfaceAudience.Private +061public class ExecutorService { +062 private static final Logger LOG = LoggerFactory.getLogger(ExecutorService.class); 063 -064 // Name of the server hosting this executor service. -065 private final String servername; +064 // hold the all the executors created in a map addressable by their names +065 private final ConcurrentHashMap executorMap = new ConcurrentHashMap<>(); 066 -067 /** -068 * Default constructor. -069 * @param servername Name of the hosting server. -070 */ -071 public ExecutorService(final String servername) { -072super(); -073this.servername = servername; -074 } -075 -076 /** -077 * Start an executor service with a given name. If there was a service already -078 * started with the same name, this throws a RuntimeException. -079 * @param name Name of the service to start. -080 */ -081 @VisibleForTesting -082 public void startExecutorService(String name, int maxThreads) { -083if (this.executorMap.get(name) != null) { -084 throw new RuntimeException("An executor servi


[35/51] [partial] hbase-site git commit: Published site at 8eaaa63114a64bcaeaf0ed9bdd88615ee22255c1.

2018-09-25 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f6f9d4f3/devapidocs/org/apache/hadoop/hbase/regionserver/RSRpcServices.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/regionserver/RSRpcServices.html 
b/devapidocs/org/apache/hadoop/hbase/regionserver/RSRpcServices.html
index e41f49c..23b1976 100644
--- a/devapidocs/org/apache/hadoop/hbase/regionserver/RSRpcServices.html
+++ b/devapidocs/org/apache/hadoop/hbase/regionserver/RSRpcServices.html
@@ -18,7 +18,7 @@
 catch(err) {
 }
 //-->
-var methods = 
{"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10,"i14":10,"i15":10,"i16":10,"i17":10,"i18":10,"i19":10,"i20":10,"i21":10,"i22":10,"i23":10,"i24":10,"i25":10,"i26":10,"i27":10,"i28":10,"i29":10,"i30":10,"i31":10,"i32":10,"i33":10,"i34":10,"i35":9,"i36":10,"i37":10,"i38":10,"i39":10,"i40":10,"i41":10,"i42":9,"i43":10,"i44":10,"i45":10,"i46":10,"i47":10,"i48":10,"i49":10,"i50":10,"i51":9,"i52":9,"i53":9,"i54":10,"i55":10,"i56":10,"i57":10,"i58":10,"i59":10,"i60":10,"i61":10,"i62":10,"i63":10,"i64":10,"i65":10,"i66":10,"i67":10,"i68":10,"i69":10,"i70":10,"i71":10,"i72":10,"i73":10,"i74":10,"i75":10,"i76":10,"i77":10,"i78":10,"i79":10,"i80":10,"i81":10,"i82":10,"i83":10,"i84":10,"i85":10,"i86":10,"i87":10,"i88":10,"i89":10,"i90":10,"i91":10,"i92":10,"i93":10,"i94":10,"i95":10};
+var methods = 
{"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10,"i14":10,"i15":10,"i16":10,"i17":10,"i18":10,"i19":10,"i20":10,"i21":10,"i22":10,"i23":10,"i24":10,"i25":10,"i26":10,"i27":10,"i28":10,"i29":10,"i30":10,"i31":10,"i32":10,"i33":10,"i34":10,"i35":10,"i36":10,"i37":10,"i38":9,"i39":10,"i40":10,"i41":10,"i42":10,"i43":10,"i44":10,"i45":9,"i46":10,"i47":10,"i48":10,"i49":10,"i50":10,"i51":10,"i52":10,"i53":10,"i54":9,"i55":9,"i56":9,"i57":10,"i58":10,"i59":10,"i60":10,"i61":10,"i62":10,"i63":10,"i64":10,"i65":10,"i66":10,"i67":10,"i68":10,"i69":10,"i70":10,"i71":10,"i72":10,"i73":10,"i74":10,"i75":10,"i76":10,"i77":10,"i78":10,"i79":10,"i80":10,"i81":10,"i82":10,"i83":10,"i84":10,"i85":10,"i86":10,"i87":10,"i88":10,"i89":10,"i90":10,"i91":10,"i92":10,"i93":10,"i94":10,"i95":10,"i96":10,"i97":10,"i98":10};
 var tabs = {65535:["t0","All Methods"],1:["t1","Static 
Methods"],2:["t2","Instance Methods"],8:["t4","Concrete Methods"]};
 var altColor = "altColor";
 var rowColor = "rowColor";
@@ -118,7 +118,7 @@ var activeTableTab = "activeTableTab";
 
 
 @InterfaceAudience.Private
-public class RSRpcServices
+public class RSRpcServices
 extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true";
 title="class or interface in java.lang">Object
 implements HBaseRPCErrorHandler, 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminService.BlockingInterface,
 
org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ClientService.BlockingInterface,
 PriorityFunction, ConfigurationObserver
 Implements the regionserver RPC services.
@@ -620,15 +620,28 @@ implements 
+private void
+executeCloseRegionProcedures(org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CloseRegionRequest request) 
+
+
+private void
+executeOpenRegionProcedures(org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.OpenRegionRequest request,
+   https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true";
 title="class or interface in java.util">Map tdCache) 
+
+
+private void
+executeProcedures(org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.RemoteProcedureRequest request) 
+
+
 org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.ExecuteProceduresResponse
 executeProcedures(org.apache.hbase.thirdparty.com.google.protobuf.RpcController controller,
  
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.ExecuteProceduresRequest request) 
 
-
+
 static boolean
 exitIfOOME(https://docs.oracle.com/javase/8/docs/api/java/lang/Throwable.html?is-external=true";
 title="class or interface in java.lang">Throwable e) 
 
-
+
 private void
 failRegionAction(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiResponse.Builder responseBuilder,
 
org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionActionResult.Builder regionActionResultBuilder,
@@ -636,53 +649,53 @@ implements CellScanner cellScanner,
 https://docs.oracle.com/javase/8/docs/api/java/lang/Throwable.html?is-external=true";
 title="class or interface in 
java.lang">Throwable error) 
 
-
+
 org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.FlushRegionResponse
 flushRegion(org.apache.hbase.thirdparty.com.google.protobuf.RpcController controller,

org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.FlushRegionRequest request)

[30/51] [partial] hbase-site git commit: Published site at 8eaaa63114a64bcaeaf0ed9bdd88615ee22255c1.

2018-09-25 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f6f9d4f3/devapidocs/org/apache/hadoop/hbase/util/Bytes.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/util/Bytes.html 
b/devapidocs/org/apache/hadoop/hbase/util/Bytes.html
index 9dd868b..9c965b8 100644
--- a/devapidocs/org/apache/hadoop/hbase/util/Bytes.html
+++ b/devapidocs/org/apache/hadoop/hbase/util/Bytes.html
@@ -281,7 +281,7 @@ implements https://docs.oracle.com/javase/8/docs/api/java/lang/Comparab
 
 
 
-private static boolean
+(package private) static boolean
 UNSAFE_UNALIGNED 
 
 
@@ -1462,7 +1462,7 @@ implements https://docs.oracle.com/javase/8/docs/api/java/lang/Comparab
 
 
 UNSAFE_UNALIGNED
-private static final boolean UNSAFE_UNALIGNED
+static final boolean UNSAFE_UNALIGNED
 
 
 
@@ -1471,7 +1471,7 @@ implements https://docs.oracle.com/javase/8/docs/api/java/lang/Comparab
 
 
 bytes
-private byte[] bytes
+private byte[] bytes
 
 
 
@@ -1480,7 +1480,7 @@ implements https://docs.oracle.com/javase/8/docs/api/java/lang/Comparab
 
 
 offset
-private int offset
+private int offset
 
 
 
@@ -1489,7 +1489,7 @@ implements https://docs.oracle.com/javase/8/docs/api/java/lang/Comparab
 
 
 length
-private int length
+private int length
 
 
 
@@ -1498,7 +1498,7 @@ implements https://docs.oracle.com/javase/8/docs/api/java/lang/Comparab
 
 
 BYTES_COMPARATOR
-public static final https://docs.oracle.com/javase/8/docs/api/java/util/Comparator.html?is-external=true";
 title="class or interface in java.util">Comparator BYTES_COMPARATOR
+public static final https://docs.oracle.com/javase/8/docs/api/java/util/Comparator.html?is-external=true";
 title="class or interface in java.util">Comparator BYTES_COMPARATOR
 Pass this to TreeMaps where byte [] are keys.
 
 
@@ -1508,7 +1508,7 @@ implements https://docs.oracle.com/javase/8/docs/api/java/lang/Comparab
 
 
 BYTES_RAWCOMPARATOR
-public static final org.apache.hadoop.io.RawComparator 
BYTES_RAWCOMPARATOR
+public static final org.apache.hadoop.io.RawComparator 
BYTES_RAWCOMPARATOR
 Use comparing byte arrays, byte-by-byte
 
 
@@ -1518,7 +1518,7 @@ implements https://docs.oracle.com/javase/8/docs/api/java/lang/Comparab
 
 
 HEX_CHARS_UPPER
-private static final char[] HEX_CHARS_UPPER
+private static final char[] HEX_CHARS_UPPER
 
 
 
@@ -1527,7 +1527,7 @@ implements https://docs.oracle.com/javase/8/docs/api/java/lang/Comparab
 
 
 RNG
-private static final https://docs.oracle.com/javase/8/docs/api/java/security/SecureRandom.html?is-external=true";
 title="class or interface in java.security">SecureRandom RNG
+private static final https://docs.oracle.com/javase/8/docs/api/java/security/SecureRandom.html?is-external=true";
 title="class or interface in java.security">SecureRandom RNG
 
 
 
@@ -1536,7 +1536,7 @@ implements https://docs.oracle.com/javase/8/docs/api/java/lang/Comparab
 
 
 HEX_CHARS
-private static final char[] HEX_CHARS
+private static final char[] HEX_CHARS
 
 
 
@@ -1553,7 +1553,7 @@ implements https://docs.oracle.com/javase/8/docs/api/java/lang/Comparab
 
 
 Bytes
-public Bytes()
+public Bytes()
 Create a zero-size sequence.
 
 
@@ -1563,7 +1563,7 @@ implements https://docs.oracle.com/javase/8/docs/api/java/lang/Comparab
 
 
 Bytes
-public Bytes(byte[] bytes)
+public Bytes(byte[] bytes)
 Create a Bytes using the byte array as the initial 
value.
 
 Parameters:
@@ -1577,7 +1577,7 @@ implements https://docs.oracle.com/javase/8/docs/api/java/lang/Comparab
 
 
 Bytes
-public Bytes(Bytes ibw)
+public Bytes(Bytes ibw)
 Set the new Bytes to the contents of the passed
  ibw.
 
@@ -1592,7 +1592,7 @@ implements https://docs.oracle.com/javase/8/docs/api/java/lang/Comparab
 
 
 Bytes
-public Bytes(byte[] bytes,
+public Bytes(byte[] bytes,
  int offset,
  int length)
 Set the value to a given byte range
@@ -1611,7 +1611,7 @@ implements https://docs.oracle.com/javase/8/docs/api/java/lang/Comparab
 
 Bytes
 https://docs.oracle.com/javase/8/docs/api/java/lang/Deprecated.html?is-external=true";
 title="class or interface in java.lang">@Deprecated
-public Bytes(com.google.protobuf.ByteString byteString)
+public Bytes(com.google.protobuf.ByteString byteString)
 Deprecated. As of release 2.0.0, this will be removed in HBase 
3.0.0.
 Copy bytes from ByteString instance.
 
@@ -1634,7 +1634,7 @@ public 
 
 len
-public static final int len(byte[] b)
+public static final int len(byte[] b)
 Returns length of the byte array, returning 0 if the array 
is null.
  Useful for calculating sizes.
 
@@ -1651,7 +1651,7 @@ public 
 
 get
-public byte[] get()
+public byte[] get()
 Get the data from the Bytes.
 
 Returns:
@@ -1665,7 +1665,7 @@ public 
 
 set
-public void set(byte[] b)
+public void set(byte[] b)
 
 Parameters:
 b - Use passed bytes as backing array for this instance.
@@ -1678,7 +1678,7 @@ public 
 
 set
-public void set(byte[] b,
+public void set(byte[] b,
 int offset,
   

[45/51] [partial] hbase-site git commit: Published site at 8eaaa63114a64bcaeaf0ed9bdd88615ee22255c1.

2018-09-25 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f6f9d4f3/checkstyle-aggregate.html
--
diff --git a/checkstyle-aggregate.html b/checkstyle-aggregate.html
index 93d4a73..55b0055 100644
--- a/checkstyle-aggregate.html
+++ b/checkstyle-aggregate.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase – Checkstyle Results
 
@@ -281,10 +281,10 @@
  Warnings
  Errors
 
-3743
+3746
 0
 0
-15216
+15210
 
 Files
 
@@ -584,9121 +584,9116 @@
 0
 25
 
-org/apache/hadoop/hbase/MockRegionServerServices.java
-0
-0
-1
-
 org/apache/hadoop/hbase/MultithreadedTestUtil.java
 0
 0
 3
-
+
 org/apache/hadoop/hbase/NamespaceDescriptor.java
 0
 0
 3
-
+
 org/apache/hadoop/hbase/NotAllMetaRegionsOnlineException.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/NotServingRegionException.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/PerformanceEvaluation.java
 0
 0
 39
-
+
 org/apache/hadoop/hbase/PerformanceEvaluationCommons.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/PrivateCellUtil.java
 0
 0
 67
-
+
 org/apache/hadoop/hbase/QosTestHelper.java
 0
 0
 3
-
+
 org/apache/hadoop/hbase/RESTApiClusterManager.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/RegionLoad.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/RegionLocations.java
 0
 0
 11
-
+
 org/apache/hadoop/hbase/RegionStateListener.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/ResourceChecker.java
 0
 0
 4
-
+
 org/apache/hadoop/hbase/ScanPerformanceEvaluation.java
 0
 0
 12
-
+
 org/apache/hadoop/hbase/ScheduledChore.java
 0
 0
 5
-
+
 org/apache/hadoop/hbase/Server.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/ServerLoad.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/ServerName.java
 0
 0
 25
-
+
 org/apache/hadoop/hbase/SplitLogCounters.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/SplitLogTask.java
 0
 0
 4
-
+
 org/apache/hadoop/hbase/StripeCompactionsPerformanceEvaluation.java
 0
 0
 3
-
+
 org/apache/hadoop/hbase/TableDescriptors.java
 0
 0
 12
-
+
 org/apache/hadoop/hbase/TableInfoMissingException.java
 0
 0
 6
-
+
 org/apache/hadoop/hbase/TableName.java
 0
 0
 17
-
+
 org/apache/hadoop/hbase/TableNotDisabledException.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/TableNotEnabledException.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/TableNotFoundException.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/TagType.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/TestCellUtil.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/TestCheckTestClasses.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/TestClassFinder.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/TestClientClusterStatus.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/TestClientOperationTimeout.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/TestClusterPortAssignment.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/TestFSTableDescriptorForceCreation.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/TestGlobalMemStoreSize.java
 0
 0
 6
-
+
 org/apache/hadoop/hbase/TestHBaseConfiguration.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/TestHBaseTestingUtility.java
 0
 0
 8
-
+
 org/apache/hadoop/hbase/TestHColumnDescriptor.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/TestHColumnDescriptorDefaultVersions.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/TestHTableDescriptor.java
 0
 0
 11
-
+
 org/apache/hadoop/hbase/TestIOFencing.java
 0
 0
 3
-
+
 org/apache/hadoop/hbase/TestInfoServers.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/TestJMXConnectorServer.java
 0
 0
 3
-
+
 org/apache/hadoop/hbase/TestKeyValue.java
 0
 0
 3
-
+
 org/apache/hadoop/hbase/TestLocalHBaseCluster.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/TestMetaTableAccessor.java
 0
 0
 8
-
+
 org/apache/hadoop/hbase/TestMetaTableAccessorNoCluster.java
 0
 0
 7
-
+
 org/apache/hadoop/hbase/TestMetaTableLocator.java
 0
 0
 41
-
+
 org/apache/hadoop/hbase/TestMovedRegionsCleaner.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/TestMultiVersions.java
 0
 0
 4
-
+
 org/apache/hadoop/hbase/TestNamespace.java
 0
 0
 3
-
+
 org/apache/hadoop/hbase/TestNodeHealthCheckChore.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/TestPartialResultsFromClientSide.java
 0
 0
 24
-
+
 org/apache/hadoop/hbase/TestPerformanceEvaluation.java
 0
 0
 11
-
+
 org/apache/hadoop/hbase/TestRegionRebalancing.java
 0
 0
 7
-
+
 org/apache/hadoop/hbase/TestSerialization.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/TestServerSideScanMetricsFromClientSide.java
 0
 0
 10
-
+
 org/apache/hadoop/hbase/TestStochasticBalancerJmxMetrics.java
 0
 0
 3
-
+
 org/apache/hadoop/hbase/TestTimeout.java
 0
 0
 11
-
+
 org/apache/hadoop/hbase/TimestampTestBase.java
 0
 0
 14
-
+
 org/apache/hadoop/hbase/UnknownRegionException.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/Waiter.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/ZKNamespaceManager.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/ZNodeClearer.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/backup/BackupDriver.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/backup/FailedArchiveException.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/backup/HFileArchiver.java
 0
 0
 13
-
+
 org/apache/hadoop/hbase

[40/51] [partial] hbase-site git commit: Published site at 8eaaa63114a64bcaeaf0ed9bdd88615ee22255c1.

2018-09-25 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f6f9d4f3/devapidocs/org/apache/hadoop/hbase/master/HMaster.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/master/HMaster.html 
b/devapidocs/org/apache/hadoop/hbase/master/HMaster.html
index 62ced18..18d69e4 100644
--- a/devapidocs/org/apache/hadoop/hbase/master/HMaster.html
+++ b/devapidocs/org/apache/hadoop/hbase/master/HMaster.html
@@ -18,7 +18,7 @@
 catch(err) {
 }
 //-->
-var methods = 
{"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":9,"i11":10,"i12":10,"i13":10,"i14":10,"i15":10,"i16":10,"i17":10,"i18":10,"i19":10,"i20":10,"i21":10,"i22":10,"i23":10,"i24":10,"i25":9,"i26":10,"i27":10,"i28":10,"i29":10,"i30":10,"i31":10,"i32":10,"i33":10,"i34":10,"i35":9,"i36":10,"i37":10,"i38":10,"i39":10,"i40":10,"i41":10,"i42":10,"i43":10,"i44":9,"i45":10,"i46":10,"i47":10,"i48":10,"i49":10,"i50":10,"i51":10,"i52":10,"i53":10,"i54":10,"i55":10,"i56":10,"i57":10,"i58":10,"i59":10,"i60":10,"i61":10,"i62":10,"i63":10,"i64":9,"i65":10,"i66":10,"i67":10,"i68":10,"i69":10,"i70":10,"i71":10,"i72":10,"i73":10,"i74":10,"i75":10,"i76":10,"i77":10,"i78":10,"i79":10,"i80":10,"i81":10,"i82":10,"i83":10,"i84":10,"i85":10,"i86":10,"i87":10,"i88":10,"i89":10,"i90":10,"i91":10,"i92":10,"i93":10,"i94":10,"i95":10,"i96":10,"i97":10,"i98":10,"i99":10,"i100":10,"i101":10,"i102":10,"i103":10,"i104":10,"i105":10,"i106":10,"i107":10,"i108":10,"i109":
 
10,"i110":10,"i111":10,"i112":10,"i113":10,"i114":10,"i115":10,"i116":10,"i117":10,"i118":10,"i119":10,"i120":10,"i121":10,"i122":10,"i123":9,"i124":10,"i125":10,"i126":10,"i127":10,"i128":10,"i129":10,"i130":10,"i131":10,"i132":10,"i133":10,"i134":10,"i135":10,"i136":10,"i137":9,"i138":10,"i139":10,"i140":10,"i141":10,"i142":10,"i143":10,"i144":10,"i145":10,"i146":10,"i147":10,"i148":10,"i149":10,"i150":10,"i151":10,"i152":10,"i153":10,"i154":10,"i155":10,"i156":10,"i157":10,"i158":10,"i159":10,"i160":10,"i161":10,"i162":10,"i163":10,"i164":10,"i165":10,"i166":10,"i167":10,"i168":10,"i169":10,"i170":10,"i171":10,"i172":10,"i173":10,"i174":10,"i175":10,"i176":10,"i177":9};
+var methods = 
{"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":9,"i11":10,"i12":10,"i13":10,"i14":10,"i15":10,"i16":10,"i17":10,"i18":10,"i19":10,"i20":10,"i21":10,"i22":10,"i23":10,"i24":10,"i25":9,"i26":10,"i27":10,"i28":10,"i29":10,"i30":10,"i31":10,"i32":10,"i33":10,"i34":10,"i35":10,"i36":9,"i37":10,"i38":10,"i39":10,"i40":10,"i41":10,"i42":10,"i43":10,"i44":10,"i45":9,"i46":10,"i47":10,"i48":10,"i49":10,"i50":10,"i51":10,"i52":10,"i53":10,"i54":10,"i55":10,"i56":10,"i57":10,"i58":10,"i59":10,"i60":10,"i61":10,"i62":10,"i63":10,"i64":10,"i65":9,"i66":10,"i67":10,"i68":10,"i69":10,"i70":10,"i71":10,"i72":10,"i73":10,"i74":10,"i75":10,"i76":10,"i77":10,"i78":10,"i79":10,"i80":10,"i81":10,"i82":10,"i83":10,"i84":10,"i85":10,"i86":10,"i87":10,"i88":10,"i89":10,"i90":10,"i91":10,"i92":10,"i93":10,"i94":10,"i95":10,"i96":10,"i97":10,"i98":10,"i99":10,"i100":10,"i101":10,"i102":10,"i103":10,"i104":10,"i105":10,"i106":10,"i107":10,"i108":10,"i109":
 
10,"i110":10,"i111":10,"i112":10,"i113":10,"i114":10,"i115":10,"i116":10,"i117":10,"i118":10,"i119":10,"i120":10,"i121":10,"i122":10,"i123":9,"i124":10,"i125":10,"i126":10,"i127":10,"i128":10,"i129":10,"i130":10,"i131":10,"i132":10,"i133":10,"i134":10,"i135":10,"i136":10,"i137":9,"i138":10,"i139":10,"i140":10,"i141":10,"i142":10,"i143":10,"i144":10,"i145":10,"i146":10,"i147":10,"i148":10,"i149":10,"i150":10,"i151":10,"i152":10,"i153":10,"i154":10,"i155":10,"i156":10,"i157":10,"i158":10,"i159":10,"i160":10,"i161":10,"i162":10,"i163":10,"i164":10,"i165":10,"i166":10,"i167":10,"i168":10,"i169":10,"i170":10,"i171":10,"i172":10,"i173":10,"i174":10,"i175":10,"i176":10,"i177":9};
 var tabs = {65535:["t0","All Methods"],1:["t1","Static 
Methods"],2:["t2","Instance Methods"],8:["t4","Concrete Methods"]};
 var altColor = "altColor";
 var rowColor = "rowColor";
@@ -128,7 +128,7 @@ var activeTableTab = "activeTableTab";
 
 
 @InterfaceAudience.LimitedPrivate(value="Tools")
-public class HMaster
+public class HMaster
 extends HRegionServer
 implements MasterServices
 HMaster is the "master server" for HBase. An HBase cluster 
has one active
@@ -651,13 +651,17 @@ implements 
+protected AssignmentManager
+createAssignmentManager(MasterServices master) 
+
+
 protected MasterMetaBootstrap
 createMetaBootstrap()
 
  Create a MasterMetaBootstrap 
instance.
 
 
-
+
 (package private) long
 createNamespace(NamespaceDescriptor namespaceDescriptor,
long nonceGroup,
@@ -665,32 +669,32 @@ implements Create a new Namespace.
 
 
-
+
 private void
 createProcedureExecutor() 
 
-
+
 private SpaceQuotaSnapshotNotifier
 createQuotaSnapshotNotifier() 
 
-
+
 protected RSRpcServices
 createRpcServices() 
 
-
+
 protected ServerManager
 createServerManager(MasterServices master)
 
 

[31/51] [partial] hbase-site git commit: Published site at 8eaaa63114a64bcaeaf0ed9bdd88615ee22255c1.

2018-09-25 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f6f9d4f3/devapidocs/org/apache/hadoop/hbase/util/Bytes.LexicographicalComparerHolder.PureJavaComparer.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/util/Bytes.LexicographicalComparerHolder.PureJavaComparer.html
 
b/devapidocs/org/apache/hadoop/hbase/util/Bytes.LexicographicalComparerHolder.PureJavaComparer.html
index 3d721e1..d4d6c23 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/util/Bytes.LexicographicalComparerHolder.PureJavaComparer.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/util/Bytes.LexicographicalComparerHolder.PureJavaComparer.html
@@ -122,7 +122,7 @@ var activeTableTab = "activeTableTab";
 
 
 
-static enum Bytes.LexicographicalComparerHolder.PureJavaComparer
+static enum Bytes.LexicographicalComparerHolder.PureJavaComparer
 extends https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true";
 title="class or interface in java.lang">Enum
 implements Bytes.Comparer
 
@@ -217,7 +217,7 @@ the order they are declared.
 
 
 INSTANCE
-public static final Bytes.LexicographicalComparerHolder.PureJavaComparer
 INSTANCE
+public static final Bytes.LexicographicalComparerHolder.PureJavaComparer
 INSTANCE
 
 
 
@@ -234,7 +234,7 @@ the order they are declared.
 
 
 values
-public static Bytes.LexicographicalComparerHolder.PureJavaComparer[] values()
+public static Bytes.LexicographicalComparerHolder.PureJavaComparer[] values()
 Returns an array containing the constants of this enum 
type, in
 the order they are declared.  This method may be used to iterate
 over the constants as follows:
@@ -254,7 +254,7 @@ for (Bytes.LexicographicalComparerHolder.PureJavaComparer c 
: Bytes.Lexicographi
 
 
 valueOf
-public static Bytes.LexicographicalComparerHolder.PureJavaComparer valueOf(https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String name)
+public static Bytes.LexicographicalComparerHolder.PureJavaComparer valueOf(https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String name)
 Returns the enum constant of this type with the specified 
name.
 The string must match exactly an identifier used to declare an
 enum constant in this type.  (Extraneous whitespace characters are 
@@ -276,7 +276,7 @@ not permitted.)
 
 
 compareTo
-public int compareTo(byte[] buffer1,
+public int compareTo(byte[] buffer1,
  int offset1,
  int length1,
  byte[] buffer2,

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f6f9d4f3/devapidocs/org/apache/hadoop/hbase/util/Bytes.LexicographicalComparerHolder.UnsafeComparer.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/util/Bytes.LexicographicalComparerHolder.UnsafeComparer.html
 
b/devapidocs/org/apache/hadoop/hbase/util/Bytes.LexicographicalComparerHolder.UnsafeComparer.html
index 909bc09..5b19db9 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/util/Bytes.LexicographicalComparerHolder.UnsafeComparer.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/util/Bytes.LexicographicalComparerHolder.UnsafeComparer.html
@@ -122,7 +122,7 @@ var activeTableTab = "activeTableTab";
 
 
 
-static enum Bytes.LexicographicalComparerHolder.UnsafeComparer
+static enum Bytes.LexicographicalComparerHolder.UnsafeComparer
 extends https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true";
 title="class or interface in java.lang">Enum
 implements Bytes.Comparer
 
@@ -238,7 +238,7 @@ the order they are declared.
 
 
 INSTANCE
-public static final Bytes.LexicographicalComparerHolder.UnsafeComparer
 INSTANCE
+public static final Bytes.LexicographicalComparerHolder.UnsafeComparer
 INSTANCE
 
 
 
@@ -255,7 +255,7 @@ the order they are declared.
 
 
 theUnsafe
-static final sun.misc.Unsafe theUnsafe
+static final sun.misc.Unsafe theUnsafe
 
 
 
@@ -272,7 +272,7 @@ the order they are declared.
 
 
 values
-public static Bytes.LexicographicalComparerHolder.UnsafeComparer[] values()
+public static Bytes.LexicographicalComparerHolder.UnsafeComparer[] values()
 Returns an array containing the constants of this enum 
type, in
 the order they are declared.  This method may be used to iterate
 over the constants as follows:
@@ -292,7 +292,7 @@ for (Bytes.LexicographicalComparerHolder.UnsafeComparer c : 
Bytes.Lexicographica
 
 
 valueOf
-public static Bytes.LexicographicalComparerHolder.UnsafeComparer valueOf(https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String name)
+public static Bytes.LexicographicalComparerHolder.UnsafeComparer valueOf(https://docs.oracle.com/javase/8/do

[43/51] [partial] hbase-site git commit: Published site at 8eaaa63114a64bcaeaf0ed9bdd88615ee22255c1.

2018-09-25 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f6f9d4f3/devapidocs/index-all.html
--
diff --git a/devapidocs/index-all.html b/devapidocs/index-all.html
index 5c2b197..f372fa9 100644
--- a/devapidocs/index-all.html
+++ b/devapidocs/index-all.html
@@ -163,6 +163,8 @@
  
 abort
 - Variable in class org.apache.hadoop.hbase.regionserver.handler.CloseRegionHandler
  
+abort
 - Variable in class org.apache.hadoop.hbase.regionserver.handler.UnassignRegionHandler
+ 
 abort(String,
 Throwable) - Method in class 
org.apache.hadoop.hbase.regionserver.HRegionServer
 
 Cause the server to exit without closing the regions it is 
serving, the log
@@ -3882,6 +3884,12 @@
  
 AssignRegionAction(int,
 int) - Constructor for class 
org.apache.hadoop.hbase.master.balancer.BaseLoadBalancer.Cluster.AssignRegionAction
  
+AssignRegionHandler - Class in org.apache.hadoop.hbase.regionserver.handler
+
+Handles opening of a region on a region server.
+
+AssignRegionHandler(RegionServerServices,
 RegionInfo, TableDescriptor, long, EventType) - Constructor for 
class org.apache.hadoop.hbase.regionserver.handler.AssignRegionHandler
+ 
 assignRegions(MasterProcedureEnv,
 List) - Method in class 
org.apache.hadoop.hbase.master.procedure.ServerCrashProcedure
 
 Assign the regions on the crashed RS to other Rses.
@@ -11734,6 +11742,8 @@
  
 CLEANUP_INTERVAL
 - Static variable in class org.apache.hadoop.hbase.thrift2.ThriftHBaseServiceHandler
  
+cleanUpAndReportFailure(IOException)
 - Method in class org.apache.hadoop.hbase.regionserver.handler.AssignRegionHandler
+ 
 cleanupAndRestoreBackupSystem(Connection,
 BackupInfo, Configuration) - Static method in class 
org.apache.hadoop.hbase.backup.impl.TableBackupClient
  
 cleanupAnySplitDetritus()
 - Method in class org.apache.hadoop.hbase.regionserver.HRegionFileSystem
@@ -11788,7 +11798,9 @@
  snapshots.
 
 cleanupFailedOpen(HRegion)
 - Method in class org.apache.hadoop.hbase.regionserver.handler.OpenRegionHandler
- 
+
+Deprecated.
+ 
 cleanupHbckZnode()
 - Method in class org.apache.hadoop.hbase.util.HBaseFsck
  
 cleanUpHFileRefs(String,
 List) - Method in class 
org.apache.hadoop.hbase.replication.regionserver.ReplicationSourceManager
@@ -18975,6 +18987,10 @@
 
 Create the FlushPolicy configured for the given table.
 
+create(RegionServerServices,
 RegionInfo, TableDescriptor, long) - Static method in class 
org.apache.hadoop.hbase.regionserver.handler.AssignRegionHandler
+ 
+create(RegionServerServices,
 String, boolean, ServerName) - Static method in class 
org.apache.hadoop.hbase.regionserver.handler.UnassignRegionHandler
+ 
 create(Configuration,
 FlushRequester, Server, RegionServerAccounting) - Static method in 
class org.apache.hadoop.hbase.regionserver.HeapMemoryManager
  
 create(HRegionServer)
 - Static method in class org.apache.hadoop.hbase.regionserver.HRegionServer.MovedRegionsCleaner
@@ -19116,6 +19132,8 @@
 
 Creates another similar Bloom filter.
 
+createAssignmentManager(MasterServices)
 - Method in class org.apache.hadoop.hbase.master.HMaster
+ 
 createAssignProcedure(RegionStateNode,
 ServerName) - Method in class 
org.apache.hadoop.hbase.master.assignment.AssignmentManager
  
 createAssignProcedures(List)
 - Method in class org.apache.hadoop.hbase.master.assignment.AssignmentManager
@@ -24226,6 +24244,10 @@
  
 delayedSeekKV
 - Variable in class org.apache.hadoop.hbase.regionserver.StoreFileScanner
  
+delayedSubmit(EventHandler,
 long, TimeUnit) - Method in class 
org.apache.hadoop.hbase.executor.ExecutorService
+ 
+delayedSubmitTimer
 - Variable in class org.apache.hadoop.hbase.executor.ExecutorService
+ 
 DelayedTask(FutureTask,
 long, TimeUnit) - Constructor for class 
org.apache.hadoop.hbase.procedure2.RemoteProcedureDispatcher.DelayedTask
  
 DelayedUtil - Class in org.apache.hadoop.hbase.procedure2.util
@@ -25641,6 +25663,8 @@
  
 destination
 - Variable in class org.apache.hadoop.hbase.regionserver.handler.CloseRegionHandler
  
+destination
 - Variable in class org.apache.hadoop.hbase.regionserver.handler.UnassignRegionHandler
+ 
 destinationServer
 - Variable in class org.apache.hadoop.hbase.master.assignment.UnassignProcedure
 
 Deprecated.
@@ -26270,7 +26294,9 @@
 doCleanupBulkLoad(ClientProtos.CleanupBulkLoadRequest)
 - Method in class org.apache.hadoop.hbase.client.CancellableRegionServerCallable
  
 doCleanUpOnFailedOpen(HRegion)
 - Method in class org.apache.hadoop.hbase.regionserver.handler.OpenRegionHandler
- 
+
+Deprecated.
+ 
 doClient(FileSystem,
 Set, boolean, boolean) - Method in class 
org.apache.hadoop.hbase.regionserver.CompactionTool
 
 Execute compaction, from this client, one path at the 
time.
@@ -27774,6 +27800,8 @@
 
 Deprecated.
  
+encodedName
 - Variable in class org.apache.hadoop.hbase.regionserver.handler.UnassignRegionHandler
+ 
 encodedName2HighestSequenceId
 - Variable in class org.apache.hadoop.hbase.regionserve

[41/51] [partial] hbase-site git commit: Published site at 8eaaa63114a64bcaeaf0ed9bdd88615ee22255c1.

2018-09-25 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f6f9d4f3/devapidocs/org/apache/hadoop/hbase/executor/ExecutorService.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/executor/ExecutorService.html 
b/devapidocs/org/apache/hadoop/hbase/executor/ExecutorService.html
index f7fc9d6..7530bbf 100644
--- a/devapidocs/org/apache/hadoop/hbase/executor/ExecutorService.html
+++ b/devapidocs/org/apache/hadoop/hbase/executor/ExecutorService.html
@@ -18,7 +18,7 @@
 catch(err) {
 }
 //-->
-var methods = 
{"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10};
+var methods = 
{"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10};
 var tabs = {65535:["t0","All Methods"],2:["t2","Instance 
Methods"],8:["t4","Concrete Methods"]};
 var altColor = "altColor";
 var rowColor = "rowColor";
@@ -110,7 +110,7 @@ var activeTableTab = "activeTableTab";
 
 
 @InterfaceAudience.Private
-public class ExecutorService
+public class ExecutorService
 extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true";
 title="class or interface in java.lang">Object
 This is a generic executor service. This component 
abstracts a
  threadpool, a queue to which EventTypes 
can be submitted,
@@ -182,14 +182,18 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 Field and Description
 
 
+private 
org.apache.hbase.thirdparty.com.google.common.util.concurrent.ListeningScheduledExecutorService
+delayedSubmitTimer 
+
+
 private https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/ConcurrentHashMap.html?is-external=true";
 title="class or interface in java.util.concurrent">ConcurrentHashMapString,ExecutorService.Executor>
 executorMap 
 
-
+
 private static org.slf4j.Logger
 LOG 
 
-
+
 private https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
 servername 
 
@@ -228,42 +232,48 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 Method and Description
 
 
+void
+delayedSubmit(EventHandler eh,
+ long delay,
+ https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/TimeUnit.html?is-external=true";
 title="class or interface in 
java.util.concurrent">TimeUnit unit) 
+
+
 https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true";
 title="class or interface in java.util">MapString,ExecutorService.ExecutorStatus>
 getAllExecutorStatuses() 
 
-
+
 (package private) ExecutorService.Executor
 getExecutor(ExecutorType type) 
 
-
+
 (package private) ExecutorService.Executor
 getExecutor(https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String name) 
 
-
+
 https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/ThreadPoolExecutor.html?is-external=true";
 title="class or interface in 
java.util.concurrent">ThreadPoolExecutor
 getExecutorThreadPool(ExecutorType type) 
 
-
+
 (package private) boolean
 isExecutorServiceRunning(https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String name) 
 
-
+
 void
 shutdown() 
 
-
+
 void
 startExecutorService(ExecutorType type,
 int maxThreads) 
 
-
+
 void
 startExecutorService(https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String name,
 int maxThreads)
 Start an executor service with a given name.
 
 
-
+
 void
 submit(EventHandler eh) 
 
@@ -295,7 +305,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 LOG
-private static final org.slf4j.Logger LOG
+private static final org.slf4j.Logger LOG
 
 
 
@@ -304,16 +314,25 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 executorMap
-private final https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/ConcurrentHashMap.html?is-external=true";
 title="class or interface in java.util.concurrent">ConcurrentHashMapString,ExecutorService.Executor> executorMap
+private final https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/ConcurrentHashMap.html?is-external=true";
 title="class or interface in java.util.concurrent">ConcurrentHashMapString,ExecutorService.Executor> executorMap
 
 
 
 
 
-
+
 
 servername

[44/51] [partial] hbase-site git commit: Published site at 8eaaa63114a64bcaeaf0ed9bdd88615ee22255c1.

2018-09-25 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f6f9d4f3/checkstyle.rss
--
diff --git a/checkstyle.rss b/checkstyle.rss
index 0d4cfec..9541318 100644
--- a/checkstyle.rss
+++ b/checkstyle.rss
@@ -25,8 +25,8 @@ under the License.
 en-us
 ©2007 - 2018 The Apache Software Foundation
 
-  File: 3743,
- Errors: 15216,
+  File: 3746,
+ Errors: 15210,
  Warnings: 0,
  Infos: 0
   
@@ -4890,6 +4890,20 @@ under the License.
   
   
 
+  http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.master.TestCloseAnOpeningRegion.java";>org/apache/hadoop/hbase/master/TestCloseAnOpeningRegion.java
+
+
+  0
+
+
+  0
+
+
+  0
+
+  
+  
+
   http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.master.TestMasterStatusServlet.java";>org/apache/hadoop/hbase/master/TestMasterStatusServlet.java
 
 
@@ -6584,6 +6598,20 @@ under the License.
   
   
 
+  http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.regionserver.handler.AssignRegionHandler.java";>org/apache/hadoop/hbase/regionserver/handler/AssignRegionHandler.java
+
+
+  0
+
+
+  0
+
+
+  0
+
+  
+  
+
   http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.regionserver.compactions.OffPeakHours.java";>org/apache/hadoop/hbase/regionserver/compactions/OffPeakHours.java
 
 
@@ -14839,7 +14867,7 @@ under the License.
   0
 
 
-  1
+  0
 
   
   
@@ -22824,6 +22852,20 @@ under the License.
   
   
 
+  http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.regionserver.handler.UnassignRegionHandler.java";>org/apache/hadoop/hbase/regionserver/handler/UnassignRegionHandler.java
+
+
+  0
+
+
+  0
+
+
+  0
+
+  
+  
+
   http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.util.AtomicUtils.java";>org/apache/hadoop/hbase/util/AtomicUtils.java
 
 
@@ -28293,7 +28335,7 @@ under the License.
   0
 
 
-  5
+  4
 
   
   
@@ -34901,7 +34943,7 @@ under the License.
   0
 
 
-  11
+  10
 
   
   
@@ -52261,7 +52303,7 @@ under the License.
   0
 
 
-  6
+  3
 
   
   

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f6f9d4f3/coc.html
--
diff --git a/coc.html b/coc.html
index b3a3990..a9a898a 100644
--- a/coc.html
+++ b/coc.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase – 
   Code of Conduct Policy
@@ -375,7 +375,7 @@ email to mailto:priv...@hbase.apache.org";>the priv
 https://www.apache.org/";>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2018-09-22
+  Last Published: 
2018-09-25
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f6f9d4f3/dependencies.html
--
diff --git a/dependencies.html b/dependencies.html
index 5419660..5847017 100644
--- a/dependencies.html
+++ b/dependencies.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase – Project Dependencies
 
@@ -440,7 +440,7 @@
 https://www.apache.org/";>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2018-09-22
+  Last Published: 
2018-09-25
 
 
 

http

[21/51] [partial] hbase-site git commit: Published site at 8eaaa63114a64bcaeaf0ed9bdd88615ee22255c1.

2018-09-25 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f6f9d4f3/devapidocs/src-html/org/apache/hadoop/hbase/master/HMaster.TableDescriptorGetter.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/master/HMaster.TableDescriptorGetter.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/master/HMaster.TableDescriptorGetter.html
index 0cf012a..976894f 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/master/HMaster.TableDescriptorGetter.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/master/HMaster.TableDescriptorGetter.html
@@ -63,3884 +63,3883 @@
 055import javax.servlet.http.HttpServlet;
 056import 
javax.servlet.http.HttpServletRequest;
 057import 
javax.servlet.http.HttpServletResponse;
-058
-059import 
org.apache.commons.lang3.StringUtils;
-060import 
org.apache.hadoop.conf.Configuration;
-061import org.apache.hadoop.fs.Path;
-062import 
org.apache.hadoop.hbase.ChoreService;
-063import 
org.apache.hadoop.hbase.ClusterId;
-064import 
org.apache.hadoop.hbase.ClusterMetrics;
-065import 
org.apache.hadoop.hbase.ClusterMetrics.Option;
-066import 
org.apache.hadoop.hbase.ClusterMetricsBuilder;
-067import 
org.apache.hadoop.hbase.CompoundConfiguration;
-068import 
org.apache.hadoop.hbase.DoNotRetryIOException;
-069import 
org.apache.hadoop.hbase.HBaseIOException;
-070import 
org.apache.hadoop.hbase.HBaseInterfaceAudience;
-071import 
org.apache.hadoop.hbase.HConstants;
-072import 
org.apache.hadoop.hbase.InvalidFamilyOperationException;
-073import 
org.apache.hadoop.hbase.MasterNotRunningException;
-074import 
org.apache.hadoop.hbase.MetaTableAccessor;
-075import 
org.apache.hadoop.hbase.NamespaceDescriptor;
-076import 
org.apache.hadoop.hbase.PleaseHoldException;
-077import 
org.apache.hadoop.hbase.ReplicationPeerNotFoundException;
-078import 
org.apache.hadoop.hbase.ServerName;
-079import 
org.apache.hadoop.hbase.TableDescriptors;
-080import 
org.apache.hadoop.hbase.TableName;
-081import 
org.apache.hadoop.hbase.TableNotDisabledException;
-082import 
org.apache.hadoop.hbase.TableNotFoundException;
-083import 
org.apache.hadoop.hbase.UnknownRegionException;
-084import 
org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
-085import 
org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
-086import 
org.apache.hadoop.hbase.client.MasterSwitchType;
-087import 
org.apache.hadoop.hbase.client.RegionInfo;
-088import 
org.apache.hadoop.hbase.client.RegionInfoBuilder;
-089import 
org.apache.hadoop.hbase.client.Result;
-090import 
org.apache.hadoop.hbase.client.TableDescriptor;
-091import 
org.apache.hadoop.hbase.client.TableDescriptorBuilder;
-092import 
org.apache.hadoop.hbase.client.TableState;
-093import 
org.apache.hadoop.hbase.coprocessor.CoprocessorHost;
-094import 
org.apache.hadoop.hbase.exceptions.DeserializationException;
-095import 
org.apache.hadoop.hbase.exceptions.MergeRegionException;
-096import 
org.apache.hadoop.hbase.executor.ExecutorType;
-097import 
org.apache.hadoop.hbase.favored.FavoredNodesManager;
-098import 
org.apache.hadoop.hbase.favored.FavoredNodesPromoter;
-099import 
org.apache.hadoop.hbase.http.InfoServer;
-100import 
org.apache.hadoop.hbase.ipc.CoprocessorRpcUtils;
-101import 
org.apache.hadoop.hbase.ipc.RpcServer;
-102import 
org.apache.hadoop.hbase.ipc.ServerNotRunningYetException;
-103import 
org.apache.hadoop.hbase.log.HBaseMarkers;
-104import 
org.apache.hadoop.hbase.master.MasterRpcServices.BalanceSwitchMode;
-105import 
org.apache.hadoop.hbase.master.assignment.AssignProcedure;
-106import 
org.apache.hadoop.hbase.master.assignment.AssignmentManager;
-107import 
org.apache.hadoop.hbase.master.assignment.MergeTableRegionsProcedure;
-108import 
org.apache.hadoop.hbase.master.assignment.MoveRegionProcedure;
-109import 
org.apache.hadoop.hbase.master.assignment.RegionStateNode;
-110import 
org.apache.hadoop.hbase.master.assignment.RegionStates;
-111import 
org.apache.hadoop.hbase.master.assignment.TransitRegionStateProcedure;
-112import 
org.apache.hadoop.hbase.master.assignment.UnassignProcedure;
-113import 
org.apache.hadoop.hbase.master.balancer.BalancerChore;
-114import 
org.apache.hadoop.hbase.master.balancer.BaseLoadBalancer;
-115import 
org.apache.hadoop.hbase.master.balancer.ClusterStatusChore;
-116import 
org.apache.hadoop.hbase.master.balancer.LoadBalancerFactory;
-117import 
org.apache.hadoop.hbase.master.cleaner.CleanerChore;
-118import 
org.apache.hadoop.hbase.master.cleaner.HFileCleaner;
-119import 
org.apache.hadoop.hbase.master.cleaner.LogCleaner;
-120import 
org.apache.hadoop.hbase.master.cleaner.ReplicationBarrierCleaner;
-121import 
org.apache.hadoop.hbase.master.locking.LockManager;
-122import 
org.apache.hadoop.hbase.master.normalizer.NormalizationPlan;
-123import 
org.apache.hadoop.hbase.master.normalizer.NormalizationPlan.PlanType;
-124import 
org.apache.hadoop.hbase.master.normalizer.RegionNormalizer;
-125import 
org.apache.hadoop.hbase.master.normaliz

[39/51] [partial] hbase-site git commit: Published site at 8eaaa63114a64bcaeaf0ed9bdd88615ee22255c1.

2018-09-25 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f6f9d4f3/devapidocs/org/apache/hadoop/hbase/master/HMasterCommandLine.LocalHMaster.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/master/HMasterCommandLine.LocalHMaster.html
 
b/devapidocs/org/apache/hadoop/hbase/master/HMasterCommandLine.LocalHMaster.html
index 6fc579b..b04b0d5 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/master/HMasterCommandLine.LocalHMaster.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/master/HMasterCommandLine.LocalHMaster.html
@@ -247,14 +247,14 @@ extends 
 
 Methods inherited from class org.apache.hadoop.hbase.master.HMaster
-abort,
 abortProcedure,
 addColumn,
 addReplicationPeer,
 balance,
 balance,
 balanceSwitch,
 canCreateBaseZNode, canUpdateTableDescriptor,
 checkIfShouldMoveSystemRegionAsync,
 checkInitialized,
 checkServiceStarted,
 checkTableModifiable,
 configureInfoServer,
 constructMaster,
 createMetaBootstrap,
 createNamespace,
 createRpcServices,
 createServerManager,
 createSystemTable,
 createTable,
 decommissionRegionSer
 vers, decorateMasterConfiguration,
 deleteColumn,
 deleteNamespace,
 deleteTable,
 disableReplicationPeer,
 disableTable,
 enableReplicationPeer, enableTable,
 getAssignmentManager,
 getAverageLoad,
 getCatalogJanitor,
 getClientIdAuditPrefix,
 getClusterMetrics,
 getClusterMetrics,
 getClusterMetricsWi
 thoutCoprocessor, getClusterMetricsWithoutCoprocessor,
 getClusterSchema,
 getDumpServlet,
 getFavoredNodesManager,
 getHFileCleaner,
 getInitializedEvent,
 getLastMajorCompactionTimestamp,
 getLastMajorCompactionTimestampForRegion,
 getLoadBalancer,
 getLoadBalancerClassName,
 getLoadedCoprocessors,
 getLockManager,
 getLocks,
 getLogCleaner,
 getMasterActiveTime,
 getMasterCoprocessorHost,
 getMasterCoprocessors,
 getMasterFileSystem,
 getMasterFinishedInitializationTime,
 getMasterMetrics,
 getMasterProcedureExecutor,
 getMasterProcedureManagerHost,
 getMasterQuotaManager,
 getMasterRpcServices,
 getMasterStartTime,
 getMasterWalManager,
 getMergePlanCount,
 getMetaTableObserver,
 getMobCompactionState,
 getNamespace,
 getNamespaces,
 getNumWALFiles,
 getProcedures, getProcessName,
 getQuotaObserverChore,
 getRegionNormalizer,
 getRegionNormalizerTracker,
 getRegionServerFatalLogBuffer,
 getRegionServerInfoPort,
 getRegionServerVersion,
 getRemoteInetAddress,
 getReplicationLoad,
 getReplicationPeerConfig,
 getReplicationPeerManager,
 getServerManager,
 getServerName,
 getSnapshotManager,
 getSnapshotQuotaObserverChore,
 getSpaceQuotaSnapshotNotifier,
 getSplitOrMergeTracker,
 getSplitPlanCount,
 getSyncReplicationReplayWALManager,
 getTableDescriptors,
 getTableRegionForRow,
 getTableStateManager,
 getUseThisHostnameInstead, getWalProcedureStore,
 getZooKeeper,
 initClusterSchemaService,
 initializeZKBasedSystemTrackers,
 isActiveMaster,
 isBalancerOn,
 isCatalogJanitorEnabled,
 isCleanerChoreEnabled,
 isInitialized,
 isInMaintenanceMode,
 isNormalizerOn,
 isSplitOrMergeEnabled,
 listDecommissionedRegionServers,
 listReplicationPeers,
 listTableDescriptors,
 listTableDescriptorsByNamespace,
 listTableNames,
 listTableNamesByNamespace,
 login,
 main,
 mergeRegions,
 modifyColumn<
 /a>, modifyNamespace,
 modifyTable,
 move,
 normalizeRegions,
 recommissionRegionServer,
 registerService,
 remoteProcedureComplet
 ed, remoteProcedureFailed,
 removeReplicationPeer,
 reportMobCompactionEnd,
 reportMobCompactionStart,
 requestMobCompaction,
 restoreSnapshot, setCatalogJanitorEnabled,
 setInitialized,
 shutdown,
 splitRegion,
 stop,
 stopMaster,
 stopServiceThreads,
 transitReplicationPeerSyncReplicationState, 
truncateTable,
 updateConfigurationForQuotasObserver,
 updateReplicationPeerConfig,
 waitForMasterActive,
 waitUntilMetaOnline,
 waitUntilNamespaceOnline
+abort,
 abortProcedure,
 addColumn,
 addReplicationPeer,
 balance,
 balance,
 balanceSwitch,
 canCreateBaseZNode, canUpdateTableDescriptor,
 checkIfShouldMoveSystemRegionAsync,
 checkInitialized,
 checkServiceStarted,
 checkTableModifiable,
 configureInfoServer,
 constructMaster,
 createAssignmentManager,
 createMetaBootstrap,
 createNamespace,
 createRpcServices,
 createServerManager,
 createSystemTable,
 createTable, decommissionRegionServers,
 decorateMasterConfiguration,
 deleteColumn,
 deleteNamespace,
 deleteTable,
 disableReplicationPeer,
 disableTable, enableReplicationPeer,
 enableTable,
 getAssignmentManager,
 getAverageLoad,
 getCatalogJanitor,
 getClientIdAuditPrefix,
 getClusterMetrics,
 getClusterMetrics, getClusterMetricsWithoutCoprocessor,
 getClusterMetricsWithoutCoprocessor,
 getClusterSchema,
 getDumpServlet,
 getFavoredNodesManager,
 getHFileCleaner,
 getInitializedEvent,
 getLastMajorCompactionTimestamp,
 getLastMajorCompactionTimestampForRegion,
 getLoadBalancer,
 getLoadBalancerClassName,
 getLoadedCoprocessors,
 getLockManager,
 getLocks,
 getLogCleaner,
 getMasterActiveTime, getMasterCoprocessorHost,
 getMasterCoprocessors,
 getMaste

[01/51] [partial] hbase-site git commit: Published site at 8eaaa63114a64bcaeaf0ed9bdd88615ee22255c1.

2018-09-25 Thread git-site-role
Repository: hbase-site
Updated Branches:
  refs/heads/asf-site e4b87e9ee -> f6f9d4f3e


http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f6f9d4f3/devapidocs/src-html/org/apache/hadoop/hbase/util/Bytes.Comparer.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/util/Bytes.Comparer.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/util/Bytes.Comparer.html
index 804ef45..e999ddb 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/util/Bytes.Comparer.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/util/Bytes.Comparer.html
@@ -138,2491 +138,2492 @@
 130  // SizeOf which uses 
java.lang.instrument says 24 bytes. (3 longs?)
 131  public static final int 
ESTIMATED_HEAP_TAX = 16;
 132
-133  private static final boolean 
UNSAFE_UNALIGNED = UnsafeAvailChecker.unaligned();
-134
-135  /**
-136   * Returns length of the byte array, 
returning 0 if the array is null.
-137   * Useful for calculating sizes.
-138   * @param b byte array, which can be 
null
-139   * @return 0 if b is null, otherwise 
returns length
-140   */
-141  final public static int len(byte[] b) 
{
-142return b == null ? 0 : b.length;
-143  }
-144
-145  private byte[] bytes;
-146  private int offset;
-147  private int length;
-148
-149  /**
-150   * Create a zero-size sequence.
-151   */
-152  public Bytes() {
-153super();
-154  }
-155
-156  /**
-157   * Create a Bytes using the byte array 
as the initial value.
-158   * @param bytes This array becomes the 
backing storage for the object.
-159   */
-160  public Bytes(byte[] bytes) {
-161this(bytes, 0, bytes.length);
-162  }
-163
-164  /**
-165   * Set the new Bytes to the contents of 
the passed
-166   * ibw.
-167   * @param ibw the value to set this 
Bytes to.
-168   */
-169  public Bytes(final Bytes ibw) {
-170this(ibw.get(), ibw.getOffset(), 
ibw.getLength());
-171  }
-172
-173  /**
-174   * Set the value to a given byte 
range
-175   * @param bytes the new byte range to 
set to
-176   * @param offset the offset in newData 
to start at
-177   * @param length the number of bytes in 
the range
-178   */
-179  public Bytes(final byte[] bytes, final 
int offset,
-180  final int length) {
-181this.bytes = bytes;
-182this.offset = offset;
-183this.length = length;
-184  }
-185
-186  /**
-187   * Copy bytes from ByteString 
instance.
-188   * @param byteString copy from
-189   * @deprecated As of release 2.0.0, 
this will be removed in HBase 3.0.0.
-190   */
-191  @Deprecated
-192  public Bytes(final ByteString 
byteString) {
-193this(byteString.toByteArray());
-194  }
-195
-196  /**
-197   * Get the data from the Bytes.
-198   * @return The data is only valid 
between offset and offset+length.
-199   */
-200  public byte [] get() {
-201if (this.bytes == null) {
-202  throw new 
IllegalStateException("Uninitialiized. Null constructor " +
-203  "called w/o accompaying 
readFields invocation");
-204}
-205return this.bytes;
-206  }
-207
-208  /**
-209   * @param b Use passed bytes as backing 
array for this instance.
-210   */
-211  public void set(final byte [] b) {
-212set(b, 0, b.length);
-213  }
-214
-215  /**
-216   * @param b Use passed bytes as backing 
array for this instance.
-217   * @param offset
-218   * @param length
-219   */
-220  public void set(final byte [] b, final 
int offset, final int length) {
-221this.bytes = b;
-222this.offset = offset;
-223this.length = length;
-224  }
-225
-226  /**
-227   * @return the number of valid bytes in 
the buffer
-228   * @deprecated use {@link #getLength()} 
instead
-229   */
-230  @Deprecated
-231  public int getSize() {
-232if (this.bytes == null) {
-233  throw new 
IllegalStateException("Uninitialiized. Null constructor " +
-234  "called w/o accompaying 
readFields invocation");
-235}
-236return this.length;
-237  }
-238
-239  /**
-240   * @return the number of valid bytes in 
the buffer
-241   */
-242  public int getLength() {
-243if (this.bytes == null) {
-244  throw new 
IllegalStateException("Uninitialiized. Null constructor " +
-245  "called w/o accompaying 
readFields invocation");
-246}
-247return this.length;
-248  }
-249
-250  /**
-251   * @return offset
-252   */
-253  public int getOffset(){
-254return this.offset;
-255  }
-256
-257  /**
-258   * @deprecated As of release 2.0.0, 
this will be removed in HBase 3.0.0.
-259   */
-260  @Deprecated
-261  public ByteString toByteString() {
-262return 
ByteString.copyFrom(this.bytes, this.offset, this.length);
-263  }
-264
-265  @Override
-266  public int hashCode() {
-267return Bytes.hashCode(bytes, offset, 
length);
-268  }
-269
-270  /**
-271   * Define the sort order of the 
Bytes.
-272   * @param that The other bytes 
writable
-273   * @return Positive if left is bigger 
than right, 0 if they are equal, and
-274   * negative if left is s

[10/51] [partial] hbase-site git commit: Published site at 8eaaa63114a64bcaeaf0ed9bdd88615ee22255c1.

2018-09-25 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f6f9d4f3/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RSRpcServices.RegionScannersCloseCallBack.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RSRpcServices.RegionScannersCloseCallBack.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RSRpcServices.RegionScannersCloseCallBack.html
index 25f458d..20e3eaa 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RSRpcServices.RegionScannersCloseCallBack.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RSRpcServices.RegionScannersCloseCallBack.html
@@ -28,3711 +28,3756 @@
 020import java.io.FileNotFoundException;
 021import java.io.IOException;
 022import java.io.InterruptedIOException;
-023import 
java.lang.reflect.InvocationTargetException;
-024import java.net.BindException;
-025import java.net.InetSocketAddress;
-026import java.net.UnknownHostException;
-027import java.nio.ByteBuffer;
-028import java.util.ArrayList;
-029import java.util.Arrays;
-030import java.util.Collections;
-031import java.util.HashMap;
-032import java.util.Iterator;
-033import java.util.List;
-034import java.util.Map;
-035import java.util.Map.Entry;
-036import java.util.NavigableMap;
-037import java.util.Set;
-038import java.util.TreeSet;
-039import 
java.util.concurrent.ConcurrentHashMap;
-040import 
java.util.concurrent.ConcurrentMap;
-041import java.util.concurrent.TimeUnit;
-042import 
java.util.concurrent.atomic.AtomicBoolean;
-043import 
java.util.concurrent.atomic.AtomicLong;
-044import 
java.util.concurrent.atomic.LongAdder;
-045import 
org.apache.commons.lang3.mutable.MutableObject;
-046import 
org.apache.hadoop.conf.Configuration;
-047import org.apache.hadoop.fs.Path;
-048import 
org.apache.hadoop.hbase.ByteBufferExtendedCell;
-049import 
org.apache.hadoop.hbase.CacheEvictionStats;
-050import 
org.apache.hadoop.hbase.CacheEvictionStatsBuilder;
-051import org.apache.hadoop.hbase.Cell;
-052import 
org.apache.hadoop.hbase.CellScannable;
-053import 
org.apache.hadoop.hbase.CellScanner;
-054import 
org.apache.hadoop.hbase.CellUtil;
-055import 
org.apache.hadoop.hbase.CompareOperator;
-056import 
org.apache.hadoop.hbase.DoNotRetryIOException;
-057import 
org.apache.hadoop.hbase.DroppedSnapshotException;
-058import 
org.apache.hadoop.hbase.HBaseIOException;
-059import 
org.apache.hadoop.hbase.HConstants;
-060import 
org.apache.hadoop.hbase.MultiActionResultTooLarge;
-061import 
org.apache.hadoop.hbase.NotServingRegionException;
-062import 
org.apache.hadoop.hbase.PrivateCellUtil;
-063import 
org.apache.hadoop.hbase.RegionTooBusyException;
-064import org.apache.hadoop.hbase.Server;
-065import 
org.apache.hadoop.hbase.ServerName;
-066import 
org.apache.hadoop.hbase.TableName;
-067import 
org.apache.hadoop.hbase.UnknownScannerException;
-068import 
org.apache.hadoop.hbase.client.Append;
-069import 
org.apache.hadoop.hbase.client.ConnectionUtils;
-070import 
org.apache.hadoop.hbase.client.Delete;
-071import 
org.apache.hadoop.hbase.client.Durability;
-072import 
org.apache.hadoop.hbase.client.Get;
-073import 
org.apache.hadoop.hbase.client.Increment;
-074import 
org.apache.hadoop.hbase.client.Mutation;
-075import 
org.apache.hadoop.hbase.client.Put;
-076import 
org.apache.hadoop.hbase.client.RegionInfo;
-077import 
org.apache.hadoop.hbase.client.RegionReplicaUtil;
-078import 
org.apache.hadoop.hbase.client.Result;
-079import 
org.apache.hadoop.hbase.client.Row;
-080import 
org.apache.hadoop.hbase.client.RowMutations;
-081import 
org.apache.hadoop.hbase.client.Scan;
-082import 
org.apache.hadoop.hbase.client.TableDescriptor;
-083import 
org.apache.hadoop.hbase.client.VersionInfoUtil;
-084import 
org.apache.hadoop.hbase.conf.ConfigurationObserver;
-085import 
org.apache.hadoop.hbase.exceptions.FailedSanityCheckException;
-086import 
org.apache.hadoop.hbase.exceptions.OutOfOrderScannerNextException;
-087import 
org.apache.hadoop.hbase.exceptions.ScannerResetException;
-088import 
org.apache.hadoop.hbase.exceptions.UnknownProtocolException;
-089import 
org.apache.hadoop.hbase.filter.ByteArrayComparable;
-090import 
org.apache.hadoop.hbase.io.TimeRange;
-091import 
org.apache.hadoop.hbase.ipc.HBaseRPCErrorHandler;
-092import 
org.apache.hadoop.hbase.ipc.HBaseRpcController;
-093import 
org.apache.hadoop.hbase.ipc.PriorityFunction;
-094import 
org.apache.hadoop.hbase.ipc.QosPriority;
-095import 
org.apache.hadoop.hbase.ipc.RpcCallContext;
-096import 
org.apache.hadoop.hbase.ipc.RpcCallback;
-097import 
org.apache.hadoop.hbase.ipc.RpcScheduler;
-098import 
org.apache.hadoop.hbase.ipc.RpcServer;
-099import 
org.apache.hadoop.hbase.ipc.RpcServer.BlockingServiceAndInterface;
-100import 
org.apache.hadoop.hbase.ipc.RpcServerFactory;
-101import 
org.apache.hadoop.hbase.ipc.RpcServerInterface;
-102import 
org.apache.hadoop.hbase.ipc.ServerNotRunningYetException;
-103import 
org.apache.ha

[13/51] [partial] hbase-site git commit: Published site at 8eaaa63114a64bcaeaf0ed9bdd88615ee22255c1.

2018-09-25 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f6f9d4f3/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RSRpcServices.RegionScannerCloseCallBack.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RSRpcServices.RegionScannerCloseCallBack.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RSRpcServices.RegionScannerCloseCallBack.html
index 25f458d..20e3eaa 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RSRpcServices.RegionScannerCloseCallBack.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RSRpcServices.RegionScannerCloseCallBack.html
@@ -28,3711 +28,3756 @@
 020import java.io.FileNotFoundException;
 021import java.io.IOException;
 022import java.io.InterruptedIOException;
-023import 
java.lang.reflect.InvocationTargetException;
-024import java.net.BindException;
-025import java.net.InetSocketAddress;
-026import java.net.UnknownHostException;
-027import java.nio.ByteBuffer;
-028import java.util.ArrayList;
-029import java.util.Arrays;
-030import java.util.Collections;
-031import java.util.HashMap;
-032import java.util.Iterator;
-033import java.util.List;
-034import java.util.Map;
-035import java.util.Map.Entry;
-036import java.util.NavigableMap;
-037import java.util.Set;
-038import java.util.TreeSet;
-039import 
java.util.concurrent.ConcurrentHashMap;
-040import 
java.util.concurrent.ConcurrentMap;
-041import java.util.concurrent.TimeUnit;
-042import 
java.util.concurrent.atomic.AtomicBoolean;
-043import 
java.util.concurrent.atomic.AtomicLong;
-044import 
java.util.concurrent.atomic.LongAdder;
-045import 
org.apache.commons.lang3.mutable.MutableObject;
-046import 
org.apache.hadoop.conf.Configuration;
-047import org.apache.hadoop.fs.Path;
-048import 
org.apache.hadoop.hbase.ByteBufferExtendedCell;
-049import 
org.apache.hadoop.hbase.CacheEvictionStats;
-050import 
org.apache.hadoop.hbase.CacheEvictionStatsBuilder;
-051import org.apache.hadoop.hbase.Cell;
-052import 
org.apache.hadoop.hbase.CellScannable;
-053import 
org.apache.hadoop.hbase.CellScanner;
-054import 
org.apache.hadoop.hbase.CellUtil;
-055import 
org.apache.hadoop.hbase.CompareOperator;
-056import 
org.apache.hadoop.hbase.DoNotRetryIOException;
-057import 
org.apache.hadoop.hbase.DroppedSnapshotException;
-058import 
org.apache.hadoop.hbase.HBaseIOException;
-059import 
org.apache.hadoop.hbase.HConstants;
-060import 
org.apache.hadoop.hbase.MultiActionResultTooLarge;
-061import 
org.apache.hadoop.hbase.NotServingRegionException;
-062import 
org.apache.hadoop.hbase.PrivateCellUtil;
-063import 
org.apache.hadoop.hbase.RegionTooBusyException;
-064import org.apache.hadoop.hbase.Server;
-065import 
org.apache.hadoop.hbase.ServerName;
-066import 
org.apache.hadoop.hbase.TableName;
-067import 
org.apache.hadoop.hbase.UnknownScannerException;
-068import 
org.apache.hadoop.hbase.client.Append;
-069import 
org.apache.hadoop.hbase.client.ConnectionUtils;
-070import 
org.apache.hadoop.hbase.client.Delete;
-071import 
org.apache.hadoop.hbase.client.Durability;
-072import 
org.apache.hadoop.hbase.client.Get;
-073import 
org.apache.hadoop.hbase.client.Increment;
-074import 
org.apache.hadoop.hbase.client.Mutation;
-075import 
org.apache.hadoop.hbase.client.Put;
-076import 
org.apache.hadoop.hbase.client.RegionInfo;
-077import 
org.apache.hadoop.hbase.client.RegionReplicaUtil;
-078import 
org.apache.hadoop.hbase.client.Result;
-079import 
org.apache.hadoop.hbase.client.Row;
-080import 
org.apache.hadoop.hbase.client.RowMutations;
-081import 
org.apache.hadoop.hbase.client.Scan;
-082import 
org.apache.hadoop.hbase.client.TableDescriptor;
-083import 
org.apache.hadoop.hbase.client.VersionInfoUtil;
-084import 
org.apache.hadoop.hbase.conf.ConfigurationObserver;
-085import 
org.apache.hadoop.hbase.exceptions.FailedSanityCheckException;
-086import 
org.apache.hadoop.hbase.exceptions.OutOfOrderScannerNextException;
-087import 
org.apache.hadoop.hbase.exceptions.ScannerResetException;
-088import 
org.apache.hadoop.hbase.exceptions.UnknownProtocolException;
-089import 
org.apache.hadoop.hbase.filter.ByteArrayComparable;
-090import 
org.apache.hadoop.hbase.io.TimeRange;
-091import 
org.apache.hadoop.hbase.ipc.HBaseRPCErrorHandler;
-092import 
org.apache.hadoop.hbase.ipc.HBaseRpcController;
-093import 
org.apache.hadoop.hbase.ipc.PriorityFunction;
-094import 
org.apache.hadoop.hbase.ipc.QosPriority;
-095import 
org.apache.hadoop.hbase.ipc.RpcCallContext;
-096import 
org.apache.hadoop.hbase.ipc.RpcCallback;
-097import 
org.apache.hadoop.hbase.ipc.RpcScheduler;
-098import 
org.apache.hadoop.hbase.ipc.RpcServer;
-099import 
org.apache.hadoop.hbase.ipc.RpcServer.BlockingServiceAndInterface;
-100import 
org.apache.hadoop.hbase.ipc.RpcServerFactory;
-101import 
org.apache.hadoop.hbase.ipc.RpcServerInterface;
-102import 
org.apache.hadoop.hbase.ipc.ServerNotRunningYetException;
-103import 
org.apache.hadoop.

[37/51] [partial] hbase-site git commit: Published site at 8eaaa63114a64bcaeaf0ed9bdd88615ee22255c1.

2018-09-25 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f6f9d4f3/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionServer.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionServer.html 
b/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionServer.html
index a5e8409..cab645e 100644
--- a/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionServer.html
+++ b/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionServer.html
@@ -1292,8 +1292,7 @@ implements 
 void
 postOpenDeployTasks(RegionServerServices.PostOpenDeployContext context)
-Tasks to perform after region open to complete deploy of 
region on
- regionserver
+Tasks to perform after region open to complete deploy of 
region on regionserver
 
 
 
@@ -2425,7 +2424,7 @@ protected static final https://docs.oracle.com/javase/8/docs/api/j
 
 
 movedRegions
-protected https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true";
 title="class or interface in java.util">MapString,HRegionServer.MovedRegionInfo> movedRegions
+protected https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true";
 title="class or interface in java.util">MapString,HRegionServer.MovedRegionInfo> movedRegions
 
 
 
@@ -2434,7 +2433,7 @@ protected static final https://docs.oracle.com/javase/8/docs/api/j
 
 
 TIMEOUT_REGION_MOVED
-private static final int TIMEOUT_REGION_MOVED
+private static final int TIMEOUT_REGION_MOVED
 
 See Also:
 Constant
 Field Values
@@ -3357,18 +3356,15 @@ protected static final https://docs.oracle.com/javase/8/docs/api/j
 
 postOpenDeployTasks
 public void postOpenDeployTasks(RegionServerServices.PostOpenDeployContext context)
- throws org.apache.zookeeper.KeeperException,
-https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true";
 title="class or interface in java.io">IOException
+ throws https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true";
 title="class or interface in java.io">IOException
 Description copied from 
interface: RegionServerServices
-Tasks to perform after region open to complete deploy of 
region on
- regionserver
+Tasks to perform after region open to complete deploy of 
region on regionserver
 
 Specified by:
 postOpenDeployTasks in
 interface RegionServerServices
 Parameters:
 context - the context
 Throws:
-org.apache.zookeeper.KeeperException
 https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true";
 title="class or interface in java.io">IOException
 
 
@@ -3379,7 +3375,7 @@ protected static final https://docs.oracle.com/javase/8/docs/api/j
 
 
 reportRegionStateTransition
-public boolean reportRegionStateTransition(RegionServerServices.RegionStateTransitionContext context)
+public boolean reportRegionStateTransition(RegionServerServices.RegionStateTransitionContext context)
 Description copied from 
interface: RegionServerServices
 Notify master that a handler requests to change a region 
state
 
@@ -3394,7 +3390,7 @@ protected static final https://docs.oracle.com/javase/8/docs/api/j
 
 
 triggerFlushInPrimaryRegion
-void triggerFlushInPrimaryRegion(HRegion region)
+void triggerFlushInPrimaryRegion(HRegion region)
 Trigger a flush in the primary region replica if this 
region is a secondary replica. Does not
  block this thread. See RegionReplicaFlushHandler for details.
 
@@ -3405,7 +3401,7 @@ protected static final https://docs.oracle.com/javase/8/docs/api/j
 
 
 getRpcServer
-public RpcServerInterface getRpcServer()
+public RpcServerInterface getRpcServer()
 Description copied from 
interface: RegionServerServices
 Returns a reference to the region server's RPC server
 
@@ -3420,7 +3416,7 @@ protected static final https://docs.oracle.com/javase/8/docs/api/j
 
 
 getRSRpcServices
-public RSRpcServices getRSRpcServices()
+public RSRpcServices getRSRpcServices()
 
 
 
@@ -3429,7 +3425,7 @@ protected static final https://docs.oracle.com/javase/8/docs/api/j
 
 
 abort
-public void abort(https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String reason,
+public void abort(https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String reason,
   https://docs.oracle.com/javase/8/docs/api/java/lang/Throwable.html?is-external=true";
 title="class or interface in java.lang">Throwable cause)
 Cause the server to exit without closing the regions it is 
serving, the log
  it is using and without notifying the master. Used unit testing and on
@

[05/51] [partial] hbase-site git commit: Published site at 8eaaa63114a64bcaeaf0ed9bdd88615ee22255c1.

2018-09-25 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f6f9d4f3/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RegionServerServices.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RegionServerServices.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RegionServerServices.html
index acc491f..e6c6561 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RegionServerServices.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RegionServerServices.html
@@ -26,256 +26,255 @@
 018 */
 019package 
org.apache.hadoop.hbase.regionserver;
 020
-021import java.io.IOException;
-022import java.util.Collection;
-023import java.util.List;
-024import java.util.Map.Entry;
-025import 
java.util.concurrent.ConcurrentMap;
-026
+021import com.google.protobuf.Service;
+022import java.io.IOException;
+023import java.util.Collection;
+024import java.util.List;
+025import java.util.Map.Entry;
+026import 
java.util.concurrent.ConcurrentMap;
 027import 
org.apache.hadoop.hbase.Abortable;
 028import org.apache.hadoop.hbase.Server;
-029import 
org.apache.hadoop.hbase.TableName;
-030import 
org.apache.hadoop.hbase.client.RegionInfo;
-031import 
org.apache.hadoop.hbase.client.locking.EntityLock;
-032import 
org.apache.hadoop.hbase.executor.ExecutorService;
-033import 
org.apache.hadoop.hbase.ipc.RpcServerInterface;
-034import 
org.apache.hadoop.hbase.quotas.RegionServerRpcQuotaManager;
-035import 
org.apache.hadoop.hbase.quotas.RegionServerSpaceQuotaManager;
-036import 
org.apache.hadoop.hbase.quotas.RegionSizeStore;
-037import 
org.apache.hadoop.hbase.regionserver.compactions.CompactionRequester;
-038import 
org.apache.hadoop.hbase.regionserver.throttle.ThroughputController;
-039import org.apache.hadoop.hbase.wal.WAL;
-040import 
org.apache.yetus.audience.InterfaceAudience;
-041import 
org.apache.zookeeper.KeeperException;
+029import 
org.apache.hadoop.hbase.TableDescriptors;
+030import 
org.apache.hadoop.hbase.TableName;
+031import 
org.apache.hadoop.hbase.client.RegionInfo;
+032import 
org.apache.hadoop.hbase.client.locking.EntityLock;
+033import 
org.apache.hadoop.hbase.executor.ExecutorService;
+034import 
org.apache.hadoop.hbase.ipc.RpcServerInterface;
+035import 
org.apache.hadoop.hbase.quotas.RegionServerRpcQuotaManager;
+036import 
org.apache.hadoop.hbase.quotas.RegionServerSpaceQuotaManager;
+037import 
org.apache.hadoop.hbase.quotas.RegionSizeStore;
+038import 
org.apache.hadoop.hbase.regionserver.compactions.CompactionRequester;
+039import 
org.apache.hadoop.hbase.regionserver.throttle.ThroughputController;
+040import org.apache.hadoop.hbase.wal.WAL;
+041import 
org.apache.yetus.audience.InterfaceAudience;
 042
 043import 
org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionStateTransition.TransitionCode;
 044
-045import com.google.protobuf.Service;
-046
-047/**
-048 * A curated subset of services provided 
by {@link HRegionServer}.
-049 * For use internally only. Passed to 
Managers, Services and Chores so can pass less-than-a
-050 * full-on HRegionServer at test-time. Be 
judicious adding API. Changes cause ripples through
-051 * the code base.
-052 */
-053@InterfaceAudience.Private
-054public interface RegionServerServices 
extends Server, MutableOnlineRegions, FavoredNodesForRegion {
-055
-056  /** @return the WAL for a particular 
region. Pass null for getting the
-057   * default (common) WAL */
-058  WAL getWAL(RegionInfo regionInfo) 
throws IOException;
-059
-060  /** @return the List of WALs that are 
used by this server
-061   *  Doesn't include the meta WAL
-062   */
-063  List getWALs() throws 
IOException;
-064
-065  /**
-066   * @return Implementation of {@link 
FlushRequester} or null. Usually it will not be null unless
-067   * during intialization.
-068   */
-069  FlushRequester getFlushRequester();
-070
-071  /**
-072   * @return Implementation of {@link 
CompactionRequester} or null. Usually it will not be null
-073   * unless during 
intialization.
-074   */
-075  CompactionRequester 
getCompactionRequestor();
-076
-077  /**
-078   * @return the RegionServerAccounting 
for this Region Server
-079   */
-080  RegionServerAccounting 
getRegionServerAccounting();
-081
-082  /**
-083   * @return RegionServer's instance of 
{@link RegionServerRpcQuotaManager}
-084   */
-085  RegionServerRpcQuotaManager 
getRegionServerRpcQuotaManager();
-086
-087  /**
-088   * @return RegionServer's instance of 
{@link SecureBulkLoadManager}
-089   */
-090  SecureBulkLoadManager 
getSecureBulkLoadManager();
-091
-092  /**
-093   * @return RegionServer's instance of 
{@link RegionServerSpaceQuotaManager}
-094   */
-095  RegionServerSpaceQuotaManager 
getRegionServerSpaceQuotaManager();
-096
-097  /**
-098   * Context for postOpenDeployTasks().
-099   */
-100  class PostOpenDeployContext {
-101private final HRegion region

[46/51] [partial] hbase-site git commit: Published site at 8eaaa63114a64bcaeaf0ed9bdd88615ee22255c1.

2018-09-25 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f6f9d4f3/book.html
--
diff --git a/book.html b/book.html
index 21103ca..2eb63b5 100644
--- a/book.html
+++ b/book.html
@@ -41284,7 +41284,7 @@ 
org/apache/hadoop/hbase/security/access/AccessControlClient.revoke:(Lorg/apache/
 
 
 Version 3.0.0-SNAPSHOT
-Last updated 2018-09-22 14:32:13 UTC
+Last updated 2018-09-25 14:36:10 UTC
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f6f9d4f3/bulk-loads.html
--
diff --git a/bulk-loads.html b/bulk-loads.html
index e22314e..79a2f7e 100644
--- a/bulk-loads.html
+++ b/bulk-loads.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase –  
   Bulk Loads in Apache HBase (TM)
@@ -306,7 +306,7 @@ under the License. -->
 https://www.apache.org/";>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2018-09-22
+  Last Published: 
2018-09-25
 
 
 



[20/51] [partial] hbase-site git commit: Published site at 8eaaa63114a64bcaeaf0ed9bdd88615ee22255c1.

2018-09-25 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f6f9d4f3/devapidocs/src-html/org/apache/hadoop/hbase/master/HMaster.html
--
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/master/HMaster.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/master/HMaster.html
index 0cf012a..976894f 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/master/HMaster.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/master/HMaster.html
@@ -63,3884 +63,3883 @@
 055import javax.servlet.http.HttpServlet;
 056import 
javax.servlet.http.HttpServletRequest;
 057import 
javax.servlet.http.HttpServletResponse;
-058
-059import 
org.apache.commons.lang3.StringUtils;
-060import 
org.apache.hadoop.conf.Configuration;
-061import org.apache.hadoop.fs.Path;
-062import 
org.apache.hadoop.hbase.ChoreService;
-063import 
org.apache.hadoop.hbase.ClusterId;
-064import 
org.apache.hadoop.hbase.ClusterMetrics;
-065import 
org.apache.hadoop.hbase.ClusterMetrics.Option;
-066import 
org.apache.hadoop.hbase.ClusterMetricsBuilder;
-067import 
org.apache.hadoop.hbase.CompoundConfiguration;
-068import 
org.apache.hadoop.hbase.DoNotRetryIOException;
-069import 
org.apache.hadoop.hbase.HBaseIOException;
-070import 
org.apache.hadoop.hbase.HBaseInterfaceAudience;
-071import 
org.apache.hadoop.hbase.HConstants;
-072import 
org.apache.hadoop.hbase.InvalidFamilyOperationException;
-073import 
org.apache.hadoop.hbase.MasterNotRunningException;
-074import 
org.apache.hadoop.hbase.MetaTableAccessor;
-075import 
org.apache.hadoop.hbase.NamespaceDescriptor;
-076import 
org.apache.hadoop.hbase.PleaseHoldException;
-077import 
org.apache.hadoop.hbase.ReplicationPeerNotFoundException;
-078import 
org.apache.hadoop.hbase.ServerName;
-079import 
org.apache.hadoop.hbase.TableDescriptors;
-080import 
org.apache.hadoop.hbase.TableName;
-081import 
org.apache.hadoop.hbase.TableNotDisabledException;
-082import 
org.apache.hadoop.hbase.TableNotFoundException;
-083import 
org.apache.hadoop.hbase.UnknownRegionException;
-084import 
org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
-085import 
org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
-086import 
org.apache.hadoop.hbase.client.MasterSwitchType;
-087import 
org.apache.hadoop.hbase.client.RegionInfo;
-088import 
org.apache.hadoop.hbase.client.RegionInfoBuilder;
-089import 
org.apache.hadoop.hbase.client.Result;
-090import 
org.apache.hadoop.hbase.client.TableDescriptor;
-091import 
org.apache.hadoop.hbase.client.TableDescriptorBuilder;
-092import 
org.apache.hadoop.hbase.client.TableState;
-093import 
org.apache.hadoop.hbase.coprocessor.CoprocessorHost;
-094import 
org.apache.hadoop.hbase.exceptions.DeserializationException;
-095import 
org.apache.hadoop.hbase.exceptions.MergeRegionException;
-096import 
org.apache.hadoop.hbase.executor.ExecutorType;
-097import 
org.apache.hadoop.hbase.favored.FavoredNodesManager;
-098import 
org.apache.hadoop.hbase.favored.FavoredNodesPromoter;
-099import 
org.apache.hadoop.hbase.http.InfoServer;
-100import 
org.apache.hadoop.hbase.ipc.CoprocessorRpcUtils;
-101import 
org.apache.hadoop.hbase.ipc.RpcServer;
-102import 
org.apache.hadoop.hbase.ipc.ServerNotRunningYetException;
-103import 
org.apache.hadoop.hbase.log.HBaseMarkers;
-104import 
org.apache.hadoop.hbase.master.MasterRpcServices.BalanceSwitchMode;
-105import 
org.apache.hadoop.hbase.master.assignment.AssignProcedure;
-106import 
org.apache.hadoop.hbase.master.assignment.AssignmentManager;
-107import 
org.apache.hadoop.hbase.master.assignment.MergeTableRegionsProcedure;
-108import 
org.apache.hadoop.hbase.master.assignment.MoveRegionProcedure;
-109import 
org.apache.hadoop.hbase.master.assignment.RegionStateNode;
-110import 
org.apache.hadoop.hbase.master.assignment.RegionStates;
-111import 
org.apache.hadoop.hbase.master.assignment.TransitRegionStateProcedure;
-112import 
org.apache.hadoop.hbase.master.assignment.UnassignProcedure;
-113import 
org.apache.hadoop.hbase.master.balancer.BalancerChore;
-114import 
org.apache.hadoop.hbase.master.balancer.BaseLoadBalancer;
-115import 
org.apache.hadoop.hbase.master.balancer.ClusterStatusChore;
-116import 
org.apache.hadoop.hbase.master.balancer.LoadBalancerFactory;
-117import 
org.apache.hadoop.hbase.master.cleaner.CleanerChore;
-118import 
org.apache.hadoop.hbase.master.cleaner.HFileCleaner;
-119import 
org.apache.hadoop.hbase.master.cleaner.LogCleaner;
-120import 
org.apache.hadoop.hbase.master.cleaner.ReplicationBarrierCleaner;
-121import 
org.apache.hadoop.hbase.master.locking.LockManager;
-122import 
org.apache.hadoop.hbase.master.normalizer.NormalizationPlan;
-123import 
org.apache.hadoop.hbase.master.normalizer.NormalizationPlan.PlanType;
-124import 
org.apache.hadoop.hbase.master.normalizer.RegionNormalizer;
-125import 
org.apache.hadoop.hbase.master.normalizer.RegionNormalizerChore;
-126import 
org.apache.hadoop.hbase.master.normalizer.RegionNormalizerFactory;
-127impor

hbase-site git commit: INFRA-10751 Empty commit

2018-09-25 Thread git-site-role
Repository: hbase-site
Updated Branches:
  refs/heads/asf-site f6f9d4f3e -> 2a4120d7e


INFRA-10751 Empty commit


Project: http://git-wip-us.apache.org/repos/asf/hbase-site/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase-site/commit/2a4120d7
Tree: http://git-wip-us.apache.org/repos/asf/hbase-site/tree/2a4120d7
Diff: http://git-wip-us.apache.org/repos/asf/hbase-site/diff/2a4120d7

Branch: refs/heads/asf-site
Commit: 2a4120d7e95e5c741d32fa49465fa6eaac0695c6
Parents: f6f9d4f
Author: jenkins 
Authored: Tue Sep 25 14:54:50 2018 +
Committer: jenkins 
Committed: Tue Sep 25 14:54:50 2018 +

--

--




[34/51] [partial] hbase-site git commit: Published site at 8eaaa63114a64bcaeaf0ed9bdd88615ee22255c1.

2018-09-25 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f6f9d4f3/devapidocs/org/apache/hadoop/hbase/regionserver/RegionServerServices.PostOpenDeployContext.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/regionserver/RegionServerServices.PostOpenDeployContext.html
 
b/devapidocs/org/apache/hadoop/hbase/regionserver/RegionServerServices.PostOpenDeployContext.html
index ec79484..bfe700b 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/regionserver/RegionServerServices.PostOpenDeployContext.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/regionserver/RegionServerServices.PostOpenDeployContext.html
@@ -113,7 +113,7 @@ var activeTableTab = "activeTableTab";
 
 
 
-public static class RegionServerServices.PostOpenDeployContext
+public static class RegionServerServices.PostOpenDeployContext
 extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true";
 title="class or interface in java.lang">Object
 Context for postOpenDeployTasks().
 
@@ -211,7 +211,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 region
-private final HRegion region
+private final HRegion region
 
 
 
@@ -220,7 +220,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 masterSystemTime
-private final long masterSystemTime
+private final long masterSystemTime
 
 
 
@@ -238,7 +238,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 PostOpenDeployContext
 @InterfaceAudience.Private
-public PostOpenDeployContext(HRegion region,
+public PostOpenDeployContext(HRegion region,
 
long masterSystemTime)
 
 
@@ -256,7 +256,7 @@ public 
 
 getRegion
-public HRegion getRegion()
+public HRegion getRegion()
 
 
 
@@ -265,7 +265,7 @@ public 
 
 getMasterSystemTime
-public long getMasterSystemTime()
+public long getMasterSystemTime()
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f6f9d4f3/devapidocs/org/apache/hadoop/hbase/regionserver/RegionServerServices.RegionStateTransitionContext.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/regionserver/RegionServerServices.RegionStateTransitionContext.html
 
b/devapidocs/org/apache/hadoop/hbase/regionserver/RegionServerServices.RegionStateTransitionContext.html
index bebc751..c642a6b 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/regionserver/RegionServerServices.RegionStateTransitionContext.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/regionserver/RegionServerServices.RegionStateTransitionContext.html
@@ -113,7 +113,7 @@ var activeTableTab = "activeTableTab";
 
 
 
-public static class RegionServerServices.RegionStateTransitionContext
+public static class RegionServerServices.RegionStateTransitionContext
 extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true";
 title="class or interface in java.lang">Object
 
 
@@ -228,7 +228,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 code
-private 
final org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionStateTransition.TransitionCode
 code
+private 
final org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionStateTransition.TransitionCode
 code
 
 
 
@@ -237,7 +237,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 openSeqNum
-private final long openSeqNum
+private final long openSeqNum
 
 
 
@@ -246,7 +246,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 masterSystemTime
-private final long masterSystemTime
+private final long masterSystemTime
 
 
 
@@ -255,7 +255,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 hris
-private final RegionInfo[] hris
+private final RegionInfo[] hris
 
 
 
@@ -273,7 +273,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 RegionStateTransitionContext
 @InterfaceAudience.Private
-public RegionStateTransitionContext(org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionStateTransition.TransitionCode code,
+public RegionStateTransitionContext(org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionStateTransition.TransitionCode code,

long openSeqNum,

long masterSystemTime,
RegionInfo... hris)
@@ -293,7 +293,7 @@ public 
 
 getCode
-public org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionStateTransition.TransitionCode getCode()
+public org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionStateTransition.TransitionCode getCode()
 
 
 
@@ -302,7 +302,7 @@ public 
 

[02/51] [partial] hbase-site git commit: Published site at 8eaaa63114a64bcaeaf0ed9bdd88615ee22255c1.

2018-09-25 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f6f9d4f3/devapidocs/src-html/org/apache/hadoop/hbase/util/Bytes.ByteArrayComparator.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/util/Bytes.ByteArrayComparator.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/util/Bytes.ByteArrayComparator.html
index 804ef45..e999ddb 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/util/Bytes.ByteArrayComparator.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/util/Bytes.ByteArrayComparator.html
@@ -138,2491 +138,2492 @@
 130  // SizeOf which uses 
java.lang.instrument says 24 bytes. (3 longs?)
 131  public static final int 
ESTIMATED_HEAP_TAX = 16;
 132
-133  private static final boolean 
UNSAFE_UNALIGNED = UnsafeAvailChecker.unaligned();
-134
-135  /**
-136   * Returns length of the byte array, 
returning 0 if the array is null.
-137   * Useful for calculating sizes.
-138   * @param b byte array, which can be 
null
-139   * @return 0 if b is null, otherwise 
returns length
-140   */
-141  final public static int len(byte[] b) 
{
-142return b == null ? 0 : b.length;
-143  }
-144
-145  private byte[] bytes;
-146  private int offset;
-147  private int length;
-148
-149  /**
-150   * Create a zero-size sequence.
-151   */
-152  public Bytes() {
-153super();
-154  }
-155
-156  /**
-157   * Create a Bytes using the byte array 
as the initial value.
-158   * @param bytes This array becomes the 
backing storage for the object.
-159   */
-160  public Bytes(byte[] bytes) {
-161this(bytes, 0, bytes.length);
-162  }
-163
-164  /**
-165   * Set the new Bytes to the contents of 
the passed
-166   * ibw.
-167   * @param ibw the value to set this 
Bytes to.
-168   */
-169  public Bytes(final Bytes ibw) {
-170this(ibw.get(), ibw.getOffset(), 
ibw.getLength());
-171  }
-172
-173  /**
-174   * Set the value to a given byte 
range
-175   * @param bytes the new byte range to 
set to
-176   * @param offset the offset in newData 
to start at
-177   * @param length the number of bytes in 
the range
-178   */
-179  public Bytes(final byte[] bytes, final 
int offset,
-180  final int length) {
-181this.bytes = bytes;
-182this.offset = offset;
-183this.length = length;
-184  }
-185
-186  /**
-187   * Copy bytes from ByteString 
instance.
-188   * @param byteString copy from
-189   * @deprecated As of release 2.0.0, 
this will be removed in HBase 3.0.0.
-190   */
-191  @Deprecated
-192  public Bytes(final ByteString 
byteString) {
-193this(byteString.toByteArray());
-194  }
-195
-196  /**
-197   * Get the data from the Bytes.
-198   * @return The data is only valid 
between offset and offset+length.
-199   */
-200  public byte [] get() {
-201if (this.bytes == null) {
-202  throw new 
IllegalStateException("Uninitialiized. Null constructor " +
-203  "called w/o accompaying 
readFields invocation");
-204}
-205return this.bytes;
-206  }
-207
-208  /**
-209   * @param b Use passed bytes as backing 
array for this instance.
-210   */
-211  public void set(final byte [] b) {
-212set(b, 0, b.length);
-213  }
-214
-215  /**
-216   * @param b Use passed bytes as backing 
array for this instance.
-217   * @param offset
-218   * @param length
-219   */
-220  public void set(final byte [] b, final 
int offset, final int length) {
-221this.bytes = b;
-222this.offset = offset;
-223this.length = length;
-224  }
-225
-226  /**
-227   * @return the number of valid bytes in 
the buffer
-228   * @deprecated use {@link #getLength()} 
instead
-229   */
-230  @Deprecated
-231  public int getSize() {
-232if (this.bytes == null) {
-233  throw new 
IllegalStateException("Uninitialiized. Null constructor " +
-234  "called w/o accompaying 
readFields invocation");
-235}
-236return this.length;
-237  }
-238
-239  /**
-240   * @return the number of valid bytes in 
the buffer
-241   */
-242  public int getLength() {
-243if (this.bytes == null) {
-244  throw new 
IllegalStateException("Uninitialiized. Null constructor " +
-245  "called w/o accompaying 
readFields invocation");
-246}
-247return this.length;
-248  }
-249
-250  /**
-251   * @return offset
-252   */
-253  public int getOffset(){
-254return this.offset;
-255  }
-256
-257  /**
-258   * @deprecated As of release 2.0.0, 
this will be removed in HBase 3.0.0.
-259   */
-260  @Deprecated
-261  public ByteString toByteString() {
-262return 
ByteString.copyFrom(this.bytes, this.offset, this.length);
-263  }
-264
-265  @Override
-266  public int hashCode() {
-267return Bytes.hashCode(bytes, offset, 
length);
-268  }
-269
-270  /**
-271   * Define the sort order of the 
Bytes.
-272   * @param that The other bytes 
writable
-273   * @return Positive if left is bigger 
than right, 0 if they are equal, and
-274   * negative if left is smaller 
than right.
-275   */

[17/51] [partial] hbase-site git commit: Published site at 8eaaa63114a64bcaeaf0ed9bdd88615ee22255c1.

2018-09-25 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f6f9d4f3/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.MovedRegionsCleaner.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.MovedRegionsCleaner.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.MovedRegionsCleaner.html
index 8cc5add..34858d6 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.MovedRegionsCleaner.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.MovedRegionsCleaner.html
@@ -2188,1428 +2188,1428 @@
 2180  }
 2181
 2182  @Override
-2183  public void postOpenDeployTasks(final 
PostOpenDeployContext context)
-2184  throws KeeperException, 
IOException {
-2185HRegion r = context.getRegion();
-2186long masterSystemTime = 
context.getMasterSystemTime();
-2187rpcServices.checkOpen();
-2188LOG.info("Post open deploy tasks for 
" + r.getRegionInfo().getRegionNameAsString());
-2189// Do checks to see if we need to 
compact (references or too many files)
-2190for (HStore s : r.stores.values()) 
{
-2191  if (s.hasReferences() || 
s.needsCompaction()) {
-2192
this.compactSplitThread.requestSystemCompaction(r, s, "Opening Region");
-2193  }
-2194}
-2195long openSeqNum = 
r.getOpenSeqNum();
-2196if (openSeqNum == 
HConstants.NO_SEQNUM) {
-2197  // If we opened a region, we 
should have read some sequence number from it.
-2198  LOG.error("No sequence number 
found when opening " +
-2199
r.getRegionInfo().getRegionNameAsString());
-2200  openSeqNum = 0;
-2201}
-2202
-2203// Notify master
-2204if (!reportRegionStateTransition(new 
RegionStateTransitionContext(
-2205TransitionCode.OPENED, 
openSeqNum, masterSystemTime, r.getRegionInfo( {
-2206  throw new IOException("Failed to 
report opened region to master: "
-2207+ 
r.getRegionInfo().getRegionNameAsString());
-2208}
-2209
-2210triggerFlushInPrimaryRegion(r);
-2211
-2212LOG.debug("Finished post open deploy 
task for " + r.getRegionInfo().getRegionNameAsString());
-2213  }
-2214
-2215  @Override
-2216  public boolean 
reportRegionStateTransition(final RegionStateTransitionContext context) {
-2217TransitionCode code = 
context.getCode();
-2218long openSeqNum = 
context.getOpenSeqNum();
-2219long masterSystemTime = 
context.getMasterSystemTime();
-2220RegionInfo[] hris = 
context.getHris();
-2221
-if (TEST_SKIP_REPORTING_TRANSITION) 
{
-2223  // This is for testing only in 
case there is no master
-2224  // to handle the region transition 
report at all.
-2225  if (code == TransitionCode.OPENED) 
{
-2226Preconditions.checkArgument(hris 
!= null && hris.length == 1);
-2227if (hris[0].isMetaRegion()) {
-2228  try {
-2229
MetaTableLocator.setMetaLocation(getZooKeeper(), serverName,
-2230
hris[0].getReplicaId(),State.OPEN);
-2231  } catch (KeeperException e) 
{
-2232LOG.info("Failed to update 
meta location", e);
-2233return false;
-2234  }
-2235} else {
-2236  try {
-2237
MetaTableAccessor.updateRegionLocation(clusterConnection,
-2238  hris[0], serverName, 
openSeqNum, masterSystemTime);
-2239  } catch (IOException e) {
-2240LOG.info("Failed to update 
meta", e);
-2241return false;
-2242  }
-2243}
-2244  }
-2245  return true;
-2246}
-2247
-2248
ReportRegionStateTransitionRequest.Builder builder =
-2249  
ReportRegionStateTransitionRequest.newBuilder();
-2250
builder.setServer(ProtobufUtil.toServerName(serverName));
-2251RegionStateTransition.Builder 
transition = builder.addTransitionBuilder();
-2252
transition.setTransitionCode(code);
-2253if (code == TransitionCode.OPENED 
&& openSeqNum >= 0) {
-2254  
transition.setOpenSeqNum(openSeqNum);
-2255}
-2256for (RegionInfo hri: hris) {
-2257  
transition.addRegionInfo(ProtobufUtil.toRegionInfo(hri));
-2258}
-2259ReportRegionStateTransitionRequest 
request = builder.build();
-2260int tries = 0;
-2261long pauseTime = 
INIT_PAUSE_TIME_MS;
-2262// Keep looping till we get an 
error. We want to send reports even though server is going down.
-2263// Only go down if clusterConnection 
is null. It is set to null almost as last thing as the
-2264// HRegionServer does down.
-2265while (this.clusterConnection != 
null && !this.clusterConnection.isClosed()) {
-2266  
RegionServerStatusService.BlockingInterface rss = rssStub;
-2267  try {
-2268if (rss == null) {
-2269  
createRegionServerStatusStub();
-2270  continue;
-2271}
-2272
ReportRegionStateTransitionResponse response =
-2273   

[49/51] [partial] hbase-site git commit: Published site at 8eaaa63114a64bcaeaf0ed9bdd88615ee22255c1.

2018-09-25 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f6f9d4f3/apidocs/src-html/org/apache/hadoop/hbase/util/Bytes.ByteArrayComparator.html
--
diff --git 
a/apidocs/src-html/org/apache/hadoop/hbase/util/Bytes.ByteArrayComparator.html 
b/apidocs/src-html/org/apache/hadoop/hbase/util/Bytes.ByteArrayComparator.html
index 804ef45..e999ddb 100644
--- 
a/apidocs/src-html/org/apache/hadoop/hbase/util/Bytes.ByteArrayComparator.html
+++ 
b/apidocs/src-html/org/apache/hadoop/hbase/util/Bytes.ByteArrayComparator.html
@@ -138,2491 +138,2492 @@
 130  // SizeOf which uses 
java.lang.instrument says 24 bytes. (3 longs?)
 131  public static final int 
ESTIMATED_HEAP_TAX = 16;
 132
-133  private static final boolean 
UNSAFE_UNALIGNED = UnsafeAvailChecker.unaligned();
-134
-135  /**
-136   * Returns length of the byte array, 
returning 0 if the array is null.
-137   * Useful for calculating sizes.
-138   * @param b byte array, which can be 
null
-139   * @return 0 if b is null, otherwise 
returns length
-140   */
-141  final public static int len(byte[] b) 
{
-142return b == null ? 0 : b.length;
-143  }
-144
-145  private byte[] bytes;
-146  private int offset;
-147  private int length;
-148
-149  /**
-150   * Create a zero-size sequence.
-151   */
-152  public Bytes() {
-153super();
-154  }
-155
-156  /**
-157   * Create a Bytes using the byte array 
as the initial value.
-158   * @param bytes This array becomes the 
backing storage for the object.
-159   */
-160  public Bytes(byte[] bytes) {
-161this(bytes, 0, bytes.length);
-162  }
-163
-164  /**
-165   * Set the new Bytes to the contents of 
the passed
-166   * ibw.
-167   * @param ibw the value to set this 
Bytes to.
-168   */
-169  public Bytes(final Bytes ibw) {
-170this(ibw.get(), ibw.getOffset(), 
ibw.getLength());
-171  }
-172
-173  /**
-174   * Set the value to a given byte 
range
-175   * @param bytes the new byte range to 
set to
-176   * @param offset the offset in newData 
to start at
-177   * @param length the number of bytes in 
the range
-178   */
-179  public Bytes(final byte[] bytes, final 
int offset,
-180  final int length) {
-181this.bytes = bytes;
-182this.offset = offset;
-183this.length = length;
-184  }
-185
-186  /**
-187   * Copy bytes from ByteString 
instance.
-188   * @param byteString copy from
-189   * @deprecated As of release 2.0.0, 
this will be removed in HBase 3.0.0.
-190   */
-191  @Deprecated
-192  public Bytes(final ByteString 
byteString) {
-193this(byteString.toByteArray());
-194  }
-195
-196  /**
-197   * Get the data from the Bytes.
-198   * @return The data is only valid 
between offset and offset+length.
-199   */
-200  public byte [] get() {
-201if (this.bytes == null) {
-202  throw new 
IllegalStateException("Uninitialiized. Null constructor " +
-203  "called w/o accompaying 
readFields invocation");
-204}
-205return this.bytes;
-206  }
-207
-208  /**
-209   * @param b Use passed bytes as backing 
array for this instance.
-210   */
-211  public void set(final byte [] b) {
-212set(b, 0, b.length);
-213  }
-214
-215  /**
-216   * @param b Use passed bytes as backing 
array for this instance.
-217   * @param offset
-218   * @param length
-219   */
-220  public void set(final byte [] b, final 
int offset, final int length) {
-221this.bytes = b;
-222this.offset = offset;
-223this.length = length;
-224  }
-225
-226  /**
-227   * @return the number of valid bytes in 
the buffer
-228   * @deprecated use {@link #getLength()} 
instead
-229   */
-230  @Deprecated
-231  public int getSize() {
-232if (this.bytes == null) {
-233  throw new 
IllegalStateException("Uninitialiized. Null constructor " +
-234  "called w/o accompaying 
readFields invocation");
-235}
-236return this.length;
-237  }
-238
-239  /**
-240   * @return the number of valid bytes in 
the buffer
-241   */
-242  public int getLength() {
-243if (this.bytes == null) {
-244  throw new 
IllegalStateException("Uninitialiized. Null constructor " +
-245  "called w/o accompaying 
readFields invocation");
-246}
-247return this.length;
-248  }
-249
-250  /**
-251   * @return offset
-252   */
-253  public int getOffset(){
-254return this.offset;
-255  }
-256
-257  /**
-258   * @deprecated As of release 2.0.0, 
this will be removed in HBase 3.0.0.
-259   */
-260  @Deprecated
-261  public ByteString toByteString() {
-262return 
ByteString.copyFrom(this.bytes, this.offset, this.length);
-263  }
-264
-265  @Override
-266  public int hashCode() {
-267return Bytes.hashCode(bytes, offset, 
length);
-268  }
-269
-270  /**
-271   * Define the sort order of the 
Bytes.
-272   * @param that The other bytes 
writable
-273   * @return Positive if left is bigger 
than right, 0 if they are equal, and
-274   * negative if left is smaller 
than right.
-275   */
-276  @Override

[18/51] [partial] hbase-site git commit: Published site at 8eaaa63114a64bcaeaf0ed9bdd88615ee22255c1.

2018-09-25 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f6f9d4f3/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.MovedRegionInfo.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.MovedRegionInfo.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.MovedRegionInfo.html
index 8cc5add..34858d6 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.MovedRegionInfo.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.MovedRegionInfo.html
@@ -2188,1428 +2188,1428 @@
 2180  }
 2181
 2182  @Override
-2183  public void postOpenDeployTasks(final 
PostOpenDeployContext context)
-2184  throws KeeperException, 
IOException {
-2185HRegion r = context.getRegion();
-2186long masterSystemTime = 
context.getMasterSystemTime();
-2187rpcServices.checkOpen();
-2188LOG.info("Post open deploy tasks for 
" + r.getRegionInfo().getRegionNameAsString());
-2189// Do checks to see if we need to 
compact (references or too many files)
-2190for (HStore s : r.stores.values()) 
{
-2191  if (s.hasReferences() || 
s.needsCompaction()) {
-2192
this.compactSplitThread.requestSystemCompaction(r, s, "Opening Region");
-2193  }
-2194}
-2195long openSeqNum = 
r.getOpenSeqNum();
-2196if (openSeqNum == 
HConstants.NO_SEQNUM) {
-2197  // If we opened a region, we 
should have read some sequence number from it.
-2198  LOG.error("No sequence number 
found when opening " +
-2199
r.getRegionInfo().getRegionNameAsString());
-2200  openSeqNum = 0;
-2201}
-2202
-2203// Notify master
-2204if (!reportRegionStateTransition(new 
RegionStateTransitionContext(
-2205TransitionCode.OPENED, 
openSeqNum, masterSystemTime, r.getRegionInfo( {
-2206  throw new IOException("Failed to 
report opened region to master: "
-2207+ 
r.getRegionInfo().getRegionNameAsString());
-2208}
-2209
-2210triggerFlushInPrimaryRegion(r);
-2211
-2212LOG.debug("Finished post open deploy 
task for " + r.getRegionInfo().getRegionNameAsString());
-2213  }
-2214
-2215  @Override
-2216  public boolean 
reportRegionStateTransition(final RegionStateTransitionContext context) {
-2217TransitionCode code = 
context.getCode();
-2218long openSeqNum = 
context.getOpenSeqNum();
-2219long masterSystemTime = 
context.getMasterSystemTime();
-2220RegionInfo[] hris = 
context.getHris();
-2221
-if (TEST_SKIP_REPORTING_TRANSITION) 
{
-2223  // This is for testing only in 
case there is no master
-2224  // to handle the region transition 
report at all.
-2225  if (code == TransitionCode.OPENED) 
{
-2226Preconditions.checkArgument(hris 
!= null && hris.length == 1);
-2227if (hris[0].isMetaRegion()) {
-2228  try {
-2229
MetaTableLocator.setMetaLocation(getZooKeeper(), serverName,
-2230
hris[0].getReplicaId(),State.OPEN);
-2231  } catch (KeeperException e) 
{
-2232LOG.info("Failed to update 
meta location", e);
-2233return false;
-2234  }
-2235} else {
-2236  try {
-2237
MetaTableAccessor.updateRegionLocation(clusterConnection,
-2238  hris[0], serverName, 
openSeqNum, masterSystemTime);
-2239  } catch (IOException e) {
-2240LOG.info("Failed to update 
meta", e);
-2241return false;
-2242  }
-2243}
-2244  }
-2245  return true;
-2246}
-2247
-2248
ReportRegionStateTransitionRequest.Builder builder =
-2249  
ReportRegionStateTransitionRequest.newBuilder();
-2250
builder.setServer(ProtobufUtil.toServerName(serverName));
-2251RegionStateTransition.Builder 
transition = builder.addTransitionBuilder();
-2252
transition.setTransitionCode(code);
-2253if (code == TransitionCode.OPENED 
&& openSeqNum >= 0) {
-2254  
transition.setOpenSeqNum(openSeqNum);
-2255}
-2256for (RegionInfo hri: hris) {
-2257  
transition.addRegionInfo(ProtobufUtil.toRegionInfo(hri));
-2258}
-2259ReportRegionStateTransitionRequest 
request = builder.build();
-2260int tries = 0;
-2261long pauseTime = 
INIT_PAUSE_TIME_MS;
-2262// Keep looping till we get an 
error. We want to send reports even though server is going down.
-2263// Only go down if clusterConnection 
is null. It is set to null almost as last thing as the
-2264// HRegionServer does down.
-2265while (this.clusterConnection != 
null && !this.clusterConnection.isClosed()) {
-2266  
RegionServerStatusService.BlockingInterface rss = rssStub;
-2267  try {
-2268if (rss == null) {
-2269  
createRegionServerStatusStub();
-2270  continue;
-2271}
-2272
ReportRegionStateTransitionResponse response =
-2273  
rss.reportRe

[33/51] [partial] hbase-site git commit: Published site at 8eaaa63114a64bcaeaf0ed9bdd88615ee22255c1.

2018-09-25 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f6f9d4f3/devapidocs/org/apache/hadoop/hbase/regionserver/handler/AssignRegionHandler.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/regionserver/handler/AssignRegionHandler.html
 
b/devapidocs/org/apache/hadoop/hbase/regionserver/handler/AssignRegionHandler.html
new file mode 100644
index 000..2ba4882
--- /dev/null
+++ 
b/devapidocs/org/apache/hadoop/hbase/regionserver/handler/AssignRegionHandler.html
@@ -0,0 +1,471 @@
+http://www.w3.org/TR/html4/loose.dtd";>
+
+
+
+
+
+AssignRegionHandler (Apache HBase 3.0.0-SNAPSHOT API)
+
+
+
+
+
+var methods = {"i0":10,"i1":9,"i2":10,"i3":10,"i4":10};
+var tabs = {65535:["t0","All Methods"],1:["t1","Static 
Methods"],2:["t2","Instance Methods"],8:["t4","Concrete Methods"]};
+var altColor = "altColor";
+var rowColor = "rowColor";
+var tableTab = "tableTab";
+var activeTableTab = "activeTableTab";
+
+
+JavaScript is disabled on your browser.
+
+
+
+
+
+Skip navigation links
+
+
+
+
+Overview
+Package
+Class
+Use
+Tree
+Deprecated
+Index
+Help
+
+
+
+
+Prev Class
+Next Class
+
+
+Frames
+No Frames
+
+
+All Classes
+
+
+
+
+
+
+
+Summary: 
+Nested | 
+Field | 
+Constr | 
+Method
+
+
+Detail: 
+Field | 
+Constr | 
+Method
+
+
+
+
+
+
+
+
+org.apache.hadoop.hbase.regionserver.handler
+Class 
AssignRegionHandler
+
+
+
+https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true";
 title="class or interface in java.lang">java.lang.Object
+
+
+org.apache.hadoop.hbase.executor.EventHandler
+
+
+org.apache.hadoop.hbase.regionserver.handler.AssignRegionHandler
+
+
+
+
+
+
+
+
+
+All Implemented Interfaces:
+https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true";
 title="class or interface in java.lang">ComparableRunnable>, https://docs.oracle.com/javase/8/docs/api/java/lang/Runnable.html?is-external=true";
 title="class or interface in java.lang">Runnable
+
+
+
+@InterfaceAudience.Private
+public class AssignRegionHandler
+extends EventHandler
+Handles opening of a region on a region server.
+ 
+ Just done the same thing with the old OpenRegionHandler,
 with some modifications on
+ fencing and retrying. But we need to keep the OpenRegionHandler
 as is to keep compatible
+ with the zk less assignment for 1.x, otherwise it is not possible to do 
rolling upgrade.
+
+
+
+
+
+
+
+
+
+
+
+Field Summary
+
+Fields 
+
+Modifier and Type
+Field and Description
+
+
+private static org.slf4j.Logger
+LOG 
+
+
+private long
+masterSystemTime 
+
+
+private RegionInfo
+regionInfo 
+
+
+private TableDescriptor
+tableDesc 
+
+
+
+
+
+
+Fields inherited from class org.apache.hadoop.hbase.executor.EventHandler
+eventType,
 seqids,
 server,
 waitingTimeForEvents
+
+
+
+
+
+
+
+
+Constructor Summary
+
+Constructors 
+
+Constructor and Description
+
+
+AssignRegionHandler(RegionServerServices server,
+   RegionInfo regionInfo,
+   TableDescriptor tableDesc,
+   long masterSystemTime,
+   EventType eventType) 
+
+
+
+
+
+
+
+
+
+Method Summary
+
+All Methods Static Methods Instance Methods Concrete Methods 
+
+Modifier and Type
+Method and Description
+
+
+private void
+cleanUpAndReportFailure(https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true";
 title="class or interface in 
java.io">IOException error) 
+
+
+static AssignRegionHandler
+create(RegionServerServices server,
+  RegionInfo regionInfo,
+  TableDescriptor tableDesc,
+  long masterSystemTime) 
+
+
+private RegionServerServices
+getServer() 
+
+
+protected void
+handleException(https://docs.oracle.com/javase/8/docs/api/java/lang/Throwable.html?is-external=true";
 title="class or interface in java.lang">Throwable t)
+Event exception handler, may be overridden
+
+
+
+void
+process()
+This method is the main processing loop to be implemented 
by the various
+ subclasses.
+
+
+
+
+
+
+
+Methods inherited from class org.apache.hadoop.hbase.executor.EventHandler
+compareTo,
 getEventType,
 getInformativeName,
 getPriority,
 getSeqid,
 prepare,
 run,
 toString
+
+
+
+
+
+Methods inherited from class java.lang.https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true";
 title="class or interface in java.lang">Object
+https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-exte

[47/51] [partial] hbase-site git commit: Published site at 8eaaa63114a64bcaeaf0ed9bdd88615ee22255c1.

2018-09-25 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f6f9d4f3/apidocs/src-html/org/apache/hadoop/hbase/util/Bytes.html
--
diff --git a/apidocs/src-html/org/apache/hadoop/hbase/util/Bytes.html 
b/apidocs/src-html/org/apache/hadoop/hbase/util/Bytes.html
index 804ef45..e999ddb 100644
--- a/apidocs/src-html/org/apache/hadoop/hbase/util/Bytes.html
+++ b/apidocs/src-html/org/apache/hadoop/hbase/util/Bytes.html
@@ -138,2491 +138,2492 @@
 130  // SizeOf which uses 
java.lang.instrument says 24 bytes. (3 longs?)
 131  public static final int 
ESTIMATED_HEAP_TAX = 16;
 132
-133  private static final boolean 
UNSAFE_UNALIGNED = UnsafeAvailChecker.unaligned();
-134
-135  /**
-136   * Returns length of the byte array, 
returning 0 if the array is null.
-137   * Useful for calculating sizes.
-138   * @param b byte array, which can be 
null
-139   * @return 0 if b is null, otherwise 
returns length
-140   */
-141  final public static int len(byte[] b) 
{
-142return b == null ? 0 : b.length;
-143  }
-144
-145  private byte[] bytes;
-146  private int offset;
-147  private int length;
-148
-149  /**
-150   * Create a zero-size sequence.
-151   */
-152  public Bytes() {
-153super();
-154  }
-155
-156  /**
-157   * Create a Bytes using the byte array 
as the initial value.
-158   * @param bytes This array becomes the 
backing storage for the object.
-159   */
-160  public Bytes(byte[] bytes) {
-161this(bytes, 0, bytes.length);
-162  }
-163
-164  /**
-165   * Set the new Bytes to the contents of 
the passed
-166   * ibw.
-167   * @param ibw the value to set this 
Bytes to.
-168   */
-169  public Bytes(final Bytes ibw) {
-170this(ibw.get(), ibw.getOffset(), 
ibw.getLength());
-171  }
-172
-173  /**
-174   * Set the value to a given byte 
range
-175   * @param bytes the new byte range to 
set to
-176   * @param offset the offset in newData 
to start at
-177   * @param length the number of bytes in 
the range
-178   */
-179  public Bytes(final byte[] bytes, final 
int offset,
-180  final int length) {
-181this.bytes = bytes;
-182this.offset = offset;
-183this.length = length;
-184  }
-185
-186  /**
-187   * Copy bytes from ByteString 
instance.
-188   * @param byteString copy from
-189   * @deprecated As of release 2.0.0, 
this will be removed in HBase 3.0.0.
-190   */
-191  @Deprecated
-192  public Bytes(final ByteString 
byteString) {
-193this(byteString.toByteArray());
-194  }
-195
-196  /**
-197   * Get the data from the Bytes.
-198   * @return The data is only valid 
between offset and offset+length.
-199   */
-200  public byte [] get() {
-201if (this.bytes == null) {
-202  throw new 
IllegalStateException("Uninitialiized. Null constructor " +
-203  "called w/o accompaying 
readFields invocation");
-204}
-205return this.bytes;
-206  }
-207
-208  /**
-209   * @param b Use passed bytes as backing 
array for this instance.
-210   */
-211  public void set(final byte [] b) {
-212set(b, 0, b.length);
-213  }
-214
-215  /**
-216   * @param b Use passed bytes as backing 
array for this instance.
-217   * @param offset
-218   * @param length
-219   */
-220  public void set(final byte [] b, final 
int offset, final int length) {
-221this.bytes = b;
-222this.offset = offset;
-223this.length = length;
-224  }
-225
-226  /**
-227   * @return the number of valid bytes in 
the buffer
-228   * @deprecated use {@link #getLength()} 
instead
-229   */
-230  @Deprecated
-231  public int getSize() {
-232if (this.bytes == null) {
-233  throw new 
IllegalStateException("Uninitialiized. Null constructor " +
-234  "called w/o accompaying 
readFields invocation");
-235}
-236return this.length;
-237  }
-238
-239  /**
-240   * @return the number of valid bytes in 
the buffer
-241   */
-242  public int getLength() {
-243if (this.bytes == null) {
-244  throw new 
IllegalStateException("Uninitialiized. Null constructor " +
-245  "called w/o accompaying 
readFields invocation");
-246}
-247return this.length;
-248  }
-249
-250  /**
-251   * @return offset
-252   */
-253  public int getOffset(){
-254return this.offset;
-255  }
-256
-257  /**
-258   * @deprecated As of release 2.0.0, 
this will be removed in HBase 3.0.0.
-259   */
-260  @Deprecated
-261  public ByteString toByteString() {
-262return 
ByteString.copyFrom(this.bytes, this.offset, this.length);
-263  }
-264
-265  @Override
-266  public int hashCode() {
-267return Bytes.hashCode(bytes, offset, 
length);
-268  }
-269
-270  /**
-271   * Define the sort order of the 
Bytes.
-272   * @param that The other bytes 
writable
-273   * @return Positive if left is bigger 
than right, 0 if they are equal, and
-274   * negative if left is smaller 
than right.
-275   */
-276  @Override
-277  public int compareTo(Bytes that) {
-278return BYTES_RAWCOMPARATOR.compare(
-279this.b

[07/51] [partial] hbase-site git commit: Published site at 8eaaa63114a64bcaeaf0ed9bdd88615ee22255c1.

2018-09-25 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f6f9d4f3/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RegionServerServices.PostOpenDeployContext.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RegionServerServices.PostOpenDeployContext.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RegionServerServices.PostOpenDeployContext.html
index acc491f..e6c6561 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RegionServerServices.PostOpenDeployContext.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RegionServerServices.PostOpenDeployContext.html
@@ -26,256 +26,255 @@
 018 */
 019package 
org.apache.hadoop.hbase.regionserver;
 020
-021import java.io.IOException;
-022import java.util.Collection;
-023import java.util.List;
-024import java.util.Map.Entry;
-025import 
java.util.concurrent.ConcurrentMap;
-026
+021import com.google.protobuf.Service;
+022import java.io.IOException;
+023import java.util.Collection;
+024import java.util.List;
+025import java.util.Map.Entry;
+026import 
java.util.concurrent.ConcurrentMap;
 027import 
org.apache.hadoop.hbase.Abortable;
 028import org.apache.hadoop.hbase.Server;
-029import 
org.apache.hadoop.hbase.TableName;
-030import 
org.apache.hadoop.hbase.client.RegionInfo;
-031import 
org.apache.hadoop.hbase.client.locking.EntityLock;
-032import 
org.apache.hadoop.hbase.executor.ExecutorService;
-033import 
org.apache.hadoop.hbase.ipc.RpcServerInterface;
-034import 
org.apache.hadoop.hbase.quotas.RegionServerRpcQuotaManager;
-035import 
org.apache.hadoop.hbase.quotas.RegionServerSpaceQuotaManager;
-036import 
org.apache.hadoop.hbase.quotas.RegionSizeStore;
-037import 
org.apache.hadoop.hbase.regionserver.compactions.CompactionRequester;
-038import 
org.apache.hadoop.hbase.regionserver.throttle.ThroughputController;
-039import org.apache.hadoop.hbase.wal.WAL;
-040import 
org.apache.yetus.audience.InterfaceAudience;
-041import 
org.apache.zookeeper.KeeperException;
+029import 
org.apache.hadoop.hbase.TableDescriptors;
+030import 
org.apache.hadoop.hbase.TableName;
+031import 
org.apache.hadoop.hbase.client.RegionInfo;
+032import 
org.apache.hadoop.hbase.client.locking.EntityLock;
+033import 
org.apache.hadoop.hbase.executor.ExecutorService;
+034import 
org.apache.hadoop.hbase.ipc.RpcServerInterface;
+035import 
org.apache.hadoop.hbase.quotas.RegionServerRpcQuotaManager;
+036import 
org.apache.hadoop.hbase.quotas.RegionServerSpaceQuotaManager;
+037import 
org.apache.hadoop.hbase.quotas.RegionSizeStore;
+038import 
org.apache.hadoop.hbase.regionserver.compactions.CompactionRequester;
+039import 
org.apache.hadoop.hbase.regionserver.throttle.ThroughputController;
+040import org.apache.hadoop.hbase.wal.WAL;
+041import 
org.apache.yetus.audience.InterfaceAudience;
 042
 043import 
org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionStateTransition.TransitionCode;
 044
-045import com.google.protobuf.Service;
-046
-047/**
-048 * A curated subset of services provided 
by {@link HRegionServer}.
-049 * For use internally only. Passed to 
Managers, Services and Chores so can pass less-than-a
-050 * full-on HRegionServer at test-time. Be 
judicious adding API. Changes cause ripples through
-051 * the code base.
-052 */
-053@InterfaceAudience.Private
-054public interface RegionServerServices 
extends Server, MutableOnlineRegions, FavoredNodesForRegion {
-055
-056  /** @return the WAL for a particular 
region. Pass null for getting the
-057   * default (common) WAL */
-058  WAL getWAL(RegionInfo regionInfo) 
throws IOException;
-059
-060  /** @return the List of WALs that are 
used by this server
-061   *  Doesn't include the meta WAL
-062   */
-063  List getWALs() throws 
IOException;
-064
-065  /**
-066   * @return Implementation of {@link 
FlushRequester} or null. Usually it will not be null unless
-067   * during intialization.
-068   */
-069  FlushRequester getFlushRequester();
-070
-071  /**
-072   * @return Implementation of {@link 
CompactionRequester} or null. Usually it will not be null
-073   * unless during 
intialization.
-074   */
-075  CompactionRequester 
getCompactionRequestor();
-076
-077  /**
-078   * @return the RegionServerAccounting 
for this Region Server
-079   */
-080  RegionServerAccounting 
getRegionServerAccounting();
-081
-082  /**
-083   * @return RegionServer's instance of 
{@link RegionServerRpcQuotaManager}
-084   */
-085  RegionServerRpcQuotaManager 
getRegionServerRpcQuotaManager();
-086
-087  /**
-088   * @return RegionServer's instance of 
{@link SecureBulkLoadManager}
-089   */
-090  SecureBulkLoadManager 
getSecureBulkLoadManager();
-091
-092  /**
-093   * @return RegionServer's instance of 
{@link RegionServerSpaceQuotaManager}
-094   */
-095  RegionServerSpaceQuotaManager 
getRegionServerSpaceQuotaManager();
-096
-097  /**
-098   * Context

[42/51] [partial] hbase-site git commit: Published site at 8eaaa63114a64bcaeaf0ed9bdd88615ee22255c1.

2018-09-25 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f6f9d4f3/devapidocs/org/apache/hadoop/hbase/class-use/TableName.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/class-use/TableName.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/TableName.html
index d48be69..5c7ab7f 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/TableName.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/TableName.html
@@ -9271,6 +9271,20 @@ service.
 
 
 
+
+Method parameters in org.apache.hadoop.hbase.regionserver
 with type arguments of type TableName 
+
+Modifier and Type
+Method and Description
+
+
+
+private void
+RSRpcServices.executeOpenRegionProcedures(org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.OpenRegionRequest request,
+   https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true";
 title="class or interface in java.util">Map tdCache) 
+
+
+
 
 Constructors in org.apache.hadoop.hbase.regionserver
 with parameters of type TableName 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f6f9d4f3/devapidocs/org/apache/hadoop/hbase/client/class-use/RegionInfo.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/client/class-use/RegionInfo.html 
b/devapidocs/org/apache/hadoop/hbase/client/class-use/RegionInfo.html
index 0473899..6704aac 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/class-use/RegionInfo.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/class-use/RegionInfo.html
@@ -5383,10 +5383,16 @@ Input/OutputFormats, a table indexing MapReduce job, 
and utility methods.
 
 
 private RegionInfo
-OpenRegionHandler.regionInfo 
+AssignRegionHandler.regionInfo 
 
 
 private RegionInfo
+OpenRegionHandler.regionInfo
+Deprecated. 
+ 
+
+
+private RegionInfo
 CloseRegionHandler.regionInfo 
 
 
@@ -5400,7 +5406,9 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 RegionInfo
-OpenRegionHandler.getRegionInfo() 
+OpenRegionHandler.getRegionInfo()
+Deprecated. 
+ 
 
 
 RegionInfo
@@ -5416,9 +5424,18 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 
+static AssignRegionHandler
+AssignRegionHandler.create(RegionServerServices server,
+  RegionInfo regionInfo,
+  TableDescriptor tableDesc,
+  long masterSystemTime) 
+
+
 private static boolean
 OpenRegionHandler.isRegionStillOpening(RegionInfo regionInfo,
-RegionServerServices rsServices) 
+RegionServerServices rsServices)
+Deprecated. 
+ 
 
 
 
@@ -5429,12 +5446,19 @@ Input/OutputFormats, a table indexing MapReduce job, 
and utility methods.
 
 
 
+AssignRegionHandler(RegionServerServices server,
+   RegionInfo regionInfo,
+   TableDescriptor tableDesc,
+   long masterSystemTime,
+   EventType eventType) 
+
+
 CloseMetaHandler(Server server,
 RegionServerServices rsServices,
 RegionInfo regionInfo,
 boolean abort) 
 
-
+
 CloseRegionHandler(Server server,
   RegionServerServices rsServices,
   RegionInfo regionInfo,
@@ -5442,7 +5466,7 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
   EventType eventType,
   ServerName destination) 
 
-
+
 CloseRegionHandler(Server server,
   RegionServerServices rsServices,
   RegionInfo regionInfo,
@@ -5451,34 +5475,38 @@ Input/OutputFormats, a table indexing MapReduce job, 
and utility methods.
 This method used internally by the RegionServer to close 
out regions.
 
 
-
+
 OpenMetaHandler(Server server,
RegionServerServices rsServices,
RegionInfo regionInfo,
TableDescriptor htd,
long masterSystemTime) 
 
-
+
 OpenPriorityRegionHandler(Server server,
  RegionServerServices rsServices,
  RegionInfo regionInfo,
  TableDescriptor htd,
  long masterSystemTime) 
 
-
+
 OpenRegionHandler(Server server,
  RegionServerServices rsServices,
  RegionInfo regionInfo,
  TableDescriptor htd,
- long masterSystemTime) 
+ long masterSystemTime)
+Deprecated. 
+ 
 
-
+
 OpenRegionHandler(Server server,
  RegionServerServices rsServices,
  RegionInfo regionInfo,
  TableDescriptor htd,
  long masterSystemTime,
- EventType eventType) 
+ EventType eventType)
+Deprecated. 
+ 
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f6f9d4f3/devapidocs/org/apache/hadoop/hbase/client/class-use/TableDescriptor.html
--

[29/51] [partial] hbase-site git commit: Published site at 8eaaa63114a64bcaeaf0ed9bdd88615ee22255c1.

2018-09-25 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f6f9d4f3/devapidocs/org/apache/hadoop/hbase/util/package-tree.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/util/package-tree.html 
b/devapidocs/org/apache/hadoop/hbase/util/package-tree.html
index a5d292d..d85465c 100644
--- a/devapidocs/org/apache/hadoop/hbase/util/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/util/package-tree.html
@@ -520,14 +520,14 @@
 
 java.lang.https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true";
 title="class or interface in java.lang">Enum (implements java.lang.https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true";
 title="class or interface in java.lang">Comparable, java.io.https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true";
 title="class or interface in java.io">Serializable)
 
-org.apache.hadoop.hbase.util.PoolMap.PoolType
 org.apache.hadoop.hbase.util.HBaseFsck.ErrorReporter.ERROR_CODE
+org.apache.hadoop.hbase.util.ChecksumType
 org.apache.hadoop.hbase.util.IdReadWriteLock.ReferenceType
-org.apache.hadoop.hbase.util.Bytes.LexicographicalComparerHolder.PureJavaComparer
 (implements org.apache.hadoop.hbase.util.Bytes.Comparer)
 org.apache.hadoop.hbase.util.PrettyPrinter.Unit
 org.apache.hadoop.hbase.util.Order
+org.apache.hadoop.hbase.util.PoolMap.PoolType
 org.apache.hadoop.hbase.util.Bytes.LexicographicalComparerHolder.UnsafeComparer
 (implements org.apache.hadoop.hbase.util.Bytes.Comparer)
-org.apache.hadoop.hbase.util.ChecksumType
+org.apache.hadoop.hbase.util.Bytes.LexicographicalComparerHolder.PureJavaComparer
 (implements org.apache.hadoop.hbase.util.Bytes.Comparer)
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f6f9d4f3/devapidocs/overview-tree.html
--
diff --git a/devapidocs/overview-tree.html b/devapidocs/overview-tree.html
index c83ea97..4c1f36c 100644
--- a/devapidocs/overview-tree.html
+++ b/devapidocs/overview-tree.html
@@ -1500,6 +1500,7 @@
 org.apache.hadoop.hbase.util.EnvironmentEdgeManager
 org.apache.hadoop.hbase.executor.EventHandler (implements java.lang.https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true";
 title="class or interface in java.lang">Comparable, java.lang.https://docs.oracle.com/javase/8/docs/api/java/lang/Runnable.html?is-external=true";
 title="class or interface in java.lang">Runnable)
 
+org.apache.hadoop.hbase.regionserver.handler.AssignRegionHandler
 org.apache.hadoop.hbase.regionserver.handler.CloseRegionHandler
 
 org.apache.hadoop.hbase.regionserver.handler.CloseMetaHandler
@@ -1521,6 +1522,7 @@
 org.apache.hadoop.hbase.master.snapshot.EnabledTableSnapshotHandler
 
 
+org.apache.hadoop.hbase.regionserver.handler.UnassignRegionHandler
 org.apache.hadoop.hbase.regionserver.handler.WALSplitterHandler
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f6f9d4f3/devapidocs/src-html/org/apache/hadoop/hbase/Version.html
--
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/Version.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/Version.html
index af8aab0..f2bd10b 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/Version.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/Version.html
@@ -16,11 +16,11 @@
 008@InterfaceAudience.Private
 009public class Version {
 010  public static final String version = 
"3.0.0-SNAPSHOT";
-011  public static final String revision = 
"7ab77518a2569e2416a50020393aa386e7734501";
+011  public static final String revision = 
"8eaaa63114a64bcaeaf0ed9bdd88615ee22255c1";
 012  public static final String user = 
"jenkins";
-013  public static final String date = "Sat 
Sep 22 14:42:20 UTC 2018";
+013  public static final String date = "Tue 
Sep 25 14:45:54 UTC 2018";
 014  public static final String url = 
"git://jenkins-websites1.apache.org/home/jenkins/jenkins-slave/workspace/hbase_generate_website/hbase";
-015  public static final String srcChecksum 
= "40cfe40c509b5c9b75784418a7a88604";
+015  public static final String srcChecksum 
= "5bd47f87f2a094af440e01bd108f2b16";
 016}
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f6f9d4f3/devapidocs/src-html/org/apache/hadoop/hbase/executor/ExecutorService.Executor.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/executor/ExecutorService.Executor.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/executor/ExecutorService.Executor.html
index 49f081b..33c9cc0 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/executor/ExecutorService.Executor.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/executor/ExecutorService.Executor.html
@@ -35,309 +35,328 @@
 027import 
java.util.concurrent.BlockingQueue;
 028

[36/51] [partial] hbase-site git commit: Published site at 8eaaa63114a64bcaeaf0ed9bdd88615ee22255c1.

2018-09-25 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f6f9d4f3/devapidocs/org/apache/hadoop/hbase/regionserver/RSRpcServices.LogDelegate.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/regionserver/RSRpcServices.LogDelegate.html
 
b/devapidocs/org/apache/hadoop/hbase/regionserver/RSRpcServices.LogDelegate.html
index 6d3503e..84c4071 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/regionserver/RSRpcServices.LogDelegate.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/regionserver/RSRpcServices.LogDelegate.html
@@ -105,7 +105,7 @@ var activeTableTab = "activeTableTab";
 
 
 
-static interface RSRpcServices.LogDelegate
+static interface RSRpcServices.LogDelegate
 
 
 
@@ -151,7 +151,7 @@ var activeTableTab = "activeTableTab";
 
 
 logBatchWarning
-void logBatchWarning(https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String firstRegionName,
+void logBatchWarning(https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String firstRegionName,
  int sum,
  int rowSizeWarnThreshold)
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f6f9d4f3/devapidocs/org/apache/hadoop/hbase/regionserver/RSRpcServices.RegionScannerCloseCallBack.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/regionserver/RSRpcServices.RegionScannerCloseCallBack.html
 
b/devapidocs/org/apache/hadoop/hbase/regionserver/RSRpcServices.RegionScannerCloseCallBack.html
index 1a19bee..5d0e72a 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/regionserver/RSRpcServices.RegionScannerCloseCallBack.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/regionserver/RSRpcServices.RegionScannerCloseCallBack.html
@@ -117,7 +117,7 @@ var activeTableTab = "activeTableTab";
 
 
 
-private static final class RSRpcServices.RegionScannerCloseCallBack
+private static final class RSRpcServices.RegionScannerCloseCallBack
 extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true";
 title="class or interface in java.lang">Object
 implements RpcCallback
 An Rpc callback for closing a RegionScanner.
@@ -209,7 +209,7 @@ implements 
 
 scanner
-private final RegionScanner scanner
+private final RegionScanner scanner
 
 
 
@@ -226,7 +226,7 @@ implements 
 
 RegionScannerCloseCallBack
-public RegionScannerCloseCallBack(RegionScanner scanner)
+public RegionScannerCloseCallBack(RegionScanner scanner)
 
 
 
@@ -243,7 +243,7 @@ implements 
 
 run
-public void run()
+public void run()
  throws https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true";
 title="class or interface in java.io">IOException
 Description copied from 
interface: RpcCallback
 Called at the end of an Rpc Call RpcCallContext

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f6f9d4f3/devapidocs/org/apache/hadoop/hbase/regionserver/RSRpcServices.RegionScannerHolder.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/regionserver/RSRpcServices.RegionScannerHolder.html
 
b/devapidocs/org/apache/hadoop/hbase/regionserver/RSRpcServices.RegionScannerHolder.html
index 0cdebfb..70b0bd0 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/regionserver/RSRpcServices.RegionScannerHolder.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/regionserver/RSRpcServices.RegionScannerHolder.html
@@ -113,7 +113,7 @@ var activeTableTab = "activeTableTab";
 
 
 
-private static final class RSRpcServices.RegionScannerHolder
+private static final class RSRpcServices.RegionScannerHolder
 extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true";
 title="class or interface in java.lang">Object
 Holder class which holds the RegionScanner, nextCallSeq and 
RpcCallbacks together.
 
@@ -239,7 +239,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 nextCallSeq
-private final https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/AtomicLong.html?is-external=true";
 title="class or interface in java.util.concurrent.atomic">AtomicLong nextCallSeq
+private final https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/AtomicLong.html?is-external=true";
 title="class or interface in java.util.concurrent.atomic">AtomicLong nextCallSeq
 
 
 
@@ -248,7 +248,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 scannerName
-private final https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String scannerName
+private final https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String scannerName
 
 
 
@@ -257,7 +257,7 @@ extends htt

[28/51] [partial] hbase-site git commit: Published site at 8eaaa63114a64bcaeaf0ed9bdd88615ee22255c1.

2018-09-25 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f6f9d4f3/devapidocs/src-html/org/apache/hadoop/hbase/executor/ExecutorService.ExecutorStatus.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/executor/ExecutorService.ExecutorStatus.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/executor/ExecutorService.ExecutorStatus.html
index 49f081b..33c9cc0 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/executor/ExecutorService.ExecutorStatus.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/executor/ExecutorService.ExecutorStatus.html
@@ -35,309 +35,328 @@
 027import 
java.util.concurrent.BlockingQueue;
 028import 
java.util.concurrent.ConcurrentHashMap;
 029import 
java.util.concurrent.ConcurrentMap;
-030import 
java.util.concurrent.LinkedBlockingQueue;
-031import 
java.util.concurrent.ThreadPoolExecutor;
-032import java.util.concurrent.TimeUnit;
-033import 
java.util.concurrent.atomic.AtomicLong;
-034
-035import 
org.apache.yetus.audience.InterfaceAudience;
-036import org.slf4j.Logger;
-037import org.slf4j.LoggerFactory;
-038import 
org.apache.hadoop.hbase.monitoring.ThreadMonitoring;
+030import java.util.concurrent.Executors;
+031import 
java.util.concurrent.LinkedBlockingQueue;
+032import 
java.util.concurrent.ThreadPoolExecutor;
+033import java.util.concurrent.TimeUnit;
+034import 
java.util.concurrent.atomic.AtomicLong;
+035import 
org.apache.hadoop.hbase.monitoring.ThreadMonitoring;
+036import 
org.apache.yetus.audience.InterfaceAudience;
+037import org.slf4j.Logger;
+038import org.slf4j.LoggerFactory;
 039
 040import 
org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;
 041import 
org.apache.hbase.thirdparty.com.google.common.collect.Lists;
 042import 
org.apache.hbase.thirdparty.com.google.common.collect.Maps;
-043import 
org.apache.hbase.thirdparty.com.google.common.util.concurrent.ThreadFactoryBuilder;
-044
-045/**
-046 * This is a generic executor service. 
This component abstracts a
-047 * threadpool, a queue to which {@link 
EventType}s can be submitted,
-048 * and a 
Runnable that handles the object that is added to the 
queue.
-049 *
-050 * 

In order to create a new service, create an instance of this class and -051 * then do: instance.startExecutorService("myService");. When done -052 * call {@link #shutdown()}. -053 * -054 *

In order to use the service created above, call -055 * {@link #submit(EventHandler)}. -056 */ -057@InterfaceAudience.Private -058public class ExecutorService { -059 private static final Logger LOG = LoggerFactory.getLogger(ExecutorService.class); -060 -061 // hold the all the executors created in a map addressable by their names -062 private final ConcurrentHashMap executorMap = new ConcurrentHashMap<>(); +043import org.apache.hbase.thirdparty.com.google.common.util.concurrent.ListenableFuture; +044import org.apache.hbase.thirdparty.com.google.common.util.concurrent.ListeningScheduledExecutorService; +045import org.apache.hbase.thirdparty.com.google.common.util.concurrent.MoreExecutors; +046import org.apache.hbase.thirdparty.com.google.common.util.concurrent.ThreadFactoryBuilder; +047 +048/** +049 * This is a generic executor service. This component abstracts a +050 * threadpool, a queue to which {@link EventType}s can be submitted, +051 * and a Runnable that handles the object that is added to the queue. +052 * +053 *

In order to create a new service, create an instance of this class and +054 * then do: instance.startExecutorService("myService");. When done +055 * call {@link #shutdown()}. +056 * +057 *

In order to use the service created above, call +058 * {@link #submit(EventHandler)}. +059 */ +060@InterfaceAudience.Private +061public class ExecutorService { +062 private static final Logger LOG = LoggerFactory.getLogger(ExecutorService.class); 063 -064 // Name of the server hosting this executor service. -065 private final String servername; +064 // hold the all the executors created in a map addressable by their names +065 private final ConcurrentHashMap executorMap = new ConcurrentHashMap<>(); 066 -067 /** -068 * Default constructor. -069 * @param servername Name of the hosting server. -070 */ -071 public ExecutorService(final String servername) { -072super(); -073this.servername = servername; -074 } -075 -076 /** -077 * Start an executor service with a given name. If there was a service already -078 * started with the same name, this throws a RuntimeException. -079 * @param name Name of the service to start. -080 */ -081 @VisibleForTesting -082 public void startExecutorService(String name, int maxThreads) { -083if (this.executorMap.get(name) != null) { -084 throw new RuntimeException("An executor service with the name " + name + -085" is already running


[50/51] [partial] hbase-site git commit: Published site at 8eaaa63114a64bcaeaf0ed9bdd88615ee22255c1.

2018-09-25 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f6f9d4f3/apidocs/org/apache/hadoop/hbase/util/Bytes.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/util/Bytes.html 
b/apidocs/org/apache/hadoop/hbase/util/Bytes.html
index b240e4c..ea6c069 100644
--- a/apidocs/org/apache/hadoop/hbase/util/Bytes.html
+++ b/apidocs/org/apache/hadoop/hbase/util/Bytes.html
@@ -1346,7 +1346,7 @@ implements https://docs.oracle.com/javase/8/docs/api/java/lang/Comparab
 
 
 BYTES_COMPARATOR
-public static final https://docs.oracle.com/javase/8/docs/api/java/util/Comparator.html?is-external=true";
 title="class or interface in java.util">Comparator BYTES_COMPARATOR
+public static final https://docs.oracle.com/javase/8/docs/api/java/util/Comparator.html?is-external=true";
 title="class or interface in java.util">Comparator BYTES_COMPARATOR
 Pass this to TreeMaps where byte [] are keys.
 
 
@@ -1356,7 +1356,7 @@ implements https://docs.oracle.com/javase/8/docs/api/java/lang/Comparab
 
 
 BYTES_RAWCOMPARATOR
-public static final org.apache.hadoop.io.RawComparator 
BYTES_RAWCOMPARATOR
+public static final org.apache.hadoop.io.RawComparator 
BYTES_RAWCOMPARATOR
 Use comparing byte arrays, byte-by-byte
 
 
@@ -1374,7 +1374,7 @@ implements https://docs.oracle.com/javase/8/docs/api/java/lang/Comparab
 
 
 Bytes
-public Bytes()
+public Bytes()
 Create a zero-size sequence.
 
 
@@ -1384,7 +1384,7 @@ implements https://docs.oracle.com/javase/8/docs/api/java/lang/Comparab
 
 
 Bytes
-public Bytes(byte[] bytes)
+public Bytes(byte[] bytes)
 Create a Bytes using the byte array as the initial 
value.
 
 Parameters:
@@ -1398,7 +1398,7 @@ implements https://docs.oracle.com/javase/8/docs/api/java/lang/Comparab
 
 
 Bytes
-public Bytes(Bytes ibw)
+public Bytes(Bytes ibw)
 Set the new Bytes to the contents of the passed
  ibw.
 
@@ -1413,7 +1413,7 @@ implements https://docs.oracle.com/javase/8/docs/api/java/lang/Comparab
 
 
 Bytes
-public Bytes(byte[] bytes,
+public Bytes(byte[] bytes,
  int offset,
  int length)
 Set the value to a given byte range
@@ -1432,7 +1432,7 @@ implements https://docs.oracle.com/javase/8/docs/api/java/lang/Comparab
 
 Bytes
 https://docs.oracle.com/javase/8/docs/api/java/lang/Deprecated.html?is-external=true";
 title="class or interface in java.lang">@Deprecated
-public Bytes(com.google.protobuf.ByteString byteString)
+public Bytes(com.google.protobuf.ByteString byteString)
 Deprecated. As of release 2.0.0, this will be removed in HBase 
3.0.0.
 Copy bytes from ByteString instance.
 
@@ -1455,7 +1455,7 @@ public 
 
 len
-public static final int len(byte[] b)
+public static final int len(byte[] b)
 Returns length of the byte array, returning 0 if the array 
is null.
  Useful for calculating sizes.
 
@@ -1472,7 +1472,7 @@ public 
 
 get
-public byte[] get()
+public byte[] get()
 Get the data from the Bytes.
 
 Returns:
@@ -1486,7 +1486,7 @@ public 
 
 set
-public void set(byte[] b)
+public void set(byte[] b)
 
 Parameters:
 b - Use passed bytes as backing array for this instance.
@@ -1499,7 +1499,7 @@ public 
 
 set
-public void set(byte[] b,
+public void set(byte[] b,
 int offset,
 int length)
 
@@ -1517,7 +1517,7 @@ public 
 getSize
 https://docs.oracle.com/javase/8/docs/api/java/lang/Deprecated.html?is-external=true";
 title="class or interface in java.lang">@Deprecated
-public int getSize()
+public int getSize()
 Deprecated. use getLength()
 instead
 
 Returns:
@@ -1531,7 +1531,7 @@ public int 
 
 getLength
-public int getLength()
+public int getLength()
 
 Returns:
 the number of valid bytes in the buffer
@@ -1544,7 +1544,7 @@ public int 
 
 getOffset
-public int getOffset()
+public int getOffset()
 
 Returns:
 offset
@@ -1558,7 +1558,7 @@ public int 
 toByteString
 https://docs.oracle.com/javase/8/docs/api/java/lang/Deprecated.html?is-external=true";
 title="class or interface in java.lang">@Deprecated
-public com.google.protobuf.ByteString toByteString()
+public com.google.protobuf.ByteString toByteString()
 Deprecated. As of release 2.0.0, this will be removed in HBase 
3.0.0.
 
 
@@ -1568,7 +1568,7 @@ public com.google.protobuf.ByteString 
 
 hashCode
-public int hashCode()
+public int hashCode()
 
 Overrides:
 https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#hashCode--";
 title="class or interface in java.lang">hashCode in 
class https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true";
 title="class or interface in java.lang">Object
@@ -1581,7 +1581,7 @@ public com.google.protobuf.ByteString 
 
 compareTo
-public int compareTo(Bytes that)
+public int compareTo(Bytes that)
 Define the sort order of the Bytes.
 
 Specified by:
@@ -1600,7 +1600,7 @@ public com.google.protobuf.ByteString 
 
 compareTo
-public int compareTo(byte[] that)
+public int compareTo(byte[] that)
 Compares the bytes in this obj

[32/51] [partial] hbase-site git commit: Published site at 8eaaa63114a64bcaeaf0ed9bdd88615ee22255c1.

2018-09-25 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f6f9d4f3/devapidocs/org/apache/hadoop/hbase/regionserver/handler/RSProcedureHandler.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/regionserver/handler/RSProcedureHandler.html
 
b/devapidocs/org/apache/hadoop/hbase/regionserver/handler/RSProcedureHandler.html
index c3ca84f..34a0791 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/regionserver/handler/RSProcedureHandler.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/regionserver/handler/RSProcedureHandler.html
@@ -50,7 +50,7 @@ var activeTableTab = "activeTableTab";
 
 
 Prev Class
-Next Class
+Next Class
 
 
 Frames
@@ -328,7 +328,7 @@ extends 
 
 Prev Class
-Next Class
+Next Class
 
 
 Frames

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f6f9d4f3/devapidocs/org/apache/hadoop/hbase/regionserver/handler/UnassignRegionHandler.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/regionserver/handler/UnassignRegionHandler.html
 
b/devapidocs/org/apache/hadoop/hbase/regionserver/handler/UnassignRegionHandler.html
new file mode 100644
index 000..2eb680c
--- /dev/null
+++ 
b/devapidocs/org/apache/hadoop/hbase/regionserver/handler/UnassignRegionHandler.html
@@ -0,0 +1,454 @@
+http://www.w3.org/TR/html4/loose.dtd";>
+
+
+
+
+
+UnassignRegionHandler (Apache HBase 3.0.0-SNAPSHOT API)
+
+
+
+
+
+var methods = {"i0":9,"i1":10,"i2":10,"i3":10};
+var tabs = {65535:["t0","All Methods"],1:["t1","Static 
Methods"],2:["t2","Instance Methods"],8:["t4","Concrete Methods"]};
+var altColor = "altColor";
+var rowColor = "rowColor";
+var tableTab = "tableTab";
+var activeTableTab = "activeTableTab";
+
+
+JavaScript is disabled on your browser.
+
+
+
+
+
+Skip navigation links
+
+
+
+
+Overview
+Package
+Class
+Use
+Tree
+Deprecated
+Index
+Help
+
+
+
+
+Prev Class
+Next Class
+
+
+Frames
+No Frames
+
+
+All Classes
+
+
+
+
+
+
+
+Summary: 
+Nested | 
+Field | 
+Constr | 
+Method
+
+
+Detail: 
+Field | 
+Constr | 
+Method
+
+
+
+
+
+
+
+
+org.apache.hadoop.hbase.regionserver.handler
+Class 
UnassignRegionHandler
+
+
+
+https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true";
 title="class or interface in java.lang">java.lang.Object
+
+
+org.apache.hadoop.hbase.executor.EventHandler
+
+
+org.apache.hadoop.hbase.regionserver.handler.UnassignRegionHandler
+
+
+
+
+
+
+
+
+
+All Implemented Interfaces:
+https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true";
 title="class or interface in java.lang">ComparableRunnable>, https://docs.oracle.com/javase/8/docs/api/java/lang/Runnable.html?is-external=true";
 title="class or interface in java.lang">Runnable
+
+
+
+@InterfaceAudience.Private
+public class UnassignRegionHandler
+extends EventHandler
+Handles closing of a region on a region server.
+ 
+ Just done the same thing with the old CloseRegionHandler,
 with some modifications on
+ fencing and retrying. But we need to keep the CloseRegionHandler
 as is to keep compatible
+ with the zk less assignment for 1.x, otherwise it is not possible to do 
rolling upgrade.
+
+
+
+
+
+
+
+
+
+
+
+Field Summary
+
+Fields 
+
+Modifier and Type
+Field and Description
+
+
+private boolean
+abort 
+
+
+private ServerName
+destination 
+
+
+private https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
+encodedName 
+
+
+private static org.slf4j.Logger
+LOG 
+
+
+
+
+
+
+Fields inherited from class org.apache.hadoop.hbase.executor.EventHandler
+eventType,
 seqids,
 server,
 waitingTimeForEvents
+
+
+
+
+
+
+
+
+Constructor Summary
+
+Constructors 
+
+Constructor and Description
+
+
+UnassignRegionHandler(RegionServerServices server,
+ https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String encodedName,
+ boolean abort,
+ ServerName destination,
+ EventType eventType) 
+
+
+
+
+
+
+
+
+
+Method Summary
+
+All Methods Static Methods Instance Methods Concrete Methods 
+
+Modifier and Type
+Method and Description
+
+
+static UnassignRegionHandler
+create(RegionServerServices server,
+  https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-ext

[19/51] [partial] hbase-site git commit: Published site at 8eaaa63114a64bcaeaf0ed9bdd88615ee22255c1.

2018-09-25 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f6f9d4f3/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.CompactionChecker.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.CompactionChecker.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.CompactionChecker.html
index 8cc5add..34858d6 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.CompactionChecker.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.CompactionChecker.html
@@ -2188,1428 +2188,1428 @@
 2180  }
 2181
 2182  @Override
-2183  public void postOpenDeployTasks(final 
PostOpenDeployContext context)
-2184  throws KeeperException, 
IOException {
-2185HRegion r = context.getRegion();
-2186long masterSystemTime = 
context.getMasterSystemTime();
-2187rpcServices.checkOpen();
-2188LOG.info("Post open deploy tasks for 
" + r.getRegionInfo().getRegionNameAsString());
-2189// Do checks to see if we need to 
compact (references or too many files)
-2190for (HStore s : r.stores.values()) 
{
-2191  if (s.hasReferences() || 
s.needsCompaction()) {
-2192
this.compactSplitThread.requestSystemCompaction(r, s, "Opening Region");
-2193  }
-2194}
-2195long openSeqNum = 
r.getOpenSeqNum();
-2196if (openSeqNum == 
HConstants.NO_SEQNUM) {
-2197  // If we opened a region, we 
should have read some sequence number from it.
-2198  LOG.error("No sequence number 
found when opening " +
-2199
r.getRegionInfo().getRegionNameAsString());
-2200  openSeqNum = 0;
-2201}
-2202
-2203// Notify master
-2204if (!reportRegionStateTransition(new 
RegionStateTransitionContext(
-2205TransitionCode.OPENED, 
openSeqNum, masterSystemTime, r.getRegionInfo( {
-2206  throw new IOException("Failed to 
report opened region to master: "
-2207+ 
r.getRegionInfo().getRegionNameAsString());
-2208}
-2209
-2210triggerFlushInPrimaryRegion(r);
-2211
-2212LOG.debug("Finished post open deploy 
task for " + r.getRegionInfo().getRegionNameAsString());
-2213  }
-2214
-2215  @Override
-2216  public boolean 
reportRegionStateTransition(final RegionStateTransitionContext context) {
-2217TransitionCode code = 
context.getCode();
-2218long openSeqNum = 
context.getOpenSeqNum();
-2219long masterSystemTime = 
context.getMasterSystemTime();
-2220RegionInfo[] hris = 
context.getHris();
-2221
-if (TEST_SKIP_REPORTING_TRANSITION) 
{
-2223  // This is for testing only in 
case there is no master
-2224  // to handle the region transition 
report at all.
-2225  if (code == TransitionCode.OPENED) 
{
-2226Preconditions.checkArgument(hris 
!= null && hris.length == 1);
-2227if (hris[0].isMetaRegion()) {
-2228  try {
-2229
MetaTableLocator.setMetaLocation(getZooKeeper(), serverName,
-2230
hris[0].getReplicaId(),State.OPEN);
-2231  } catch (KeeperException e) 
{
-2232LOG.info("Failed to update 
meta location", e);
-2233return false;
-2234  }
-2235} else {
-2236  try {
-2237
MetaTableAccessor.updateRegionLocation(clusterConnection,
-2238  hris[0], serverName, 
openSeqNum, masterSystemTime);
-2239  } catch (IOException e) {
-2240LOG.info("Failed to update 
meta", e);
-2241return false;
-2242  }
-2243}
-2244  }
-2245  return true;
-2246}
-2247
-2248
ReportRegionStateTransitionRequest.Builder builder =
-2249  
ReportRegionStateTransitionRequest.newBuilder();
-2250
builder.setServer(ProtobufUtil.toServerName(serverName));
-2251RegionStateTransition.Builder 
transition = builder.addTransitionBuilder();
-2252
transition.setTransitionCode(code);
-2253if (code == TransitionCode.OPENED 
&& openSeqNum >= 0) {
-2254  
transition.setOpenSeqNum(openSeqNum);
-2255}
-2256for (RegionInfo hri: hris) {
-2257  
transition.addRegionInfo(ProtobufUtil.toRegionInfo(hri));
-2258}
-2259ReportRegionStateTransitionRequest 
request = builder.build();
-2260int tries = 0;
-2261long pauseTime = 
INIT_PAUSE_TIME_MS;
-2262// Keep looping till we get an 
error. We want to send reports even though server is going down.
-2263// Only go down if clusterConnection 
is null. It is set to null almost as last thing as the
-2264// HRegionServer does down.
-2265while (this.clusterConnection != 
null && !this.clusterConnection.isClosed()) {
-2266  
RegionServerStatusService.BlockingInterface rss = rssStub;
-2267  try {
-2268if (rss == null) {
-2269  
createRegionServerStatusStub();
-2270  continue;
-2271}
-2272
ReportRegionStateTransitionResponse response =
-2273  
rs

[48/51] [partial] hbase-site git commit: Published site at 8eaaa63114a64bcaeaf0ed9bdd88615ee22255c1.

2018-09-25 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f6f9d4f3/apidocs/src-html/org/apache/hadoop/hbase/util/Bytes.RowEndKeyComparator.html
--
diff --git 
a/apidocs/src-html/org/apache/hadoop/hbase/util/Bytes.RowEndKeyComparator.html 
b/apidocs/src-html/org/apache/hadoop/hbase/util/Bytes.RowEndKeyComparator.html
index 804ef45..e999ddb 100644
--- 
a/apidocs/src-html/org/apache/hadoop/hbase/util/Bytes.RowEndKeyComparator.html
+++ 
b/apidocs/src-html/org/apache/hadoop/hbase/util/Bytes.RowEndKeyComparator.html
@@ -138,2491 +138,2492 @@
 130  // SizeOf which uses 
java.lang.instrument says 24 bytes. (3 longs?)
 131  public static final int 
ESTIMATED_HEAP_TAX = 16;
 132
-133  private static final boolean 
UNSAFE_UNALIGNED = UnsafeAvailChecker.unaligned();
-134
-135  /**
-136   * Returns length of the byte array, 
returning 0 if the array is null.
-137   * Useful for calculating sizes.
-138   * @param b byte array, which can be 
null
-139   * @return 0 if b is null, otherwise 
returns length
-140   */
-141  final public static int len(byte[] b) 
{
-142return b == null ? 0 : b.length;
-143  }
-144
-145  private byte[] bytes;
-146  private int offset;
-147  private int length;
-148
-149  /**
-150   * Create a zero-size sequence.
-151   */
-152  public Bytes() {
-153super();
-154  }
-155
-156  /**
-157   * Create a Bytes using the byte array 
as the initial value.
-158   * @param bytes This array becomes the 
backing storage for the object.
-159   */
-160  public Bytes(byte[] bytes) {
-161this(bytes, 0, bytes.length);
-162  }
-163
-164  /**
-165   * Set the new Bytes to the contents of 
the passed
-166   * ibw.
-167   * @param ibw the value to set this 
Bytes to.
-168   */
-169  public Bytes(final Bytes ibw) {
-170this(ibw.get(), ibw.getOffset(), 
ibw.getLength());
-171  }
-172
-173  /**
-174   * Set the value to a given byte 
range
-175   * @param bytes the new byte range to 
set to
-176   * @param offset the offset in newData 
to start at
-177   * @param length the number of bytes in 
the range
-178   */
-179  public Bytes(final byte[] bytes, final 
int offset,
-180  final int length) {
-181this.bytes = bytes;
-182this.offset = offset;
-183this.length = length;
-184  }
-185
-186  /**
-187   * Copy bytes from ByteString 
instance.
-188   * @param byteString copy from
-189   * @deprecated As of release 2.0.0, 
this will be removed in HBase 3.0.0.
-190   */
-191  @Deprecated
-192  public Bytes(final ByteString 
byteString) {
-193this(byteString.toByteArray());
-194  }
-195
-196  /**
-197   * Get the data from the Bytes.
-198   * @return The data is only valid 
between offset and offset+length.
-199   */
-200  public byte [] get() {
-201if (this.bytes == null) {
-202  throw new 
IllegalStateException("Uninitialiized. Null constructor " +
-203  "called w/o accompaying 
readFields invocation");
-204}
-205return this.bytes;
-206  }
-207
-208  /**
-209   * @param b Use passed bytes as backing 
array for this instance.
-210   */
-211  public void set(final byte [] b) {
-212set(b, 0, b.length);
-213  }
-214
-215  /**
-216   * @param b Use passed bytes as backing 
array for this instance.
-217   * @param offset
-218   * @param length
-219   */
-220  public void set(final byte [] b, final 
int offset, final int length) {
-221this.bytes = b;
-222this.offset = offset;
-223this.length = length;
-224  }
-225
-226  /**
-227   * @return the number of valid bytes in 
the buffer
-228   * @deprecated use {@link #getLength()} 
instead
-229   */
-230  @Deprecated
-231  public int getSize() {
-232if (this.bytes == null) {
-233  throw new 
IllegalStateException("Uninitialiized. Null constructor " +
-234  "called w/o accompaying 
readFields invocation");
-235}
-236return this.length;
-237  }
-238
-239  /**
-240   * @return the number of valid bytes in 
the buffer
-241   */
-242  public int getLength() {
-243if (this.bytes == null) {
-244  throw new 
IllegalStateException("Uninitialiized. Null constructor " +
-245  "called w/o accompaying 
readFields invocation");
-246}
-247return this.length;
-248  }
-249
-250  /**
-251   * @return offset
-252   */
-253  public int getOffset(){
-254return this.offset;
-255  }
-256
-257  /**
-258   * @deprecated As of release 2.0.0, 
this will be removed in HBase 3.0.0.
-259   */
-260  @Deprecated
-261  public ByteString toByteString() {
-262return 
ByteString.copyFrom(this.bytes, this.offset, this.length);
-263  }
-264
-265  @Override
-266  public int hashCode() {
-267return Bytes.hashCode(bytes, offset, 
length);
-268  }
-269
-270  /**
-271   * Define the sort order of the 
Bytes.
-272   * @param that The other bytes 
writable
-273   * @return Positive if left is bigger 
than right, 0 if they are equal, and
-274   * negative if left is smaller 
than right.
-275   */
-276  @Override

[27/51] [partial] hbase-site git commit: Published site at 8eaaa63114a64bcaeaf0ed9bdd88615ee22255c1.

2018-09-25 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f6f9d4f3/devapidocs/src-html/org/apache/hadoop/hbase/executor/ExecutorService.RunningEventStatus.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/executor/ExecutorService.RunningEventStatus.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/executor/ExecutorService.RunningEventStatus.html
index 49f081b..33c9cc0 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/executor/ExecutorService.RunningEventStatus.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/executor/ExecutorService.RunningEventStatus.html
@@ -35,309 +35,328 @@
 027import 
java.util.concurrent.BlockingQueue;
 028import 
java.util.concurrent.ConcurrentHashMap;
 029import 
java.util.concurrent.ConcurrentMap;
-030import 
java.util.concurrent.LinkedBlockingQueue;
-031import 
java.util.concurrent.ThreadPoolExecutor;
-032import java.util.concurrent.TimeUnit;
-033import 
java.util.concurrent.atomic.AtomicLong;
-034
-035import 
org.apache.yetus.audience.InterfaceAudience;
-036import org.slf4j.Logger;
-037import org.slf4j.LoggerFactory;
-038import 
org.apache.hadoop.hbase.monitoring.ThreadMonitoring;
+030import java.util.concurrent.Executors;
+031import 
java.util.concurrent.LinkedBlockingQueue;
+032import 
java.util.concurrent.ThreadPoolExecutor;
+033import java.util.concurrent.TimeUnit;
+034import 
java.util.concurrent.atomic.AtomicLong;
+035import 
org.apache.hadoop.hbase.monitoring.ThreadMonitoring;
+036import 
org.apache.yetus.audience.InterfaceAudience;
+037import org.slf4j.Logger;
+038import org.slf4j.LoggerFactory;
 039
 040import 
org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;
 041import 
org.apache.hbase.thirdparty.com.google.common.collect.Lists;
 042import 
org.apache.hbase.thirdparty.com.google.common.collect.Maps;
-043import 
org.apache.hbase.thirdparty.com.google.common.util.concurrent.ThreadFactoryBuilder;
-044
-045/**
-046 * This is a generic executor service. 
This component abstracts a
-047 * threadpool, a queue to which {@link 
EventType}s can be submitted,
-048 * and a 
Runnable that handles the object that is added to the 
queue.
-049 *
-050 * 

In order to create a new service, create an instance of this class and -051 * then do: instance.startExecutorService("myService");. When done -052 * call {@link #shutdown()}. -053 * -054 *

In order to use the service created above, call -055 * {@link #submit(EventHandler)}. -056 */ -057@InterfaceAudience.Private -058public class ExecutorService { -059 private static final Logger LOG = LoggerFactory.getLogger(ExecutorService.class); -060 -061 // hold the all the executors created in a map addressable by their names -062 private final ConcurrentHashMap executorMap = new ConcurrentHashMap<>(); +043import org.apache.hbase.thirdparty.com.google.common.util.concurrent.ListenableFuture; +044import org.apache.hbase.thirdparty.com.google.common.util.concurrent.ListeningScheduledExecutorService; +045import org.apache.hbase.thirdparty.com.google.common.util.concurrent.MoreExecutors; +046import org.apache.hbase.thirdparty.com.google.common.util.concurrent.ThreadFactoryBuilder; +047 +048/** +049 * This is a generic executor service. This component abstracts a +050 * threadpool, a queue to which {@link EventType}s can be submitted, +051 * and a Runnable that handles the object that is added to the queue. +052 * +053 *

In order to create a new service, create an instance of this class and +054 * then do: instance.startExecutorService("myService");. When done +055 * call {@link #shutdown()}. +056 * +057 *

In order to use the service created above, call +058 * {@link #submit(EventHandler)}. +059 */ +060@InterfaceAudience.Private +061public class ExecutorService { +062 private static final Logger LOG = LoggerFactory.getLogger(ExecutorService.class); 063 -064 // Name of the server hosting this executor service. -065 private final String servername; +064 // hold the all the executors created in a map addressable by their names +065 private final ConcurrentHashMap executorMap = new ConcurrentHashMap<>(); 066 -067 /** -068 * Default constructor. -069 * @param servername Name of the hosting server. -070 */ -071 public ExecutorService(final String servername) { -072super(); -073this.servername = servername; -074 } -075 -076 /** -077 * Start an executor service with a given name. If there was a service already -078 * started with the same name, this throws a RuntimeException. -079 * @param name Name of the service to start. -080 */ -081 @VisibleForTesting -082 public void startExecutorService(String name, int maxThreads) { -083if (this.executorMap.get(name) != null) { -084 throw new RuntimeException("An executor service with the name " + name + -085


[14/51] [partial] hbase-site git commit: Published site at 8eaaa63114a64bcaeaf0ed9bdd88615ee22255c1.

2018-09-25 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f6f9d4f3/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RSRpcServices.LogDelegate.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RSRpcServices.LogDelegate.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RSRpcServices.LogDelegate.html
index 25f458d..20e3eaa 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RSRpcServices.LogDelegate.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RSRpcServices.LogDelegate.html
@@ -28,3711 +28,3756 @@
 020import java.io.FileNotFoundException;
 021import java.io.IOException;
 022import java.io.InterruptedIOException;
-023import 
java.lang.reflect.InvocationTargetException;
-024import java.net.BindException;
-025import java.net.InetSocketAddress;
-026import java.net.UnknownHostException;
-027import java.nio.ByteBuffer;
-028import java.util.ArrayList;
-029import java.util.Arrays;
-030import java.util.Collections;
-031import java.util.HashMap;
-032import java.util.Iterator;
-033import java.util.List;
-034import java.util.Map;
-035import java.util.Map.Entry;
-036import java.util.NavigableMap;
-037import java.util.Set;
-038import java.util.TreeSet;
-039import 
java.util.concurrent.ConcurrentHashMap;
-040import 
java.util.concurrent.ConcurrentMap;
-041import java.util.concurrent.TimeUnit;
-042import 
java.util.concurrent.atomic.AtomicBoolean;
-043import 
java.util.concurrent.atomic.AtomicLong;
-044import 
java.util.concurrent.atomic.LongAdder;
-045import 
org.apache.commons.lang3.mutable.MutableObject;
-046import 
org.apache.hadoop.conf.Configuration;
-047import org.apache.hadoop.fs.Path;
-048import 
org.apache.hadoop.hbase.ByteBufferExtendedCell;
-049import 
org.apache.hadoop.hbase.CacheEvictionStats;
-050import 
org.apache.hadoop.hbase.CacheEvictionStatsBuilder;
-051import org.apache.hadoop.hbase.Cell;
-052import 
org.apache.hadoop.hbase.CellScannable;
-053import 
org.apache.hadoop.hbase.CellScanner;
-054import 
org.apache.hadoop.hbase.CellUtil;
-055import 
org.apache.hadoop.hbase.CompareOperator;
-056import 
org.apache.hadoop.hbase.DoNotRetryIOException;
-057import 
org.apache.hadoop.hbase.DroppedSnapshotException;
-058import 
org.apache.hadoop.hbase.HBaseIOException;
-059import 
org.apache.hadoop.hbase.HConstants;
-060import 
org.apache.hadoop.hbase.MultiActionResultTooLarge;
-061import 
org.apache.hadoop.hbase.NotServingRegionException;
-062import 
org.apache.hadoop.hbase.PrivateCellUtil;
-063import 
org.apache.hadoop.hbase.RegionTooBusyException;
-064import org.apache.hadoop.hbase.Server;
-065import 
org.apache.hadoop.hbase.ServerName;
-066import 
org.apache.hadoop.hbase.TableName;
-067import 
org.apache.hadoop.hbase.UnknownScannerException;
-068import 
org.apache.hadoop.hbase.client.Append;
-069import 
org.apache.hadoop.hbase.client.ConnectionUtils;
-070import 
org.apache.hadoop.hbase.client.Delete;
-071import 
org.apache.hadoop.hbase.client.Durability;
-072import 
org.apache.hadoop.hbase.client.Get;
-073import 
org.apache.hadoop.hbase.client.Increment;
-074import 
org.apache.hadoop.hbase.client.Mutation;
-075import 
org.apache.hadoop.hbase.client.Put;
-076import 
org.apache.hadoop.hbase.client.RegionInfo;
-077import 
org.apache.hadoop.hbase.client.RegionReplicaUtil;
-078import 
org.apache.hadoop.hbase.client.Result;
-079import 
org.apache.hadoop.hbase.client.Row;
-080import 
org.apache.hadoop.hbase.client.RowMutations;
-081import 
org.apache.hadoop.hbase.client.Scan;
-082import 
org.apache.hadoop.hbase.client.TableDescriptor;
-083import 
org.apache.hadoop.hbase.client.VersionInfoUtil;
-084import 
org.apache.hadoop.hbase.conf.ConfigurationObserver;
-085import 
org.apache.hadoop.hbase.exceptions.FailedSanityCheckException;
-086import 
org.apache.hadoop.hbase.exceptions.OutOfOrderScannerNextException;
-087import 
org.apache.hadoop.hbase.exceptions.ScannerResetException;
-088import 
org.apache.hadoop.hbase.exceptions.UnknownProtocolException;
-089import 
org.apache.hadoop.hbase.filter.ByteArrayComparable;
-090import 
org.apache.hadoop.hbase.io.TimeRange;
-091import 
org.apache.hadoop.hbase.ipc.HBaseRPCErrorHandler;
-092import 
org.apache.hadoop.hbase.ipc.HBaseRpcController;
-093import 
org.apache.hadoop.hbase.ipc.PriorityFunction;
-094import 
org.apache.hadoop.hbase.ipc.QosPriority;
-095import 
org.apache.hadoop.hbase.ipc.RpcCallContext;
-096import 
org.apache.hadoop.hbase.ipc.RpcCallback;
-097import 
org.apache.hadoop.hbase.ipc.RpcScheduler;
-098import 
org.apache.hadoop.hbase.ipc.RpcServer;
-099import 
org.apache.hadoop.hbase.ipc.RpcServer.BlockingServiceAndInterface;
-100import 
org.apache.hadoop.hbase.ipc.RpcServerFactory;
-101import 
org.apache.hadoop.hbase.ipc.RpcServerInterface;
-102import 
org.apache.hadoop.hbase.ipc.ServerNotRunningYetException;
-103import 
org.apache.hadoop.hbase.ipc.ServerRpcController;
-104import 
org.apache.hadoop.hbase.log.HBas

[25/51] [partial] hbase-site git commit: Published site at 8eaaa63114a64bcaeaf0ed9bdd88615ee22255c1.

2018-09-25 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f6f9d4f3/devapidocs/src-html/org/apache/hadoop/hbase/executor/ExecutorService.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/executor/ExecutorService.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/executor/ExecutorService.html
index 49f081b..33c9cc0 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/executor/ExecutorService.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/executor/ExecutorService.html
@@ -35,309 +35,328 @@
 027import 
java.util.concurrent.BlockingQueue;
 028import 
java.util.concurrent.ConcurrentHashMap;
 029import 
java.util.concurrent.ConcurrentMap;
-030import 
java.util.concurrent.LinkedBlockingQueue;
-031import 
java.util.concurrent.ThreadPoolExecutor;
-032import java.util.concurrent.TimeUnit;
-033import 
java.util.concurrent.atomic.AtomicLong;
-034
-035import 
org.apache.yetus.audience.InterfaceAudience;
-036import org.slf4j.Logger;
-037import org.slf4j.LoggerFactory;
-038import 
org.apache.hadoop.hbase.monitoring.ThreadMonitoring;
+030import java.util.concurrent.Executors;
+031import 
java.util.concurrent.LinkedBlockingQueue;
+032import 
java.util.concurrent.ThreadPoolExecutor;
+033import java.util.concurrent.TimeUnit;
+034import 
java.util.concurrent.atomic.AtomicLong;
+035import 
org.apache.hadoop.hbase.monitoring.ThreadMonitoring;
+036import 
org.apache.yetus.audience.InterfaceAudience;
+037import org.slf4j.Logger;
+038import org.slf4j.LoggerFactory;
 039
 040import 
org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;
 041import 
org.apache.hbase.thirdparty.com.google.common.collect.Lists;
 042import 
org.apache.hbase.thirdparty.com.google.common.collect.Maps;
-043import 
org.apache.hbase.thirdparty.com.google.common.util.concurrent.ThreadFactoryBuilder;
-044
-045/**
-046 * This is a generic executor service. 
This component abstracts a
-047 * threadpool, a queue to which {@link 
EventType}s can be submitted,
-048 * and a 
Runnable that handles the object that is added to the 
queue.
-049 *
-050 * 

In order to create a new service, create an instance of this class and -051 * then do: instance.startExecutorService("myService");. When done -052 * call {@link #shutdown()}. -053 * -054 *

In order to use the service created above, call -055 * {@link #submit(EventHandler)}. -056 */ -057@InterfaceAudience.Private -058public class ExecutorService { -059 private static final Logger LOG = LoggerFactory.getLogger(ExecutorService.class); -060 -061 // hold the all the executors created in a map addressable by their names -062 private final ConcurrentHashMap executorMap = new ConcurrentHashMap<>(); +043import org.apache.hbase.thirdparty.com.google.common.util.concurrent.ListenableFuture; +044import org.apache.hbase.thirdparty.com.google.common.util.concurrent.ListeningScheduledExecutorService; +045import org.apache.hbase.thirdparty.com.google.common.util.concurrent.MoreExecutors; +046import org.apache.hbase.thirdparty.com.google.common.util.concurrent.ThreadFactoryBuilder; +047 +048/** +049 * This is a generic executor service. This component abstracts a +050 * threadpool, a queue to which {@link EventType}s can be submitted, +051 * and a Runnable that handles the object that is added to the queue. +052 * +053 *

In order to create a new service, create an instance of this class and +054 * then do: instance.startExecutorService("myService");. When done +055 * call {@link #shutdown()}. +056 * +057 *

In order to use the service created above, call +058 * {@link #submit(EventHandler)}. +059 */ +060@InterfaceAudience.Private +061public class ExecutorService { +062 private static final Logger LOG = LoggerFactory.getLogger(ExecutorService.class); 063 -064 // Name of the server hosting this executor service. -065 private final String servername; +064 // hold the all the executors created in a map addressable by their names +065 private final ConcurrentHashMap executorMap = new ConcurrentHashMap<>(); 066 -067 /** -068 * Default constructor. -069 * @param servername Name of the hosting server. -070 */ -071 public ExecutorService(final String servername) { -072super(); -073this.servername = servername; -074 } -075 -076 /** -077 * Start an executor service with a given name. If there was a service already -078 * started with the same name, this throws a RuntimeException. -079 * @param name Name of the service to start. -080 */ -081 @VisibleForTesting -082 public void startExecutorService(String name, int maxThreads) { -083if (this.executorMap.get(name) != null) { -084 throw new RuntimeException("An executor service with the name " + name + -085" is already running!"); -086} -087Executor hbes = new Executor(name, maxThreads); -088


hbase git commit: HBASE-21221 Ineffective assertion in TestFromClientSide3#testMultiRowMutations

2018-09-25 Thread tedyu
Repository: hbase
Updated Branches:
  refs/heads/master 8eaaa6311 -> b8134fe5b


HBASE-21221 Ineffective assertion in TestFromClientSide3#testMultiRowMutations


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/b8134fe5
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/b8134fe5
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/b8134fe5

Branch: refs/heads/master
Commit: b8134fe5b6ce4a411c79d5bd63404906bf174c3d
Parents: 8eaaa63
Author: tedyu 
Authored: Tue Sep 25 09:17:26 2018 -0700
Committer: tedyu 
Committed: Tue Sep 25 09:17:26 2018 -0700

--
 .../hbase/client/TestFromClientSide3.java   | 21 +++-
 1 file changed, 12 insertions(+), 9 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/b8134fe5/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide3.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide3.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide3.java
index fad4f45..aa5bfb0 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide3.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide3.java
@@ -35,6 +35,7 @@ import java.util.concurrent.ExecutorService;
 import java.util.concurrent.Executors;
 import java.util.concurrent.ThreadLocalRandom;
 import java.util.concurrent.TimeUnit;
+import java.util.concurrent.atomic.AtomicBoolean;
 import java.util.concurrent.atomic.AtomicInteger;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.Cell;
@@ -829,8 +830,8 @@ public class TestFromClientSide3 {
 }
   });
   ExecutorService cpService = Executors.newSingleThreadExecutor();
+  AtomicBoolean exceptionDuringMutateRows = new AtomicBoolean();
   cpService.execute(() -> {
-boolean threw;
 Put put1 = new Put(row);
 Put put2 = new Put(rowLocked);
 put1.addColumn(FAMILY, QUALIFIER, value1);
@@ -844,26 +845,25 @@ public class TestFromClientSide3 {
   
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.MutationType.PUT,
 put2))
   .build();
   
table.coprocessorService(MultiRowMutationProtos.MultiRowMutationService.class,
-ROW, ROW,
+  ROW, ROW,
 (MultiRowMutationProtos.MultiRowMutationService exe) -> {
   ServerRpcController controller = new ServerRpcController();
   
CoprocessorRpcUtils.BlockingRpcCallback
 rpcCallback = new CoprocessorRpcUtils.BlockingRpcCallback<>();
   exe.mutateRows(controller, request, rpcCallback);
+  if (controller.failedOnException()) {
+exceptionDuringMutateRows.set(true);
+  }
   return rpcCallback.get();
 });
-  threw = false;
 } catch (Throwable ex) {
-  threw = true;
-}
-if (!threw) {
-  // Can't call fail() earlier because the catch would eat it.
-  fail("This cp should fail because the target lock is blocked by 
previous put");
+  LOG.error("encountered " + ex);
 }
   });
   cpService.shutdown();
   cpService.awaitTermination(Long.MAX_VALUE, TimeUnit.DAYS);
-  WaitingForMultiMutationsObserver observer = find(tableName, 
WaitingForMultiMutationsObserver.class);
+  WaitingForMultiMutationsObserver observer = find(tableName,
+  WaitingForMultiMutationsObserver.class);
   observer.latch.countDown();
   putService.shutdown();
   putService.awaitTermination(Long.MAX_VALUE, TimeUnit.DAYS);
@@ -877,6 +877,9 @@ public class TestFromClientSide3 {
 assertTrue(Bytes.equals(r1.getValue(FAMILY, QUALIFIER), value0));
   }
   assertNoLocks(tableName);
+  if (!exceptionDuringMutateRows.get()) {
+fail("This cp should fail because the target lock is blocked by 
previous put");
+  }
 }
   }
 



hbase git commit: HBASE-21223 [amv2] Remove abort_procedure from shell

2018-09-25 Thread stack
Repository: hbase
Updated Branches:
  refs/heads/branch-2.1 101205345 -> 0d008b479


HBASE-21223 [amv2] Remove abort_procedure from shell

Signed-off-by: Balazs Meszaros 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/0d008b47
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/0d008b47
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/0d008b47

Branch: refs/heads/branch-2.1
Commit: 0d008b47925e99a71f60def73645c798430b9778
Parents: 1012053
Author: Michael Stack 
Authored: Tue Sep 25 09:50:01 2018 -0700
Committer: Michael Stack 
Committed: Tue Sep 25 10:07:32 2018 -0700

--
 .../org/apache/hadoop/hbase/client/Admin.java   |  6 +++
 .../apache/hadoop/hbase/client/AsyncAdmin.java  |  5 ++-
 hbase-shell/src/main/ruby/hbase/admin.rb|  9 
 hbase-shell/src/main/ruby/shell.rb  |  1 -
 .../main/ruby/shell/commands/abort_procedure.rb | 47 
 5 files changed, 10 insertions(+), 58 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/0d008b47/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Admin.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Admin.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Admin.java
index 331f2d1..2ea6987 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Admin.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Admin.java
@@ -1723,11 +1723,14 @@ public interface Admin extends Abortable, Closeable {
 
   /**
* Abort a procedure.
+   * Do not use. Usually it is ignored but if not, it can do more damage than 
good. See hbck2.
* @param procId ID of the procedure to abort
* @param mayInterruptIfRunning if the proc completed at least one step, 
should it be aborted?
* @return true if aborted, false if procedure 
already completed or does not exist
* @throws IOException
+   * @deprecated Since 2.1.1 -- to be removed.
*/
+  @Deprecated
   boolean abortProcedure(
   long procId,
   boolean mayInterruptIfRunning) throws IOException;
@@ -1738,12 +1741,15 @@ public interface Admin extends Abortable, Closeable {
* It may throw ExecutionException if there was an error while executing the 
operation
* or TimeoutException in case the wait timeout was not long enough to allow 
the
* operation to complete.
+   * Do not use. Usually it is ignored but if not, it can do more damage than 
good. See hbck2.
*
* @param procId ID of the procedure to abort
* @param mayInterruptIfRunning if the proc completed at least one step, 
should it be aborted?
* @return true if aborted, false if procedure 
already completed or does not exist
* @throws IOException
+   * @deprecated Since 2.1.1 -- to be removed.
*/
+  @Deprecated
   Future abortProcedureAsync(
 long procId,
 boolean mayInterruptIfRunning) throws IOException;

http://git-wip-us.apache.org/repos/asf/hbase/blob/0d008b47/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncAdmin.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncAdmin.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncAdmin.java
index 8141e74..134ef85 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncAdmin.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncAdmin.java
@@ -846,12 +846,15 @@ public interface AsyncAdmin {
   Map props);
 
   /**
-   * abort a procedure
+   * Abort a procedure
+   * Do not use. Usually it is ignored but if not, it can do more damage than 
good. See hbck2.
* @param procId ID of the procedure to abort
* @param mayInterruptIfRunning if the proc completed at least one step, 
should it be aborted?
* @return true if aborted, false if procedure already completed or does not 
exist. the value is
* wrapped by {@link CompletableFuture}
+   * @deprecated Since 2.1.1 -- to be removed.
*/
+  @Deprecated
   CompletableFuture abortProcedure(long procId, boolean 
mayInterruptIfRunning);
 
   /**

http://git-wip-us.apache.org/repos/asf/hbase/blob/0d008b47/hbase-shell/src/main/ruby/hbase/admin.rb
--
diff --git a/hbase-shell/src/main/ruby/hbase/admin.rb 
b/hbase-shell/src/main/ruby/hbase/admin.rb
index 4385acf..68bb353 100644
--- a/hbase-shell/src/main/ruby/hbase/admin.rb
+++ b/hbase-shell/src/main/ruby/hbase/admin.rb
@@ -1211,15 +1211,6 @@ module Hbase
   @admin.getSecurityCapabilities
 end
 
-# Abort a procedure
-def abort_procedure?(proc_id, may_interrupt_if_running = nil)
-  if may_inte

hbase git commit: HBASE-21223 [amv2] Remove abort_procedure from shell

2018-09-25 Thread stack
Repository: hbase
Updated Branches:
  refs/heads/branch-2 fa2888ebf -> 2836f7562


HBASE-21223 [amv2] Remove abort_procedure from shell

Signed-off-by: Balazs Meszaros 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/2836f756
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/2836f756
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/2836f756

Branch: refs/heads/branch-2
Commit: 2836f756250a2c61d35ce03197222da29d5b8181
Parents: fa2888e
Author: Michael Stack 
Authored: Tue Sep 25 09:50:01 2018 -0700
Committer: Michael Stack 
Committed: Tue Sep 25 11:28:54 2018 -0700

--
 .../org/apache/hadoop/hbase/client/Admin.java   |  6 +++
 .../apache/hadoop/hbase/client/AsyncAdmin.java  |  5 ++-
 hbase-shell/src/main/ruby/hbase/admin.rb|  9 
 hbase-shell/src/main/ruby/shell.rb  |  1 -
 .../main/ruby/shell/commands/abort_procedure.rb | 47 
 5 files changed, 10 insertions(+), 58 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/2836f756/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Admin.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Admin.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Admin.java
index 01ebb66..6096e54 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Admin.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Admin.java
@@ -1736,11 +1736,14 @@ public interface Admin extends Abortable, Closeable {
 
   /**
* Abort a procedure.
+   * Do not use. Usually it is ignored but if not, it can do more damage than 
good. See hbck2.
* @param procId ID of the procedure to abort
* @param mayInterruptIfRunning if the proc completed at least one step, 
should it be aborted?
* @return true if aborted, false if procedure 
already completed or does not exist
* @throws IOException
+   * @deprecated Since 2.1.1 -- to be removed.
*/
+  @Deprecated
   boolean abortProcedure(
   long procId,
   boolean mayInterruptIfRunning) throws IOException;
@@ -1751,12 +1754,15 @@ public interface Admin extends Abortable, Closeable {
* It may throw ExecutionException if there was an error while executing the 
operation
* or TimeoutException in case the wait timeout was not long enough to allow 
the
* operation to complete.
+   * Do not use. Usually it is ignored but if not, it can do more damage than 
good. See hbck2.
*
* @param procId ID of the procedure to abort
* @param mayInterruptIfRunning if the proc completed at least one step, 
should it be aborted?
* @return true if aborted, false if procedure 
already completed or does not exist
* @throws IOException
+   * @deprecated Since 2.1.1 -- to be removed.
*/
+  @Deprecated
   Future abortProcedureAsync(
 long procId,
 boolean mayInterruptIfRunning) throws IOException;

http://git-wip-us.apache.org/repos/asf/hbase/blob/2836f756/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncAdmin.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncAdmin.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncAdmin.java
index 0e47de8..897e97f 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncAdmin.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncAdmin.java
@@ -846,12 +846,15 @@ public interface AsyncAdmin {
   Map props);
 
   /**
-   * abort a procedure
+   * Abort a procedure
+   * Do not use. Usually it is ignored but if not, it can do more damage than 
good. See hbck2.
* @param procId ID of the procedure to abort
* @param mayInterruptIfRunning if the proc completed at least one step, 
should it be aborted?
* @return true if aborted, false if procedure already completed or does not 
exist. the value is
* wrapped by {@link CompletableFuture}
+   * @deprecated Since 2.1.1 -- to be removed.
*/
+  @Deprecated
   CompletableFuture abortProcedure(long procId, boolean 
mayInterruptIfRunning);
 
   /**

http://git-wip-us.apache.org/repos/asf/hbase/blob/2836f756/hbase-shell/src/main/ruby/hbase/admin.rb
--
diff --git a/hbase-shell/src/main/ruby/hbase/admin.rb 
b/hbase-shell/src/main/ruby/hbase/admin.rb
index 2be059d..e4efab0 100644
--- a/hbase-shell/src/main/ruby/hbase/admin.rb
+++ b/hbase-shell/src/main/ruby/hbase/admin.rb
@@ -1224,15 +1224,6 @@ module Hbase
   @admin.getSecurityCapabilities
 end
 
-# Abort a procedure
-def abort_procedure?(proc_id, may_interrupt_if_running = nil)
-  if may_interrup

hbase git commit: HBASE-21223 [amv2] Remove abort_procedure from shell

2018-09-25 Thread stack
Repository: hbase
Updated Branches:
  refs/heads/master b8134fe5b -> 27369136b


HBASE-21223 [amv2] Remove abort_procedure from shell

Signed-off-by: Balazs Meszaros 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/27369136
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/27369136
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/27369136

Branch: refs/heads/master
Commit: 27369136be331a2378e433c23c49a6534c38f8b8
Parents: b8134fe
Author: Michael Stack 
Authored: Tue Sep 25 09:50:01 2018 -0700
Committer: Michael Stack 
Committed: Tue Sep 25 11:29:15 2018 -0700

--
 .../org/apache/hadoop/hbase/client/Admin.java   |  6 +++
 .../apache/hadoop/hbase/client/AsyncAdmin.java  |  5 ++-
 hbase-shell/src/main/ruby/hbase/admin.rb|  9 
 hbase-shell/src/main/ruby/shell.rb  |  1 -
 .../main/ruby/shell/commands/abort_procedure.rb | 47 
 5 files changed, 10 insertions(+), 58 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/27369136/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Admin.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Admin.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Admin.java
index a43a0b2..08b44c9 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Admin.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Admin.java
@@ -1737,11 +1737,14 @@ public interface Admin extends Abortable, Closeable {
 
   /**
* Abort a procedure.
+   * Do not use. Usually it is ignored but if not, it can do more damage than 
good. See hbck2.
* @param procId ID of the procedure to abort
* @param mayInterruptIfRunning if the proc completed at least one step, 
should it be aborted?
* @return true if aborted, false if procedure 
already completed or does not exist
* @throws IOException
+   * @deprecated Since 2.1.1 -- to be removed.
*/
+  @Deprecated
   boolean abortProcedure(
   long procId,
   boolean mayInterruptIfRunning) throws IOException;
@@ -1752,12 +1755,15 @@ public interface Admin extends Abortable, Closeable {
* It may throw ExecutionException if there was an error while executing the 
operation
* or TimeoutException in case the wait timeout was not long enough to allow 
the
* operation to complete.
+   * Do not use. Usually it is ignored but if not, it can do more damage than 
good. See hbck2.
*
* @param procId ID of the procedure to abort
* @param mayInterruptIfRunning if the proc completed at least one step, 
should it be aborted?
* @return true if aborted, false if procedure 
already completed or does not exist
* @throws IOException
+   * @deprecated Since 2.1.1 -- to be removed.
*/
+  @Deprecated
   Future abortProcedureAsync(
 long procId,
 boolean mayInterruptIfRunning) throws IOException;

http://git-wip-us.apache.org/repos/asf/hbase/blob/27369136/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncAdmin.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncAdmin.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncAdmin.java
index 739c78a..6bb253a 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncAdmin.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncAdmin.java
@@ -877,12 +877,15 @@ public interface AsyncAdmin {
   Map props);
 
   /**
-   * abort a procedure
+   * Abort a procedure
+   * Do not use. Usually it is ignored but if not, it can do more damage than 
good. See hbck2.
* @param procId ID of the procedure to abort
* @param mayInterruptIfRunning if the proc completed at least one step, 
should it be aborted?
* @return true if aborted, false if procedure already completed or does not 
exist. the value is
* wrapped by {@link CompletableFuture}
+   * @deprecated Since 2.1.1 -- to be removed.
*/
+  @Deprecated
   CompletableFuture abortProcedure(long procId, boolean 
mayInterruptIfRunning);
 
   /**

http://git-wip-us.apache.org/repos/asf/hbase/blob/27369136/hbase-shell/src/main/ruby/hbase/admin.rb
--
diff --git a/hbase-shell/src/main/ruby/hbase/admin.rb 
b/hbase-shell/src/main/ruby/hbase/admin.rb
index 2be059d..e4efab0 100644
--- a/hbase-shell/src/main/ruby/hbase/admin.rb
+++ b/hbase-shell/src/main/ruby/hbase/admin.rb
@@ -1224,15 +1224,6 @@ module Hbase
   @admin.getSecurityCapabilities
 end
 
-# Abort a procedure
-def abort_procedure?(proc_id, may_interrupt_if_running = nil)
-  if may_interrupt_if

hbase git commit: HBASE-21164 reportForDuty should do backoff rather than retry

2018-09-25 Thread stack
Repository: hbase
Updated Branches:
  refs/heads/branch-2.1 0d008b479 -> fea75742b


HBASE-21164 reportForDuty should do backoff rather than retry

Remove unused methods from Sleeper (its ok, its @Private).
Remove notion of startTime from Sleeper handling (it is is unused).
Allow passing in how long to sleep so can maintain externally.
In HRS, use a RetryCounter to calculate backoff sleep time for when
reportForDuty is failing against a struggling Master.


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/fea75742
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/fea75742
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/fea75742

Branch: refs/heads/branch-2.1
Commit: fea75742b4b9dc476bb2b83efcb3c6358f46bcc2
Parents: 0d008b4
Author: Mingliang Liu 
Authored: Thu Sep 6 23:01:52 2018 -0700
Committer: Michael Stack 
Committed: Tue Sep 25 11:31:39 2018 -0700

--
 .../org/apache/hadoop/hbase/util/Sleeper.java   | 31 +++
 .../org/apache/hadoop/hbase/master/HMaster.java |  3 +-
 .../hbase/regionserver/HRegionServer.java   | 16 ++--
 .../TestRegionServerReportForDuty.java  | 88 
 4 files changed, 111 insertions(+), 27 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/fea75742/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Sleeper.java
--
diff --git 
a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Sleeper.java 
b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Sleeper.java
index 7d4d692..93ef08c 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Sleeper.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Sleeper.java
@@ -50,13 +50,6 @@ public class Sleeper {
   }
 
   /**
-   * Sleep for period.
-   */
-  public void sleep() {
-sleep(System.currentTimeMillis());
-  }
-
-  /**
* If currently asleep, stops sleeping; if not asleep, will skip the next
* sleep cycle.
*/
@@ -68,28 +61,24 @@ public class Sleeper {
   }
 
   /**
-   * Sleep for period adjusted by passed startTime
-   * @param startTime Time some task started previous to now.  Time to sleep
-   * will be docked current time minus passed startTime.
+   * Sleep for period.
*/
-  public void sleep(final long startTime) {
+  public void sleep() {
+sleep(this.period);
+  }
+
+  public void sleep(long sleepTime) {
 if (this.stopper.isStopped()) {
   return;
 }
 long now = System.currentTimeMillis();
-long waitTime = this.period - (now - startTime);
-if (waitTime > this.period) {
-  LOG.warn("Calculated wait time > " + this.period +
-"; setting to this.period: " + System.currentTimeMillis() + ", " +
-startTime);
-  waitTime = this.period;
-}
-while (waitTime > 0) {
+long currentSleepTime = sleepTime;
+while (currentSleepTime > 0) {
   long woke = -1;
   try {
 synchronized (sleepLock) {
   if (triggerWake) break;
-  sleepLock.wait(waitTime);
+  sleepLock.wait(currentSleepTime);
 }
 woke = System.currentTimeMillis();
 long slept = woke - now;
@@ -108,7 +97,7 @@ public class Sleeper {
   }
   // Recalculate waitTime.
   woke = (woke == -1)? System.currentTimeMillis(): woke;
-  waitTime = this.period - (woke - startTime);
+  currentSleepTime = this.period - (woke - now);
 }
 synchronized(sleepLock) {
   triggerWake = false;

http://git-wip-us.apache.org/repos/asf/hbase/blob/fea75742/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java
index d7e57e8..475b0ca 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java
@@ -2923,7 +2923,8 @@ public class HMaster extends HRegionServer implements 
MasterServices {
 }
   }
 
-  void checkServiceStarted() throws ServerNotRunningYetException {
+  @VisibleForTesting
+  protected void checkServiceStarted() throws ServerNotRunningYetException {
 if (!serviceStarted) {
   throw new ServerNotRunningYetException("Server is not running yet");
 }

http://git-wip-us.apache.org/repos/asf/hbase/blob/fea75742/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbas

hbase git commit: HBASE-21164 reportForDuty should do backoff rather than retry

2018-09-25 Thread stack
Repository: hbase
Updated Branches:
  refs/heads/master 27369136b -> 08c4d70aa


HBASE-21164 reportForDuty should do backoff rather than retry

Remove unused methods from Sleeper (its ok, its @Private).
Remove notion of startTime from Sleeper handling (it is is unused).
Allow passing in how long to sleep so can maintain externally.
In HRS, use a RetryCounter to calculate backoff sleep time for when
reportForDuty is failing against a struggling Master.


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/08c4d70a
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/08c4d70a
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/08c4d70a

Branch: refs/heads/master
Commit: 08c4d70aaf0079fa1d0d74d5874f0f2639474138
Parents: 2736913
Author: Mingliang Liu 
Authored: Thu Sep 6 23:01:52 2018 -0700
Committer: Michael Stack 
Committed: Tue Sep 25 11:32:28 2018 -0700

--
 .../org/apache/hadoop/hbase/util/Sleeper.java   | 31 +++
 .../org/apache/hadoop/hbase/master/HMaster.java |  3 +-
 .../hbase/regionserver/HRegionServer.java   | 16 ++--
 .../TestRegionServerReportForDuty.java  | 88 
 4 files changed, 111 insertions(+), 27 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/08c4d70a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Sleeper.java
--
diff --git 
a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Sleeper.java 
b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Sleeper.java
index 7d4d692..93ef08c 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Sleeper.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Sleeper.java
@@ -50,13 +50,6 @@ public class Sleeper {
   }
 
   /**
-   * Sleep for period.
-   */
-  public void sleep() {
-sleep(System.currentTimeMillis());
-  }
-
-  /**
* If currently asleep, stops sleeping; if not asleep, will skip the next
* sleep cycle.
*/
@@ -68,28 +61,24 @@ public class Sleeper {
   }
 
   /**
-   * Sleep for period adjusted by passed startTime
-   * @param startTime Time some task started previous to now.  Time to sleep
-   * will be docked current time minus passed startTime.
+   * Sleep for period.
*/
-  public void sleep(final long startTime) {
+  public void sleep() {
+sleep(this.period);
+  }
+
+  public void sleep(long sleepTime) {
 if (this.stopper.isStopped()) {
   return;
 }
 long now = System.currentTimeMillis();
-long waitTime = this.period - (now - startTime);
-if (waitTime > this.period) {
-  LOG.warn("Calculated wait time > " + this.period +
-"; setting to this.period: " + System.currentTimeMillis() + ", " +
-startTime);
-  waitTime = this.period;
-}
-while (waitTime > 0) {
+long currentSleepTime = sleepTime;
+while (currentSleepTime > 0) {
   long woke = -1;
   try {
 synchronized (sleepLock) {
   if (triggerWake) break;
-  sleepLock.wait(waitTime);
+  sleepLock.wait(currentSleepTime);
 }
 woke = System.currentTimeMillis();
 long slept = woke - now;
@@ -108,7 +97,7 @@ public class Sleeper {
   }
   // Recalculate waitTime.
   woke = (woke == -1)? System.currentTimeMillis(): woke;
-  waitTime = this.period - (woke - startTime);
+  currentSleepTime = this.period - (woke - now);
 }
 synchronized(sleepLock) {
   triggerWake = false;

http://git-wip-us.apache.org/repos/asf/hbase/blob/08c4d70a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java
index 1853062..8ae8be3 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java
@@ -3012,7 +3012,8 @@ public class HMaster extends HRegionServer implements 
MasterServices {
 }
   }
 
-  void checkServiceStarted() throws ServerNotRunningYetException {
+  @VisibleForTesting
+  protected void checkServiceStarted() throws ServerNotRunningYetException {
 if (!serviceStarted) {
   throw new ServerNotRunningYetException("Server is not running yet");
 }

http://git-wip-us.apache.org/repos/asf/hbase/blob/08c4d70a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/region

hbase git commit: HBASE-21164 reportForDuty should do backoff rather than retry

2018-09-25 Thread stack
Repository: hbase
Updated Branches:
  refs/heads/branch-2 2836f7562 -> 0f514ab75


HBASE-21164 reportForDuty should do backoff rather than retry

Remove unused methods from Sleeper (its ok, its @Private).
Remove notion of startTime from Sleeper handling (it is is unused).
Allow passing in how long to sleep so can maintain externally.
In HRS, use a RetryCounter to calculate backoff sleep time for when
reportForDuty is failing against a struggling Master.


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/0f514ab7
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/0f514ab7
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/0f514ab7

Branch: refs/heads/branch-2
Commit: 0f514ab75ae3d903bb2229c97ef824bbd53d3ba8
Parents: 2836f75
Author: Mingliang Liu 
Authored: Thu Sep 6 23:01:52 2018 -0700
Committer: Michael Stack 
Committed: Tue Sep 25 11:32:15 2018 -0700

--
 .../org/apache/hadoop/hbase/util/Sleeper.java   | 31 +++
 .../org/apache/hadoop/hbase/master/HMaster.java |  3 +-
 .../hbase/regionserver/HRegionServer.java   | 16 ++--
 .../TestRegionServerReportForDuty.java  | 88 
 4 files changed, 111 insertions(+), 27 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/0f514ab7/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Sleeper.java
--
diff --git 
a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Sleeper.java 
b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Sleeper.java
index 7d4d692..93ef08c 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Sleeper.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Sleeper.java
@@ -50,13 +50,6 @@ public class Sleeper {
   }
 
   /**
-   * Sleep for period.
-   */
-  public void sleep() {
-sleep(System.currentTimeMillis());
-  }
-
-  /**
* If currently asleep, stops sleeping; if not asleep, will skip the next
* sleep cycle.
*/
@@ -68,28 +61,24 @@ public class Sleeper {
   }
 
   /**
-   * Sleep for period adjusted by passed startTime
-   * @param startTime Time some task started previous to now.  Time to sleep
-   * will be docked current time minus passed startTime.
+   * Sleep for period.
*/
-  public void sleep(final long startTime) {
+  public void sleep() {
+sleep(this.period);
+  }
+
+  public void sleep(long sleepTime) {
 if (this.stopper.isStopped()) {
   return;
 }
 long now = System.currentTimeMillis();
-long waitTime = this.period - (now - startTime);
-if (waitTime > this.period) {
-  LOG.warn("Calculated wait time > " + this.period +
-"; setting to this.period: " + System.currentTimeMillis() + ", " +
-startTime);
-  waitTime = this.period;
-}
-while (waitTime > 0) {
+long currentSleepTime = sleepTime;
+while (currentSleepTime > 0) {
   long woke = -1;
   try {
 synchronized (sleepLock) {
   if (triggerWake) break;
-  sleepLock.wait(waitTime);
+  sleepLock.wait(currentSleepTime);
 }
 woke = System.currentTimeMillis();
 long slept = woke - now;
@@ -108,7 +97,7 @@ public class Sleeper {
   }
   // Recalculate waitTime.
   woke = (woke == -1)? System.currentTimeMillis(): woke;
-  waitTime = this.period - (woke - startTime);
+  currentSleepTime = this.period - (woke - now);
 }
 synchronized(sleepLock) {
   triggerWake = false;

http://git-wip-us.apache.org/repos/asf/hbase/blob/0f514ab7/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java
index 50c7b80..0678bfe 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java
@@ -2998,7 +2998,8 @@ public class HMaster extends HRegionServer implements 
MasterServices {
 }
   }
 
-  void checkServiceStarted() throws ServerNotRunningYetException {
+  @VisibleForTesting
+  protected void checkServiceStarted() throws ServerNotRunningYetException {
 if (!serviceStarted) {
   throw new ServerNotRunningYetException("Server is not running yet");
 }

http://git-wip-us.apache.org/repos/asf/hbase/blob/0f514ab7/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/re

hbase git commit: HBASE-20734 Colocate recovered edits directory with hbase.wal.dir

2018-09-25 Thread reidchan
Repository: hbase
Updated Branches:
  refs/heads/master 08c4d70aa -> 0e173d38b


HBASE-20734 Colocate recovered edits directory with hbase.wal.dir

Signed-off-by: Reid Chan 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/0e173d38
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/0e173d38
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/0e173d38

Branch: refs/heads/master
Commit: 0e173d38b05363e1fb5c85955a4964f05958c1fc
Parents: 08c4d70
Author: Zach York 
Authored: Wed Jun 27 16:18:53 2018 -0700
Committer: Reid Chan 
Committed: Wed Sep 26 11:20:57 2018 +0800

--
 .../apache/hadoop/hbase/util/CommonFSUtils.java |  28 +++
 .../assignment/MergeTableRegionsProcedure.java  |   8 +-
 .../master/assignment/RegionStateStore.java |   8 +-
 .../assignment/SplitTableRegionProcedure.java   |  10 +-
 .../AbstractStateMachineTableProcedure.java |   7 +-
 .../master/procedure/DisableTableProcedure.java |   6 +-
 .../hadoop/hbase/regionserver/HRegion.java  | 158 +++-
 .../apache/hadoop/hbase/wal/WALSplitter.java| 181 +--
 .../hadoop/hbase/master/AbstractTestDLS.java|   6 +-
 .../hadoop/hbase/regionserver/TestHRegion.java  |   8 +-
 .../regionserver/wal/AbstractTestWALReplay.java |   8 +-
 .../hbase/wal/TestReadWriteSeqIdFiles.java  |  18 +-
 .../apache/hadoop/hbase/wal/TestWALFactory.java |   2 +-
 .../apache/hadoop/hbase/wal/TestWALSplit.java   |  50 ++---
 14 files changed, 296 insertions(+), 202 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/0e173d38/hbase-common/src/main/java/org/apache/hadoop/hbase/util/CommonFSUtils.java
--
diff --git 
a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/CommonFSUtils.java 
b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/CommonFSUtils.java
index 8924098..899c633 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/CommonFSUtils.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/CommonFSUtils.java
@@ -421,6 +421,34 @@ public abstract class CommonFSUtils {
   }
 
   /**
+   * Returns the WAL region directory based on the given table name and region 
name
+   * @param conf configuration to determine WALRootDir
+   * @param tableName Table that the region is under
+   * @param encodedRegionName Region name used for creating the final region 
directory
+   * @return the region directory used to store WALs under the WALRootDir
+   * @throws IOException if there is an exception determining the WALRootDir
+   */
+  public static Path getWALRegionDir(final Configuration conf,
+  final TableName tableName, final String encodedRegionName)
+  throws IOException {
+return new Path(getWALTableDir(conf, tableName),
+encodedRegionName);
+  }
+
+  /**
+   * Returns the Table directory under the WALRootDir for the specified table 
name
+   * @param conf configuration used to get the WALRootDir
+   * @param tableName Table to get the directory for
+   * @return a path to the WAL table directory for the specified table
+   * @throws IOException if there is an exception determining the WALRootDir
+   */
+  public static Path getWALTableDir(final Configuration conf, final TableName 
tableName)
+  throws IOException {
+return new Path(new Path(getWALRootDir(conf), 
tableName.getNamespaceAsString()),
+tableName.getQualifierAsString());
+  }
+
+  /**
* Returns the {@link org.apache.hadoop.fs.Path} object representing the 
table directory under
* path rootdir
*

http://git-wip-us.apache.org/repos/asf/hbase/blob/0e173d38/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/MergeTableRegionsProcedure.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/MergeTableRegionsProcedure.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/MergeTableRegionsProcedure.java
index efeea59..d3b209e 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/MergeTableRegionsProcedure.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/MergeTableRegionsProcedure.java
@@ -757,14 +757,16 @@ public class MergeTableRegionsProcedure
   }
 
   private void writeMaxSequenceIdFile(MasterProcedureEnv env) throws 
IOException {
-FileSystem fs = 
env.getMasterServices().getMasterFileSystem().getFileSystem();
+FileSystem walFS = 
env.getMasterServices().getMasterWalManager().getFileSystem();
 long maxSequenceId = -1L;
 for (RegionInfo region : regionsToMerge) {
   maxSequenceId =
-Math.max(maxSequenceId, WALSplitter.getMaxRegionSequenceId(fs, 
getRegionDir(env, reg

hbase git commit: HBASE-20734 Colocate recovered edits directory with hbase.wal.dir

2018-09-25 Thread reidchan
Repository: hbase
Updated Branches:
  refs/heads/branch-2 0f514ab75 -> 21fafbaf5


HBASE-20734 Colocate recovered edits directory with hbase.wal.dir

Amending-Author: Reid Chan 
Signed-off-by: Reid Chan 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/21fafbaf
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/21fafbaf
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/21fafbaf

Branch: refs/heads/branch-2
Commit: 21fafbaf53bd7973520228da2b85d462644be606
Parents: 0f514ab
Author: Zach York 
Authored: Wed Jun 27 16:18:53 2018 -0700
Committer: Reid Chan 
Committed: Wed Sep 26 11:49:21 2018 +0800

--
 .../apache/hadoop/hbase/util/CommonFSUtils.java |  28 +++
 .../assignment/MergeTableRegionsProcedure.java  |   8 +-
 .../master/assignment/RegionStateStore.java |   8 +-
 .../assignment/SplitTableRegionProcedure.java   |  10 +-
 .../AbstractStateMachineTableProcedure.java |   7 +-
 .../master/procedure/DisableTableProcedure.java |   6 +-
 .../hadoop/hbase/regionserver/HRegion.java  | 158 +++-
 .../apache/hadoop/hbase/wal/WALSplitter.java| 185 +--
 .../hadoop/hbase/master/AbstractTestDLS.java|   6 +-
 .../hadoop/hbase/regionserver/TestHRegion.java  |   8 +-
 .../regionserver/wal/AbstractTestWALReplay.java |   8 +-
 .../hbase/wal/TestReadWriteSeqIdFiles.java  |  18 +-
 .../apache/hadoop/hbase/wal/TestWALFactory.java |   2 +-
 .../apache/hadoop/hbase/wal/TestWALSplit.java   |  50 ++---
 14 files changed, 297 insertions(+), 205 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/21fafbaf/hbase-common/src/main/java/org/apache/hadoop/hbase/util/CommonFSUtils.java
--
diff --git 
a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/CommonFSUtils.java 
b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/CommonFSUtils.java
index 8924098..899c633 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/CommonFSUtils.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/CommonFSUtils.java
@@ -421,6 +421,34 @@ public abstract class CommonFSUtils {
   }
 
   /**
+   * Returns the WAL region directory based on the given table name and region 
name
+   * @param conf configuration to determine WALRootDir
+   * @param tableName Table that the region is under
+   * @param encodedRegionName Region name used for creating the final region 
directory
+   * @return the region directory used to store WALs under the WALRootDir
+   * @throws IOException if there is an exception determining the WALRootDir
+   */
+  public static Path getWALRegionDir(final Configuration conf,
+  final TableName tableName, final String encodedRegionName)
+  throws IOException {
+return new Path(getWALTableDir(conf, tableName),
+encodedRegionName);
+  }
+
+  /**
+   * Returns the Table directory under the WALRootDir for the specified table 
name
+   * @param conf configuration used to get the WALRootDir
+   * @param tableName Table to get the directory for
+   * @return a path to the WAL table directory for the specified table
+   * @throws IOException if there is an exception determining the WALRootDir
+   */
+  public static Path getWALTableDir(final Configuration conf, final TableName 
tableName)
+  throws IOException {
+return new Path(new Path(getWALRootDir(conf), 
tableName.getNamespaceAsString()),
+tableName.getQualifierAsString());
+  }
+
+  /**
* Returns the {@link org.apache.hadoop.fs.Path} object representing the 
table directory under
* path rootdir
*

http://git-wip-us.apache.org/repos/asf/hbase/blob/21fafbaf/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/MergeTableRegionsProcedure.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/MergeTableRegionsProcedure.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/MergeTableRegionsProcedure.java
index efeea59..d3b209e 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/MergeTableRegionsProcedure.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/MergeTableRegionsProcedure.java
@@ -757,14 +757,16 @@ public class MergeTableRegionsProcedure
   }
 
   private void writeMaxSequenceIdFile(MasterProcedureEnv env) throws 
IOException {
-FileSystem fs = 
env.getMasterServices().getMasterFileSystem().getFileSystem();
+FileSystem walFS = 
env.getMasterServices().getMasterWalManager().getFileSystem();
 long maxSequenceId = -1L;
 for (RegionInfo region : regionsToMerge) {
   maxSequenceId =
-Math.max(maxSequenceId, WALSplitter.getMaxRegionSeque

hbase git commit: HBASE-20734 Colocate recovered edits directory with hbase.wal.dir

2018-09-25 Thread reidchan
Repository: hbase
Updated Branches:
  refs/heads/branch-1 c6a133452 -> a00adb0b4


HBASE-20734 Colocate recovered edits directory with hbase.wal.dir

Signed-off-by: Andrew Purtell 
Signed-off-by: Reid Chan 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/a00adb0b
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/a00adb0b
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/a00adb0b

Branch: refs/heads/branch-1
Commit: a00adb0b45f9411be2002a1d9b0ea20a046d52a3
Parents: c6a1334
Author: Zach York 
Authored: Wed Jun 27 16:18:53 2018 -0700
Committer: Reid Chan 
Committed: Wed Sep 26 11:54:44 2018 +0800

--
 .../hadoop/hbase/regionserver/HRegion.java  | 190 ---
 .../org/apache/hadoop/hbase/util/FSUtils.java   |  27 +++
 .../apache/hadoop/hbase/wal/WALSplitter.java| 166 
 .../hadoop/hbase/regionserver/TestHRegion.java  |   8 +-
 .../hbase/regionserver/TestRecoveredEdits.java  |   2 +-
 .../hbase/regionserver/wal/TestWALReplay.java   |   6 +-
 .../apache/hadoop/hbase/wal/TestWALFactory.java |   2 +-
 .../apache/hadoop/hbase/wal/TestWALSplit.java   |  50 ++---
 8 files changed, 263 insertions(+), 188 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/a00adb0b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
index b682b50..0605663 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
@@ -58,6 +58,7 @@ import java.util.NavigableSet;
 import java.util.RandomAccess;
 import java.util.Set;
 import java.util.TreeMap;
+import java.util.TreeSet;
 import java.util.concurrent.Callable;
 import java.util.concurrent.CompletionService;
 import java.util.concurrent.ConcurrentHashMap;
@@ -326,6 +327,9 @@ public class HRegion implements HeapSize, 
PropagatingConfigurationObserver, Regi
   private final int rowLockWaitDuration;
   static final int DEFAULT_ROWLOCK_WAIT_DURATION = 3;
 
+  private Path regionDir;
+  private FileSystem walFS;
+
   // The internal wait duration to acquire a lock before read/update
   // from the region. It is not per row. The purpose of this wait time
   // is to avoid waiting a long time while the region is busy, so that
@@ -921,7 +925,7 @@ public class HRegion implements HeapSize, 
PropagatingConfigurationObserver, Regi
 if (ServerRegionReplicaUtil.shouldReplayRecoveredEdits(this)) {
   // Recover any edits if available.
   maxSeqId = Math.max(maxSeqId,
-replayRecoveredEditsIfAny(this.fs.getRegionDir(), maxSeqIdInStores, 
reporter, status));
+replayRecoveredEditsIfAny(maxSeqIdInStores, reporter, status));
   // Make sure mvcc is up to max.
   this.mvcc.advanceTo(maxSeqId);
 }
@@ -965,8 +969,8 @@ public class HRegion implements HeapSize, 
PropagatingConfigurationObserver, Regi
 // is opened before recovery completes. So we add a safety bumper to avoid 
new sequence number
 // overlaps used sequence numbers
 if (this.writestate.writesEnabled) {
-  nextSeqid = 
WALSplitter.writeRegionSequenceIdFile(this.fs.getFileSystem(), this.fs
-  .getRegionDir(), nextSeqid, (this.recovering ? (this.flushPerChanges 
+ 1000) : 1));
+  nextSeqid = WALSplitter.writeRegionSequenceIdFile(getWalFileSystem(), 
getWALRegionDir(),
+  nextSeqid, (this.recovering ? (this.flushPerChanges + 1000) : 
1));
 } else {
   nextSeqid++;
 }
@@ -1104,11 +1108,11 @@ public class HRegion implements HeapSize, 
PropagatingConfigurationObserver, Regi
   getRegionServerServices().getServerName(), storeFiles);
 WALUtil.writeRegionEventMarker(wal, getTableDesc(), getRegionInfo(), 
regionEventDesc, mvcc);
 
-// Store SeqId in HDFS when a region closes
+// Store SeqId in WAL FileSystem when a region closes
 // checking region folder exists is due to many tests which delete the 
table folder while a
 // table is still online
-if (this.fs.getFileSystem().exists(this.fs.getRegionDir())) {
-  WALSplitter.writeRegionSequenceIdFile(this.fs.getFileSystem(), 
this.fs.getRegionDir(),
+if (getWalFileSystem().exists(getWALRegionDir())) {
+  WALSplitter.writeRegionSequenceIdFile(getWalFileSystem(), 
getWALRegionDir(),
 mvcc.getReadPoint(), 0);
 }
   }
@@ -1797,6 +1801,32 @@ public class HRegion implements HeapSize, 
PropagatingConfigurationObserver, Regi
 return this.fs;
   }
 
+  /** @return the WAL {@link HRegionFileSystem} used by this region */
+  HReg

hbase git commit: HBASE-20734 Colocate recovered edits directory with hbase.wal.dir

2018-09-25 Thread reidchan
Repository: hbase
Updated Branches:
  refs/heads/branch-1.4 4be1bf649 -> ca269d346


HBASE-20734 Colocate recovered edits directory with hbase.wal.dir

Signed-off-by: Andrew Purtell 
Signed-off-by: Reid Chan 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/ca269d34
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/ca269d34
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/ca269d34

Branch: refs/heads/branch-1.4
Commit: ca269d3460c284c3ddbe8380ebe89ad8f4e39ccc
Parents: 4be1bf6
Author: Zach York 
Authored: Wed Jun 27 16:18:53 2018 -0700
Committer: Reid Chan 
Committed: Wed Sep 26 11:58:23 2018 +0800

--
 .../hadoop/hbase/regionserver/HRegion.java  | 190 ---
 .../org/apache/hadoop/hbase/util/FSUtils.java   |  27 +++
 .../apache/hadoop/hbase/wal/WALSplitter.java| 166 
 .../hadoop/hbase/regionserver/TestHRegion.java  |   8 +-
 .../hbase/regionserver/TestRecoveredEdits.java  |   2 +-
 .../hbase/regionserver/wal/TestWALReplay.java   |   6 +-
 .../apache/hadoop/hbase/wal/TestWALFactory.java |   2 +-
 .../apache/hadoop/hbase/wal/TestWALSplit.java   |  50 ++---
 8 files changed, 263 insertions(+), 188 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/ca269d34/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
index 705a4bd..602793a 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
@@ -57,6 +57,7 @@ import java.util.NavigableSet;
 import java.util.RandomAccess;
 import java.util.Set;
 import java.util.TreeMap;
+import java.util.TreeSet;
 import java.util.concurrent.Callable;
 import java.util.concurrent.CompletionService;
 import java.util.concurrent.ConcurrentHashMap;
@@ -322,6 +323,9 @@ public class HRegion implements HeapSize, 
PropagatingConfigurationObserver, Regi
   private final int rowLockWaitDuration;
   static final int DEFAULT_ROWLOCK_WAIT_DURATION = 3;
 
+  private Path regionDir;
+  private FileSystem walFS;
+
   // The internal wait duration to acquire a lock before read/update
   // from the region. It is not per row. The purpose of this wait time
   // is to avoid waiting a long time while the region is busy, so that
@@ -910,7 +914,7 @@ public class HRegion implements HeapSize, 
PropagatingConfigurationObserver, Regi
 if (ServerRegionReplicaUtil.shouldReplayRecoveredEdits(this)) {
   // Recover any edits if available.
   maxSeqId = Math.max(maxSeqId,
-replayRecoveredEditsIfAny(this.fs.getRegionDir(), maxSeqIdInStores, 
reporter, status));
+replayRecoveredEditsIfAny(maxSeqIdInStores, reporter, status));
   // Make sure mvcc is up to max.
   this.mvcc.advanceTo(maxSeqId);
 }
@@ -954,8 +958,8 @@ public class HRegion implements HeapSize, 
PropagatingConfigurationObserver, Regi
 // is opened before recovery completes. So we add a safety bumper to avoid 
new sequence number
 // overlaps used sequence numbers
 if (this.writestate.writesEnabled) {
-  nextSeqid = 
WALSplitter.writeRegionSequenceIdFile(this.fs.getFileSystem(), this.fs
-  .getRegionDir(), nextSeqid, (this.recovering ? (this.flushPerChanges 
+ 1000) : 1));
+  nextSeqid = WALSplitter.writeRegionSequenceIdFile(getWalFileSystem(), 
getWALRegionDir(),
+  nextSeqid, (this.recovering ? (this.flushPerChanges + 1000) : 
1));
 } else {
   nextSeqid++;
 }
@@ -1093,11 +1097,11 @@ public class HRegion implements HeapSize, 
PropagatingConfigurationObserver, Regi
   getRegionServerServices().getServerName(), storeFiles);
 WALUtil.writeRegionEventMarker(wal, getTableDesc(), getRegionInfo(), 
regionEventDesc, mvcc);
 
-// Store SeqId in HDFS when a region closes
+// Store SeqId in WAL FileSystem when a region closes
 // checking region folder exists is due to many tests which delete the 
table folder while a
 // table is still online
-if (this.fs.getFileSystem().exists(this.fs.getRegionDir())) {
-  WALSplitter.writeRegionSequenceIdFile(this.fs.getFileSystem(), 
this.fs.getRegionDir(),
+if (getWalFileSystem().exists(getWALRegionDir())) {
+  WALSplitter.writeRegionSequenceIdFile(getWalFileSystem(), 
getWALRegionDir(),
 mvcc.getReadPoint(), 0);
 }
   }
@@ -1786,6 +1790,32 @@ public class HRegion implements HeapSize, 
PropagatingConfigurationObserver, Regi
 return this.fs;
   }
 
+  /** @return the WAL {@link HRegionFileSystem} used by this region */
+