[10/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-04-27 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/efd0601e/devapidocs/org/apache/hadoop/hbase/client/HTableWrapper.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/client/HTableWrapper.html 
b/devapidocs/org/apache/hadoop/hbase/client/HTableWrapper.html
index 67c6092..0a673e2 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/HTableWrapper.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/HTableWrapper.html
@@ -50,7 +50,7 @@ var activeTableTab = "activeTableTab";
 
 
 Prev Class
-Next Class
+Next Class
 
 
 Frames
@@ -1816,7 +1816,7 @@ public int 
 
 Prev Class
-Next Class
+Next Class
 
 
 Frames

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/efd0601e/devapidocs/org/apache/hadoop/hbase/client/ImmutableHTableDescriptor.UnmodifyableTableDescriptor.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/client/ImmutableHTableDescriptor.UnmodifyableTableDescriptor.html
 
b/devapidocs/org/apache/hadoop/hbase/client/ImmutableHTableDescriptor.UnmodifyableTableDescriptor.html
new file mode 100644
index 000..78215d6
--- /dev/null
+++ 
b/devapidocs/org/apache/hadoop/hbase/client/ImmutableHTableDescriptor.UnmodifyableTableDescriptor.html
@@ -0,0 +1,440 @@
+http://www.w3.org/TR/html4/loose.dtd";>
+
+
+
+
+
+ImmutableHTableDescriptor.UnmodifyableTableDescriptor (Apache HBase 
2.0.0-SNAPSHOT API)
+
+
+
+
+
+var methods = {"i0":42,"i1":42,"i2":42,"i3":42,"i4":42,"i5":42};
+var tabs = {65535:["t0","All Methods"],2:["t2","Instance 
Methods"],8:["t4","Concrete Methods"],32:["t6","Deprecated Methods"]};
+var altColor = "altColor";
+var rowColor = "rowColor";
+var tableTab = "tableTab";
+var activeTableTab = "activeTableTab";
+
+
+JavaScript is disabled on your browser.
+
+
+
+
+
+Skip navigation links
+
+
+
+
+Overview
+Package
+Class
+Use
+Tree
+Deprecated
+Index
+Help
+
+
+
+
+Prev Class
+Next Class
+
+
+Frames
+No Frames
+
+
+All Classes
+
+
+
+
+
+
+
+Summary: 
+Nested | 
+Field | 
+Constr | 
+Method
+
+
+Detail: 
+Field | 
+Constr | 
+Method
+
+
+
+
+
+
+
+
+org.apache.hadoop.hbase.client
+Class ImmutableHTableDescriptor.UnmodifyableTableDescriptor
+
+
+
+http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true";
 title="class or interface in java.lang">java.lang.Object
+
+
+org.apache.hadoop.hbase.client.TableDescriptorBuilder.ModifyableTableDescriptor
+
+
+org.apache.hadoop.hbase.client.ImmutableHTableDescriptor.UnmodifyableTableDescriptor
+
+
+
+
+
+
+
+
+
+All Implemented Interfaces:
+http://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true";
 title="class or interface in java.lang">Comparable,
 TableDescriptor
+
+
+Enclosing class:
+ImmutableHTableDescriptor
+
+
+Deprecated.
+
+http://docs.oracle.com/javase/8/docs/api/java/lang/Deprecated.html?is-external=true";
 title="class or interface in java.lang">@Deprecated
+private static class ImmutableHTableDescriptor.UnmodifyableTableDescriptor
+extends TableDescriptorBuilder.ModifyableTableDescriptor
+
+
+
+
+
+
+
+
+
+
+
+Constructor Summary
+
+Constructors 
+
+Constructor and Description
+
+
+UnmodifyableTableDescriptor(TableDescriptor desc)
+Deprecated. 
+ 
+
+
+
+
+
+
+
+
+
+Method Summary
+
+All Methods Instance Methods Concrete Methods Deprecated Methods 
+
+Modifier and Type
+Method and Description
+
+
+void
+remove(Bytes key)
+Deprecated. 
+Remove metadata represented by the key from the TableDescriptorBuilder.ModifyableTableDescriptor.values
 map
+
+
+
+void
+removeConfiguration(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String key)
+Deprecated. 
+Remove a config setting represented by the key from the
+ TableDescriptorBuilder.ModifyableTableDescriptor.configuration
 map
+
+
+
+HColumnDescriptor
+removeFamily(byte[] column)
+Deprecated. 
+Removes the HColumnDescriptor with name specified by the 
parameter column
+ from the table descriptor
+
+
+
+TableDescriptorBuilder.ModifyableTableDescriptor
+setConfiguration(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String key,
+http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String value)
+Deprecated. 
+Setter for storing a configuration setting in Ta

[10/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-04-21 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/10601a30/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.html
index be839b7..72853dd 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.html
@@ -45,1639 +45,1784 @@
 037
 038import 
com.google.common.annotations.VisibleForTesting;
 039
-040import org.apache.commons.logging.Log;
-041import 
org.apache.commons.logging.LogFactory;
-042import 
org.apache.hadoop.hbase.HColumnDescriptor;
-043import 
org.apache.hadoop.hbase.HRegionInfo;
-044import 
org.apache.hadoop.hbase.HRegionLocation;
-045import 
org.apache.hadoop.hbase.HTableDescriptor;
-046import 
org.apache.hadoop.hbase.MetaTableAccessor;
-047import 
org.apache.hadoop.hbase.MetaTableAccessor.QueryType;
-048import 
org.apache.hadoop.hbase.NotServingRegionException;
-049import 
org.apache.hadoop.hbase.RegionLocations;
-050import 
org.apache.hadoop.hbase.ServerName;
-051import 
org.apache.hadoop.hbase.NamespaceDescriptor;
-052import 
org.apache.hadoop.hbase.HConstants;
-053import 
org.apache.hadoop.hbase.TableName;
-054import 
org.apache.hadoop.hbase.AsyncMetaTableAccessor;
-055import 
org.apache.hadoop.hbase.TableNotFoundException;
-056import 
org.apache.hadoop.hbase.UnknownRegionException;
-057import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
-058import 
org.apache.hadoop.hbase.classification.InterfaceStability;
-059import 
org.apache.hadoop.hbase.client.AsyncRpcRetryingCallerFactory.AdminRequestCallerBuilder;
-060import 
org.apache.hadoop.hbase.client.AsyncRpcRetryingCallerFactory.MasterRequestCallerBuilder;
-061import 
org.apache.hadoop.hbase.client.Scan.ReadType;
-062import 
org.apache.hadoop.hbase.client.replication.ReplicationSerDeHelper;
-063import 
org.apache.hadoop.hbase.client.replication.TableCFs;
-064import 
org.apache.hadoop.hbase.exceptions.DeserializationException;
-065import 
org.apache.hadoop.hbase.ipc.HBaseRpcController;
-066import 
org.apache.hadoop.hbase.quotas.QuotaFilter;
-067import 
org.apache.hadoop.hbase.quotas.QuotaSettings;
-068import 
org.apache.hadoop.hbase.quotas.QuotaTableUtil;
-069import 
org.apache.hadoop.hbase.replication.ReplicationException;
-070import 
org.apache.hadoop.hbase.replication.ReplicationPeerConfig;
-071import 
org.apache.hadoop.hbase.replication.ReplicationPeerDescription;
-072import 
org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback;
-073import 
org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
-074import 
org.apache.hadoop.hbase.shaded.protobuf.RequestConverter;
-075import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminService;
-076import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CloseRegionRequest;
-077import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CloseRegionResponse;
-078import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.SplitRegionRequest;
-079import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.SplitRegionResponse;
-080import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema;
-081import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnRequest;
-082import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnResponse;
-083import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AssignRegionRequest;
-084import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AssignRegionResponse;
-085import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.BalanceRequest;
-086import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.BalanceResponse;
-087import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateNamespaceRequest;
-088import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateNamespaceResponse;
-089import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteNamespaceRequest;
-090import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteNamespaceResponse;
-091import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DisableTableRequest;
-092import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DisableTableResponse;
-093import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableTableRequest;
-094import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableTableResponse;
-095import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteColumnRequest;
-096import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteColumnResponse;
-097import 
org.apache.hadoop.hbase.shaded.protobuf.generat

[10/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-04-20 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/662ea7dc/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.CreateTableFuture.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.CreateTableFuture.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.CreateTableFuture.html
index b798d4b..8c56a67 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.CreateTableFuture.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.CreateTableFuture.html
@@ -3877,425 +3877,371 @@
 3869  throw new 
ReplicationException("tableCfs is null");
 3870}
 3871ReplicationPeerConfig peerConfig = 
getReplicationPeerConfig(id);
-3872Map> preTableCfs = peerConfig.getTableCFsMap();
-3873if (preTableCfs == null) {
-3874  
peerConfig.setTableCFsMap(tableCfs);
-3875} else {
-3876  for (Map.Entry> entry : tableCfs.entrySet()) {
-3877TableName table = 
entry.getKey();
-3878Collection 
appendCfs = entry.getValue();
-3879if 
(preTableCfs.containsKey(table)) {
-3880  List cfs = 
preTableCfs.get(table);
-3881  if (cfs == null || appendCfs 
== null || appendCfs.isEmpty()) {
-3882preTableCfs.put(table, 
null);
-3883  } else {
-3884Set cfSet = 
new HashSet(cfs);
-3885cfSet.addAll(appendCfs);
-3886preTableCfs.put(table, 
Lists.newArrayList(cfSet));
-3887  }
-3888} else {
-3889  if (appendCfs == null || 
appendCfs.isEmpty()) {
-3890preTableCfs.put(table, 
null);
-3891  } else {
-3892preTableCfs.put(table, 
Lists.newArrayList(appendCfs));
-3893  }
-3894}
-3895  }
-3896}
-3897updateReplicationPeerConfig(id, 
peerConfig);
-3898  }
-3899
-3900  @Override
-3901  public void 
removeReplicationPeerTableCFs(String id,
-3902  Map> tableCfs) throws ReplicationException,
-3903  IOException {
-3904if (tableCfs == null) {
-3905  throw new 
ReplicationException("tableCfs is null");
-3906}
-3907ReplicationPeerConfig peerConfig = 
getReplicationPeerConfig(id);
-3908Map> preTableCfs = peerConfig.getTableCFsMap();
-3909if (preTableCfs == null) {
-3910  throw new 
ReplicationException("Table-Cfs for peer" + id + " is null");
-3911}
-3912for (Map.Entry> entry : tableCfs.entrySet()) {
-3913
-3914  TableName table = 
entry.getKey();
-3915  Collection removeCfs 
= entry.getValue();
-3916  if 
(preTableCfs.containsKey(table)) {
-3917List cfs = 
preTableCfs.get(table);
-3918if (cfs == null && 
(removeCfs == null || removeCfs.isEmpty())) {
-3919  preTableCfs.remove(table);
-3920} else if (cfs != null 
&& (removeCfs != null && !removeCfs.isEmpty())) {
-3921  Set cfSet = new 
HashSet(cfs);
-3922  cfSet.removeAll(removeCfs);
-3923  if (cfSet.isEmpty()) {
-3924preTableCfs.remove(table);
-3925  } else {
-3926preTableCfs.put(table, 
Lists.newArrayList(cfSet));
-3927  }
-3928} else if (cfs == null 
&& (removeCfs != null && !removeCfs.isEmpty())) {
-3929  throw new 
ReplicationException("Cannot remove cf of table: " + table
-3930  + " which doesn't specify 
cfs from table-cfs config in peer: " + id);
-3931} else if (cfs != null 
&& (removeCfs == null || removeCfs.isEmpty())) {
-3932  throw new 
ReplicationException("Cannot remove table: " + table
-3933  + " which has specified 
cfs from table-cfs config in peer: " + id);
-3934}
-3935  } else {
-3936throw new 
ReplicationException("No table: " + table + " in table-cfs config of peer: " + 
id);
-3937  }
-3938}
-3939updateReplicationPeerConfig(id, 
peerConfig);
-3940  }
-3941
-3942  @Override
-3943  public 
List listReplicationPeers() throws 
IOException {
-3944return 
listReplicationPeers((Pattern)null);
-3945  }
-3946
-3947  @Override
-3948  public 
List listReplicationPeers(String regex) 
throws IOException {
-3949return 
listReplicationPeers(Pattern.compile(regex));
-3950  }
-3951
-3952  @Override
-3953  public 
List listReplicationPeers(Pattern pattern)
-3954  throws IOException {
-3955return executeCallable(new 
MasterCallable>(getConnection(),
-3956getRpcControllerFactory()) {
-3957  @Override
-3958  protected 
List rpcCall() throws Exception {
-3959
List

[10/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-04-19 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6b4bae59/devapidocs/org/apache/hadoop/hbase/regionserver/package-summary.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/regionserver/package-summary.html 
b/devapidocs/org/apache/hadoop/hbase/regionserver/package-summary.html
index 48674d0..61f3785 100644
--- a/devapidocs/org/apache/hadoop/hbase/regionserver/package-summary.html
+++ b/devapidocs/org/apache/hadoop/hbase/regionserver/package-summary.html
@@ -411,30 +411,42 @@
 
 
 
+ByteBufferChunkCell
+
+ByteBuffer based cell which has the chunkid at the 0th 
offset
+
+
+
 CellArrayMap
 
 CellArrayMap is a simple array of Cells and cannot be 
allocated off-heap.
 
 
-
+
 CellFlatMap
 
 CellFlatMap stores a constant number of elements and is 
immutable after creation stage.
 
 
-
+
 CellSet
 
 A http://docs.oracle.com/javase/8/docs/api/java/util/Set.html?is-external=true";
 title="class or interface in java.util">Set of Cells, where an add will overwrite 
the entry if already
  exists in the set.
 
 
-
+
 Chunk
 
 A chunk of memory out of which allocations are sliced.
 
 
+
+ChunkCreator
+
+Does the management of memstoreLAB chunk creations.
+
+
 
 CompactedHFilesDischargeHandler
 
@@ -793,12 +805,6 @@
 
 
 
-MemStoreChunkPool
-
-A pool of Chunk 
instances.
-
-
-
 MemStoreCompactor
 
 The ongoing MemStore Compaction manager, dispatches a solo 
running compaction and interrupts
@@ -1018,195 +1024,201 @@
 
 
 
+NoTagByteBufferChunkCell
+
+ByteBuffer based cell which has the chunkid at the 0th 
offset and with no tags
+
+
+
 OffheapChunk
 
 An off heap chunk implementation.
 
 
-
+
 OnheapChunk
 
 An on heap chunk implementation.
 
 
-
+
 OperationStatus
 
 This class stores the Operation status code and the 
exception message
  that occurs in case of failure of operations like put, delete, etc.
 
 
-
+
 RegionCoprocessorHost
 
 Implements the coprocessor environment and runtime support 
for coprocessors
  loaded within a Region.
 
 
-
+
 RegionCoprocessorHost.CoprocessorOperation
  
 
-
+
 RegionCoprocessorHost.EndpointOperation
  
 
-
+
 RegionCoprocessorHost.EndpointOperationWithResult
  
 
-
+
 RegionCoprocessorHost.RegionEnvironment
 
 Encapsulation of the environment of each coprocessor
 
 
-
+
 RegionCoprocessorHost.RegionOperation
  
 
-
+
 RegionCoprocessorHost.RegionOperationWithResult
  
 
-
+
 RegionCoprocessorHost.TableCoprocessorAttribute
  
 
-
+
 RegionServerAccounting
 
 RegionServerAccounting keeps record of some basic real time 
information about
  the Region Server.
 
 
-
+
 RegionServerCoprocessorHost
  
 
-
+
 RegionServerCoprocessorHost.CoprocessOperationWithResult
  
 
-
+
 RegionServerCoprocessorHost.CoprocessorOperation
  
 
-
+
 RegionServerCoprocessorHost.EnvironmentPriorityComparator
 
 Environment priority comparator.
 
 
-
+
 RegionServerCoprocessorHost.RegionServerEnvironment
 
 Coprocessor environment extension providing access to 
region server
  related services.
 
 
-
+
 RegionServerServices.PostOpenDeployContext
 
 Context for postOpenDeployTasks().
 
 
-
+
 RegionServerServices.RegionStateTransitionContext
  
 
-
+
 RegionServicesForStores
 
 Services a Store needs from a Region.
 
 
-
+
 RegionSplitPolicy
 
 A split policy determines when a region should be 
split.
 
 
-
+
 RegionUnassigner
 
 Used to unssign a region when we hit FNFE.
 
 
-
+
 ReversedKeyValueHeap
 
 ReversedKeyValueHeap is used for supporting reversed 
scanning.
 
 
-
+
 ReversedKeyValueHeap.ReversedKVScannerComparator
 
 In ReversedKVScannerComparator, we compare the row of 
scanners' peek values
  first, sort bigger one before the smaller one.
 
 
-
+
 ReversedMobStoreScanner
 
 ReversedMobStoreScanner extends from ReversedStoreScanner, 
and is used to support
  reversed scanning in both the memstore and the MOB store.
 
 
-
+
 ReversedRegionScannerImpl
 
 ReversibleRegionScannerImpl extends from RegionScannerImpl, 
and is used to
  support reversed scanning.
 
 
-
+
 ReversedStoreScanner
 
 ReversedStoreScanner extends from StoreScanner, and is used 
to support
  reversed scanning.
 
 
-
+
 RSDumpServlet
  
 
-
+
 RSRpcServices
 
 Implements the regionserver RPC services.
 
 
-
+
 RSRpcServices.RegionScannerCloseCallBack
 
 An Rpc callback for closing a RegionScanner.
 
 
-
+
 RSRpcServices.RegionScannerHolder
 
 Holder class which holds the RegionScanner, nextCallSeq and 
RpcCallbacks together.
 
 
-
+
 RSRpcServices.RegionScannersCloseCallBack
 
 An RpcCallBack that creates a list of scanners that needs 
to perform callBack operation on
  completion of multiGets.
 
 
-
+
 RSStatusServlet
  
 
-
+
 ScanInfo
 
 Immutable information for scans over a store.
 
 
-
+
 ScannerContext
 
 ScannerContext instances encapsulate limit tracking AND 
progress towards those limits during
@@ -1214,268 +1226,268 @@
  InternalScanner.next(java.util.List).
 
 
-
+
 ScannerContext.Builder
  
 
-
+
 ScannerContext.LimitFields
 
 T

[10/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-04-18 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/2fcc2ae0/devapidocs/src-html/org/apache/hadoop/hbase/tmpl/regionserver/RSStatusTmplImpl.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/tmpl/regionserver/RSStatusTmplImpl.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/tmpl/regionserver/RSStatusTmplImpl.html
index d2b491b..69c4690 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/tmpl/regionserver/RSStatusTmplImpl.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/tmpl/regionserver/RSStatusTmplImpl.html
@@ -34,27 +34,27 @@
 026
 027{
 028  private final HRegionServer 
regionServer;
-029  private final String filter;
-030  private final String format;
-031  private final String bcv;
-032  private final String bcn;
+029  private final String bcv;
+030  private final String filter;
+031  private final String bcn;
+032  private final String format;
 033  protected static 
org.apache.hadoop.hbase.tmpl.regionserver.RSStatusTmpl.ImplData 
__jamon_setOptionalArguments(org.apache.hadoop.hbase.tmpl.regionserver.RSStatusTmpl.ImplData
 p_implData)
 034  {
-035if(! 
p_implData.getFilter__IsNotDefault())
+035if(! 
p_implData.getBcv__IsNotDefault())
 036{
-037  p_implData.setFilter("general");
+037  p_implData.setBcv("");
 038}
-039if(! 
p_implData.getFormat__IsNotDefault())
+039if(! 
p_implData.getFilter__IsNotDefault())
 040{
-041  p_implData.setFormat("html");
+041  p_implData.setFilter("general");
 042}
-043if(! 
p_implData.getBcv__IsNotDefault())
+043if(! 
p_implData.getBcn__IsNotDefault())
 044{
-045  p_implData.setBcv("");
+045  p_implData.setBcn("");
 046}
-047if(! 
p_implData.getBcn__IsNotDefault())
+047if(! 
p_implData.getFormat__IsNotDefault())
 048{
-049  p_implData.setBcn("");
+049  p_implData.setFormat("html");
 050}
 051return p_implData;
 052  }
@@ -62,10 +62,10 @@
 054  {
 055super(p_templateManager, 
__jamon_setOptionalArguments(p_implData));
 056regionServer = 
p_implData.getRegionServer();
-057filter = p_implData.getFilter();
-058format = p_implData.getFormat();
-059bcv = p_implData.getBcv();
-060bcn = p_implData.getBcn();
+057bcv = p_implData.getBcv();
+058filter = p_implData.getFilter();
+059bcn = p_implData.getBcn();
+060format = p_implData.getFormat();
 061  }
 062  
 063  @Override public void 
renderNoFlush(final java.io.Writer jamonWriter)
@@ -94,8 +94,8 @@
 086  // 41, 3
 087  {
 088
org.apache.hadoop.hbase.tmpl.common.TaskMonitorTmpl __jamon__var_7 = new 
org.apache.hadoop.hbase.tmpl.common.TaskMonitorTmpl(this.getTemplateManager());
-089__jamon__var_7.setFormat("json" 
);
-090
__jamon__var_7.setFilter(filter);
+089
__jamon__var_7.setFilter(filter);
+090__jamon__var_7.setFormat("json" 
);
 091
__jamon__var_7.renderNoFlush(jamonWriter);
 092  }
 093  // 41, 68



[10/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-04-17 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e4348f53/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.DeleteColumnFamilyProcedureBiConsumer.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.DeleteColumnFamilyProcedureBiConsumer.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.DeleteColumnFamilyProcedureBiConsumer.html
index 31517f6..ac4a9b3 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.DeleteColumnFamilyProcedureBiConsumer.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.DeleteColumnFamilyProcedureBiConsumer.html
@@ -64,1374 +64,1421 @@
 056import 
org.apache.hadoop.hbase.client.Scan.ReadType;
 057import 
org.apache.hadoop.hbase.exceptions.DeserializationException;
 058import 
org.apache.hadoop.hbase.ipc.HBaseRpcController;
-059import 
org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback;
-060import 
org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
-061import 
org.apache.hadoop.hbase.shaded.protobuf.RequestConverter;
-062import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminService;
-063import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CloseRegionRequest;
-064import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CloseRegionResponse;
-065import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.SplitRegionRequest;
-066import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.SplitRegionResponse;
-067import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema;
-068import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnRequest;
-069import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnResponse;
-070import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AssignRegionRequest;
-071import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AssignRegionResponse;
-072import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.BalanceRequest;
-073import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.BalanceResponse;
-074import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateNamespaceRequest;
-075import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateNamespaceResponse;
-076import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteNamespaceRequest;
-077import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteNamespaceResponse;
-078import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DisableTableRequest;
-079import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DisableTableResponse;
-080import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableTableRequest;
-081import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableTableResponse;
-082import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteColumnRequest;
-083import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteColumnResponse;
-084import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetNamespaceDescriptorRequest;
-085import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetNamespaceDescriptorResponse;
-086import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetProcedureResultRequest;
-087import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetProcedureResultResponse;
-088import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetSchemaAlterStatusRequest;
-089import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetSchemaAlterStatusResponse;
-090import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableDescriptorsRequest;
-091import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableDescriptorsResponse;
-092import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableNamesRequest;
-093import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableNamesResponse;
-094import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateTableRequest;
-095import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateTableResponse;
-096import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteTableRequest;
-097import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteTableResponse;
-098import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsBalancerEnabledRequest;
-099import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsBalancerEnabledResponse;
-100import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListNamespaceDescriptorsRequest;
-101import 

[10/51] [partial] hbase-site git commit: Published site at d7ddc79198679d8c642e7d8ad5141ba518f8d9f3.

2017-04-11 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/3c0cf248/devapidocs/org/apache/hadoop/hbase/regionserver/wal/class-use/WALActionsListener.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/regionserver/wal/class-use/WALActionsListener.html
 
b/devapidocs/org/apache/hadoop/hbase/regionserver/wal/class-use/WALActionsListener.html
index 5d6fc7d..05d364d 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/regionserver/wal/class-use/WALActionsListener.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/regionserver/wal/class-use/WALActionsListener.html
@@ -269,7 +269,7 @@
 
 
 protected http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List
-DisabledWALProvider.DisabledWAL.listeners 
+AbstractFSWALProvider.listeners 
 
 
 private http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List
@@ -277,7 +277,7 @@
 
 
 protected http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List
-AbstractFSWALProvider.listeners 
+DisabledWALProvider.DisabledWAL.listeners 
 
 
 
@@ -334,30 +334,30 @@
 
 
 void
-WALProvider.init(WALFactory factory,
+AbstractFSWALProvider.init(WALFactory factory,
 org.apache.hadoop.conf.Configuration conf,
 http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List listeners,
-http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String providerId)
-Set up the provider to create wals.
-
+http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in 
java.lang">String providerId) 
 
 
 void
-DisabledWALProvider.init(WALFactory factory,
+RegionGroupingProvider.init(WALFactory factory,
 org.apache.hadoop.conf.Configuration conf,
 http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List listeners,
 http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in 
java.lang">String providerId) 
 
 
 void
-RegionGroupingProvider.init(WALFactory factory,
+WALProvider.init(WALFactory factory,
 org.apache.hadoop.conf.Configuration conf,
 http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List listeners,
-http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in 
java.lang">String providerId) 
+http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String providerId)
+Set up the provider to create wals.
+
 
 
 void
-AbstractFSWALProvider.init(WALFactory factory,
+DisabledWALProvider.init(WALFactory factory,
 org.apache.hadoop.conf.Configuration conf,
 http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List listeners,
 http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in 
java.lang">String providerId) 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/3c0cf248/devapidocs/org/apache/hadoop/hbase/regionserver/wal/class-use/WALEdit.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/regionserver/wal/class-use/WALEdit.html 
b/devapidocs/org/apache/hadoop/hbase/regionserver/wal/class-use/WALEdit.html
index 86c5473..098eeec 100644
--- a/devapidocs/org/apache/hadoop/hbase/regionserver/wal/class-use/WALEdit.html
+++ b/devapidocs/org/apache/hadoop/hbase/regionserver/wal/class-use/WALEdit.html
@@ -302,13 +302,13 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 
-(package private) WALEdit[]
-HRegion.BatchOperation.walEditsFromCoprocessors 
-
-
 private WALEdit[]
 MiniBatchOperationInProgress.walEditsFromCoprocessors 
 
+
+(package private) WALEdit[]
+HRegion.BatchOperation.walEditsFromCoprocessors 
+
 
 
 
@@ -392,11 +392,9 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 void
-RowProcessor.postProcess(HRegion region,
+MultiRowMutationProcessor.postProcess(HRegion region,
WALEdit walEdit,
-   boolean success)
-The hook to be executed after process() and applying the 
Mutations to region.
-
+   boolean success) 
 
 
 void
@@ -406,9 +404,11 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 void
-MultiRowMutationProcessor.postProcess(HRegion

[10/51] [partial] hbase-site git commit: Published site at d7ddc79198679d8c642e7d8ad5141ba518f8d9f3.

2017-04-11 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d26ac36c/devapidocs/org/apache/hadoop/hbase/regionserver/wal/class-use/WALActionsListener.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/regionserver/wal/class-use/WALActionsListener.html
 
b/devapidocs/org/apache/hadoop/hbase/regionserver/wal/class-use/WALActionsListener.html
index 05d364d..5d6fc7d 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/regionserver/wal/class-use/WALActionsListener.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/regionserver/wal/class-use/WALActionsListener.html
@@ -269,7 +269,7 @@
 
 
 protected http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List
-AbstractFSWALProvider.listeners 
+DisabledWALProvider.DisabledWAL.listeners 
 
 
 private http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List
@@ -277,7 +277,7 @@
 
 
 protected http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List
-DisabledWALProvider.DisabledWAL.listeners 
+AbstractFSWALProvider.listeners 
 
 
 
@@ -334,30 +334,30 @@
 
 
 void
-AbstractFSWALProvider.init(WALFactory factory,
+WALProvider.init(WALFactory factory,
 org.apache.hadoop.conf.Configuration conf,
 http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List listeners,
-http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in 
java.lang">String providerId) 
+http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String providerId)
+Set up the provider to create wals.
+
 
 
 void
-RegionGroupingProvider.init(WALFactory factory,
+DisabledWALProvider.init(WALFactory factory,
 org.apache.hadoop.conf.Configuration conf,
 http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List listeners,
 http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in 
java.lang">String providerId) 
 
 
 void
-WALProvider.init(WALFactory factory,
+RegionGroupingProvider.init(WALFactory factory,
 org.apache.hadoop.conf.Configuration conf,
 http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List listeners,
-http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String providerId)
-Set up the provider to create wals.
-
+http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in 
java.lang">String providerId) 
 
 
 void
-DisabledWALProvider.init(WALFactory factory,
+AbstractFSWALProvider.init(WALFactory factory,
 org.apache.hadoop.conf.Configuration conf,
 http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List listeners,
 http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in 
java.lang">String providerId) 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d26ac36c/devapidocs/org/apache/hadoop/hbase/regionserver/wal/class-use/WALEdit.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/regionserver/wal/class-use/WALEdit.html 
b/devapidocs/org/apache/hadoop/hbase/regionserver/wal/class-use/WALEdit.html
index 098eeec..86c5473 100644
--- a/devapidocs/org/apache/hadoop/hbase/regionserver/wal/class-use/WALEdit.html
+++ b/devapidocs/org/apache/hadoop/hbase/regionserver/wal/class-use/WALEdit.html
@@ -302,13 +302,13 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 
-private WALEdit[]
-MiniBatchOperationInProgress.walEditsFromCoprocessors 
-
-
 (package private) WALEdit[]
 HRegion.BatchOperation.walEditsFromCoprocessors 
 
+
+private WALEdit[]
+MiniBatchOperationInProgress.walEditsFromCoprocessors 
+
 
 
 
@@ -392,9 +392,11 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 void
-MultiRowMutationProcessor.postProcess(HRegion region,
+RowProcessor.postProcess(HRegion region,
WALEdit walEdit,
-   boolean success) 
+   boolean success)
+The hook to be executed after process() and applying the 
Mutations to region.
+
 
 
 void
@@ -404,11 +406,9 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 void
-RowProcessor.postProcess(HRegion region,
+MultiRo

[10/51] [partial] hbase-site git commit: Published site at 0b5bd78d6e7c51a5c1b6b30a1f385eafcdba8f7b.

2017-04-11 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/2dbdc486/apidocs/org/apache/hadoop/hbase/client/AsyncTableBase.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/client/AsyncTableBase.html 
b/apidocs/org/apache/hadoop/hbase/client/AsyncTableBase.html
new file mode 100644
index 000..cb02aca
--- /dev/null
+++ b/apidocs/org/apache/hadoop/hbase/client/AsyncTableBase.html
@@ -0,0 +1,1107 @@
+http://www.w3.org/TR/html4/loose.dtd";>
+
+
+
+
+
+AsyncTableBase (Apache HBase 2.0.0-SNAPSHOT API)
+
+
+
+
+
+var methods = 
{"i0":6,"i1":6,"i2":18,"i3":18,"i4":6,"i5":18,"i6":6,"i7":18,"i8":6,"i9":6,"i10":6,"i11":18,"i12":18,"i13":18,"i14":18,"i15":6,"i16":6,"i17":18,"i18":6,"i19":6,"i20":6,"i21":6,"i22":6,"i23":6,"i24":6,"i25":6,"i26":18,"i27":18,"i28":6,"i29":6,"i30":6,"i31":18,"i32":6};
+var tabs = {65535:["t0","All Methods"],2:["t2","Instance 
Methods"],4:["t3","Abstract Methods"],16:["t5","Default Methods"]};
+var altColor = "altColor";
+var rowColor = "rowColor";
+var tableTab = "tableTab";
+var activeTableTab = "activeTableTab";
+
+
+JavaScript is disabled on your browser.
+
+
+
+
+
+Skip navigation links
+
+
+
+
+Overview
+Package
+Class
+Use
+Tree
+Deprecated
+Index
+Help
+
+
+
+
+Prev Class
+Next Class
+
+
+Frames
+No Frames
+
+
+All Classes
+
+
+
+
+
+
+
+Summary: 
+Nested | 
+Field | 
+Constr | 
+Method
+
+
+Detail: 
+Field | 
+Constr | 
+Method
+
+
+
+
+
+
+
+
+org.apache.hadoop.hbase.client
+Interface 
AsyncTableBase
+
+
+
+
+
+
+All Known Subinterfaces:
+AsyncTable, RawAsyncTable
+
+
+
+@InterfaceAudience.Public
+public interface AsyncTableBase
+The base interface for asynchronous version of Table. 
Obtain an instance from a
+ AsyncConnection.
+ 
+ The implementation is required to be thread safe.
+ 
+ Usually the implementation will not throw any exception directly. You need to 
get the exception
+ from the returned http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";
 title="class or interface in 
java.util.concurrent">CompletableFuture.
+
+
+
+
+
+
+
+
+
+
+
+Method Summary
+
+All Methods Instance Methods Abstract Methods Default Methods 
+
+Modifier and Type
+Method and Description
+
+
+http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";
 title="class or interface in java.util.concurrent">CompletableFuture
+append(Append append)
+Appends values to one or more columns within a single 
row.
+
+
+
+ http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">ListCompletableFuture>
+batch(http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List actions)
+Method that does a batch call on Deletes, Gets, Puts, 
Increments and Appends.
+
+
+
+default  http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";
 title="class or interface in java.util.concurrent">CompletableFutureList>
+batchAll(http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List actions)
+A simple version of batch.
+
+
+
+default http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";
 title="class or interface in java.util.concurrent">CompletableFutureBoolean>
+checkAndDelete(byte[] row,
+  byte[] family,
+  byte[] qualifier,
+  byte[] value,
+  Delete delete)
+Atomically checks if a row/family/qualifier value equals to 
the expected value.
+
+
+
+http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";
 title="class or interface in java.util.concurrent">CompletableFutureBoolean>
+checkAndDelete(byte[] row,
+  byte[] family,
+  byte[] qualifier,
+  Compar

[10/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-04-11 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e57d1b63/apidocs/org/apache/hadoop/hbase/client/Connection.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/client/Connection.html 
b/apidocs/org/apache/hadoop/hbase/client/Connection.html
deleted file mode 100644
index f80fe7d..000
--- a/apidocs/org/apache/hadoop/hbase/client/Connection.html
+++ /dev/null
@@ -1,527 +0,0 @@
-http://www.w3.org/TR/html4/loose.dtd";>
-
-
-
-
-
-Connection (Apache HBase 2.0.0-SNAPSHOT API)
-
-
-
-
-
-var methods = 
{"i0":6,"i1":6,"i2":6,"i3":6,"i4":6,"i5":6,"i6":18,"i7":18,"i8":6,"i9":6};
-var tabs = {65535:["t0","All Methods"],2:["t2","Instance 
Methods"],4:["t3","Abstract Methods"],16:["t5","Default Methods"]};
-var altColor = "altColor";
-var rowColor = "rowColor";
-var tableTab = "tableTab";
-var activeTableTab = "activeTableTab";
-
-
-JavaScript is disabled on your browser.
-
-
-
-
-
-Skip navigation links
-
-
-
-
-Overview
-Package
-Class
-Use
-Tree
-Deprecated
-Index
-Help
-
-
-
-
-Prev Class
-Next Class
-
-
-Frames
-No Frames
-
-
-All Classes
-
-
-
-
-
-
-
-Summary: 
-Nested | 
-Field | 
-Constr | 
-Method
-
-
-Detail: 
-Field | 
-Constr | 
-Method
-
-
-
-
-
-
-
-
-org.apache.hadoop.hbase.client
-Interface Connection
-
-
-
-
-
-
-All Superinterfaces:
-org.apache.hadoop.hbase.Abortable, http://docs.oracle.com/javase/8/docs/api/java/lang/AutoCloseable.html?is-external=true";
 title="class or interface in java.lang">AutoCloseable, http://docs.oracle.com/javase/8/docs/api/java/io/Closeable.html?is-external=true";
 title="class or interface in java.io">Closeable
-
-
-
-@InterfaceAudience.Public
-public interface Connection
-extends org.apache.hadoop.hbase.Abortable, http://docs.oracle.com/javase/8/docs/api/java/io/Closeable.html?is-external=true";
 title="class or interface in java.io">Closeable
-A cluster connection encapsulating lower level individual 
connections to actual servers and
- a connection to zookeeper. Connections are instantiated through the ConnectionFactory
- class. The lifecycle of the connection is managed by the caller, who has to 
close()
- the connection to release the resources.
-
-  The connection object contains logic to find the master, locate regions 
out on the cluster,
- keeps a cache of locations and then knows how to re-calibrate after they 
move. The individual
- connections to servers, meta cache, zookeeper connection, etc are all shared 
by the
- Table and 
Admin 
instances obtained from this connection.
-
-  Connection creation is a heavy-weight operation. Connection 
implementations are thread-safe,
- so that the client can create a connection once, and share it with different 
threads.
- Table and 
Admin 
instances, on the other hand, are light-weight and are not
- thread-safe.  Typically, a single connection per client application is 
instantiated and every
- thread will obtain its own Table instance. Caching or pooling of Table and 
Admin
- is not recommended.
-
-Since:
-0.99.0
-See Also:
-ConnectionFactory
-
-
-
-
-
-
-
-
-
-
-
-
-Method Summary
-
-All Methods Instance Methods Abstract Methods Default Methods 
-
-Modifier and Type
-Method and Description
-
-
-void
-close() 
-
-
-Admin
-getAdmin()
-Retrieve an Admin implementation to administer an HBase 
cluster.
-
-
-
-BufferedMutator
-getBufferedMutator(BufferedMutatorParams params)
-Retrieve a BufferedMutator for performing 
client-side buffering of writes.
-
-
-
-BufferedMutator
-getBufferedMutator(TableName tableName)
-
- Retrieve a BufferedMutator for performing 
client-side buffering of writes.
-
-
-
-org.apache.hadoop.conf.Configuration
-getConfiguration() 
-
-
-RegionLocator
-getRegionLocator(TableName tableName)
-Retrieve a RegionLocator implementation to inspect region 
information on a table.
-
-
-
-default Table
-getTable(TableName tableName)
-Retrieve a Table implementation for accessing a table.
-
-
-
-default Table
-getTable(TableName tableName,
-http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/ExecutorService.html?is-external=true";
 title="class or interface in 
java.util.concurrent">ExecutorService pool)
-Retrieve a Table implementation for accessing a table.
-
-
-
-TableBuilder
-getTableBuilder(TableName tableName,
-   http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/ExecutorService.html?is-external=true";
 title="class or interface in 
java.util.concurrent">ExecutorService pool)
-Returns an TableBuilder for creating Table.
-
-
-
-boolean
-isClosed()
-Returns whether the con

[10/51] [partial] hbase-site git commit: Published site at 59e8b8e2ba4d403d042fe4cc02f8f9f80aad67af.

2017-04-09 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/32aa5813/devapidocs/org/apache/hadoop/hbase/replication/class-use/ReplicationPeers.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/replication/class-use/ReplicationPeers.html
 
b/devapidocs/org/apache/hadoop/hbase/replication/class-use/ReplicationPeers.html
index e768cd7..8c57b38 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/replication/class-use/ReplicationPeers.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/replication/class-use/ReplicationPeers.html
@@ -265,15 +265,15 @@
 
 
 private ReplicationPeers
-ReplicationSource.replicationPeers 
+ReplicationSourceManager.replicationPeers 
 
 
 private ReplicationPeers
-Replication.replicationPeers 
+ReplicationSource.replicationPeers 
 
 
 private ReplicationPeers
-ReplicationSourceManager.replicationPeers 
+Replication.replicationPeers 
 
 
 private ReplicationPeers

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/32aa5813/devapidocs/org/apache/hadoop/hbase/replication/class-use/ReplicationQueues.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/replication/class-use/ReplicationQueues.html
 
b/devapidocs/org/apache/hadoop/hbase/replication/class-use/ReplicationQueues.html
index 7f8b4b2..7bebf07 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/replication/class-use/ReplicationQueues.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/replication/class-use/ReplicationQueues.html
@@ -151,15 +151,15 @@
 
 
 private ReplicationQueues
-ReplicationSource.replicationQueues 
+ReplicationSourceManager.replicationQueues 
 
 
 private ReplicationQueues
-Replication.replicationQueues 
+ReplicationSource.replicationQueues 
 
 
 private ReplicationQueues
-ReplicationSourceManager.replicationQueues 
+Replication.replicationQueues 
 
 
 private ReplicationQueues

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/32aa5813/devapidocs/org/apache/hadoop/hbase/replication/class-use/ReplicationTracker.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/replication/class-use/ReplicationTracker.html
 
b/devapidocs/org/apache/hadoop/hbase/replication/class-use/ReplicationTracker.html
index b3d3829..373d114 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/replication/class-use/ReplicationTracker.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/replication/class-use/ReplicationTracker.html
@@ -147,11 +147,11 @@
 
 
 private ReplicationTracker
-Replication.replicationTracker 
+ReplicationSourceManager.replicationTracker 
 
 
 private ReplicationTracker
-ReplicationSourceManager.replicationTracker 
+Replication.replicationTracker 
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/32aa5813/devapidocs/org/apache/hadoop/hbase/replication/class-use/WALEntryFilter.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/replication/class-use/WALEntryFilter.html 
b/devapidocs/org/apache/hadoop/hbase/replication/class-use/WALEntryFilter.html
index d70f824..ffad47a 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/replication/class-use/WALEntryFilter.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/replication/class-use/WALEntryFilter.html
@@ -185,14 +185,14 @@
 
 
 WALEntryFilter
-BaseReplicationEndpoint.getWALEntryfilter()
-Returns a default set of filters
+ReplicationEndpoint.getWALEntryfilter()
+Returns a WALEntryFilter to use for filtering out 
WALEntries from the log.
 
 
 
 WALEntryFilter
-ReplicationEndpoint.getWALEntryfilter()
-Returns a WALEntryFilter to use for filtering out 
WALEntries from the log.
+BaseReplicationEndpoint.getWALEntryfilter()
+Returns a default set of filters
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/32aa5813/devapidocs/org/apache/hadoop/hbase/replication/regionserver/class-use/MetricsReplicationSourceImpl.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/replication/regionserver/class-use/MetricsReplicationSourceImpl.html
 
b/devapidocs/org/apache/hadoop/hbase/replication/regionserver/class-use/MetricsReplicationSourceImpl.html
index 4c2ce9f..9cffb66 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/replication/regionserver/class-use/MetricsReplicationSourceImpl.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/replication/regionserver/class-use/MetricsReplicationSourceImpl.html
@@ -104,11 +104,11 @@
 
 
 private MetricsReplicationSourceImpl
-MetricsReplicationGlobalSourceSource.rms 
+MetricsReplicationSourceSourceImpl.rms 
 
 
 private MetricsReplicationSourceImpl
-MetricsReplicationSourceSourceImpl.rms 
+MetricsReplicationGlobalSourceSource.rms 
 
 
 (package private) MetricsReplicationSourceImpl

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/32aa5813/devapidocs/org/apache/hadoop/hbase/replication/regionserver

[10/51] [partial] hbase-site git commit: Published site at 59e8b8e2ba4d403d042fe4cc02f8f9f80aad67af.

2017-04-08 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e2af56d9/devapidocs/org/apache/hadoop/hbase/security/class-use/User.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/security/class-use/User.html 
b/devapidocs/org/apache/hadoop/hbase/security/class-use/User.html
index f1a1e53..b142fd6 100644
--- a/devapidocs/org/apache/hadoop/hbase/security/class-use/User.html
+++ b/devapidocs/org/apache/hadoop/hbase/security/class-use/User.html
@@ -386,23 +386,23 @@
 
 
 
-User
-RpcCallContext.getRequestUser()
-Returns the user credentials associated with the current 
RPC request or
- null if no credentials were provided.
-
-
-
 static User
 RpcServer.getRequestUser()
 Returns the user credentials associated with the current 
RPC request or
  null if no credentials were provided.
 
 
-
+
 User
 RpcServer.Call.getRequestUser() 
 
+
+User
+RpcCallContext.getRequestUser()
+Returns the user credentials associated with the current 
RPC request or
+ null if no credentials were provided.
+
+
 
 User
 ConnectionId.getTicket() 
@@ -439,31 +439,31 @@
 
 
 org.apache.hadoop.hbase.shaded.com.google.protobuf.BlockingRpcChannel
+AbstractRpcClient.createBlockingRpcChannel(ServerName sn,
+User ticket,
+int rpcTimeout) 
+
+
+org.apache.hadoop.hbase.shaded.com.google.protobuf.BlockingRpcChannel
 RpcClient.createBlockingRpcChannel(ServerName sn,
 User user,
 int rpcTimeout)
 Creates a "channel" that can be used by a blocking protobuf 
service.
 
 
-
-org.apache.hadoop.hbase.shaded.com.google.protobuf.BlockingRpcChannel
-AbstractRpcClient.createBlockingRpcChannel(ServerName sn,
-User ticket,
-int rpcTimeout) 
-
 
 org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcChannel
-RpcClient.createRpcChannel(ServerName sn,
+AbstractRpcClient.createRpcChannel(ServerName sn,
 User user,
-int rpcTimeout)
-Creates a "channel" that can be used by a protobuf 
service.
-
+int rpcTimeout) 
 
 
 org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcChannel
-AbstractRpcClient.createRpcChannel(ServerName sn,
+RpcClient.createRpcChannel(ServerName sn,
 User user,
-int rpcTimeout) 
+int rpcTimeout)
+Creates a "channel" that can be used by a protobuf 
service.
+
 
 
 int
@@ -891,17 +891,17 @@
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in 
java.util">List
-DateTieredStoreEngine.DateTieredCompactionContext.compact(ThroughputController throughputController,
+StripeStoreEngine.StripeCompaction.compact(ThroughputController throughputController,
User user) 
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in 
java.util">List
-DefaultStoreEngine.DefaultCompactionContext.compact(ThroughputController throughputController,
+DateTieredStoreEngine.DateTieredCompactionContext.compact(ThroughputController throughputController,
User user) 
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in 
java.util">List
-StripeStoreEngine.StripeCompaction.compact(ThroughputController throughputController,
+DefaultStoreEngine.DefaultCompactionContext.compact(ThroughputController throughputController,
User user) 
 
 
@@ -922,18 +922,18 @@
 
 
 int
-RSRpcServices.getPriority(org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.RequestHeader header,
-   
org.apache.hadoop.hbase.shaded.com.google.protobuf.Message param,
-   User user) 
-
-
-int
 AnnotationReadingPriorityFunction.getPriority(org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.RequestHeader header,

org.apache.hadoop.hbase.shaded.com.google.protobuf.Message param,
User user)
 Returns a 'priority' based on the request type.
 
 
+
+int
+RSRpcServices.getPriority(org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.RequestHeader header,
+   
org.apache.hadoop.hbase.shaded.com.google.protobuf.Message param,
+   User user) 
+
 
 private http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List
 HStore.moveCompatedFilesIntoPlace(CompactionRequest cr,
@@ -1366,14 +1366,14 @@
 
 
 
-static User
-User.create(org.apache.hadoop.security.UserGroupInformation ugi)
+User
+UserProvider.create(org.apache.hadoop.security.UserGroupInformation ugi)
 Wraps an underlying UserGroupInformation 
instance.
 
 
 
-User
-UserProvider.create(org.apache.hadoop.security.UserGroupInformation ugi)
+static User
+User.create(org.apache.hadoop.security.UserGroupInformation ugi)
 Wraps an underlying Us

[10/51] [partial] hbase-site git commit: Published site at 59e8b8e2ba4d403d042fe4cc02f8f9f80aad67af.

2017-04-08 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/58d4ae69/devapidocs/org/apache/hadoop/hbase/security/class-use/User.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/security/class-use/User.html 
b/devapidocs/org/apache/hadoop/hbase/security/class-use/User.html
index b142fd6..f1a1e53 100644
--- a/devapidocs/org/apache/hadoop/hbase/security/class-use/User.html
+++ b/devapidocs/org/apache/hadoop/hbase/security/class-use/User.html
@@ -386,23 +386,23 @@
 
 
 
-static User
-RpcServer.getRequestUser()
+User
+RpcCallContext.getRequestUser()
 Returns the user credentials associated with the current 
RPC request or
  null if no credentials were provided.
 
 
 
-User
-RpcServer.Call.getRequestUser() 
-
-
-User
-RpcCallContext.getRequestUser()
+static User
+RpcServer.getRequestUser()
 Returns the user credentials associated with the current 
RPC request or
  null if no credentials were provided.
 
 
+
+User
+RpcServer.Call.getRequestUser() 
+
 
 User
 ConnectionId.getTicket() 
@@ -439,25 +439,19 @@
 
 
 org.apache.hadoop.hbase.shaded.com.google.protobuf.BlockingRpcChannel
-AbstractRpcClient.createBlockingRpcChannel(ServerName sn,
-User ticket,
-int rpcTimeout) 
-
-
-org.apache.hadoop.hbase.shaded.com.google.protobuf.BlockingRpcChannel
 RpcClient.createBlockingRpcChannel(ServerName sn,
 User user,
 int rpcTimeout)
 Creates a "channel" that can be used by a blocking protobuf 
service.
 
 
-
-org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcChannel
-AbstractRpcClient.createRpcChannel(ServerName sn,
-User user,
-int rpcTimeout) 
-
 
+org.apache.hadoop.hbase.shaded.com.google.protobuf.BlockingRpcChannel
+AbstractRpcClient.createBlockingRpcChannel(ServerName sn,
+User ticket,
+int rpcTimeout) 
+
+
 org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcChannel
 RpcClient.createRpcChannel(ServerName sn,
 User user,
@@ -465,6 +459,12 @@
 Creates a "channel" that can be used by a protobuf 
service.
 
 
+
+org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcChannel
+AbstractRpcClient.createRpcChannel(ServerName sn,
+User user,
+int rpcTimeout) 
+
 
 int
 PriorityFunction.getPriority(org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.RequestHeader header,
@@ -891,17 +891,17 @@
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in 
java.util">List
-StripeStoreEngine.StripeCompaction.compact(ThroughputController throughputController,
+DateTieredStoreEngine.DateTieredCompactionContext.compact(ThroughputController throughputController,
User user) 
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in 
java.util">List
-DateTieredStoreEngine.DateTieredCompactionContext.compact(ThroughputController throughputController,
+DefaultStoreEngine.DefaultCompactionContext.compact(ThroughputController throughputController,
User user) 
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in 
java.util">List
-DefaultStoreEngine.DefaultCompactionContext.compact(ThroughputController throughputController,
+StripeStoreEngine.StripeCompaction.compact(ThroughputController throughputController,
User user) 
 
 
@@ -922,17 +922,17 @@
 
 
 int
-AnnotationReadingPriorityFunction.getPriority(org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.RequestHeader header,
+RSRpcServices.getPriority(org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.RequestHeader header,

org.apache.hadoop.hbase.shaded.com.google.protobuf.Message param,
-   User user)
-Returns a 'priority' based on the request type.
-
+   User user) 
 
 
 int
-RSRpcServices.getPriority(org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.RequestHeader header,
+AnnotationReadingPriorityFunction.getPriority(org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.RequestHeader header,

org.apache.hadoop.hbase.shaded.com.google.protobuf.Message param,
-   User user) 
+   User user)
+Returns a 'priority' based on the request type.
+
 
 
 private http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List
@@ -1366,14 +1366,14 @@
 
 
 
-User
-UserProvider.create(org.apache.hadoop.security.UserGroupInformation ugi)
+static User
+User.create(org.apache.hadoop.security.UserGroupInformation ugi)
 Wraps an underlying UserGroupInformation 
instance.
 
 
 
-static User
-User.create(org.apache.hadoop.security.UserGroupInformation ugi)
+User
+UserProvider.create(

[10/51] [partial] hbase-site git commit: Published site at 18c5ecf6ed57e80b32568ca1a1a12c7af36bab46.

2017-04-07 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1ab1bfc8/devapidocs/org/apache/hadoop/hbase/security/class-use/User.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/security/class-use/User.html 
b/devapidocs/org/apache/hadoop/hbase/security/class-use/User.html
index f1a1e53..b142fd6 100644
--- a/devapidocs/org/apache/hadoop/hbase/security/class-use/User.html
+++ b/devapidocs/org/apache/hadoop/hbase/security/class-use/User.html
@@ -386,23 +386,23 @@
 
 
 
-User
-RpcCallContext.getRequestUser()
-Returns the user credentials associated with the current 
RPC request or
- null if no credentials were provided.
-
-
-
 static User
 RpcServer.getRequestUser()
 Returns the user credentials associated with the current 
RPC request or
  null if no credentials were provided.
 
 
-
+
 User
 RpcServer.Call.getRequestUser() 
 
+
+User
+RpcCallContext.getRequestUser()
+Returns the user credentials associated with the current 
RPC request or
+ null if no credentials were provided.
+
+
 
 User
 ConnectionId.getTicket() 
@@ -439,31 +439,31 @@
 
 
 org.apache.hadoop.hbase.shaded.com.google.protobuf.BlockingRpcChannel
+AbstractRpcClient.createBlockingRpcChannel(ServerName sn,
+User ticket,
+int rpcTimeout) 
+
+
+org.apache.hadoop.hbase.shaded.com.google.protobuf.BlockingRpcChannel
 RpcClient.createBlockingRpcChannel(ServerName sn,
 User user,
 int rpcTimeout)
 Creates a "channel" that can be used by a blocking protobuf 
service.
 
 
-
-org.apache.hadoop.hbase.shaded.com.google.protobuf.BlockingRpcChannel
-AbstractRpcClient.createBlockingRpcChannel(ServerName sn,
-User ticket,
-int rpcTimeout) 
-
 
 org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcChannel
-RpcClient.createRpcChannel(ServerName sn,
+AbstractRpcClient.createRpcChannel(ServerName sn,
 User user,
-int rpcTimeout)
-Creates a "channel" that can be used by a protobuf 
service.
-
+int rpcTimeout) 
 
 
 org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcChannel
-AbstractRpcClient.createRpcChannel(ServerName sn,
+RpcClient.createRpcChannel(ServerName sn,
 User user,
-int rpcTimeout) 
+int rpcTimeout)
+Creates a "channel" that can be used by a protobuf 
service.
+
 
 
 int
@@ -891,17 +891,17 @@
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in 
java.util">List
-DateTieredStoreEngine.DateTieredCompactionContext.compact(ThroughputController throughputController,
+StripeStoreEngine.StripeCompaction.compact(ThroughputController throughputController,
User user) 
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in 
java.util">List
-DefaultStoreEngine.DefaultCompactionContext.compact(ThroughputController throughputController,
+DateTieredStoreEngine.DateTieredCompactionContext.compact(ThroughputController throughputController,
User user) 
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in 
java.util">List
-StripeStoreEngine.StripeCompaction.compact(ThroughputController throughputController,
+DefaultStoreEngine.DefaultCompactionContext.compact(ThroughputController throughputController,
User user) 
 
 
@@ -922,18 +922,18 @@
 
 
 int
-RSRpcServices.getPriority(org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.RequestHeader header,
-   
org.apache.hadoop.hbase.shaded.com.google.protobuf.Message param,
-   User user) 
-
-
-int
 AnnotationReadingPriorityFunction.getPriority(org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.RequestHeader header,

org.apache.hadoop.hbase.shaded.com.google.protobuf.Message param,
User user)
 Returns a 'priority' based on the request type.
 
 
+
+int
+RSRpcServices.getPriority(org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.RequestHeader header,
+   
org.apache.hadoop.hbase.shaded.com.google.protobuf.Message param,
+   User user) 
+
 
 private http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List
 HStore.moveCompatedFilesIntoPlace(CompactionRequest cr,
@@ -1366,14 +1366,14 @@
 
 
 
-static User
-User.create(org.apache.hadoop.security.UserGroupInformation ugi)
+User
+UserProvider.create(org.apache.hadoop.security.UserGroupInformation ugi)
 Wraps an underlying UserGroupInformation 
instance.
 
 
 
-User
-UserProvider.create(org.apache.hadoop.security.UserGroupInformation ugi)
+static User
+User.create(org.apache.hadoop.security.UserGroupInformation ugi)
 Wraps an underlying Us

[10/51] [partial] hbase-site git commit: Published site at 18c5ecf6ed57e80b32568ca1a1a12c7af36bab46.

2017-04-07 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/dd6bba64/devapidocs/org/apache/hadoop/hbase/replication/class-use/ReplicationPeers.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/replication/class-use/ReplicationPeers.html
 
b/devapidocs/org/apache/hadoop/hbase/replication/class-use/ReplicationPeers.html
index e768cd7..8c57b38 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/replication/class-use/ReplicationPeers.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/replication/class-use/ReplicationPeers.html
@@ -265,15 +265,15 @@
 
 
 private ReplicationPeers
-ReplicationSource.replicationPeers 
+ReplicationSourceManager.replicationPeers 
 
 
 private ReplicationPeers
-Replication.replicationPeers 
+ReplicationSource.replicationPeers 
 
 
 private ReplicationPeers
-ReplicationSourceManager.replicationPeers 
+Replication.replicationPeers 
 
 
 private ReplicationPeers

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/dd6bba64/devapidocs/org/apache/hadoop/hbase/replication/class-use/ReplicationQueues.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/replication/class-use/ReplicationQueues.html
 
b/devapidocs/org/apache/hadoop/hbase/replication/class-use/ReplicationQueues.html
index 7f8b4b2..7bebf07 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/replication/class-use/ReplicationQueues.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/replication/class-use/ReplicationQueues.html
@@ -151,15 +151,15 @@
 
 
 private ReplicationQueues
-ReplicationSource.replicationQueues 
+ReplicationSourceManager.replicationQueues 
 
 
 private ReplicationQueues
-Replication.replicationQueues 
+ReplicationSource.replicationQueues 
 
 
 private ReplicationQueues
-ReplicationSourceManager.replicationQueues 
+Replication.replicationQueues 
 
 
 private ReplicationQueues

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/dd6bba64/devapidocs/org/apache/hadoop/hbase/replication/class-use/ReplicationTracker.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/replication/class-use/ReplicationTracker.html
 
b/devapidocs/org/apache/hadoop/hbase/replication/class-use/ReplicationTracker.html
index b3d3829..373d114 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/replication/class-use/ReplicationTracker.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/replication/class-use/ReplicationTracker.html
@@ -147,11 +147,11 @@
 
 
 private ReplicationTracker
-Replication.replicationTracker 
+ReplicationSourceManager.replicationTracker 
 
 
 private ReplicationTracker
-ReplicationSourceManager.replicationTracker 
+Replication.replicationTracker 
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/dd6bba64/devapidocs/org/apache/hadoop/hbase/replication/class-use/WALEntryFilter.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/replication/class-use/WALEntryFilter.html 
b/devapidocs/org/apache/hadoop/hbase/replication/class-use/WALEntryFilter.html
index d70f824..ffad47a 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/replication/class-use/WALEntryFilter.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/replication/class-use/WALEntryFilter.html
@@ -185,14 +185,14 @@
 
 
 WALEntryFilter
-BaseReplicationEndpoint.getWALEntryfilter()
-Returns a default set of filters
+ReplicationEndpoint.getWALEntryfilter()
+Returns a WALEntryFilter to use for filtering out 
WALEntries from the log.
 
 
 
 WALEntryFilter
-ReplicationEndpoint.getWALEntryfilter()
-Returns a WALEntryFilter to use for filtering out 
WALEntries from the log.
+BaseReplicationEndpoint.getWALEntryfilter()
+Returns a default set of filters
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/dd6bba64/devapidocs/org/apache/hadoop/hbase/replication/regionserver/class-use/MetricsReplicationSourceImpl.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/replication/regionserver/class-use/MetricsReplicationSourceImpl.html
 
b/devapidocs/org/apache/hadoop/hbase/replication/regionserver/class-use/MetricsReplicationSourceImpl.html
index 4c2ce9f..9cffb66 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/replication/regionserver/class-use/MetricsReplicationSourceImpl.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/replication/regionserver/class-use/MetricsReplicationSourceImpl.html
@@ -104,11 +104,11 @@
 
 
 private MetricsReplicationSourceImpl
-MetricsReplicationGlobalSourceSource.rms 
+MetricsReplicationSourceSourceImpl.rms 
 
 
 private MetricsReplicationSourceImpl
-MetricsReplicationSourceSourceImpl.rms 
+MetricsReplicationGlobalSourceSource.rms 
 
 
 (package private) MetricsReplicationSourceImpl

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/dd6bba64/devapidocs/org/apache/hadoop/hbase/replication/regionserver

[10/51] [partial] hbase-site git commit: Published site at 1a701ce44484f45a8a07ea9826b84f0df6f1518e.

2017-04-07 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/05bdfb4a/devapidocs/org/apache/hadoop/hbase/security/class-use/User.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/security/class-use/User.html 
b/devapidocs/org/apache/hadoop/hbase/security/class-use/User.html
index f1a1e53..b142fd6 100644
--- a/devapidocs/org/apache/hadoop/hbase/security/class-use/User.html
+++ b/devapidocs/org/apache/hadoop/hbase/security/class-use/User.html
@@ -386,23 +386,23 @@
 
 
 
-User
-RpcCallContext.getRequestUser()
-Returns the user credentials associated with the current 
RPC request or
- null if no credentials were provided.
-
-
-
 static User
 RpcServer.getRequestUser()
 Returns the user credentials associated with the current 
RPC request or
  null if no credentials were provided.
 
 
-
+
 User
 RpcServer.Call.getRequestUser() 
 
+
+User
+RpcCallContext.getRequestUser()
+Returns the user credentials associated with the current 
RPC request or
+ null if no credentials were provided.
+
+
 
 User
 ConnectionId.getTicket() 
@@ -439,31 +439,31 @@
 
 
 org.apache.hadoop.hbase.shaded.com.google.protobuf.BlockingRpcChannel
+AbstractRpcClient.createBlockingRpcChannel(ServerName sn,
+User ticket,
+int rpcTimeout) 
+
+
+org.apache.hadoop.hbase.shaded.com.google.protobuf.BlockingRpcChannel
 RpcClient.createBlockingRpcChannel(ServerName sn,
 User user,
 int rpcTimeout)
 Creates a "channel" that can be used by a blocking protobuf 
service.
 
 
-
-org.apache.hadoop.hbase.shaded.com.google.protobuf.BlockingRpcChannel
-AbstractRpcClient.createBlockingRpcChannel(ServerName sn,
-User ticket,
-int rpcTimeout) 
-
 
 org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcChannel
-RpcClient.createRpcChannel(ServerName sn,
+AbstractRpcClient.createRpcChannel(ServerName sn,
 User user,
-int rpcTimeout)
-Creates a "channel" that can be used by a protobuf 
service.
-
+int rpcTimeout) 
 
 
 org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcChannel
-AbstractRpcClient.createRpcChannel(ServerName sn,
+RpcClient.createRpcChannel(ServerName sn,
 User user,
-int rpcTimeout) 
+int rpcTimeout)
+Creates a "channel" that can be used by a protobuf 
service.
+
 
 
 int
@@ -891,17 +891,17 @@
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in 
java.util">List
-DateTieredStoreEngine.DateTieredCompactionContext.compact(ThroughputController throughputController,
+StripeStoreEngine.StripeCompaction.compact(ThroughputController throughputController,
User user) 
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in 
java.util">List
-DefaultStoreEngine.DefaultCompactionContext.compact(ThroughputController throughputController,
+DateTieredStoreEngine.DateTieredCompactionContext.compact(ThroughputController throughputController,
User user) 
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in 
java.util">List
-StripeStoreEngine.StripeCompaction.compact(ThroughputController throughputController,
+DefaultStoreEngine.DefaultCompactionContext.compact(ThroughputController throughputController,
User user) 
 
 
@@ -922,18 +922,18 @@
 
 
 int
-RSRpcServices.getPriority(org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.RequestHeader header,
-   
org.apache.hadoop.hbase.shaded.com.google.protobuf.Message param,
-   User user) 
-
-
-int
 AnnotationReadingPriorityFunction.getPriority(org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.RequestHeader header,

org.apache.hadoop.hbase.shaded.com.google.protobuf.Message param,
User user)
 Returns a 'priority' based on the request type.
 
 
+
+int
+RSRpcServices.getPriority(org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.RequestHeader header,
+   
org.apache.hadoop.hbase.shaded.com.google.protobuf.Message param,
+   User user) 
+
 
 private http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List
 HStore.moveCompatedFilesIntoPlace(CompactionRequest cr,
@@ -1366,14 +1366,14 @@
 
 
 
-static User
-User.create(org.apache.hadoop.security.UserGroupInformation ugi)
+User
+UserProvider.create(org.apache.hadoop.security.UserGroupInformation ugi)
 Wraps an underlying UserGroupInformation 
instance.
 
 
 
-User
-UserProvider.create(org.apache.hadoop.security.UserGroupInformation ugi)
+static User
+User.create(org.apache.hadoop.security.UserGroupInformation ugi)
 Wraps an underlying Us

[10/51] [partial] hbase-site git commit: Published site at 1a701ce44484f45a8a07ea9826b84f0df6f1518e.

2017-04-07 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/27774cab/devapidocs/org/apache/hadoop/hbase/security/class-use/User.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/security/class-use/User.html 
b/devapidocs/org/apache/hadoop/hbase/security/class-use/User.html
index b142fd6..f1a1e53 100644
--- a/devapidocs/org/apache/hadoop/hbase/security/class-use/User.html
+++ b/devapidocs/org/apache/hadoop/hbase/security/class-use/User.html
@@ -386,23 +386,23 @@
 
 
 
-static User
-RpcServer.getRequestUser()
+User
+RpcCallContext.getRequestUser()
 Returns the user credentials associated with the current 
RPC request or
  null if no credentials were provided.
 
 
 
-User
-RpcServer.Call.getRequestUser() 
-
-
-User
-RpcCallContext.getRequestUser()
+static User
+RpcServer.getRequestUser()
 Returns the user credentials associated with the current 
RPC request or
  null if no credentials were provided.
 
 
+
+User
+RpcServer.Call.getRequestUser() 
+
 
 User
 ConnectionId.getTicket() 
@@ -439,25 +439,19 @@
 
 
 org.apache.hadoop.hbase.shaded.com.google.protobuf.BlockingRpcChannel
-AbstractRpcClient.createBlockingRpcChannel(ServerName sn,
-User ticket,
-int rpcTimeout) 
-
-
-org.apache.hadoop.hbase.shaded.com.google.protobuf.BlockingRpcChannel
 RpcClient.createBlockingRpcChannel(ServerName sn,
 User user,
 int rpcTimeout)
 Creates a "channel" that can be used by a blocking protobuf 
service.
 
 
-
-org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcChannel
-AbstractRpcClient.createRpcChannel(ServerName sn,
-User user,
-int rpcTimeout) 
-
 
+org.apache.hadoop.hbase.shaded.com.google.protobuf.BlockingRpcChannel
+AbstractRpcClient.createBlockingRpcChannel(ServerName sn,
+User ticket,
+int rpcTimeout) 
+
+
 org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcChannel
 RpcClient.createRpcChannel(ServerName sn,
 User user,
@@ -465,6 +459,12 @@
 Creates a "channel" that can be used by a protobuf 
service.
 
 
+
+org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcChannel
+AbstractRpcClient.createRpcChannel(ServerName sn,
+User user,
+int rpcTimeout) 
+
 
 int
 PriorityFunction.getPriority(org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.RequestHeader header,
@@ -891,17 +891,17 @@
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in 
java.util">List
-StripeStoreEngine.StripeCompaction.compact(ThroughputController throughputController,
+DateTieredStoreEngine.DateTieredCompactionContext.compact(ThroughputController throughputController,
User user) 
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in 
java.util">List
-DateTieredStoreEngine.DateTieredCompactionContext.compact(ThroughputController throughputController,
+DefaultStoreEngine.DefaultCompactionContext.compact(ThroughputController throughputController,
User user) 
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in 
java.util">List
-DefaultStoreEngine.DefaultCompactionContext.compact(ThroughputController throughputController,
+StripeStoreEngine.StripeCompaction.compact(ThroughputController throughputController,
User user) 
 
 
@@ -922,17 +922,17 @@
 
 
 int
-AnnotationReadingPriorityFunction.getPriority(org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.RequestHeader header,
+RSRpcServices.getPriority(org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.RequestHeader header,

org.apache.hadoop.hbase.shaded.com.google.protobuf.Message param,
-   User user)
-Returns a 'priority' based on the request type.
-
+   User user) 
 
 
 int
-RSRpcServices.getPriority(org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.RequestHeader header,
+AnnotationReadingPriorityFunction.getPriority(org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.RequestHeader header,

org.apache.hadoop.hbase.shaded.com.google.protobuf.Message param,
-   User user) 
+   User user)
+Returns a 'priority' based on the request type.
+
 
 
 private http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List
@@ -1366,14 +1366,14 @@
 
 
 
-User
-UserProvider.create(org.apache.hadoop.security.UserGroupInformation ugi)
+static User
+User.create(org.apache.hadoop.security.UserGroupInformation ugi)
 Wraps an underlying UserGroupInformation 
instance.
 
 
 
-static User
-User.create(org.apache.hadoop.security.UserGroupInformation ugi)
+User
+UserProvider.create(

[10/51] [partial] hbase-site git commit: Published site at ec5188df3090d42088b6f4cb8f0c2fd49425f8c1.

2017-04-06 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ff2667f4/devapidocs/org/apache/hadoop/hbase/security/class-use/User.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/security/class-use/User.html 
b/devapidocs/org/apache/hadoop/hbase/security/class-use/User.html
index f1a1e53..b142fd6 100644
--- a/devapidocs/org/apache/hadoop/hbase/security/class-use/User.html
+++ b/devapidocs/org/apache/hadoop/hbase/security/class-use/User.html
@@ -386,23 +386,23 @@
 
 
 
-User
-RpcCallContext.getRequestUser()
-Returns the user credentials associated with the current 
RPC request or
- null if no credentials were provided.
-
-
-
 static User
 RpcServer.getRequestUser()
 Returns the user credentials associated with the current 
RPC request or
  null if no credentials were provided.
 
 
-
+
 User
 RpcServer.Call.getRequestUser() 
 
+
+User
+RpcCallContext.getRequestUser()
+Returns the user credentials associated with the current 
RPC request or
+ null if no credentials were provided.
+
+
 
 User
 ConnectionId.getTicket() 
@@ -439,31 +439,31 @@
 
 
 org.apache.hadoop.hbase.shaded.com.google.protobuf.BlockingRpcChannel
+AbstractRpcClient.createBlockingRpcChannel(ServerName sn,
+User ticket,
+int rpcTimeout) 
+
+
+org.apache.hadoop.hbase.shaded.com.google.protobuf.BlockingRpcChannel
 RpcClient.createBlockingRpcChannel(ServerName sn,
 User user,
 int rpcTimeout)
 Creates a "channel" that can be used by a blocking protobuf 
service.
 
 
-
-org.apache.hadoop.hbase.shaded.com.google.protobuf.BlockingRpcChannel
-AbstractRpcClient.createBlockingRpcChannel(ServerName sn,
-User ticket,
-int rpcTimeout) 
-
 
 org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcChannel
-RpcClient.createRpcChannel(ServerName sn,
+AbstractRpcClient.createRpcChannel(ServerName sn,
 User user,
-int rpcTimeout)
-Creates a "channel" that can be used by a protobuf 
service.
-
+int rpcTimeout) 
 
 
 org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcChannel
-AbstractRpcClient.createRpcChannel(ServerName sn,
+RpcClient.createRpcChannel(ServerName sn,
 User user,
-int rpcTimeout) 
+int rpcTimeout)
+Creates a "channel" that can be used by a protobuf 
service.
+
 
 
 int
@@ -891,17 +891,17 @@
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in 
java.util">List
-DateTieredStoreEngine.DateTieredCompactionContext.compact(ThroughputController throughputController,
+StripeStoreEngine.StripeCompaction.compact(ThroughputController throughputController,
User user) 
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in 
java.util">List
-DefaultStoreEngine.DefaultCompactionContext.compact(ThroughputController throughputController,
+DateTieredStoreEngine.DateTieredCompactionContext.compact(ThroughputController throughputController,
User user) 
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in 
java.util">List
-StripeStoreEngine.StripeCompaction.compact(ThroughputController throughputController,
+DefaultStoreEngine.DefaultCompactionContext.compact(ThroughputController throughputController,
User user) 
 
 
@@ -922,18 +922,18 @@
 
 
 int
-RSRpcServices.getPriority(org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.RequestHeader header,
-   
org.apache.hadoop.hbase.shaded.com.google.protobuf.Message param,
-   User user) 
-
-
-int
 AnnotationReadingPriorityFunction.getPriority(org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.RequestHeader header,

org.apache.hadoop.hbase.shaded.com.google.protobuf.Message param,
User user)
 Returns a 'priority' based on the request type.
 
 
+
+int
+RSRpcServices.getPriority(org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.RequestHeader header,
+   
org.apache.hadoop.hbase.shaded.com.google.protobuf.Message param,
+   User user) 
+
 
 private http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List
 HStore.moveCompatedFilesIntoPlace(CompactionRequest cr,
@@ -1366,14 +1366,14 @@
 
 
 
-static User
-User.create(org.apache.hadoop.security.UserGroupInformation ugi)
+User
+UserProvider.create(org.apache.hadoop.security.UserGroupInformation ugi)
 Wraps an underlying UserGroupInformation 
instance.
 
 
 
-User
-UserProvider.create(org.apache.hadoop.security.UserGroupInformation ugi)
+static User
+User.create(org.apache.hadoop.security.UserGroupInformation ugi)
 Wraps an underlying Us

[10/51] [partial] hbase-site git commit: Published site at ec5188df3090d42088b6f4cb8f0c2fd49425f8c1.

2017-04-06 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/a3a2f06e/devapidocs/org/apache/hadoop/hbase/replication/class-use/ReplicationPeerConfig.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/replication/class-use/ReplicationPeerConfig.html
 
b/devapidocs/org/apache/hadoop/hbase/replication/class-use/ReplicationPeerConfig.html
index e705265..395ceb7 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/replication/class-use/ReplicationPeerConfig.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/replication/class-use/ReplicationPeerConfig.html
@@ -369,14 +369,14 @@
 
 
 ReplicationPeerConfig
-HMaster.getReplicationPeerConfig(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in 
java.lang">String peerId) 
-
-
-ReplicationPeerConfig
 MasterServices.getReplicationPeerConfig(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String peerId)
 Returns the configured ReplicationPeerConfig for the 
specified peer
 
 
+
+ReplicationPeerConfig
+HMaster.getReplicationPeerConfig(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in 
java.lang">String peerId) 
+
 
 
 
@@ -388,16 +388,16 @@
 
 
 void
-HMaster.addReplicationPeer(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String peerId,
-  ReplicationPeerConfig peerConfig) 
-
-
-void
 MasterServices.addReplicationPeer(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String peerId,
   ReplicationPeerConfig peerConfig)
 Add a new replication peer for replicating data to slave 
cluster
 
 
+
+void
+HMaster.addReplicationPeer(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String peerId,
+  ReplicationPeerConfig peerConfig) 
+
 
 void
 MasterCoprocessorHost.postAddReplicationPeer(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String peerId,
@@ -420,16 +420,16 @@
 
 
 void
-HMaster.updateReplicationPeerConfig(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String peerId,
-   ReplicationPeerConfig peerConfig) 
-
-
-void
 MasterServices.updateReplicationPeerConfig(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String peerId,
ReplicationPeerConfig peerConfig)
 Update the peerConfig for the specified peer
 
 
+
+void
+HMaster.updateReplicationPeerConfig(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String peerId,
+   ReplicationPeerConfig peerConfig) 
+
 
 
 
@@ -510,13 +510,13 @@
 
 
 ReplicationPeerConfig
-ReplicationPeerZKImpl.getPeerConfig()
-Get the peer config object
-
+ReplicationPeerDescription.getPeerConfig() 
 
 
 ReplicationPeerConfig
-ReplicationPeerDescription.getPeerConfig() 
+ReplicationPeerZKImpl.getPeerConfig()
+Get the peer config object
+
 
 
 ReplicationPeerConfig

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/a3a2f06e/devapidocs/org/apache/hadoop/hbase/replication/class-use/ReplicationPeers.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/replication/class-use/ReplicationPeers.html
 
b/devapidocs/org/apache/hadoop/hbase/replication/class-use/ReplicationPeers.html
index e768cd7..8c57b38 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/replication/class-use/ReplicationPeers.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/replication/class-use/ReplicationPeers.html
@@ -265,15 +265,15 @@
 
 
 private ReplicationPeers
-ReplicationSource.replicationPeers 
+ReplicationSourceManager.replicationPeers 
 
 
 private ReplicationPeers
-Replication.replicationPeers 
+ReplicationSource.replicationPeers 
 
 
 private ReplicationPeers
-ReplicationSourceManager.replicationPeers 
+Replication.replicationPeers 
 
 
 private ReplicationPeers

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/a3a2f06e/devapidocs/org/apache/hadoop/hbase/replication/class-use/ReplicationQueues.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/replication/class-use/ReplicationQueues.html
 
b/devapidocs/org/apache/hadoop/hbase/replication/class-use/ReplicationQueues.html
index 7f8b4b2..7bebf07 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/replication/class-use/ReplicationQueues.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/replication/class-us

[10/51] [partial] hbase-site git commit: Published site at ec5188df3090d42088b6f4cb8f0c2fd49425f8c1.

2017-04-06 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/b64f8dea/devapidocs/org/apache/hadoop/hbase/security/class-use/User.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/security/class-use/User.html 
b/devapidocs/org/apache/hadoop/hbase/security/class-use/User.html
index f1a1e53..b142fd6 100644
--- a/devapidocs/org/apache/hadoop/hbase/security/class-use/User.html
+++ b/devapidocs/org/apache/hadoop/hbase/security/class-use/User.html
@@ -386,23 +386,23 @@
 
 
 
-User
-RpcCallContext.getRequestUser()
-Returns the user credentials associated with the current 
RPC request or
- null if no credentials were provided.
-
-
-
 static User
 RpcServer.getRequestUser()
 Returns the user credentials associated with the current 
RPC request or
  null if no credentials were provided.
 
 
-
+
 User
 RpcServer.Call.getRequestUser() 
 
+
+User
+RpcCallContext.getRequestUser()
+Returns the user credentials associated with the current 
RPC request or
+ null if no credentials were provided.
+
+
 
 User
 ConnectionId.getTicket() 
@@ -439,31 +439,31 @@
 
 
 org.apache.hadoop.hbase.shaded.com.google.protobuf.BlockingRpcChannel
+AbstractRpcClient.createBlockingRpcChannel(ServerName sn,
+User ticket,
+int rpcTimeout) 
+
+
+org.apache.hadoop.hbase.shaded.com.google.protobuf.BlockingRpcChannel
 RpcClient.createBlockingRpcChannel(ServerName sn,
 User user,
 int rpcTimeout)
 Creates a "channel" that can be used by a blocking protobuf 
service.
 
 
-
-org.apache.hadoop.hbase.shaded.com.google.protobuf.BlockingRpcChannel
-AbstractRpcClient.createBlockingRpcChannel(ServerName sn,
-User ticket,
-int rpcTimeout) 
-
 
 org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcChannel
-RpcClient.createRpcChannel(ServerName sn,
+AbstractRpcClient.createRpcChannel(ServerName sn,
 User user,
-int rpcTimeout)
-Creates a "channel" that can be used by a protobuf 
service.
-
+int rpcTimeout) 
 
 
 org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcChannel
-AbstractRpcClient.createRpcChannel(ServerName sn,
+RpcClient.createRpcChannel(ServerName sn,
 User user,
-int rpcTimeout) 
+int rpcTimeout)
+Creates a "channel" that can be used by a protobuf 
service.
+
 
 
 int
@@ -891,17 +891,17 @@
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in 
java.util">List
-DateTieredStoreEngine.DateTieredCompactionContext.compact(ThroughputController throughputController,
+StripeStoreEngine.StripeCompaction.compact(ThroughputController throughputController,
User user) 
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in 
java.util">List
-DefaultStoreEngine.DefaultCompactionContext.compact(ThroughputController throughputController,
+DateTieredStoreEngine.DateTieredCompactionContext.compact(ThroughputController throughputController,
User user) 
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in 
java.util">List
-StripeStoreEngine.StripeCompaction.compact(ThroughputController throughputController,
+DefaultStoreEngine.DefaultCompactionContext.compact(ThroughputController throughputController,
User user) 
 
 
@@ -922,18 +922,18 @@
 
 
 int
-RSRpcServices.getPriority(org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.RequestHeader header,
-   
org.apache.hadoop.hbase.shaded.com.google.protobuf.Message param,
-   User user) 
-
-
-int
 AnnotationReadingPriorityFunction.getPriority(org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.RequestHeader header,

org.apache.hadoop.hbase.shaded.com.google.protobuf.Message param,
User user)
 Returns a 'priority' based on the request type.
 
 
+
+int
+RSRpcServices.getPriority(org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.RequestHeader header,
+   
org.apache.hadoop.hbase.shaded.com.google.protobuf.Message param,
+   User user) 
+
 
 private http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List
 HStore.moveCompatedFilesIntoPlace(CompactionRequest cr,
@@ -1366,14 +1366,14 @@
 
 
 
-static User
-User.create(org.apache.hadoop.security.UserGroupInformation ugi)
+User
+UserProvider.create(org.apache.hadoop.security.UserGroupInformation ugi)
 Wraps an underlying UserGroupInformation 
instance.
 
 
 
-User
-UserProvider.create(org.apache.hadoop.security.UserGroupInformation ugi)
+static User
+User.create(org.apache.hadoop.security.UserGroupInformation ugi)
 Wraps an underlying Us

[10/51] [partial] hbase-site git commit: Published site at ec5188df3090d42088b6f4cb8f0c2fd49425f8c1.

2017-04-06 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/30d67bee/devapidocs/org/apache/hadoop/hbase/security/access/class-use/Permission.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/security/access/class-use/Permission.html 
b/devapidocs/org/apache/hadoop/hbase/security/access/class-use/Permission.html
index dfde2ce..363d1de 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/security/access/class-use/Permission.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/security/access/class-use/Permission.html
@@ -109,35 +109,25 @@
 
 
 
-Append
-Append.setACL(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String user,
+Delete
+Delete.setACL(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String user,
   Permission perms) 
 
 
-Mutation
-Mutation.setACL(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String user,
+Increment
+Increment.setACL(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String user,
   Permission perms) 
 
 
-Delete
-Delete.setACL(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String user,
-  Permission perms) 
-
-
 Scan
 Scan.setACL(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String user,
   Permission perms) 
 
-
+
 Get
 Get.setACL(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String user,
   Permission perms) 
 
-
-Increment
-Increment.setACL(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String user,
-  Permission perms) 
-
 
 Put
 Put.setACL(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String user,
@@ -148,6 +138,16 @@
 Query.setACL(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String user,
   Permission perms) 
 
+
+Append
+Append.setACL(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String user,
+  Permission perms) 
+
+
+Mutation
+Mutation.setACL(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String user,
+  Permission perms) 
+
 
 
 
@@ -158,29 +158,21 @@
 
 
 
-Append
-Append.setACL(http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true";
 title="class or interface in java.util">MapString,Permission> perms) 
-
-
-Mutation
-Mutation.setACL(http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true";
 title="class or interface in java.util">MapString,Permission> perms) 
-
-
 Delete
 Delete.setACL(http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true";
 title="class or interface in java.util">MapString,Permission> perms) 
 
 
+Increment
+Increment.setACL(http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true";
 title="class or interface in java.util">MapString,Permission> perms) 
+
+
 Scan
 Scan.setACL(http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true";
 title="class or interface in java.util">MapString,Permission> perms) 
 
-
+
 Get
 Get.setACL(http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true";
 title="class or interface in java.util">MapString,Permission> perms) 
 
-
-Increment
-Increment.setACL(http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true";
 title="class or interface in java.util">MapString,Permission> perms) 
-
 
 Put
 Put.setACL(http://docs.oracle.com/javase/

[10/51] [partial] hbase-site git commit: Published site at d7e3116a1744057359ca48d94aa50d7fdf0db974.

2017-04-06 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/5647403a/devapidocs/org/apache/hadoop/hbase/security/access/class-use/Permission.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/security/access/class-use/Permission.html 
b/devapidocs/org/apache/hadoop/hbase/security/access/class-use/Permission.html
index dfde2ce..363d1de 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/security/access/class-use/Permission.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/security/access/class-use/Permission.html
@@ -109,35 +109,25 @@
 
 
 
-Append
-Append.setACL(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String user,
+Delete
+Delete.setACL(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String user,
   Permission perms) 
 
 
-Mutation
-Mutation.setACL(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String user,
+Increment
+Increment.setACL(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String user,
   Permission perms) 
 
 
-Delete
-Delete.setACL(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String user,
-  Permission perms) 
-
-
 Scan
 Scan.setACL(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String user,
   Permission perms) 
 
-
+
 Get
 Get.setACL(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String user,
   Permission perms) 
 
-
-Increment
-Increment.setACL(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String user,
-  Permission perms) 
-
 
 Put
 Put.setACL(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String user,
@@ -148,6 +138,16 @@
 Query.setACL(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String user,
   Permission perms) 
 
+
+Append
+Append.setACL(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String user,
+  Permission perms) 
+
+
+Mutation
+Mutation.setACL(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String user,
+  Permission perms) 
+
 
 
 
@@ -158,29 +158,21 @@
 
 
 
-Append
-Append.setACL(http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true";
 title="class or interface in java.util">MapString,Permission> perms) 
-
-
-Mutation
-Mutation.setACL(http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true";
 title="class or interface in java.util">MapString,Permission> perms) 
-
-
 Delete
 Delete.setACL(http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true";
 title="class or interface in java.util">MapString,Permission> perms) 
 
 
+Increment
+Increment.setACL(http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true";
 title="class or interface in java.util">MapString,Permission> perms) 
+
+
 Scan
 Scan.setACL(http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true";
 title="class or interface in java.util">MapString,Permission> perms) 
 
-
+
 Get
 Get.setACL(http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true";
 title="class or interface in java.util">MapString,Permission> perms) 
 
-
-Increment
-Increment.setACL(http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true";
 title="class or interface in java.util">MapString,Permission> perms) 
-
 
 Put
 Put.setACL(http://docs.oracle.com/javase/

[10/51] [partial] hbase-site git commit: Published site at d7e3116a1744057359ca48d94aa50d7fdf0db974.

2017-04-06 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/dc69ad3c/devapidocs/org/apache/hadoop/hbase/security/class-use/User.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/security/class-use/User.html 
b/devapidocs/org/apache/hadoop/hbase/security/class-use/User.html
index f1a1e53..b142fd6 100644
--- a/devapidocs/org/apache/hadoop/hbase/security/class-use/User.html
+++ b/devapidocs/org/apache/hadoop/hbase/security/class-use/User.html
@@ -386,23 +386,23 @@
 
 
 
-User
-RpcCallContext.getRequestUser()
-Returns the user credentials associated with the current 
RPC request or
- null if no credentials were provided.
-
-
-
 static User
 RpcServer.getRequestUser()
 Returns the user credentials associated with the current 
RPC request or
  null if no credentials were provided.
 
 
-
+
 User
 RpcServer.Call.getRequestUser() 
 
+
+User
+RpcCallContext.getRequestUser()
+Returns the user credentials associated with the current 
RPC request or
+ null if no credentials were provided.
+
+
 
 User
 ConnectionId.getTicket() 
@@ -439,31 +439,31 @@
 
 
 org.apache.hadoop.hbase.shaded.com.google.protobuf.BlockingRpcChannel
+AbstractRpcClient.createBlockingRpcChannel(ServerName sn,
+User ticket,
+int rpcTimeout) 
+
+
+org.apache.hadoop.hbase.shaded.com.google.protobuf.BlockingRpcChannel
 RpcClient.createBlockingRpcChannel(ServerName sn,
 User user,
 int rpcTimeout)
 Creates a "channel" that can be used by a blocking protobuf 
service.
 
 
-
-org.apache.hadoop.hbase.shaded.com.google.protobuf.BlockingRpcChannel
-AbstractRpcClient.createBlockingRpcChannel(ServerName sn,
-User ticket,
-int rpcTimeout) 
-
 
 org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcChannel
-RpcClient.createRpcChannel(ServerName sn,
+AbstractRpcClient.createRpcChannel(ServerName sn,
 User user,
-int rpcTimeout)
-Creates a "channel" that can be used by a protobuf 
service.
-
+int rpcTimeout) 
 
 
 org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcChannel
-AbstractRpcClient.createRpcChannel(ServerName sn,
+RpcClient.createRpcChannel(ServerName sn,
 User user,
-int rpcTimeout) 
+int rpcTimeout)
+Creates a "channel" that can be used by a protobuf 
service.
+
 
 
 int
@@ -891,17 +891,17 @@
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in 
java.util">List
-DateTieredStoreEngine.DateTieredCompactionContext.compact(ThroughputController throughputController,
+StripeStoreEngine.StripeCompaction.compact(ThroughputController throughputController,
User user) 
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in 
java.util">List
-DefaultStoreEngine.DefaultCompactionContext.compact(ThroughputController throughputController,
+DateTieredStoreEngine.DateTieredCompactionContext.compact(ThroughputController throughputController,
User user) 
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in 
java.util">List
-StripeStoreEngine.StripeCompaction.compact(ThroughputController throughputController,
+DefaultStoreEngine.DefaultCompactionContext.compact(ThroughputController throughputController,
User user) 
 
 
@@ -922,18 +922,18 @@
 
 
 int
-RSRpcServices.getPriority(org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.RequestHeader header,
-   
org.apache.hadoop.hbase.shaded.com.google.protobuf.Message param,
-   User user) 
-
-
-int
 AnnotationReadingPriorityFunction.getPriority(org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.RequestHeader header,

org.apache.hadoop.hbase.shaded.com.google.protobuf.Message param,
User user)
 Returns a 'priority' based on the request type.
 
 
+
+int
+RSRpcServices.getPriority(org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.RequestHeader header,
+   
org.apache.hadoop.hbase.shaded.com.google.protobuf.Message param,
+   User user) 
+
 
 private http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List
 HStore.moveCompatedFilesIntoPlace(CompactionRequest cr,
@@ -1366,14 +1366,14 @@
 
 
 
-static User
-User.create(org.apache.hadoop.security.UserGroupInformation ugi)
+User
+UserProvider.create(org.apache.hadoop.security.UserGroupInformation ugi)
 Wraps an underlying UserGroupInformation 
instance.
 
 
 
-User
-UserProvider.create(org.apache.hadoop.security.UserGroupInformation ugi)
+static User
+User.create(org.apache.hadoop.security.UserGroupInformation ugi)
 Wraps an underlying Us

[10/51] [partial] hbase-site git commit: Published site at d7e3116a1744057359ca48d94aa50d7fdf0db974.

2017-04-06 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/992c1502/devapidocs/org/apache/hadoop/hbase/security/class-use/User.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/security/class-use/User.html 
b/devapidocs/org/apache/hadoop/hbase/security/class-use/User.html
index b142fd6..f1a1e53 100644
--- a/devapidocs/org/apache/hadoop/hbase/security/class-use/User.html
+++ b/devapidocs/org/apache/hadoop/hbase/security/class-use/User.html
@@ -386,23 +386,23 @@
 
 
 
-static User
-RpcServer.getRequestUser()
+User
+RpcCallContext.getRequestUser()
 Returns the user credentials associated with the current 
RPC request or
  null if no credentials were provided.
 
 
 
-User
-RpcServer.Call.getRequestUser() 
-
-
-User
-RpcCallContext.getRequestUser()
+static User
+RpcServer.getRequestUser()
 Returns the user credentials associated with the current 
RPC request or
  null if no credentials were provided.
 
 
+
+User
+RpcServer.Call.getRequestUser() 
+
 
 User
 ConnectionId.getTicket() 
@@ -439,25 +439,19 @@
 
 
 org.apache.hadoop.hbase.shaded.com.google.protobuf.BlockingRpcChannel
-AbstractRpcClient.createBlockingRpcChannel(ServerName sn,
-User ticket,
-int rpcTimeout) 
-
-
-org.apache.hadoop.hbase.shaded.com.google.protobuf.BlockingRpcChannel
 RpcClient.createBlockingRpcChannel(ServerName sn,
 User user,
 int rpcTimeout)
 Creates a "channel" that can be used by a blocking protobuf 
service.
 
 
-
-org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcChannel
-AbstractRpcClient.createRpcChannel(ServerName sn,
-User user,
-int rpcTimeout) 
-
 
+org.apache.hadoop.hbase.shaded.com.google.protobuf.BlockingRpcChannel
+AbstractRpcClient.createBlockingRpcChannel(ServerName sn,
+User ticket,
+int rpcTimeout) 
+
+
 org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcChannel
 RpcClient.createRpcChannel(ServerName sn,
 User user,
@@ -465,6 +459,12 @@
 Creates a "channel" that can be used by a protobuf 
service.
 
 
+
+org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcChannel
+AbstractRpcClient.createRpcChannel(ServerName sn,
+User user,
+int rpcTimeout) 
+
 
 int
 PriorityFunction.getPriority(org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.RequestHeader header,
@@ -891,17 +891,17 @@
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in 
java.util">List
-StripeStoreEngine.StripeCompaction.compact(ThroughputController throughputController,
+DateTieredStoreEngine.DateTieredCompactionContext.compact(ThroughputController throughputController,
User user) 
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in 
java.util">List
-DateTieredStoreEngine.DateTieredCompactionContext.compact(ThroughputController throughputController,
+DefaultStoreEngine.DefaultCompactionContext.compact(ThroughputController throughputController,
User user) 
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in 
java.util">List
-DefaultStoreEngine.DefaultCompactionContext.compact(ThroughputController throughputController,
+StripeStoreEngine.StripeCompaction.compact(ThroughputController throughputController,
User user) 
 
 
@@ -922,17 +922,17 @@
 
 
 int
-AnnotationReadingPriorityFunction.getPriority(org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.RequestHeader header,
+RSRpcServices.getPriority(org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.RequestHeader header,

org.apache.hadoop.hbase.shaded.com.google.protobuf.Message param,
-   User user)
-Returns a 'priority' based on the request type.
-
+   User user) 
 
 
 int
-RSRpcServices.getPriority(org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.RequestHeader header,
+AnnotationReadingPriorityFunction.getPriority(org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.RequestHeader header,

org.apache.hadoop.hbase.shaded.com.google.protobuf.Message param,
-   User user) 
+   User user)
+Returns a 'priority' based on the request type.
+
 
 
 private http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List
@@ -1366,14 +1366,14 @@
 
 
 
-User
-UserProvider.create(org.apache.hadoop.security.UserGroupInformation ugi)
+static User
+User.create(org.apache.hadoop.security.UserGroupInformation ugi)
 Wraps an underlying UserGroupInformation 
instance.
 
 
 
-static User
-User.create(org.apache.hadoop.security.UserGroupInformation ugi)
+User
+UserProvider.create(

[10/51] [partial] hbase-site git commit: Published site at 17737b2710a2a1271eb791478eb99f7a573ecac1.

2017-04-06 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/de74873b/devapidocs/org/apache/hadoop/hbase/security/class-use/User.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/security/class-use/User.html 
b/devapidocs/org/apache/hadoop/hbase/security/class-use/User.html
index f1a1e53..b142fd6 100644
--- a/devapidocs/org/apache/hadoop/hbase/security/class-use/User.html
+++ b/devapidocs/org/apache/hadoop/hbase/security/class-use/User.html
@@ -386,23 +386,23 @@
 
 
 
-User
-RpcCallContext.getRequestUser()
-Returns the user credentials associated with the current 
RPC request or
- null if no credentials were provided.
-
-
-
 static User
 RpcServer.getRequestUser()
 Returns the user credentials associated with the current 
RPC request or
  null if no credentials were provided.
 
 
-
+
 User
 RpcServer.Call.getRequestUser() 
 
+
+User
+RpcCallContext.getRequestUser()
+Returns the user credentials associated with the current 
RPC request or
+ null if no credentials were provided.
+
+
 
 User
 ConnectionId.getTicket() 
@@ -439,31 +439,31 @@
 
 
 org.apache.hadoop.hbase.shaded.com.google.protobuf.BlockingRpcChannel
+AbstractRpcClient.createBlockingRpcChannel(ServerName sn,
+User ticket,
+int rpcTimeout) 
+
+
+org.apache.hadoop.hbase.shaded.com.google.protobuf.BlockingRpcChannel
 RpcClient.createBlockingRpcChannel(ServerName sn,
 User user,
 int rpcTimeout)
 Creates a "channel" that can be used by a blocking protobuf 
service.
 
 
-
-org.apache.hadoop.hbase.shaded.com.google.protobuf.BlockingRpcChannel
-AbstractRpcClient.createBlockingRpcChannel(ServerName sn,
-User ticket,
-int rpcTimeout) 
-
 
 org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcChannel
-RpcClient.createRpcChannel(ServerName sn,
+AbstractRpcClient.createRpcChannel(ServerName sn,
 User user,
-int rpcTimeout)
-Creates a "channel" that can be used by a protobuf 
service.
-
+int rpcTimeout) 
 
 
 org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcChannel
-AbstractRpcClient.createRpcChannel(ServerName sn,
+RpcClient.createRpcChannel(ServerName sn,
 User user,
-int rpcTimeout) 
+int rpcTimeout)
+Creates a "channel" that can be used by a protobuf 
service.
+
 
 
 int
@@ -891,17 +891,17 @@
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in 
java.util">List
-DateTieredStoreEngine.DateTieredCompactionContext.compact(ThroughputController throughputController,
+StripeStoreEngine.StripeCompaction.compact(ThroughputController throughputController,
User user) 
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in 
java.util">List
-DefaultStoreEngine.DefaultCompactionContext.compact(ThroughputController throughputController,
+DateTieredStoreEngine.DateTieredCompactionContext.compact(ThroughputController throughputController,
User user) 
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in 
java.util">List
-StripeStoreEngine.StripeCompaction.compact(ThroughputController throughputController,
+DefaultStoreEngine.DefaultCompactionContext.compact(ThroughputController throughputController,
User user) 
 
 
@@ -922,18 +922,18 @@
 
 
 int
-RSRpcServices.getPriority(org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.RequestHeader header,
-   
org.apache.hadoop.hbase.shaded.com.google.protobuf.Message param,
-   User user) 
-
-
-int
 AnnotationReadingPriorityFunction.getPriority(org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.RequestHeader header,

org.apache.hadoop.hbase.shaded.com.google.protobuf.Message param,
User user)
 Returns a 'priority' based on the request type.
 
 
+
+int
+RSRpcServices.getPriority(org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.RequestHeader header,
+   
org.apache.hadoop.hbase.shaded.com.google.protobuf.Message param,
+   User user) 
+
 
 private http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List
 HStore.moveCompatedFilesIntoPlace(CompactionRequest cr,
@@ -1366,14 +1366,14 @@
 
 
 
-static User
-User.create(org.apache.hadoop.security.UserGroupInformation ugi)
+User
+UserProvider.create(org.apache.hadoop.security.UserGroupInformation ugi)
 Wraps an underlying UserGroupInformation 
instance.
 
 
 
-User
-UserProvider.create(org.apache.hadoop.security.UserGroupInformation ugi)
+static User
+User.create(org.apache.hadoop.security.UserGroupInformation ugi)
 Wraps an underlying Us

[10/51] [partial] hbase-site git commit: Published site at 17737b2710a2a1271eb791478eb99f7a573ecac1.

2017-04-06 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/4f55be5a/devapidocs/org/apache/hadoop/hbase/replication/class-use/ReplicationPeers.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/replication/class-use/ReplicationPeers.html
 
b/devapidocs/org/apache/hadoop/hbase/replication/class-use/ReplicationPeers.html
index e768cd7..8c57b38 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/replication/class-use/ReplicationPeers.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/replication/class-use/ReplicationPeers.html
@@ -265,15 +265,15 @@
 
 
 private ReplicationPeers
-ReplicationSource.replicationPeers 
+ReplicationSourceManager.replicationPeers 
 
 
 private ReplicationPeers
-Replication.replicationPeers 
+ReplicationSource.replicationPeers 
 
 
 private ReplicationPeers
-ReplicationSourceManager.replicationPeers 
+Replication.replicationPeers 
 
 
 private ReplicationPeers

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/4f55be5a/devapidocs/org/apache/hadoop/hbase/replication/class-use/ReplicationQueues.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/replication/class-use/ReplicationQueues.html
 
b/devapidocs/org/apache/hadoop/hbase/replication/class-use/ReplicationQueues.html
index 7f8b4b2..7bebf07 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/replication/class-use/ReplicationQueues.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/replication/class-use/ReplicationQueues.html
@@ -151,15 +151,15 @@
 
 
 private ReplicationQueues
-ReplicationSource.replicationQueues 
+ReplicationSourceManager.replicationQueues 
 
 
 private ReplicationQueues
-Replication.replicationQueues 
+ReplicationSource.replicationQueues 
 
 
 private ReplicationQueues
-ReplicationSourceManager.replicationQueues 
+Replication.replicationQueues 
 
 
 private ReplicationQueues

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/4f55be5a/devapidocs/org/apache/hadoop/hbase/replication/class-use/ReplicationTracker.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/replication/class-use/ReplicationTracker.html
 
b/devapidocs/org/apache/hadoop/hbase/replication/class-use/ReplicationTracker.html
index b3d3829..373d114 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/replication/class-use/ReplicationTracker.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/replication/class-use/ReplicationTracker.html
@@ -147,11 +147,11 @@
 
 
 private ReplicationTracker
-Replication.replicationTracker 
+ReplicationSourceManager.replicationTracker 
 
 
 private ReplicationTracker
-ReplicationSourceManager.replicationTracker 
+Replication.replicationTracker 
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/4f55be5a/devapidocs/org/apache/hadoop/hbase/replication/class-use/WALEntryFilter.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/replication/class-use/WALEntryFilter.html 
b/devapidocs/org/apache/hadoop/hbase/replication/class-use/WALEntryFilter.html
index d70f824..ffad47a 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/replication/class-use/WALEntryFilter.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/replication/class-use/WALEntryFilter.html
@@ -185,14 +185,14 @@
 
 
 WALEntryFilter
-BaseReplicationEndpoint.getWALEntryfilter()
-Returns a default set of filters
+ReplicationEndpoint.getWALEntryfilter()
+Returns a WALEntryFilter to use for filtering out 
WALEntries from the log.
 
 
 
 WALEntryFilter
-ReplicationEndpoint.getWALEntryfilter()
-Returns a WALEntryFilter to use for filtering out 
WALEntries from the log.
+BaseReplicationEndpoint.getWALEntryfilter()
+Returns a default set of filters
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/4f55be5a/devapidocs/org/apache/hadoop/hbase/replication/regionserver/class-use/MetricsReplicationSourceImpl.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/replication/regionserver/class-use/MetricsReplicationSourceImpl.html
 
b/devapidocs/org/apache/hadoop/hbase/replication/regionserver/class-use/MetricsReplicationSourceImpl.html
index 4c2ce9f..9cffb66 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/replication/regionserver/class-use/MetricsReplicationSourceImpl.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/replication/regionserver/class-use/MetricsReplicationSourceImpl.html
@@ -104,11 +104,11 @@
 
 
 private MetricsReplicationSourceImpl
-MetricsReplicationGlobalSourceSource.rms 
+MetricsReplicationSourceSourceImpl.rms 
 
 
 private MetricsReplicationSourceImpl
-MetricsReplicationSourceSourceImpl.rms 
+MetricsReplicationGlobalSourceSource.rms 
 
 
 (package private) MetricsReplicationSourceImpl

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/4f55be5a/devapidocs/org/apache/hadoop/hbase/replication/regionserver

[10/51] [partial] hbase-site git commit: Published site at cbcbcf4dcd3401327cc36173f3ca8e5362da1e0c.

2017-04-05 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/3c5f2ff4/devapidocs/org/apache/hadoop/hbase/security/class-use/User.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/security/class-use/User.html 
b/devapidocs/org/apache/hadoop/hbase/security/class-use/User.html
index f1a1e53..b142fd6 100644
--- a/devapidocs/org/apache/hadoop/hbase/security/class-use/User.html
+++ b/devapidocs/org/apache/hadoop/hbase/security/class-use/User.html
@@ -386,23 +386,23 @@
 
 
 
-User
-RpcCallContext.getRequestUser()
-Returns the user credentials associated with the current 
RPC request or
- null if no credentials were provided.
-
-
-
 static User
 RpcServer.getRequestUser()
 Returns the user credentials associated with the current 
RPC request or
  null if no credentials were provided.
 
 
-
+
 User
 RpcServer.Call.getRequestUser() 
 
+
+User
+RpcCallContext.getRequestUser()
+Returns the user credentials associated with the current 
RPC request or
+ null if no credentials were provided.
+
+
 
 User
 ConnectionId.getTicket() 
@@ -439,31 +439,31 @@
 
 
 org.apache.hadoop.hbase.shaded.com.google.protobuf.BlockingRpcChannel
+AbstractRpcClient.createBlockingRpcChannel(ServerName sn,
+User ticket,
+int rpcTimeout) 
+
+
+org.apache.hadoop.hbase.shaded.com.google.protobuf.BlockingRpcChannel
 RpcClient.createBlockingRpcChannel(ServerName sn,
 User user,
 int rpcTimeout)
 Creates a "channel" that can be used by a blocking protobuf 
service.
 
 
-
-org.apache.hadoop.hbase.shaded.com.google.protobuf.BlockingRpcChannel
-AbstractRpcClient.createBlockingRpcChannel(ServerName sn,
-User ticket,
-int rpcTimeout) 
-
 
 org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcChannel
-RpcClient.createRpcChannel(ServerName sn,
+AbstractRpcClient.createRpcChannel(ServerName sn,
 User user,
-int rpcTimeout)
-Creates a "channel" that can be used by a protobuf 
service.
-
+int rpcTimeout) 
 
 
 org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcChannel
-AbstractRpcClient.createRpcChannel(ServerName sn,
+RpcClient.createRpcChannel(ServerName sn,
 User user,
-int rpcTimeout) 
+int rpcTimeout)
+Creates a "channel" that can be used by a protobuf 
service.
+
 
 
 int
@@ -891,17 +891,17 @@
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in 
java.util">List
-DateTieredStoreEngine.DateTieredCompactionContext.compact(ThroughputController throughputController,
+StripeStoreEngine.StripeCompaction.compact(ThroughputController throughputController,
User user) 
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in 
java.util">List
-DefaultStoreEngine.DefaultCompactionContext.compact(ThroughputController throughputController,
+DateTieredStoreEngine.DateTieredCompactionContext.compact(ThroughputController throughputController,
User user) 
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in 
java.util">List
-StripeStoreEngine.StripeCompaction.compact(ThroughputController throughputController,
+DefaultStoreEngine.DefaultCompactionContext.compact(ThroughputController throughputController,
User user) 
 
 
@@ -922,18 +922,18 @@
 
 
 int
-RSRpcServices.getPriority(org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.RequestHeader header,
-   
org.apache.hadoop.hbase.shaded.com.google.protobuf.Message param,
-   User user) 
-
-
-int
 AnnotationReadingPriorityFunction.getPriority(org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.RequestHeader header,

org.apache.hadoop.hbase.shaded.com.google.protobuf.Message param,
User user)
 Returns a 'priority' based on the request type.
 
 
+
+int
+RSRpcServices.getPriority(org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.RequestHeader header,
+   
org.apache.hadoop.hbase.shaded.com.google.protobuf.Message param,
+   User user) 
+
 
 private http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List
 HStore.moveCompatedFilesIntoPlace(CompactionRequest cr,
@@ -1366,14 +1366,14 @@
 
 
 
-static User
-User.create(org.apache.hadoop.security.UserGroupInformation ugi)
+User
+UserProvider.create(org.apache.hadoop.security.UserGroupInformation ugi)
 Wraps an underlying UserGroupInformation 
instance.
 
 
 
-User
-UserProvider.create(org.apache.hadoop.security.UserGroupInformation ugi)
+static User
+User.create(org.apache.hadoop.security.UserGroupInformation ugi)
 Wraps an underlying Us

[10/51] [partial] hbase-site git commit: Published site at cbcbcf4dcd3401327cc36173f3ca8e5362da1e0c.

2017-04-05 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/12ed5f60/devapidocs/org/apache/hadoop/hbase/security/class-use/User.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/security/class-use/User.html 
b/devapidocs/org/apache/hadoop/hbase/security/class-use/User.html
index b142fd6..f1a1e53 100644
--- a/devapidocs/org/apache/hadoop/hbase/security/class-use/User.html
+++ b/devapidocs/org/apache/hadoop/hbase/security/class-use/User.html
@@ -386,23 +386,23 @@
 
 
 
-static User
-RpcServer.getRequestUser()
+User
+RpcCallContext.getRequestUser()
 Returns the user credentials associated with the current 
RPC request or
  null if no credentials were provided.
 
 
 
-User
-RpcServer.Call.getRequestUser() 
-
-
-User
-RpcCallContext.getRequestUser()
+static User
+RpcServer.getRequestUser()
 Returns the user credentials associated with the current 
RPC request or
  null if no credentials were provided.
 
 
+
+User
+RpcServer.Call.getRequestUser() 
+
 
 User
 ConnectionId.getTicket() 
@@ -439,25 +439,19 @@
 
 
 org.apache.hadoop.hbase.shaded.com.google.protobuf.BlockingRpcChannel
-AbstractRpcClient.createBlockingRpcChannel(ServerName sn,
-User ticket,
-int rpcTimeout) 
-
-
-org.apache.hadoop.hbase.shaded.com.google.protobuf.BlockingRpcChannel
 RpcClient.createBlockingRpcChannel(ServerName sn,
 User user,
 int rpcTimeout)
 Creates a "channel" that can be used by a blocking protobuf 
service.
 
 
-
-org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcChannel
-AbstractRpcClient.createRpcChannel(ServerName sn,
-User user,
-int rpcTimeout) 
-
 
+org.apache.hadoop.hbase.shaded.com.google.protobuf.BlockingRpcChannel
+AbstractRpcClient.createBlockingRpcChannel(ServerName sn,
+User ticket,
+int rpcTimeout) 
+
+
 org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcChannel
 RpcClient.createRpcChannel(ServerName sn,
 User user,
@@ -465,6 +459,12 @@
 Creates a "channel" that can be used by a protobuf 
service.
 
 
+
+org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcChannel
+AbstractRpcClient.createRpcChannel(ServerName sn,
+User user,
+int rpcTimeout) 
+
 
 int
 PriorityFunction.getPriority(org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.RequestHeader header,
@@ -891,17 +891,17 @@
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in 
java.util">List
-StripeStoreEngine.StripeCompaction.compact(ThroughputController throughputController,
+DateTieredStoreEngine.DateTieredCompactionContext.compact(ThroughputController throughputController,
User user) 
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in 
java.util">List
-DateTieredStoreEngine.DateTieredCompactionContext.compact(ThroughputController throughputController,
+DefaultStoreEngine.DefaultCompactionContext.compact(ThroughputController throughputController,
User user) 
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in 
java.util">List
-DefaultStoreEngine.DefaultCompactionContext.compact(ThroughputController throughputController,
+StripeStoreEngine.StripeCompaction.compact(ThroughputController throughputController,
User user) 
 
 
@@ -922,17 +922,17 @@
 
 
 int
-AnnotationReadingPriorityFunction.getPriority(org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.RequestHeader header,
+RSRpcServices.getPriority(org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.RequestHeader header,

org.apache.hadoop.hbase.shaded.com.google.protobuf.Message param,
-   User user)
-Returns a 'priority' based on the request type.
-
+   User user) 
 
 
 int
-RSRpcServices.getPriority(org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.RequestHeader header,
+AnnotationReadingPriorityFunction.getPriority(org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.RequestHeader header,

org.apache.hadoop.hbase.shaded.com.google.protobuf.Message param,
-   User user) 
+   User user)
+Returns a 'priority' based on the request type.
+
 
 
 private http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List
@@ -1366,14 +1366,14 @@
 
 
 
-User
-UserProvider.create(org.apache.hadoop.security.UserGroupInformation ugi)
+static User
+User.create(org.apache.hadoop.security.UserGroupInformation ugi)
 Wraps an underlying UserGroupInformation 
instance.
 
 
 
-static User
-User.create(org.apache.hadoop.security.UserGroupInformation ugi)
+User
+UserProvider.create(

[10/51] [partial] hbase-site git commit: Published site at cbcbcf4dcd3401327cc36173f3ca8e5362da1e0c.

2017-04-05 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d5fd4098/devapidocs/org/apache/hadoop/hbase/security/class-use/User.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/security/class-use/User.html 
b/devapidocs/org/apache/hadoop/hbase/security/class-use/User.html
index f1a1e53..b142fd6 100644
--- a/devapidocs/org/apache/hadoop/hbase/security/class-use/User.html
+++ b/devapidocs/org/apache/hadoop/hbase/security/class-use/User.html
@@ -386,23 +386,23 @@
 
 
 
-User
-RpcCallContext.getRequestUser()
-Returns the user credentials associated with the current 
RPC request or
- null if no credentials were provided.
-
-
-
 static User
 RpcServer.getRequestUser()
 Returns the user credentials associated with the current 
RPC request or
  null if no credentials were provided.
 
 
-
+
 User
 RpcServer.Call.getRequestUser() 
 
+
+User
+RpcCallContext.getRequestUser()
+Returns the user credentials associated with the current 
RPC request or
+ null if no credentials were provided.
+
+
 
 User
 ConnectionId.getTicket() 
@@ -439,31 +439,31 @@
 
 
 org.apache.hadoop.hbase.shaded.com.google.protobuf.BlockingRpcChannel
+AbstractRpcClient.createBlockingRpcChannel(ServerName sn,
+User ticket,
+int rpcTimeout) 
+
+
+org.apache.hadoop.hbase.shaded.com.google.protobuf.BlockingRpcChannel
 RpcClient.createBlockingRpcChannel(ServerName sn,
 User user,
 int rpcTimeout)
 Creates a "channel" that can be used by a blocking protobuf 
service.
 
 
-
-org.apache.hadoop.hbase.shaded.com.google.protobuf.BlockingRpcChannel
-AbstractRpcClient.createBlockingRpcChannel(ServerName sn,
-User ticket,
-int rpcTimeout) 
-
 
 org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcChannel
-RpcClient.createRpcChannel(ServerName sn,
+AbstractRpcClient.createRpcChannel(ServerName sn,
 User user,
-int rpcTimeout)
-Creates a "channel" that can be used by a protobuf 
service.
-
+int rpcTimeout) 
 
 
 org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcChannel
-AbstractRpcClient.createRpcChannel(ServerName sn,
+RpcClient.createRpcChannel(ServerName sn,
 User user,
-int rpcTimeout) 
+int rpcTimeout)
+Creates a "channel" that can be used by a protobuf 
service.
+
 
 
 int
@@ -891,17 +891,17 @@
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in 
java.util">List
-DateTieredStoreEngine.DateTieredCompactionContext.compact(ThroughputController throughputController,
+StripeStoreEngine.StripeCompaction.compact(ThroughputController throughputController,
User user) 
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in 
java.util">List
-DefaultStoreEngine.DefaultCompactionContext.compact(ThroughputController throughputController,
+DateTieredStoreEngine.DateTieredCompactionContext.compact(ThroughputController throughputController,
User user) 
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in 
java.util">List
-StripeStoreEngine.StripeCompaction.compact(ThroughputController throughputController,
+DefaultStoreEngine.DefaultCompactionContext.compact(ThroughputController throughputController,
User user) 
 
 
@@ -922,18 +922,18 @@
 
 
 int
-RSRpcServices.getPriority(org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.RequestHeader header,
-   
org.apache.hadoop.hbase.shaded.com.google.protobuf.Message param,
-   User user) 
-
-
-int
 AnnotationReadingPriorityFunction.getPriority(org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.RequestHeader header,

org.apache.hadoop.hbase.shaded.com.google.protobuf.Message param,
User user)
 Returns a 'priority' based on the request type.
 
 
+
+int
+RSRpcServices.getPriority(org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.RequestHeader header,
+   
org.apache.hadoop.hbase.shaded.com.google.protobuf.Message param,
+   User user) 
+
 
 private http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List
 HStore.moveCompatedFilesIntoPlace(CompactionRequest cr,
@@ -1366,14 +1366,14 @@
 
 
 
-static User
-User.create(org.apache.hadoop.security.UserGroupInformation ugi)
+User
+UserProvider.create(org.apache.hadoop.security.UserGroupInformation ugi)
 Wraps an underlying UserGroupInformation 
instance.
 
 
 
-User
-UserProvider.create(org.apache.hadoop.security.UserGroupInformation ugi)
+static User
+User.create(org.apache.hadoop.security.UserGroupInformation ugi)
 Wraps an underlying Us

[10/51] [partial] hbase-site git commit: Published site at cbcbcf4dcd3401327cc36173f3ca8e5362da1e0c.

2017-04-05 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/eb4fc1ff/devapidocs/org/apache/hadoop/hbase/replication/class-use/ReplicationPeers.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/replication/class-use/ReplicationPeers.html
 
b/devapidocs/org/apache/hadoop/hbase/replication/class-use/ReplicationPeers.html
index e768cd7..8c57b38 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/replication/class-use/ReplicationPeers.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/replication/class-use/ReplicationPeers.html
@@ -265,15 +265,15 @@
 
 
 private ReplicationPeers
-ReplicationSource.replicationPeers 
+ReplicationSourceManager.replicationPeers 
 
 
 private ReplicationPeers
-Replication.replicationPeers 
+ReplicationSource.replicationPeers 
 
 
 private ReplicationPeers
-ReplicationSourceManager.replicationPeers 
+Replication.replicationPeers 
 
 
 private ReplicationPeers

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/eb4fc1ff/devapidocs/org/apache/hadoop/hbase/replication/class-use/ReplicationQueues.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/replication/class-use/ReplicationQueues.html
 
b/devapidocs/org/apache/hadoop/hbase/replication/class-use/ReplicationQueues.html
index 7f8b4b2..7bebf07 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/replication/class-use/ReplicationQueues.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/replication/class-use/ReplicationQueues.html
@@ -151,15 +151,15 @@
 
 
 private ReplicationQueues
-ReplicationSource.replicationQueues 
+ReplicationSourceManager.replicationQueues 
 
 
 private ReplicationQueues
-Replication.replicationQueues 
+ReplicationSource.replicationQueues 
 
 
 private ReplicationQueues
-ReplicationSourceManager.replicationQueues 
+Replication.replicationQueues 
 
 
 private ReplicationQueues

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/eb4fc1ff/devapidocs/org/apache/hadoop/hbase/replication/class-use/ReplicationTracker.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/replication/class-use/ReplicationTracker.html
 
b/devapidocs/org/apache/hadoop/hbase/replication/class-use/ReplicationTracker.html
index b3d3829..373d114 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/replication/class-use/ReplicationTracker.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/replication/class-use/ReplicationTracker.html
@@ -147,11 +147,11 @@
 
 
 private ReplicationTracker
-Replication.replicationTracker 
+ReplicationSourceManager.replicationTracker 
 
 
 private ReplicationTracker
-ReplicationSourceManager.replicationTracker 
+Replication.replicationTracker 
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/eb4fc1ff/devapidocs/org/apache/hadoop/hbase/replication/class-use/WALEntryFilter.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/replication/class-use/WALEntryFilter.html 
b/devapidocs/org/apache/hadoop/hbase/replication/class-use/WALEntryFilter.html
index d70f824..ffad47a 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/replication/class-use/WALEntryFilter.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/replication/class-use/WALEntryFilter.html
@@ -185,14 +185,14 @@
 
 
 WALEntryFilter
-BaseReplicationEndpoint.getWALEntryfilter()
-Returns a default set of filters
+ReplicationEndpoint.getWALEntryfilter()
+Returns a WALEntryFilter to use for filtering out 
WALEntries from the log.
 
 
 
 WALEntryFilter
-ReplicationEndpoint.getWALEntryfilter()
-Returns a WALEntryFilter to use for filtering out 
WALEntries from the log.
+BaseReplicationEndpoint.getWALEntryfilter()
+Returns a default set of filters
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/eb4fc1ff/devapidocs/org/apache/hadoop/hbase/replication/regionserver/class-use/MetricsReplicationSourceImpl.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/replication/regionserver/class-use/MetricsReplicationSourceImpl.html
 
b/devapidocs/org/apache/hadoop/hbase/replication/regionserver/class-use/MetricsReplicationSourceImpl.html
index 4c2ce9f..9cffb66 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/replication/regionserver/class-use/MetricsReplicationSourceImpl.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/replication/regionserver/class-use/MetricsReplicationSourceImpl.html
@@ -104,11 +104,11 @@
 
 
 private MetricsReplicationSourceImpl
-MetricsReplicationGlobalSourceSource.rms 
+MetricsReplicationSourceSourceImpl.rms 
 
 
 private MetricsReplicationSourceImpl
-MetricsReplicationSourceSourceImpl.rms 
+MetricsReplicationGlobalSourceSource.rms 
 
 
 (package private) MetricsReplicationSourceImpl

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/eb4fc1ff/devapidocs/org/apache/hadoop/hbase/replication/regionserver

[10/51] [partial] hbase-site git commit: Published site at a66d491892514fd4a188d6ca87d6260d8ae46184.

2017-04-05 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/40c1aa42/devapidocs/org/apache/hadoop/hbase/security/class-use/User.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/security/class-use/User.html 
b/devapidocs/org/apache/hadoop/hbase/security/class-use/User.html
index f1a1e53..b142fd6 100644
--- a/devapidocs/org/apache/hadoop/hbase/security/class-use/User.html
+++ b/devapidocs/org/apache/hadoop/hbase/security/class-use/User.html
@@ -386,23 +386,23 @@
 
 
 
-User
-RpcCallContext.getRequestUser()
-Returns the user credentials associated with the current 
RPC request or
- null if no credentials were provided.
-
-
-
 static User
 RpcServer.getRequestUser()
 Returns the user credentials associated with the current 
RPC request or
  null if no credentials were provided.
 
 
-
+
 User
 RpcServer.Call.getRequestUser() 
 
+
+User
+RpcCallContext.getRequestUser()
+Returns the user credentials associated with the current 
RPC request or
+ null if no credentials were provided.
+
+
 
 User
 ConnectionId.getTicket() 
@@ -439,31 +439,31 @@
 
 
 org.apache.hadoop.hbase.shaded.com.google.protobuf.BlockingRpcChannel
+AbstractRpcClient.createBlockingRpcChannel(ServerName sn,
+User ticket,
+int rpcTimeout) 
+
+
+org.apache.hadoop.hbase.shaded.com.google.protobuf.BlockingRpcChannel
 RpcClient.createBlockingRpcChannel(ServerName sn,
 User user,
 int rpcTimeout)
 Creates a "channel" that can be used by a blocking protobuf 
service.
 
 
-
-org.apache.hadoop.hbase.shaded.com.google.protobuf.BlockingRpcChannel
-AbstractRpcClient.createBlockingRpcChannel(ServerName sn,
-User ticket,
-int rpcTimeout) 
-
 
 org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcChannel
-RpcClient.createRpcChannel(ServerName sn,
+AbstractRpcClient.createRpcChannel(ServerName sn,
 User user,
-int rpcTimeout)
-Creates a "channel" that can be used by a protobuf 
service.
-
+int rpcTimeout) 
 
 
 org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcChannel
-AbstractRpcClient.createRpcChannel(ServerName sn,
+RpcClient.createRpcChannel(ServerName sn,
 User user,
-int rpcTimeout) 
+int rpcTimeout)
+Creates a "channel" that can be used by a protobuf 
service.
+
 
 
 int
@@ -891,17 +891,17 @@
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in 
java.util">List
-DateTieredStoreEngine.DateTieredCompactionContext.compact(ThroughputController throughputController,
+StripeStoreEngine.StripeCompaction.compact(ThroughputController throughputController,
User user) 
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in 
java.util">List
-DefaultStoreEngine.DefaultCompactionContext.compact(ThroughputController throughputController,
+DateTieredStoreEngine.DateTieredCompactionContext.compact(ThroughputController throughputController,
User user) 
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in 
java.util">List
-StripeStoreEngine.StripeCompaction.compact(ThroughputController throughputController,
+DefaultStoreEngine.DefaultCompactionContext.compact(ThroughputController throughputController,
User user) 
 
 
@@ -922,18 +922,18 @@
 
 
 int
-RSRpcServices.getPriority(org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.RequestHeader header,
-   
org.apache.hadoop.hbase.shaded.com.google.protobuf.Message param,
-   User user) 
-
-
-int
 AnnotationReadingPriorityFunction.getPriority(org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.RequestHeader header,

org.apache.hadoop.hbase.shaded.com.google.protobuf.Message param,
User user)
 Returns a 'priority' based on the request type.
 
 
+
+int
+RSRpcServices.getPriority(org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.RequestHeader header,
+   
org.apache.hadoop.hbase.shaded.com.google.protobuf.Message param,
+   User user) 
+
 
 private http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List
 HStore.moveCompatedFilesIntoPlace(CompactionRequest cr,
@@ -1366,14 +1366,14 @@
 
 
 
-static User
-User.create(org.apache.hadoop.security.UserGroupInformation ugi)
+User
+UserProvider.create(org.apache.hadoop.security.UserGroupInformation ugi)
 Wraps an underlying UserGroupInformation 
instance.
 
 
 
-User
-UserProvider.create(org.apache.hadoop.security.UserGroupInformation ugi)
+static User
+User.create(org.apache.hadoop.security.UserGroupInformation ugi)
 Wraps an underlying Us

[10/51] [partial] hbase-site git commit: Published site at a66d491892514fd4a188d6ca87d6260d8ae46184.

2017-04-05 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/4595b14d/devapidocs/org/apache/hadoop/hbase/security/class-use/User.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/security/class-use/User.html 
b/devapidocs/org/apache/hadoop/hbase/security/class-use/User.html
index b142fd6..f1a1e53 100644
--- a/devapidocs/org/apache/hadoop/hbase/security/class-use/User.html
+++ b/devapidocs/org/apache/hadoop/hbase/security/class-use/User.html
@@ -386,23 +386,23 @@
 
 
 
-static User
-RpcServer.getRequestUser()
+User
+RpcCallContext.getRequestUser()
 Returns the user credentials associated with the current 
RPC request or
  null if no credentials were provided.
 
 
 
-User
-RpcServer.Call.getRequestUser() 
-
-
-User
-RpcCallContext.getRequestUser()
+static User
+RpcServer.getRequestUser()
 Returns the user credentials associated with the current 
RPC request or
  null if no credentials were provided.
 
 
+
+User
+RpcServer.Call.getRequestUser() 
+
 
 User
 ConnectionId.getTicket() 
@@ -439,25 +439,19 @@
 
 
 org.apache.hadoop.hbase.shaded.com.google.protobuf.BlockingRpcChannel
-AbstractRpcClient.createBlockingRpcChannel(ServerName sn,
-User ticket,
-int rpcTimeout) 
-
-
-org.apache.hadoop.hbase.shaded.com.google.protobuf.BlockingRpcChannel
 RpcClient.createBlockingRpcChannel(ServerName sn,
 User user,
 int rpcTimeout)
 Creates a "channel" that can be used by a blocking protobuf 
service.
 
 
-
-org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcChannel
-AbstractRpcClient.createRpcChannel(ServerName sn,
-User user,
-int rpcTimeout) 
-
 
+org.apache.hadoop.hbase.shaded.com.google.protobuf.BlockingRpcChannel
+AbstractRpcClient.createBlockingRpcChannel(ServerName sn,
+User ticket,
+int rpcTimeout) 
+
+
 org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcChannel
 RpcClient.createRpcChannel(ServerName sn,
 User user,
@@ -465,6 +459,12 @@
 Creates a "channel" that can be used by a protobuf 
service.
 
 
+
+org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcChannel
+AbstractRpcClient.createRpcChannel(ServerName sn,
+User user,
+int rpcTimeout) 
+
 
 int
 PriorityFunction.getPriority(org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.RequestHeader header,
@@ -891,17 +891,17 @@
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in 
java.util">List
-StripeStoreEngine.StripeCompaction.compact(ThroughputController throughputController,
+DateTieredStoreEngine.DateTieredCompactionContext.compact(ThroughputController throughputController,
User user) 
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in 
java.util">List
-DateTieredStoreEngine.DateTieredCompactionContext.compact(ThroughputController throughputController,
+DefaultStoreEngine.DefaultCompactionContext.compact(ThroughputController throughputController,
User user) 
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in 
java.util">List
-DefaultStoreEngine.DefaultCompactionContext.compact(ThroughputController throughputController,
+StripeStoreEngine.StripeCompaction.compact(ThroughputController throughputController,
User user) 
 
 
@@ -922,17 +922,17 @@
 
 
 int
-AnnotationReadingPriorityFunction.getPriority(org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.RequestHeader header,
+RSRpcServices.getPriority(org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.RequestHeader header,

org.apache.hadoop.hbase.shaded.com.google.protobuf.Message param,
-   User user)
-Returns a 'priority' based on the request type.
-
+   User user) 
 
 
 int
-RSRpcServices.getPriority(org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.RequestHeader header,
+AnnotationReadingPriorityFunction.getPriority(org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.RequestHeader header,

org.apache.hadoop.hbase.shaded.com.google.protobuf.Message param,
-   User user) 
+   User user)
+Returns a 'priority' based on the request type.
+
 
 
 private http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List
@@ -1366,14 +1366,14 @@
 
 
 
-User
-UserProvider.create(org.apache.hadoop.security.UserGroupInformation ugi)
+static User
+User.create(org.apache.hadoop.security.UserGroupInformation ugi)
 Wraps an underlying UserGroupInformation 
instance.
 
 
 
-static User
-User.create(org.apache.hadoop.security.UserGroupInformation ugi)
+User
+UserProvider.create(

[10/51] [partial] hbase-site git commit: Published site at a66d491892514fd4a188d6ca87d6260d8ae46184.

2017-04-05 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e3a86836/apidocs/org/apache/hadoop/hbase/rest/client/RemoteHTable.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/rest/client/RemoteHTable.html 
b/apidocs/org/apache/hadoop/hbase/rest/client/RemoteHTable.html
index 67361c3..44c3d40 100644
--- a/apidocs/org/apache/hadoop/hbase/rest/client/RemoteHTable.html
+++ b/apidocs/org/apache/hadoop/hbase/rest/client/RemoteHTable.html
@@ -114,8 +114,7 @@ var activeTableTab = "activeTableTab";
 
 
 @InterfaceAudience.Public
- @InterfaceStability.Stable
-public class RemoteHTable
+public class RemoteHTable
 extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true";
 title="class or interface in java.lang">Object
 implements Table
 HTable interface to remote tables accessed via REST 
gateway
@@ -559,7 +558,7 @@ implements 
 
 RemoteHTable
-public RemoteHTable(Client client,
+public RemoteHTable(Client client,
 http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String name)
 Constructor
 
@@ -570,7 +569,7 @@ implements 
 
 RemoteHTable
-public RemoteHTable(Client client,
+public RemoteHTable(Client client,
 org.apache.hadoop.conf.Configuration conf,
 http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String name)
 Constructor
@@ -582,7 +581,7 @@ implements 
 
 RemoteHTable
-public RemoteHTable(Client client,
+public RemoteHTable(Client client,
 org.apache.hadoop.conf.Configuration conf,
 byte[] name)
 Constructor
@@ -602,7 +601,7 @@ implements 
 
 buildRowSpec
-protected http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String buildRowSpec(byte[] row,
+protected http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String buildRowSpec(byte[] row,
   http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true";
 title="class or interface in java.util">Map familyMap,
   long startTime,
   long endTime,
@@ -615,7 +614,7 @@ implements 
 
 buildMultiRowSpec
-protected http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String buildMultiRowSpec(byte[][] rows,
+protected http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String buildMultiRowSpec(byte[][] rows,
int maxVersions)
 
 
@@ -625,7 +624,7 @@ implements 
 
 buildResultFromModel
-protected Result[] buildResultFromModel(org.apache.hadoop.hbase.rest.model.CellSetModel model)
+protected Result[] buildResultFromModel(org.apache.hadoop.hbase.rest.model.CellSetModel model)
 
 
 
@@ -634,7 +633,7 @@ implements 
 
 buildModelFromPut
-protected org.apache.hadoop.hbase.rest.model.CellSetModel buildModelFromPut(Put put)
+protected org.apache.hadoop.hbase.rest.model.CellSetModel buildModelFromPut(Put put)
 
 
 
@@ -643,7 +642,7 @@ implements 
 
 getTableName
-public byte[] getTableName()
+public byte[] getTableName()
 
 
 
@@ -652,7 +651,7 @@ implements 
 
 getName
-public TableName getName()
+public TableName getName()
 Description copied from 
interface: Table
 Gets the fully qualified table name instance of this 
table.
 
@@ -667,7 +666,7 @@ implements 
 
 getConfiguration
-public org.apache.hadoop.conf.Configuration getConfiguration()
+public org.apache.hadoop.conf.Configuration getConfiguration()
 Description copied from 
interface: Table
 Returns the Configuration object used by this 
instance.
  
@@ -685,7 +684,7 @@ implements 
 
 getTableDescriptor
-public HTableDescriptor getTableDescriptor()
+public HTableDescriptor getTableDescriptor()
 throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true";
 title="class or interface in java.io">IOException
 Description copied from 
interface: Table
 Gets the table descriptor for 
this table.
@@ -703,7 +702,7 @@ implements 
 
 close
-public void close()
+public void close()
throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true";
 title="class or interface in java.io">IOException
 Description copied from 
interface: Table
 Releases any resources held or pending changes in internal 
buffers.
@@ -725,7 +724,7 @@ implements 
 
 get
-public Result get(Get get)
+public Result get(Get get)
throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true";
 title="class or interface in java.io">IOException
 Description copied from 
in

[10/51] [partial] hbase-site git commit: Published site at 910b68082c8f200f0ba6395a76b7ee1c8917e401.

2017-04-05 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7d957e04/devapidocs/org/apache/hadoop/hbase/security/class-use/User.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/security/class-use/User.html 
b/devapidocs/org/apache/hadoop/hbase/security/class-use/User.html
index f1a1e53..b142fd6 100644
--- a/devapidocs/org/apache/hadoop/hbase/security/class-use/User.html
+++ b/devapidocs/org/apache/hadoop/hbase/security/class-use/User.html
@@ -386,23 +386,23 @@
 
 
 
-User
-RpcCallContext.getRequestUser()
-Returns the user credentials associated with the current 
RPC request or
- null if no credentials were provided.
-
-
-
 static User
 RpcServer.getRequestUser()
 Returns the user credentials associated with the current 
RPC request or
  null if no credentials were provided.
 
 
-
+
 User
 RpcServer.Call.getRequestUser() 
 
+
+User
+RpcCallContext.getRequestUser()
+Returns the user credentials associated with the current 
RPC request or
+ null if no credentials were provided.
+
+
 
 User
 ConnectionId.getTicket() 
@@ -439,31 +439,31 @@
 
 
 org.apache.hadoop.hbase.shaded.com.google.protobuf.BlockingRpcChannel
+AbstractRpcClient.createBlockingRpcChannel(ServerName sn,
+User ticket,
+int rpcTimeout) 
+
+
+org.apache.hadoop.hbase.shaded.com.google.protobuf.BlockingRpcChannel
 RpcClient.createBlockingRpcChannel(ServerName sn,
 User user,
 int rpcTimeout)
 Creates a "channel" that can be used by a blocking protobuf 
service.
 
 
-
-org.apache.hadoop.hbase.shaded.com.google.protobuf.BlockingRpcChannel
-AbstractRpcClient.createBlockingRpcChannel(ServerName sn,
-User ticket,
-int rpcTimeout) 
-
 
 org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcChannel
-RpcClient.createRpcChannel(ServerName sn,
+AbstractRpcClient.createRpcChannel(ServerName sn,
 User user,
-int rpcTimeout)
-Creates a "channel" that can be used by a protobuf 
service.
-
+int rpcTimeout) 
 
 
 org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcChannel
-AbstractRpcClient.createRpcChannel(ServerName sn,
+RpcClient.createRpcChannel(ServerName sn,
 User user,
-int rpcTimeout) 
+int rpcTimeout)
+Creates a "channel" that can be used by a protobuf 
service.
+
 
 
 int
@@ -891,17 +891,17 @@
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in 
java.util">List
-DateTieredStoreEngine.DateTieredCompactionContext.compact(ThroughputController throughputController,
+StripeStoreEngine.StripeCompaction.compact(ThroughputController throughputController,
User user) 
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in 
java.util">List
-DefaultStoreEngine.DefaultCompactionContext.compact(ThroughputController throughputController,
+DateTieredStoreEngine.DateTieredCompactionContext.compact(ThroughputController throughputController,
User user) 
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in 
java.util">List
-StripeStoreEngine.StripeCompaction.compact(ThroughputController throughputController,
+DefaultStoreEngine.DefaultCompactionContext.compact(ThroughputController throughputController,
User user) 
 
 
@@ -922,18 +922,18 @@
 
 
 int
-RSRpcServices.getPriority(org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.RequestHeader header,
-   
org.apache.hadoop.hbase.shaded.com.google.protobuf.Message param,
-   User user) 
-
-
-int
 AnnotationReadingPriorityFunction.getPriority(org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.RequestHeader header,

org.apache.hadoop.hbase.shaded.com.google.protobuf.Message param,
User user)
 Returns a 'priority' based on the request type.
 
 
+
+int
+RSRpcServices.getPriority(org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.RequestHeader header,
+   
org.apache.hadoop.hbase.shaded.com.google.protobuf.Message param,
+   User user) 
+
 
 private http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List
 HStore.moveCompatedFilesIntoPlace(CompactionRequest cr,
@@ -1366,14 +1366,14 @@
 
 
 
-static User
-User.create(org.apache.hadoop.security.UserGroupInformation ugi)
+User
+UserProvider.create(org.apache.hadoop.security.UserGroupInformation ugi)
 Wraps an underlying UserGroupInformation 
instance.
 
 
 
-User
-UserProvider.create(org.apache.hadoop.security.UserGroupInformation ugi)
+static User
+User.create(org.apache.hadoop.security.UserGroupInformation ugi)
 Wraps an underlying Us

[10/51] [partial] hbase-site git commit: Published site at 910b68082c8f200f0ba6395a76b7ee1c8917e401.

2017-04-04 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ce7e3598/devapidocs/org/apache/hadoop/hbase/security/class-use/User.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/security/class-use/User.html 
b/devapidocs/org/apache/hadoop/hbase/security/class-use/User.html
index b142fd6..f1a1e53 100644
--- a/devapidocs/org/apache/hadoop/hbase/security/class-use/User.html
+++ b/devapidocs/org/apache/hadoop/hbase/security/class-use/User.html
@@ -386,23 +386,23 @@
 
 
 
-static User
-RpcServer.getRequestUser()
+User
+RpcCallContext.getRequestUser()
 Returns the user credentials associated with the current 
RPC request or
  null if no credentials were provided.
 
 
 
-User
-RpcServer.Call.getRequestUser() 
-
-
-User
-RpcCallContext.getRequestUser()
+static User
+RpcServer.getRequestUser()
 Returns the user credentials associated with the current 
RPC request or
  null if no credentials were provided.
 
 
+
+User
+RpcServer.Call.getRequestUser() 
+
 
 User
 ConnectionId.getTicket() 
@@ -439,25 +439,19 @@
 
 
 org.apache.hadoop.hbase.shaded.com.google.protobuf.BlockingRpcChannel
-AbstractRpcClient.createBlockingRpcChannel(ServerName sn,
-User ticket,
-int rpcTimeout) 
-
-
-org.apache.hadoop.hbase.shaded.com.google.protobuf.BlockingRpcChannel
 RpcClient.createBlockingRpcChannel(ServerName sn,
 User user,
 int rpcTimeout)
 Creates a "channel" that can be used by a blocking protobuf 
service.
 
 
-
-org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcChannel
-AbstractRpcClient.createRpcChannel(ServerName sn,
-User user,
-int rpcTimeout) 
-
 
+org.apache.hadoop.hbase.shaded.com.google.protobuf.BlockingRpcChannel
+AbstractRpcClient.createBlockingRpcChannel(ServerName sn,
+User ticket,
+int rpcTimeout) 
+
+
 org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcChannel
 RpcClient.createRpcChannel(ServerName sn,
 User user,
@@ -465,6 +459,12 @@
 Creates a "channel" that can be used by a protobuf 
service.
 
 
+
+org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcChannel
+AbstractRpcClient.createRpcChannel(ServerName sn,
+User user,
+int rpcTimeout) 
+
 
 int
 PriorityFunction.getPriority(org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.RequestHeader header,
@@ -891,17 +891,17 @@
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in 
java.util">List
-StripeStoreEngine.StripeCompaction.compact(ThroughputController throughputController,
+DateTieredStoreEngine.DateTieredCompactionContext.compact(ThroughputController throughputController,
User user) 
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in 
java.util">List
-DateTieredStoreEngine.DateTieredCompactionContext.compact(ThroughputController throughputController,
+DefaultStoreEngine.DefaultCompactionContext.compact(ThroughputController throughputController,
User user) 
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in 
java.util">List
-DefaultStoreEngine.DefaultCompactionContext.compact(ThroughputController throughputController,
+StripeStoreEngine.StripeCompaction.compact(ThroughputController throughputController,
User user) 
 
 
@@ -922,17 +922,17 @@
 
 
 int
-AnnotationReadingPriorityFunction.getPriority(org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.RequestHeader header,
+RSRpcServices.getPriority(org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.RequestHeader header,

org.apache.hadoop.hbase.shaded.com.google.protobuf.Message param,
-   User user)
-Returns a 'priority' based on the request type.
-
+   User user) 
 
 
 int
-RSRpcServices.getPriority(org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.RequestHeader header,
+AnnotationReadingPriorityFunction.getPriority(org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.RequestHeader header,

org.apache.hadoop.hbase.shaded.com.google.protobuf.Message param,
-   User user) 
+   User user)
+Returns a 'priority' based on the request type.
+
 
 
 private http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List
@@ -1366,14 +1366,14 @@
 
 
 
-User
-UserProvider.create(org.apache.hadoop.security.UserGroupInformation ugi)
+static User
+User.create(org.apache.hadoop.security.UserGroupInformation ugi)
 Wraps an underlying UserGroupInformation 
instance.
 
 
 
-static User
-User.create(org.apache.hadoop.security.UserGroupInformation ugi)
+User
+UserProvider.create(

[10/51] [partial] hbase-site git commit: Published site at e916b79db58bb9be806a833b2c0e675f1136c15a.

2017-04-04 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/292b62a2/devapidocs/org/apache/hadoop/hbase/security/class-use/User.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/security/class-use/User.html 
b/devapidocs/org/apache/hadoop/hbase/security/class-use/User.html
index f1a1e53..b142fd6 100644
--- a/devapidocs/org/apache/hadoop/hbase/security/class-use/User.html
+++ b/devapidocs/org/apache/hadoop/hbase/security/class-use/User.html
@@ -386,23 +386,23 @@
 
 
 
-User
-RpcCallContext.getRequestUser()
-Returns the user credentials associated with the current 
RPC request or
- null if no credentials were provided.
-
-
-
 static User
 RpcServer.getRequestUser()
 Returns the user credentials associated with the current 
RPC request or
  null if no credentials were provided.
 
 
-
+
 User
 RpcServer.Call.getRequestUser() 
 
+
+User
+RpcCallContext.getRequestUser()
+Returns the user credentials associated with the current 
RPC request or
+ null if no credentials were provided.
+
+
 
 User
 ConnectionId.getTicket() 
@@ -439,31 +439,31 @@
 
 
 org.apache.hadoop.hbase.shaded.com.google.protobuf.BlockingRpcChannel
+AbstractRpcClient.createBlockingRpcChannel(ServerName sn,
+User ticket,
+int rpcTimeout) 
+
+
+org.apache.hadoop.hbase.shaded.com.google.protobuf.BlockingRpcChannel
 RpcClient.createBlockingRpcChannel(ServerName sn,
 User user,
 int rpcTimeout)
 Creates a "channel" that can be used by a blocking protobuf 
service.
 
 
-
-org.apache.hadoop.hbase.shaded.com.google.protobuf.BlockingRpcChannel
-AbstractRpcClient.createBlockingRpcChannel(ServerName sn,
-User ticket,
-int rpcTimeout) 
-
 
 org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcChannel
-RpcClient.createRpcChannel(ServerName sn,
+AbstractRpcClient.createRpcChannel(ServerName sn,
 User user,
-int rpcTimeout)
-Creates a "channel" that can be used by a protobuf 
service.
-
+int rpcTimeout) 
 
 
 org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcChannel
-AbstractRpcClient.createRpcChannel(ServerName sn,
+RpcClient.createRpcChannel(ServerName sn,
 User user,
-int rpcTimeout) 
+int rpcTimeout)
+Creates a "channel" that can be used by a protobuf 
service.
+
 
 
 int
@@ -891,17 +891,17 @@
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in 
java.util">List
-DateTieredStoreEngine.DateTieredCompactionContext.compact(ThroughputController throughputController,
+StripeStoreEngine.StripeCompaction.compact(ThroughputController throughputController,
User user) 
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in 
java.util">List
-DefaultStoreEngine.DefaultCompactionContext.compact(ThroughputController throughputController,
+DateTieredStoreEngine.DateTieredCompactionContext.compact(ThroughputController throughputController,
User user) 
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in 
java.util">List
-StripeStoreEngine.StripeCompaction.compact(ThroughputController throughputController,
+DefaultStoreEngine.DefaultCompactionContext.compact(ThroughputController throughputController,
User user) 
 
 
@@ -922,18 +922,18 @@
 
 
 int
-RSRpcServices.getPriority(org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.RequestHeader header,
-   
org.apache.hadoop.hbase.shaded.com.google.protobuf.Message param,
-   User user) 
-
-
-int
 AnnotationReadingPriorityFunction.getPriority(org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.RequestHeader header,

org.apache.hadoop.hbase.shaded.com.google.protobuf.Message param,
User user)
 Returns a 'priority' based on the request type.
 
 
+
+int
+RSRpcServices.getPriority(org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.RequestHeader header,
+   
org.apache.hadoop.hbase.shaded.com.google.protobuf.Message param,
+   User user) 
+
 
 private http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List
 HStore.moveCompatedFilesIntoPlace(CompactionRequest cr,
@@ -1366,14 +1366,14 @@
 
 
 
-static User
-User.create(org.apache.hadoop.security.UserGroupInformation ugi)
+User
+UserProvider.create(org.apache.hadoop.security.UserGroupInformation ugi)
 Wraps an underlying UserGroupInformation 
instance.
 
 
 
-User
-UserProvider.create(org.apache.hadoop.security.UserGroupInformation ugi)
+static User
+User.create(org.apache.hadoop.security.UserGroupInformation ugi)
 Wraps an underlying Us

[10/51] [partial] hbase-site git commit: Published site at e916b79db58bb9be806a833b2c0e675f1136c15a.

2017-04-04 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1ef110e7/devapidocs/org/apache/hadoop/hbase/security/class-use/User.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/security/class-use/User.html 
b/devapidocs/org/apache/hadoop/hbase/security/class-use/User.html
index b142fd6..f1a1e53 100644
--- a/devapidocs/org/apache/hadoop/hbase/security/class-use/User.html
+++ b/devapidocs/org/apache/hadoop/hbase/security/class-use/User.html
@@ -386,23 +386,23 @@
 
 
 
-static User
-RpcServer.getRequestUser()
+User
+RpcCallContext.getRequestUser()
 Returns the user credentials associated with the current 
RPC request or
  null if no credentials were provided.
 
 
 
-User
-RpcServer.Call.getRequestUser() 
-
-
-User
-RpcCallContext.getRequestUser()
+static User
+RpcServer.getRequestUser()
 Returns the user credentials associated with the current 
RPC request or
  null if no credentials were provided.
 
 
+
+User
+RpcServer.Call.getRequestUser() 
+
 
 User
 ConnectionId.getTicket() 
@@ -439,25 +439,19 @@
 
 
 org.apache.hadoop.hbase.shaded.com.google.protobuf.BlockingRpcChannel
-AbstractRpcClient.createBlockingRpcChannel(ServerName sn,
-User ticket,
-int rpcTimeout) 
-
-
-org.apache.hadoop.hbase.shaded.com.google.protobuf.BlockingRpcChannel
 RpcClient.createBlockingRpcChannel(ServerName sn,
 User user,
 int rpcTimeout)
 Creates a "channel" that can be used by a blocking protobuf 
service.
 
 
-
-org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcChannel
-AbstractRpcClient.createRpcChannel(ServerName sn,
-User user,
-int rpcTimeout) 
-
 
+org.apache.hadoop.hbase.shaded.com.google.protobuf.BlockingRpcChannel
+AbstractRpcClient.createBlockingRpcChannel(ServerName sn,
+User ticket,
+int rpcTimeout) 
+
+
 org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcChannel
 RpcClient.createRpcChannel(ServerName sn,
 User user,
@@ -465,6 +459,12 @@
 Creates a "channel" that can be used by a protobuf 
service.
 
 
+
+org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcChannel
+AbstractRpcClient.createRpcChannel(ServerName sn,
+User user,
+int rpcTimeout) 
+
 
 int
 PriorityFunction.getPriority(org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.RequestHeader header,
@@ -891,17 +891,17 @@
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in 
java.util">List
-StripeStoreEngine.StripeCompaction.compact(ThroughputController throughputController,
+DateTieredStoreEngine.DateTieredCompactionContext.compact(ThroughputController throughputController,
User user) 
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in 
java.util">List
-DateTieredStoreEngine.DateTieredCompactionContext.compact(ThroughputController throughputController,
+DefaultStoreEngine.DefaultCompactionContext.compact(ThroughputController throughputController,
User user) 
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in 
java.util">List
-DefaultStoreEngine.DefaultCompactionContext.compact(ThroughputController throughputController,
+StripeStoreEngine.StripeCompaction.compact(ThroughputController throughputController,
User user) 
 
 
@@ -922,17 +922,17 @@
 
 
 int
-AnnotationReadingPriorityFunction.getPriority(org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.RequestHeader header,
+RSRpcServices.getPriority(org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.RequestHeader header,

org.apache.hadoop.hbase.shaded.com.google.protobuf.Message param,
-   User user)
-Returns a 'priority' based on the request type.
-
+   User user) 
 
 
 int
-RSRpcServices.getPriority(org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.RequestHeader header,
+AnnotationReadingPriorityFunction.getPriority(org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.RequestHeader header,

org.apache.hadoop.hbase.shaded.com.google.protobuf.Message param,
-   User user) 
+   User user)
+Returns a 'priority' based on the request type.
+
 
 
 private http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List
@@ -1366,14 +1366,14 @@
 
 
 
-User
-UserProvider.create(org.apache.hadoop.security.UserGroupInformation ugi)
+static User
+User.create(org.apache.hadoop.security.UserGroupInformation ugi)
 Wraps an underlying UserGroupInformation 
instance.
 
 
 
-static User
-User.create(org.apache.hadoop.security.UserGroupInformation ugi)
+User
+UserProvider.create(

[10/51] [partial] hbase-site git commit: Published site at 73e1bcd33515061be2dc2e51e6ad19d9798a8ef6.

2017-04-02 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/71b53f08/devapidocs/org/apache/hadoop/hbase/replication/class-use/ReplicationPeers.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/replication/class-use/ReplicationPeers.html
 
b/devapidocs/org/apache/hadoop/hbase/replication/class-use/ReplicationPeers.html
index 8c57b38..e768cd7 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/replication/class-use/ReplicationPeers.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/replication/class-use/ReplicationPeers.html
@@ -265,15 +265,15 @@
 
 
 private ReplicationPeers
-ReplicationSourceManager.replicationPeers 
+ReplicationSource.replicationPeers 
 
 
 private ReplicationPeers
-ReplicationSource.replicationPeers 
+Replication.replicationPeers 
 
 
 private ReplicationPeers
-Replication.replicationPeers 
+ReplicationSourceManager.replicationPeers 
 
 
 private ReplicationPeers

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/71b53f08/devapidocs/org/apache/hadoop/hbase/replication/class-use/ReplicationQueues.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/replication/class-use/ReplicationQueues.html
 
b/devapidocs/org/apache/hadoop/hbase/replication/class-use/ReplicationQueues.html
index 7bebf07..7f8b4b2 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/replication/class-use/ReplicationQueues.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/replication/class-use/ReplicationQueues.html
@@ -151,15 +151,15 @@
 
 
 private ReplicationQueues
-ReplicationSourceManager.replicationQueues 
+ReplicationSource.replicationQueues 
 
 
 private ReplicationQueues
-ReplicationSource.replicationQueues 
+Replication.replicationQueues 
 
 
 private ReplicationQueues
-Replication.replicationQueues 
+ReplicationSourceManager.replicationQueues 
 
 
 private ReplicationQueues

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/71b53f08/devapidocs/org/apache/hadoop/hbase/replication/class-use/ReplicationTracker.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/replication/class-use/ReplicationTracker.html
 
b/devapidocs/org/apache/hadoop/hbase/replication/class-use/ReplicationTracker.html
index 373d114..b3d3829 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/replication/class-use/ReplicationTracker.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/replication/class-use/ReplicationTracker.html
@@ -147,11 +147,11 @@
 
 
 private ReplicationTracker
-ReplicationSourceManager.replicationTracker 
+Replication.replicationTracker 
 
 
 private ReplicationTracker
-Replication.replicationTracker 
+ReplicationSourceManager.replicationTracker 
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/71b53f08/devapidocs/org/apache/hadoop/hbase/replication/class-use/WALEntryFilter.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/replication/class-use/WALEntryFilter.html 
b/devapidocs/org/apache/hadoop/hbase/replication/class-use/WALEntryFilter.html
index ffad47a..d70f824 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/replication/class-use/WALEntryFilter.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/replication/class-use/WALEntryFilter.html
@@ -185,14 +185,14 @@
 
 
 WALEntryFilter
-ReplicationEndpoint.getWALEntryfilter()
-Returns a WALEntryFilter to use for filtering out 
WALEntries from the log.
+BaseReplicationEndpoint.getWALEntryfilter()
+Returns a default set of filters
 
 
 
 WALEntryFilter
-BaseReplicationEndpoint.getWALEntryfilter()
-Returns a default set of filters
+ReplicationEndpoint.getWALEntryfilter()
+Returns a WALEntryFilter to use for filtering out 
WALEntries from the log.
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/71b53f08/devapidocs/org/apache/hadoop/hbase/replication/regionserver/class-use/MetricsReplicationSourceImpl.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/replication/regionserver/class-use/MetricsReplicationSourceImpl.html
 
b/devapidocs/org/apache/hadoop/hbase/replication/regionserver/class-use/MetricsReplicationSourceImpl.html
index 9cffb66..4c2ce9f 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/replication/regionserver/class-use/MetricsReplicationSourceImpl.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/replication/regionserver/class-use/MetricsReplicationSourceImpl.html
@@ -104,11 +104,11 @@
 
 
 private MetricsReplicationSourceImpl
-MetricsReplicationSourceSourceImpl.rms 
+MetricsReplicationGlobalSourceSource.rms 
 
 
 private MetricsReplicationSourceImpl
-MetricsReplicationGlobalSourceSource.rms 
+MetricsReplicationSourceSourceImpl.rms 
 
 
 (package private) MetricsReplicationSourceImpl

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/71b53f08/devapidocs/org/apache/hadoop/hbase/replication/regionserver

[10/51] [partial] hbase-site git commit: Published site at 73e1bcd33515061be2dc2e51e6ad19d9798a8ef6.

2017-04-02 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6509b6fa/devapidocs/org/apache/hadoop/hbase/security/class-use/User.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/security/class-use/User.html 
b/devapidocs/org/apache/hadoop/hbase/security/class-use/User.html
index b142fd6..f1a1e53 100644
--- a/devapidocs/org/apache/hadoop/hbase/security/class-use/User.html
+++ b/devapidocs/org/apache/hadoop/hbase/security/class-use/User.html
@@ -386,23 +386,23 @@
 
 
 
-static User
-RpcServer.getRequestUser()
+User
+RpcCallContext.getRequestUser()
 Returns the user credentials associated with the current 
RPC request or
  null if no credentials were provided.
 
 
 
-User
-RpcServer.Call.getRequestUser() 
-
-
-User
-RpcCallContext.getRequestUser()
+static User
+RpcServer.getRequestUser()
 Returns the user credentials associated with the current 
RPC request or
  null if no credentials were provided.
 
 
+
+User
+RpcServer.Call.getRequestUser() 
+
 
 User
 ConnectionId.getTicket() 
@@ -439,25 +439,19 @@
 
 
 org.apache.hadoop.hbase.shaded.com.google.protobuf.BlockingRpcChannel
-AbstractRpcClient.createBlockingRpcChannel(ServerName sn,
-User ticket,
-int rpcTimeout) 
-
-
-org.apache.hadoop.hbase.shaded.com.google.protobuf.BlockingRpcChannel
 RpcClient.createBlockingRpcChannel(ServerName sn,
 User user,
 int rpcTimeout)
 Creates a "channel" that can be used by a blocking protobuf 
service.
 
 
-
-org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcChannel
-AbstractRpcClient.createRpcChannel(ServerName sn,
-User user,
-int rpcTimeout) 
-
 
+org.apache.hadoop.hbase.shaded.com.google.protobuf.BlockingRpcChannel
+AbstractRpcClient.createBlockingRpcChannel(ServerName sn,
+User ticket,
+int rpcTimeout) 
+
+
 org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcChannel
 RpcClient.createRpcChannel(ServerName sn,
 User user,
@@ -465,6 +459,12 @@
 Creates a "channel" that can be used by a protobuf 
service.
 
 
+
+org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcChannel
+AbstractRpcClient.createRpcChannel(ServerName sn,
+User user,
+int rpcTimeout) 
+
 
 int
 PriorityFunction.getPriority(org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.RequestHeader header,
@@ -891,17 +891,17 @@
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in 
java.util">List
-StripeStoreEngine.StripeCompaction.compact(ThroughputController throughputController,
+DateTieredStoreEngine.DateTieredCompactionContext.compact(ThroughputController throughputController,
User user) 
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in 
java.util">List
-DateTieredStoreEngine.DateTieredCompactionContext.compact(ThroughputController throughputController,
+DefaultStoreEngine.DefaultCompactionContext.compact(ThroughputController throughputController,
User user) 
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in 
java.util">List
-DefaultStoreEngine.DefaultCompactionContext.compact(ThroughputController throughputController,
+StripeStoreEngine.StripeCompaction.compact(ThroughputController throughputController,
User user) 
 
 
@@ -922,17 +922,17 @@
 
 
 int
-AnnotationReadingPriorityFunction.getPriority(org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.RequestHeader header,
+RSRpcServices.getPriority(org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.RequestHeader header,

org.apache.hadoop.hbase.shaded.com.google.protobuf.Message param,
-   User user)
-Returns a 'priority' based on the request type.
-
+   User user) 
 
 
 int
-RSRpcServices.getPriority(org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.RequestHeader header,
+AnnotationReadingPriorityFunction.getPriority(org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.RequestHeader header,

org.apache.hadoop.hbase.shaded.com.google.protobuf.Message param,
-   User user) 
+   User user)
+Returns a 'priority' based on the request type.
+
 
 
 private http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List
@@ -1366,14 +1366,14 @@
 
 
 
-User
-UserProvider.create(org.apache.hadoop.security.UserGroupInformation ugi)
+static User
+User.create(org.apache.hadoop.security.UserGroupInformation ugi)
 Wraps an underlying UserGroupInformation 
instance.
 
 
 
-static User
-User.create(org.apache.hadoop.security.UserGroupInformation ugi)
+User
+UserProvider.create(

[10/51] [partial] hbase-site git commit: Published site at 73e1bcd33515061be2dc2e51e6ad19d9798a8ef6.

2017-04-02 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7a34b01b/devapidocs/org/apache/hadoop/hbase/security/class-use/User.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/security/class-use/User.html 
b/devapidocs/org/apache/hadoop/hbase/security/class-use/User.html
index f1a1e53..b142fd6 100644
--- a/devapidocs/org/apache/hadoop/hbase/security/class-use/User.html
+++ b/devapidocs/org/apache/hadoop/hbase/security/class-use/User.html
@@ -386,23 +386,23 @@
 
 
 
-User
-RpcCallContext.getRequestUser()
-Returns the user credentials associated with the current 
RPC request or
- null if no credentials were provided.
-
-
-
 static User
 RpcServer.getRequestUser()
 Returns the user credentials associated with the current 
RPC request or
  null if no credentials were provided.
 
 
-
+
 User
 RpcServer.Call.getRequestUser() 
 
+
+User
+RpcCallContext.getRequestUser()
+Returns the user credentials associated with the current 
RPC request or
+ null if no credentials were provided.
+
+
 
 User
 ConnectionId.getTicket() 
@@ -439,31 +439,31 @@
 
 
 org.apache.hadoop.hbase.shaded.com.google.protobuf.BlockingRpcChannel
+AbstractRpcClient.createBlockingRpcChannel(ServerName sn,
+User ticket,
+int rpcTimeout) 
+
+
+org.apache.hadoop.hbase.shaded.com.google.protobuf.BlockingRpcChannel
 RpcClient.createBlockingRpcChannel(ServerName sn,
 User user,
 int rpcTimeout)
 Creates a "channel" that can be used by a blocking protobuf 
service.
 
 
-
-org.apache.hadoop.hbase.shaded.com.google.protobuf.BlockingRpcChannel
-AbstractRpcClient.createBlockingRpcChannel(ServerName sn,
-User ticket,
-int rpcTimeout) 
-
 
 org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcChannel
-RpcClient.createRpcChannel(ServerName sn,
+AbstractRpcClient.createRpcChannel(ServerName sn,
 User user,
-int rpcTimeout)
-Creates a "channel" that can be used by a protobuf 
service.
-
+int rpcTimeout) 
 
 
 org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcChannel
-AbstractRpcClient.createRpcChannel(ServerName sn,
+RpcClient.createRpcChannel(ServerName sn,
 User user,
-int rpcTimeout) 
+int rpcTimeout)
+Creates a "channel" that can be used by a protobuf 
service.
+
 
 
 int
@@ -891,17 +891,17 @@
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in 
java.util">List
-DateTieredStoreEngine.DateTieredCompactionContext.compact(ThroughputController throughputController,
+StripeStoreEngine.StripeCompaction.compact(ThroughputController throughputController,
User user) 
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in 
java.util">List
-DefaultStoreEngine.DefaultCompactionContext.compact(ThroughputController throughputController,
+DateTieredStoreEngine.DateTieredCompactionContext.compact(ThroughputController throughputController,
User user) 
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in 
java.util">List
-StripeStoreEngine.StripeCompaction.compact(ThroughputController throughputController,
+DefaultStoreEngine.DefaultCompactionContext.compact(ThroughputController throughputController,
User user) 
 
 
@@ -922,18 +922,18 @@
 
 
 int
-RSRpcServices.getPriority(org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.RequestHeader header,
-   
org.apache.hadoop.hbase.shaded.com.google.protobuf.Message param,
-   User user) 
-
-
-int
 AnnotationReadingPriorityFunction.getPriority(org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.RequestHeader header,

org.apache.hadoop.hbase.shaded.com.google.protobuf.Message param,
User user)
 Returns a 'priority' based on the request type.
 
 
+
+int
+RSRpcServices.getPriority(org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.RequestHeader header,
+   
org.apache.hadoop.hbase.shaded.com.google.protobuf.Message param,
+   User user) 
+
 
 private http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List
 HStore.moveCompatedFilesIntoPlace(CompactionRequest cr,
@@ -1366,14 +1366,14 @@
 
 
 
-static User
-User.create(org.apache.hadoop.security.UserGroupInformation ugi)
+User
+UserProvider.create(org.apache.hadoop.security.UserGroupInformation ugi)
 Wraps an underlying UserGroupInformation 
instance.
 
 
 
-User
-UserProvider.create(org.apache.hadoop.security.UserGroupInformation ugi)
+static User
+User.create(org.apache.hadoop.security.UserGroupInformation ugi)
 Wraps an underlying Us

[10/51] [partial] hbase-site git commit: Published site at 73e1bcd33515061be2dc2e51e6ad19d9798a8ef6.

2017-04-02 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6d254372/devapidocs/org/apache/hadoop/hbase/replication/class-use/ReplicationPeers.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/replication/class-use/ReplicationPeers.html
 
b/devapidocs/org/apache/hadoop/hbase/replication/class-use/ReplicationPeers.html
index e768cd7..8c57b38 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/replication/class-use/ReplicationPeers.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/replication/class-use/ReplicationPeers.html
@@ -265,15 +265,15 @@
 
 
 private ReplicationPeers
-ReplicationSource.replicationPeers 
+ReplicationSourceManager.replicationPeers 
 
 
 private ReplicationPeers
-Replication.replicationPeers 
+ReplicationSource.replicationPeers 
 
 
 private ReplicationPeers
-ReplicationSourceManager.replicationPeers 
+Replication.replicationPeers 
 
 
 private ReplicationPeers

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6d254372/devapidocs/org/apache/hadoop/hbase/replication/class-use/ReplicationQueues.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/replication/class-use/ReplicationQueues.html
 
b/devapidocs/org/apache/hadoop/hbase/replication/class-use/ReplicationQueues.html
index 7f8b4b2..7bebf07 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/replication/class-use/ReplicationQueues.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/replication/class-use/ReplicationQueues.html
@@ -151,15 +151,15 @@
 
 
 private ReplicationQueues
-ReplicationSource.replicationQueues 
+ReplicationSourceManager.replicationQueues 
 
 
 private ReplicationQueues
-Replication.replicationQueues 
+ReplicationSource.replicationQueues 
 
 
 private ReplicationQueues
-ReplicationSourceManager.replicationQueues 
+Replication.replicationQueues 
 
 
 private ReplicationQueues

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6d254372/devapidocs/org/apache/hadoop/hbase/replication/class-use/ReplicationTracker.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/replication/class-use/ReplicationTracker.html
 
b/devapidocs/org/apache/hadoop/hbase/replication/class-use/ReplicationTracker.html
index b3d3829..373d114 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/replication/class-use/ReplicationTracker.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/replication/class-use/ReplicationTracker.html
@@ -147,11 +147,11 @@
 
 
 private ReplicationTracker
-Replication.replicationTracker 
+ReplicationSourceManager.replicationTracker 
 
 
 private ReplicationTracker
-ReplicationSourceManager.replicationTracker 
+Replication.replicationTracker 
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6d254372/devapidocs/org/apache/hadoop/hbase/replication/class-use/WALEntryFilter.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/replication/class-use/WALEntryFilter.html 
b/devapidocs/org/apache/hadoop/hbase/replication/class-use/WALEntryFilter.html
index d70f824..ffad47a 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/replication/class-use/WALEntryFilter.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/replication/class-use/WALEntryFilter.html
@@ -185,14 +185,14 @@
 
 
 WALEntryFilter
-BaseReplicationEndpoint.getWALEntryfilter()
-Returns a default set of filters
+ReplicationEndpoint.getWALEntryfilter()
+Returns a WALEntryFilter to use for filtering out 
WALEntries from the log.
 
 
 
 WALEntryFilter
-ReplicationEndpoint.getWALEntryfilter()
-Returns a WALEntryFilter to use for filtering out 
WALEntries from the log.
+BaseReplicationEndpoint.getWALEntryfilter()
+Returns a default set of filters
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6d254372/devapidocs/org/apache/hadoop/hbase/replication/regionserver/class-use/MetricsReplicationSourceImpl.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/replication/regionserver/class-use/MetricsReplicationSourceImpl.html
 
b/devapidocs/org/apache/hadoop/hbase/replication/regionserver/class-use/MetricsReplicationSourceImpl.html
index 4c2ce9f..9cffb66 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/replication/regionserver/class-use/MetricsReplicationSourceImpl.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/replication/regionserver/class-use/MetricsReplicationSourceImpl.html
@@ -104,11 +104,11 @@
 
 
 private MetricsReplicationSourceImpl
-MetricsReplicationGlobalSourceSource.rms 
+MetricsReplicationSourceSourceImpl.rms 
 
 
 private MetricsReplicationSourceImpl
-MetricsReplicationSourceSourceImpl.rms 
+MetricsReplicationGlobalSourceSource.rms 
 
 
 (package private) MetricsReplicationSourceImpl

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6d254372/devapidocs/org/apache/hadoop/hbase/replication/regionserver

[10/51] [partial] hbase-site git commit: Published site at 73e1bcd33515061be2dc2e51e6ad19d9798a8ef6.

2017-04-01 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/3a970c89/devapidocs/org/apache/hadoop/hbase/security/class-use/User.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/security/class-use/User.html 
b/devapidocs/org/apache/hadoop/hbase/security/class-use/User.html
index f1a1e53..b142fd6 100644
--- a/devapidocs/org/apache/hadoop/hbase/security/class-use/User.html
+++ b/devapidocs/org/apache/hadoop/hbase/security/class-use/User.html
@@ -386,23 +386,23 @@
 
 
 
-User
-RpcCallContext.getRequestUser()
-Returns the user credentials associated with the current 
RPC request or
- null if no credentials were provided.
-
-
-
 static User
 RpcServer.getRequestUser()
 Returns the user credentials associated with the current 
RPC request or
  null if no credentials were provided.
 
 
-
+
 User
 RpcServer.Call.getRequestUser() 
 
+
+User
+RpcCallContext.getRequestUser()
+Returns the user credentials associated with the current 
RPC request or
+ null if no credentials were provided.
+
+
 
 User
 ConnectionId.getTicket() 
@@ -439,31 +439,31 @@
 
 
 org.apache.hadoop.hbase.shaded.com.google.protobuf.BlockingRpcChannel
+AbstractRpcClient.createBlockingRpcChannel(ServerName sn,
+User ticket,
+int rpcTimeout) 
+
+
+org.apache.hadoop.hbase.shaded.com.google.protobuf.BlockingRpcChannel
 RpcClient.createBlockingRpcChannel(ServerName sn,
 User user,
 int rpcTimeout)
 Creates a "channel" that can be used by a blocking protobuf 
service.
 
 
-
-org.apache.hadoop.hbase.shaded.com.google.protobuf.BlockingRpcChannel
-AbstractRpcClient.createBlockingRpcChannel(ServerName sn,
-User ticket,
-int rpcTimeout) 
-
 
 org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcChannel
-RpcClient.createRpcChannel(ServerName sn,
+AbstractRpcClient.createRpcChannel(ServerName sn,
 User user,
-int rpcTimeout)
-Creates a "channel" that can be used by a protobuf 
service.
-
+int rpcTimeout) 
 
 
 org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcChannel
-AbstractRpcClient.createRpcChannel(ServerName sn,
+RpcClient.createRpcChannel(ServerName sn,
 User user,
-int rpcTimeout) 
+int rpcTimeout)
+Creates a "channel" that can be used by a protobuf 
service.
+
 
 
 int
@@ -891,17 +891,17 @@
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in 
java.util">List
-DateTieredStoreEngine.DateTieredCompactionContext.compact(ThroughputController throughputController,
+StripeStoreEngine.StripeCompaction.compact(ThroughputController throughputController,
User user) 
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in 
java.util">List
-DefaultStoreEngine.DefaultCompactionContext.compact(ThroughputController throughputController,
+DateTieredStoreEngine.DateTieredCompactionContext.compact(ThroughputController throughputController,
User user) 
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in 
java.util">List
-StripeStoreEngine.StripeCompaction.compact(ThroughputController throughputController,
+DefaultStoreEngine.DefaultCompactionContext.compact(ThroughputController throughputController,
User user) 
 
 
@@ -922,18 +922,18 @@
 
 
 int
-RSRpcServices.getPriority(org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.RequestHeader header,
-   
org.apache.hadoop.hbase.shaded.com.google.protobuf.Message param,
-   User user) 
-
-
-int
 AnnotationReadingPriorityFunction.getPriority(org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.RequestHeader header,

org.apache.hadoop.hbase.shaded.com.google.protobuf.Message param,
User user)
 Returns a 'priority' based on the request type.
 
 
+
+int
+RSRpcServices.getPriority(org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.RequestHeader header,
+   
org.apache.hadoop.hbase.shaded.com.google.protobuf.Message param,
+   User user) 
+
 
 private http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List
 HStore.moveCompatedFilesIntoPlace(CompactionRequest cr,
@@ -1366,14 +1366,14 @@
 
 
 
-static User
-User.create(org.apache.hadoop.security.UserGroupInformation ugi)
+User
+UserProvider.create(org.apache.hadoop.security.UserGroupInformation ugi)
 Wraps an underlying UserGroupInformation 
instance.
 
 
 
-User
-UserProvider.create(org.apache.hadoop.security.UserGroupInformation ugi)
+static User
+User.create(org.apache.hadoop.security.UserGroupInformation ugi)
 Wraps an underlying Us

[10/51] [partial] hbase-site git commit: Published site at 73e1bcd33515061be2dc2e51e6ad19d9798a8ef6.

2017-04-01 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/cd27d06a/devapidocs/org/apache/hadoop/hbase/security/class-use/User.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/security/class-use/User.html 
b/devapidocs/org/apache/hadoop/hbase/security/class-use/User.html
index b142fd6..f1a1e53 100644
--- a/devapidocs/org/apache/hadoop/hbase/security/class-use/User.html
+++ b/devapidocs/org/apache/hadoop/hbase/security/class-use/User.html
@@ -386,23 +386,23 @@
 
 
 
-static User
-RpcServer.getRequestUser()
+User
+RpcCallContext.getRequestUser()
 Returns the user credentials associated with the current 
RPC request or
  null if no credentials were provided.
 
 
 
-User
-RpcServer.Call.getRequestUser() 
-
-
-User
-RpcCallContext.getRequestUser()
+static User
+RpcServer.getRequestUser()
 Returns the user credentials associated with the current 
RPC request or
  null if no credentials were provided.
 
 
+
+User
+RpcServer.Call.getRequestUser() 
+
 
 User
 ConnectionId.getTicket() 
@@ -439,25 +439,19 @@
 
 
 org.apache.hadoop.hbase.shaded.com.google.protobuf.BlockingRpcChannel
-AbstractRpcClient.createBlockingRpcChannel(ServerName sn,
-User ticket,
-int rpcTimeout) 
-
-
-org.apache.hadoop.hbase.shaded.com.google.protobuf.BlockingRpcChannel
 RpcClient.createBlockingRpcChannel(ServerName sn,
 User user,
 int rpcTimeout)
 Creates a "channel" that can be used by a blocking protobuf 
service.
 
 
-
-org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcChannel
-AbstractRpcClient.createRpcChannel(ServerName sn,
-User user,
-int rpcTimeout) 
-
 
+org.apache.hadoop.hbase.shaded.com.google.protobuf.BlockingRpcChannel
+AbstractRpcClient.createBlockingRpcChannel(ServerName sn,
+User ticket,
+int rpcTimeout) 
+
+
 org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcChannel
 RpcClient.createRpcChannel(ServerName sn,
 User user,
@@ -465,6 +459,12 @@
 Creates a "channel" that can be used by a protobuf 
service.
 
 
+
+org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcChannel
+AbstractRpcClient.createRpcChannel(ServerName sn,
+User user,
+int rpcTimeout) 
+
 
 int
 PriorityFunction.getPriority(org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.RequestHeader header,
@@ -891,17 +891,17 @@
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in 
java.util">List
-StripeStoreEngine.StripeCompaction.compact(ThroughputController throughputController,
+DateTieredStoreEngine.DateTieredCompactionContext.compact(ThroughputController throughputController,
User user) 
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in 
java.util">List
-DateTieredStoreEngine.DateTieredCompactionContext.compact(ThroughputController throughputController,
+DefaultStoreEngine.DefaultCompactionContext.compact(ThroughputController throughputController,
User user) 
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in 
java.util">List
-DefaultStoreEngine.DefaultCompactionContext.compact(ThroughputController throughputController,
+StripeStoreEngine.StripeCompaction.compact(ThroughputController throughputController,
User user) 
 
 
@@ -922,17 +922,17 @@
 
 
 int
-AnnotationReadingPriorityFunction.getPriority(org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.RequestHeader header,
+RSRpcServices.getPriority(org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.RequestHeader header,

org.apache.hadoop.hbase.shaded.com.google.protobuf.Message param,
-   User user)
-Returns a 'priority' based on the request type.
-
+   User user) 
 
 
 int
-RSRpcServices.getPriority(org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.RequestHeader header,
+AnnotationReadingPriorityFunction.getPriority(org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.RequestHeader header,

org.apache.hadoop.hbase.shaded.com.google.protobuf.Message param,
-   User user) 
+   User user)
+Returns a 'priority' based on the request type.
+
 
 
 private http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List
@@ -1366,14 +1366,14 @@
 
 
 
-User
-UserProvider.create(org.apache.hadoop.security.UserGroupInformation ugi)
+static User
+User.create(org.apache.hadoop.security.UserGroupInformation ugi)
 Wraps an underlying UserGroupInformation 
instance.
 
 
 
-static User
-User.create(org.apache.hadoop.security.UserGroupInformation ugi)
+User
+UserProvider.create(

[10/51] [partial] hbase-site git commit: Published site at 1c4d9c8965952cbd17f0afdacbb0c0ac1e5bd1d7.

2017-03-31 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d6608edf/devapidocs/org/apache/hadoop/hbase/security/class-use/User.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/security/class-use/User.html 
b/devapidocs/org/apache/hadoop/hbase/security/class-use/User.html
index f1a1e53..b142fd6 100644
--- a/devapidocs/org/apache/hadoop/hbase/security/class-use/User.html
+++ b/devapidocs/org/apache/hadoop/hbase/security/class-use/User.html
@@ -386,23 +386,23 @@
 
 
 
-User
-RpcCallContext.getRequestUser()
-Returns the user credentials associated with the current 
RPC request or
- null if no credentials were provided.
-
-
-
 static User
 RpcServer.getRequestUser()
 Returns the user credentials associated with the current 
RPC request or
  null if no credentials were provided.
 
 
-
+
 User
 RpcServer.Call.getRequestUser() 
 
+
+User
+RpcCallContext.getRequestUser()
+Returns the user credentials associated with the current 
RPC request or
+ null if no credentials were provided.
+
+
 
 User
 ConnectionId.getTicket() 
@@ -439,31 +439,31 @@
 
 
 org.apache.hadoop.hbase.shaded.com.google.protobuf.BlockingRpcChannel
+AbstractRpcClient.createBlockingRpcChannel(ServerName sn,
+User ticket,
+int rpcTimeout) 
+
+
+org.apache.hadoop.hbase.shaded.com.google.protobuf.BlockingRpcChannel
 RpcClient.createBlockingRpcChannel(ServerName sn,
 User user,
 int rpcTimeout)
 Creates a "channel" that can be used by a blocking protobuf 
service.
 
 
-
-org.apache.hadoop.hbase.shaded.com.google.protobuf.BlockingRpcChannel
-AbstractRpcClient.createBlockingRpcChannel(ServerName sn,
-User ticket,
-int rpcTimeout) 
-
 
 org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcChannel
-RpcClient.createRpcChannel(ServerName sn,
+AbstractRpcClient.createRpcChannel(ServerName sn,
 User user,
-int rpcTimeout)
-Creates a "channel" that can be used by a protobuf 
service.
-
+int rpcTimeout) 
 
 
 org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcChannel
-AbstractRpcClient.createRpcChannel(ServerName sn,
+RpcClient.createRpcChannel(ServerName sn,
 User user,
-int rpcTimeout) 
+int rpcTimeout)
+Creates a "channel" that can be used by a protobuf 
service.
+
 
 
 int
@@ -891,17 +891,17 @@
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in 
java.util">List
-DateTieredStoreEngine.DateTieredCompactionContext.compact(ThroughputController throughputController,
+StripeStoreEngine.StripeCompaction.compact(ThroughputController throughputController,
User user) 
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in 
java.util">List
-DefaultStoreEngine.DefaultCompactionContext.compact(ThroughputController throughputController,
+DateTieredStoreEngine.DateTieredCompactionContext.compact(ThroughputController throughputController,
User user) 
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in 
java.util">List
-StripeStoreEngine.StripeCompaction.compact(ThroughputController throughputController,
+DefaultStoreEngine.DefaultCompactionContext.compact(ThroughputController throughputController,
User user) 
 
 
@@ -922,18 +922,18 @@
 
 
 int
-RSRpcServices.getPriority(org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.RequestHeader header,
-   
org.apache.hadoop.hbase.shaded.com.google.protobuf.Message param,
-   User user) 
-
-
-int
 AnnotationReadingPriorityFunction.getPriority(org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.RequestHeader header,

org.apache.hadoop.hbase.shaded.com.google.protobuf.Message param,
User user)
 Returns a 'priority' based on the request type.
 
 
+
+int
+RSRpcServices.getPriority(org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.RequestHeader header,
+   
org.apache.hadoop.hbase.shaded.com.google.protobuf.Message param,
+   User user) 
+
 
 private http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List
 HStore.moveCompatedFilesIntoPlace(CompactionRequest cr,
@@ -1366,14 +1366,14 @@
 
 
 
-static User
-User.create(org.apache.hadoop.security.UserGroupInformation ugi)
+User
+UserProvider.create(org.apache.hadoop.security.UserGroupInformation ugi)
 Wraps an underlying UserGroupInformation 
instance.
 
 
 
-User
-UserProvider.create(org.apache.hadoop.security.UserGroupInformation ugi)
+static User
+User.create(org.apache.hadoop.security.UserGroupInformation ugi)
 Wraps an underlying Us

[10/51] [partial] hbase-site git commit: Published site at 1c4d9c8965952cbd17f0afdacbb0c0ac1e5bd1d7.

2017-03-31 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1fcbc985/devapidocs/org/apache/hadoop/hbase/security/access/class-use/Permission.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/security/access/class-use/Permission.html 
b/devapidocs/org/apache/hadoop/hbase/security/access/class-use/Permission.html
index dfde2ce..363d1de 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/security/access/class-use/Permission.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/security/access/class-use/Permission.html
@@ -109,35 +109,25 @@
 
 
 
-Append
-Append.setACL(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String user,
+Delete
+Delete.setACL(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String user,
   Permission perms) 
 
 
-Mutation
-Mutation.setACL(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String user,
+Increment
+Increment.setACL(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String user,
   Permission perms) 
 
 
-Delete
-Delete.setACL(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String user,
-  Permission perms) 
-
-
 Scan
 Scan.setACL(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String user,
   Permission perms) 
 
-
+
 Get
 Get.setACL(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String user,
   Permission perms) 
 
-
-Increment
-Increment.setACL(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String user,
-  Permission perms) 
-
 
 Put
 Put.setACL(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String user,
@@ -148,6 +138,16 @@
 Query.setACL(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String user,
   Permission perms) 
 
+
+Append
+Append.setACL(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String user,
+  Permission perms) 
+
+
+Mutation
+Mutation.setACL(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String user,
+  Permission perms) 
+
 
 
 
@@ -158,29 +158,21 @@
 
 
 
-Append
-Append.setACL(http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true";
 title="class or interface in java.util">MapString,Permission> perms) 
-
-
-Mutation
-Mutation.setACL(http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true";
 title="class or interface in java.util">MapString,Permission> perms) 
-
-
 Delete
 Delete.setACL(http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true";
 title="class or interface in java.util">MapString,Permission> perms) 
 
 
+Increment
+Increment.setACL(http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true";
 title="class or interface in java.util">MapString,Permission> perms) 
+
+
 Scan
 Scan.setACL(http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true";
 title="class or interface in java.util">MapString,Permission> perms) 
 
-
+
 Get
 Get.setACL(http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true";
 title="class or interface in java.util">MapString,Permission> perms) 
 
-
-Increment
-Increment.setACL(http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true";
 title="class or interface in java.util">MapString,Permission> perms) 
-
 
 Put
 Put.setACL(http://docs.oracle.com/javase/

[10/51] [partial] hbase-site git commit: Published site at 1c4d9c8965952cbd17f0afdacbb0c0ac1e5bd1d7.

2017-03-31 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/86312ed1/devapidocs/org/apache/hadoop/hbase/replication/class-use/ReplicationPeers.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/replication/class-use/ReplicationPeers.html
 
b/devapidocs/org/apache/hadoop/hbase/replication/class-use/ReplicationPeers.html
index 8c57b38..e768cd7 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/replication/class-use/ReplicationPeers.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/replication/class-use/ReplicationPeers.html
@@ -265,15 +265,15 @@
 
 
 private ReplicationPeers
-ReplicationSourceManager.replicationPeers 
+ReplicationSource.replicationPeers 
 
 
 private ReplicationPeers
-ReplicationSource.replicationPeers 
+Replication.replicationPeers 
 
 
 private ReplicationPeers
-Replication.replicationPeers 
+ReplicationSourceManager.replicationPeers 
 
 
 private ReplicationPeers

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/86312ed1/devapidocs/org/apache/hadoop/hbase/replication/class-use/ReplicationQueues.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/replication/class-use/ReplicationQueues.html
 
b/devapidocs/org/apache/hadoop/hbase/replication/class-use/ReplicationQueues.html
index 7bebf07..7f8b4b2 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/replication/class-use/ReplicationQueues.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/replication/class-use/ReplicationQueues.html
@@ -151,15 +151,15 @@
 
 
 private ReplicationQueues
-ReplicationSourceManager.replicationQueues 
+ReplicationSource.replicationQueues 
 
 
 private ReplicationQueues
-ReplicationSource.replicationQueues 
+Replication.replicationQueues 
 
 
 private ReplicationQueues
-Replication.replicationQueues 
+ReplicationSourceManager.replicationQueues 
 
 
 private ReplicationQueues

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/86312ed1/devapidocs/org/apache/hadoop/hbase/replication/class-use/ReplicationTracker.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/replication/class-use/ReplicationTracker.html
 
b/devapidocs/org/apache/hadoop/hbase/replication/class-use/ReplicationTracker.html
index 373d114..b3d3829 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/replication/class-use/ReplicationTracker.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/replication/class-use/ReplicationTracker.html
@@ -147,11 +147,11 @@
 
 
 private ReplicationTracker
-ReplicationSourceManager.replicationTracker 
+Replication.replicationTracker 
 
 
 private ReplicationTracker
-Replication.replicationTracker 
+ReplicationSourceManager.replicationTracker 
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/86312ed1/devapidocs/org/apache/hadoop/hbase/replication/class-use/WALEntryFilter.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/replication/class-use/WALEntryFilter.html 
b/devapidocs/org/apache/hadoop/hbase/replication/class-use/WALEntryFilter.html
index ffad47a..d70f824 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/replication/class-use/WALEntryFilter.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/replication/class-use/WALEntryFilter.html
@@ -185,14 +185,14 @@
 
 
 WALEntryFilter
-ReplicationEndpoint.getWALEntryfilter()
-Returns a WALEntryFilter to use for filtering out 
WALEntries from the log.
+BaseReplicationEndpoint.getWALEntryfilter()
+Returns a default set of filters
 
 
 
 WALEntryFilter
-BaseReplicationEndpoint.getWALEntryfilter()
-Returns a default set of filters
+ReplicationEndpoint.getWALEntryfilter()
+Returns a WALEntryFilter to use for filtering out 
WALEntries from the log.
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/86312ed1/devapidocs/org/apache/hadoop/hbase/replication/regionserver/class-use/MetricsReplicationSourceImpl.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/replication/regionserver/class-use/MetricsReplicationSourceImpl.html
 
b/devapidocs/org/apache/hadoop/hbase/replication/regionserver/class-use/MetricsReplicationSourceImpl.html
index 9cffb66..4c2ce9f 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/replication/regionserver/class-use/MetricsReplicationSourceImpl.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/replication/regionserver/class-use/MetricsReplicationSourceImpl.html
@@ -104,11 +104,11 @@
 
 
 private MetricsReplicationSourceImpl
-MetricsReplicationSourceSourceImpl.rms 
+MetricsReplicationGlobalSourceSource.rms 
 
 
 private MetricsReplicationSourceImpl
-MetricsReplicationGlobalSourceSource.rms 
+MetricsReplicationSourceSourceImpl.rms 
 
 
 (package private) MetricsReplicationSourceImpl

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/86312ed1/devapidocs/org/apache/hadoop/hbase/replication/regionserver

[10/51] [partial] hbase-site git commit: Published site at da0d74cd27154b76aaa69fe0e5742821f3bfea79.

2016-05-27 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/237089a5/devapidocs/org/apache/hadoop/hbase/master/balancer/StochasticLoadBalancer.CandidateGenerator.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/master/balancer/StochasticLoadBalancer.CandidateGenerator.html
 
b/devapidocs/org/apache/hadoop/hbase/master/balancer/StochasticLoadBalancer.CandidateGenerator.html
index 0f9fa5e..ff7632c 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/master/balancer/StochasticLoadBalancer.CandidateGenerator.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/master/balancer/StochasticLoadBalancer.CandidateGenerator.html
@@ -103,7 +103,7 @@
 
 
 
-abstract static class StochasticLoadBalancer.CandidateGenerator
+abstract static class StochasticLoadBalancer.CandidateGenerator
 extends http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true";
 title="class or interface in java.lang">Object
 Generates a candidate action to be applied to the cluster 
for cost function search
 
@@ -212,7 +212,7 @@ extends http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 
 
 StochasticLoadBalancer.CandidateGenerator
-StochasticLoadBalancer.CandidateGenerator()
+StochasticLoadBalancer.CandidateGenerator()
 
 
 
@@ -229,7 +229,7 @@ extends http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 
 
 generate
-abstract BaseLoadBalancer.Cluster.Action generate(BaseLoadBalancer.Cluster cluster)
+abstract BaseLoadBalancer.Cluster.Action generate(BaseLoadBalancer.Cluster cluster)
 
 
 
@@ -238,7 +238,7 @@ extends http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 
 
 pickRandomRegion
-protected int pickRandomRegion(BaseLoadBalancer.Cluster cluster,
+protected int pickRandomRegion(BaseLoadBalancer.Cluster cluster,
int server,
double chanceOfNoSwap)
 From a list of regions pick a random one. Null can be 
returned which
@@ -256,7 +256,7 @@ extends http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 
 
 pickRandomServer
-protected int pickRandomServer(BaseLoadBalancer.Cluster cluster)
+protected int pickRandomServer(BaseLoadBalancer.Cluster cluster)
 
 
 
@@ -265,7 +265,7 @@ extends http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 
 
 pickRandomRack
-protected int pickRandomRack(BaseLoadBalancer.Cluster cluster)
+protected int pickRandomRack(BaseLoadBalancer.Cluster cluster)
 
 
 
@@ -274,7 +274,7 @@ extends http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 
 
 pickOtherRandomServer
-protected int pickOtherRandomServer(BaseLoadBalancer.Cluster cluster,
+protected int pickOtherRandomServer(BaseLoadBalancer.Cluster cluster,
 int serverIndex)
 
 
@@ -284,7 +284,7 @@ extends http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 
 
 pickOtherRandomRack
-protected int pickOtherRandomRack(BaseLoadBalancer.Cluster cluster,
+protected int pickOtherRandomRack(BaseLoadBalancer.Cluster cluster,
   int rackIndex)
 
 
@@ -294,7 +294,7 @@ extends http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 
 
 pickRandomRegions
-protected BaseLoadBalancer.Cluster.Action pickRandomRegions(BaseLoadBalancer.Cluster cluster,
+protected BaseLoadBalancer.Cluster.Action pickRandomRegions(BaseLoadBalancer.Cluster cluster,
 int thisServer,
 int otherServer)
 
@@ -305,7 +305,7 @@ extends http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 
 
 getAction
-protected BaseLoadBalancer.Cluster.Action getAction(int fromServer,
+protected BaseLoadBalancer.Cluster.Action getAction(int fromServer,
 int fromRegion,
 int toServer,
 int toRegion)

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/237089a5/devapidocs/org/apache/hadoop/hbase/master/balancer/StochasticLoadBalancer.CostFromRegionLoadFunction.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/master/balancer/StochasticLoadBalancer.CostFromRegionLoadFunction.html
 
b/devapidocs/org/apache/hadoop/hbase/master/balancer/StochasticLoadBalancer.CostFromRegionLoadFunction.html
index 156b3b9..ba6b9ae 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/master/balancer/StochasticLoadBalancer.CostFromRegionLoadFunction.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/master/balancer/StochasticLoadBalancer.CostFromRegionLoadFunction.html
@@ -108,7 +108,7 @@
 
 
 
-abstract static class StochasticLoadBalancer.CostFromRegionLoadFunction
+abstract static class StochasticLoadBalancer.CostFromRegionLoadFunction
 extends StochasticLoadBalancer.CostFunction
 Base class the allows writing costs functions from rolling 
average of some
  number from RegionLoad.
@@ -236,7 +236,7 @@ e

[10/51] [partial] hbase-site git commit: Published site at d34f5d4b96e649929bb47e122611537c5726851f.

2016-05-20 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/07527d7e/devapidocs/org/apache/hadoop/hbase/coprocessor/class-use/MasterCoprocessorEnvironment.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/coprocessor/class-use/MasterCoprocessorEnvironment.html
 
b/devapidocs/org/apache/hadoop/hbase/coprocessor/class-use/MasterCoprocessorEnvironment.html
index b8db80e..0dbc5d0 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/coprocessor/class-use/MasterCoprocessorEnvironment.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/coprocessor/class-use/MasterCoprocessorEnvironment.html
@@ -114,11 +114,11 @@
 
 
 void
-BaseMasterAndRegionObserver.postAbortProcedure(ObserverContext ctx) 
+BaseMasterObserver.postAbortProcedure(ObserverContext ctx) 
 
 
 void
-BaseMasterObserver.postAbortProcedure(ObserverContext ctx) 
+BaseMasterAndRegionObserver.postAbortProcedure(ObserverContext ctx) 
 
 
 void
@@ -128,14 +128,6 @@
 
 
 void
-BaseMasterAndRegionObserver.postAddColumn(ObserverContext ctx,
-  TableName tableName,
-  HColumnDescriptor columnFamily)
-Deprecated. 
-
-
-
-void
 BaseMasterObserver.postAddColumn(ObserverContext ctx,
   TableName tableName,
   HColumnDescriptor columnFamily)
@@ -146,6 +138,14 @@
 
 
 
+
+void
+BaseMasterAndRegionObserver.postAddColumn(ObserverContext ctx,
+  TableName tableName,
+  HColumnDescriptor columnFamily)
+Deprecated. 
+
+
 
 void
 MasterObserver.postAddColumn(ObserverContext ctx,
@@ -160,13 +160,13 @@
 
 
 void
-BaseMasterAndRegionObserver.postAddColumnFamily(ObserverContext ctx,
+BaseMasterObserver.postAddColumnFamily(ObserverContext ctx,
   TableName tableName,
   HColumnDescriptor columnFamily) 
 
 
 void
-BaseMasterObserver.postAddColumnFamily(ObserverContext ctx,
+BaseMasterAndRegionObserver.postAddColumnFamily(ObserverContext ctx,
   TableName tableName,
   HColumnDescriptor columnFamily) 
 
@@ -180,14 +180,6 @@
 
 
 void
-BaseMasterAndRegionObserver.postAddColumnHandler(ObserverContext ctx,
-TableName tableName,
-HColumnDescriptor columnFamily)
-Deprecated. 
-
-
-
-void
 BaseMasterObserver.postAddColumnHandler(ObserverContext ctx,
 TableName tableName,
 HColumnDescriptor columnFamily)
@@ -198,6 +190,14 @@
 
 
 
+
+void
+BaseMasterAndRegionObserver.postAddColumnHandler(ObserverContext ctx,
+TableName tableName,
+HColumnDescriptor columnFamily)
+Deprecated. 
+
+
 
 void
 MasterObserver.postAddColumnHandler(ObserverContext ctx,
@@ -212,12 +212,12 @@
 
 
 void
-BaseMasterAndRegionObserver.postAddRSGroup(ObserverContext ctx,
+BaseMasterObserver.postAddRSGroup(ObserverContext ctx,
 http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String name) 
 
 
 void
-BaseMasterObserver.postAddRSGroup(ObserverContext ctx,
+BaseMasterAndRegionObserver.postAddRSGroup(ObserverContext ctx,
 http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String name) 
 
 
@@ -229,12 +229,12 @@
 
 
 void
-BaseMasterAndRegionObserver.postAssign(ObserverContext ctx,
+BaseMasterObserver.postAssign(ObserverContext ctx,
 HRegionInfo regionInfo) 
 
 
 void
-BaseMasterObserver.postAssign(ObserverContext ctx,
+BaseMasterAndRegionObserver.postAssign(ObserverContext ctx,
 HRegionInfo regionInfo) 
 
 
@@ -246,12 +246,12 @@
 
 
 void
-BaseMasterAndRegionObserver.postBalance(ObserverContext ctx,
+BaseMasterObserver.postBalance(ObserverContext ctx,
   http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-externa

[10/51] [partial] hbase-site git commit: Published site at e0aff109018918514c8f27a8a90159e695e8b542.

2016-05-16 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/34d57efe/devapidocs/org/apache/hadoop/hbase/regionserver/class-use/Region.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/regionserver/class-use/Region.html 
b/devapidocs/org/apache/hadoop/hbase/regionserver/class-use/Region.html
index 2064301..16aac1b 100644
--- a/devapidocs/org/apache/hadoop/hbase/regionserver/class-use/Region.html
+++ b/devapidocs/org/apache/hadoop/hbase/regionserver/class-use/Region.html
@@ -404,11 +404,11 @@
 
 
 private Region
-RSRpcServices.RegionScannerHolder.r 
+SplitTransactionImpl.DaughterOpener.r 
 
 
 private Region
-SplitTransactionImpl.DaughterOpener.r 
+RSRpcServices.RegionScannerHolder.r 
 
 
 (package private) Region
@@ -465,11 +465,6 @@
 
 
 Region
-RegionMergeTransactionImpl.execute(Server server,
-  RegionServerServices services) 
-
-
-Region
 RegionMergeTransaction.execute(Server server,
   RegionServerServices services)
 Deprecated. 
@@ -477,13 +472,12 @@
 
 
 
-
+
 Region
-RegionMergeTransactionImpl.execute(Server server,
-  RegionServerServices services,
-  User user) 
+RegionMergeTransactionImpl.execute(Server server,
+  RegionServerServices services) 
 
-
+
 Region
 RegionMergeTransaction.execute(Server server,
   RegionServerServices services,
@@ -491,6 +485,12 @@
 Run the transaction.
 
 
+
+Region
+RegionMergeTransactionImpl.execute(Server server,
+  RegionServerServices services,
+  User user) 
+
 
 private Region
 MemStoreFlusher.getBiggestMemstoreOfRegionReplica(http://docs.oracle.com/javase/7/docs/api/java/util/SortedMap.html?is-external=true";
 title="class or interface in java.util">SortedMapLong,Region> regionsBySize,
@@ -569,6 +569,11 @@
 
 
 PairOfSameType
+SplitTransactionImpl.execute(Server server,
+  RegionServerServices services) 
+
+
+PairOfSameType
 SplitTransaction.execute(Server server,
   RegionServerServices services)
 Deprecated. 
@@ -576,12 +581,13 @@
 
 
 
-
+
 PairOfSameType
-SplitTransactionImpl.execute(Server server,
-  RegionServerServices services) 
+SplitTransactionImpl.execute(Server server,
+  RegionServerServices services,
+  User user) 
 
-
+
 PairOfSameType
 SplitTransaction.execute(Server server,
   RegionServerServices services,
@@ -589,12 +595,6 @@
 Run the transaction.
 
 
-
-PairOfSameType
-SplitTransactionImpl.execute(Server server,
-  RegionServerServices services,
-  User user) 
-
 
 (package private) http://docs.oracle.com/javase/7/docs/api/java/util/SortedMap.html?is-external=true";
 title="class or interface in java.util">SortedMapLong,Region>
 HRegionServer.getCopyOfOnlineRegionsSortedBySize() 
@@ -759,16 +759,16 @@
 
 
 void
+HeapMemoryManager.HeapMemoryTunerChore.flushRequested(FlushType type,
+Region region) 
+
+
+void
 FlushRequestListener.flushRequested(FlushType type,
 Region region)
 Callback which will get called when a flush request is made 
for a region.
 
 
-
-void
-HeapMemoryManager.HeapMemoryTunerChore.flushRequested(FlushType type,
-Region region) 
-
 
 (package private) WAL
 RSRpcServices.getWAL(Region region) 
@@ -859,71 +859,71 @@
 
 
 CompactionRequest
-CompactionRequestor.requestCompaction(Region r,
+CompactSplitThread.requestCompaction(Region r,
   Store s,
   http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String why,
   CompactionRequest request) 
 
 
 CompactionRequest
-CompactSplitThread.requestCompaction(Region r,
+CompactionRequestor.requestCompaction(Region r,
   Store s,
   http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String why,
   CompactionRequest request) 
 
 
 CompactionRequest
-CompactionRequestor.requestCompaction(Region r,
+CompactSplitThread.requestCompaction(Region r,
   Store s,
   http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String why,
-  int pri,
+  int priority,
   CompactionRequest request,

[10/51] [partial] hbase-site git commit: Published site at b353e388bb6bf315818fcde81f11131d6d539c70.

2016-05-13 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/96543536/devapidocs/org/apache/hadoop/hbase/regionserver/class-use/Region.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/regionserver/class-use/Region.html 
b/devapidocs/org/apache/hadoop/hbase/regionserver/class-use/Region.html
index 16aac1b..2064301 100644
--- a/devapidocs/org/apache/hadoop/hbase/regionserver/class-use/Region.html
+++ b/devapidocs/org/apache/hadoop/hbase/regionserver/class-use/Region.html
@@ -404,11 +404,11 @@
 
 
 private Region
-SplitTransactionImpl.DaughterOpener.r 
+RSRpcServices.RegionScannerHolder.r 
 
 
 private Region
-RSRpcServices.RegionScannerHolder.r 
+SplitTransactionImpl.DaughterOpener.r 
 
 
 (package private) Region
@@ -465,6 +465,11 @@
 
 
 Region
+RegionMergeTransactionImpl.execute(Server server,
+  RegionServerServices services) 
+
+
+Region
 RegionMergeTransaction.execute(Server server,
   RegionServerServices services)
 Deprecated. 
@@ -472,12 +477,13 @@
 
 
 
-
+
 Region
-RegionMergeTransactionImpl.execute(Server server,
-  RegionServerServices services) 
+RegionMergeTransactionImpl.execute(Server server,
+  RegionServerServices services,
+  User user) 
 
-
+
 Region
 RegionMergeTransaction.execute(Server server,
   RegionServerServices services,
@@ -485,12 +491,6 @@
 Run the transaction.
 
 
-
-Region
-RegionMergeTransactionImpl.execute(Server server,
-  RegionServerServices services,
-  User user) 
-
 
 private Region
 MemStoreFlusher.getBiggestMemstoreOfRegionReplica(http://docs.oracle.com/javase/7/docs/api/java/util/SortedMap.html?is-external=true";
 title="class or interface in java.util">SortedMapLong,Region> regionsBySize,
@@ -569,11 +569,6 @@
 
 
 PairOfSameType
-SplitTransactionImpl.execute(Server server,
-  RegionServerServices services) 
-
-
-PairOfSameType
 SplitTransaction.execute(Server server,
   RegionServerServices services)
 Deprecated. 
@@ -581,13 +576,12 @@
 
 
 
-
+
 PairOfSameType
-SplitTransactionImpl.execute(Server server,
-  RegionServerServices services,
-  User user) 
+SplitTransactionImpl.execute(Server server,
+  RegionServerServices services) 
 
-
+
 PairOfSameType
 SplitTransaction.execute(Server server,
   RegionServerServices services,
@@ -595,6 +589,12 @@
 Run the transaction.
 
 
+
+PairOfSameType
+SplitTransactionImpl.execute(Server server,
+  RegionServerServices services,
+  User user) 
+
 
 (package private) http://docs.oracle.com/javase/7/docs/api/java/util/SortedMap.html?is-external=true";
 title="class or interface in java.util">SortedMapLong,Region>
 HRegionServer.getCopyOfOnlineRegionsSortedBySize() 
@@ -759,16 +759,16 @@
 
 
 void
-HeapMemoryManager.HeapMemoryTunerChore.flushRequested(FlushType type,
-Region region) 
-
-
-void
 FlushRequestListener.flushRequested(FlushType type,
 Region region)
 Callback which will get called when a flush request is made 
for a region.
 
 
+
+void
+HeapMemoryManager.HeapMemoryTunerChore.flushRequested(FlushType type,
+Region region) 
+
 
 (package private) WAL
 RSRpcServices.getWAL(Region region) 
@@ -859,71 +859,71 @@
 
 
 CompactionRequest
-CompactSplitThread.requestCompaction(Region r,
+CompactionRequestor.requestCompaction(Region r,
   Store s,
   http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String why,
   CompactionRequest request) 
 
 
 CompactionRequest
-CompactionRequestor.requestCompaction(Region r,
+CompactSplitThread.requestCompaction(Region r,
   Store s,
   http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String why,
   CompactionRequest request) 
 
 
 CompactionRequest
-CompactSplitThread.requestCompaction(Region r,
+CompactionRequestor.requestCompaction(Region r,
   Store s,
   http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String why,
-  int priority,
+  int pri,
   CompactionRequest request,

[10/51] [partial] hbase-site git commit: Published site at c9ebcd4e296a31e0da43f513db3f5a8c3929c191.

2016-05-11 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/84ec2568/devapidocs/org/apache/hadoop/hbase/master/procedure/class-use/MasterProcedureEnv.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/master/procedure/class-use/MasterProcedureEnv.html
 
b/devapidocs/org/apache/hadoop/hbase/master/procedure/class-use/MasterProcedureEnv.html
index 1cb53ae..14247af 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/master/procedure/class-use/MasterProcedureEnv.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/master/procedure/class-use/MasterProcedureEnv.html
@@ -120,17 +120,17 @@
 
 
 void
-MasterObserver.preAbortProcedure(ObserverContext ctx,
+BaseMasterObserver.preAbortProcedure(ObserverContext ctx,
   ProcedureExecutor procEnv,
-  long procId)
-Called before a abortProcedure request has been 
processed.
-
+  long procId) 
 
 
 void
-BaseMasterObserver.preAbortProcedure(ObserverContext ctx,
+MasterObserver.preAbortProcedure(ObserverContext ctx,
   ProcedureExecutor procEnv,
-  long procId) 
+  long procId)
+Called before a abortProcedure request has been 
processed.
+
 
 
 
@@ -161,11 +161,11 @@
 
 
 ProcedureExecutor
-MasterServices.getMasterProcedureExecutor() 
+HMaster.getMasterProcedureExecutor() 
 
 
 ProcedureExecutor
-HMaster.getMasterProcedureExecutor() 
+MasterServices.getMasterProcedureExecutor() 
 
 
 
@@ -197,123 +197,123 @@
 
 
 boolean
-EnableTableProcedure.abort(MasterProcedureEnv env) 
+ModifyTableProcedure.abort(MasterProcedureEnv env) 
 
 
 boolean
-RestoreSnapshotProcedure.abort(MasterProcedureEnv env) 
+CloneSnapshotProcedure.abort(MasterProcedureEnv env) 
 
 
 boolean
-ModifyNamespaceProcedure.abort(MasterProcedureEnv env) 
+DeleteTableProcedure.abort(MasterProcedureEnv env) 
 
 
 boolean
-DeleteNamespaceProcedure.abort(MasterProcedureEnv env) 
+CreateTableProcedure.abort(MasterProcedureEnv env) 
 
 
 boolean
-DeleteTableProcedure.abort(MasterProcedureEnv env) 
+TruncateTableProcedure.abort(MasterProcedureEnv env) 
 
 
 boolean
-ModifyColumnFamilyProcedure.abort(MasterProcedureEnv env) 
+DeleteNamespaceProcedure.abort(MasterProcedureEnv env) 
 
 
 boolean
-CreateNamespaceProcedure.abort(MasterProcedureEnv env) 
+AddColumnFamilyProcedure.abort(MasterProcedureEnv env) 
 
 
 boolean
-CreateTableProcedure.abort(MasterProcedureEnv env) 
+DisableTableProcedure.abort(MasterProcedureEnv env) 
 
 
 boolean
-TruncateTableProcedure.abort(MasterProcedureEnv env) 
+RestoreSnapshotProcedure.abort(MasterProcedureEnv env) 
 
 
-boolean
-ModifyTableProcedure.abort(MasterProcedureEnv env) 
+protected boolean
+ServerCrashProcedure.abort(MasterProcedureEnv env) 
 
 
 boolean
-DisableTableProcedure.abort(MasterProcedureEnv env) 
+DeleteColumnFamilyProcedure.abort(MasterProcedureEnv env) 
 
 
-protected boolean
-ServerCrashProcedure.abort(MasterProcedureEnv env) 
+boolean
+EnableTableProcedure.abort(MasterProcedureEnv env) 
 
 
 boolean
-AddColumnFamilyProcedure.abort(MasterProcedureEnv env) 
+ModifyColumnFamilyProcedure.abort(MasterProcedureEnv env) 
 
 
 boolean
-DeleteColumnFamilyProcedure.abort(MasterProcedureEnv env) 
+CreateNamespaceProcedure.abort(MasterProcedureEnv env) 
 
 
 boolean
-CloneSnapshotProcedure.abort(MasterProcedureEnv env) 
+ModifyNamespaceProcedure.abort(MasterProcedureEnv env) 
 
 
 protected boolean
-EnableTableProcedure.acquireLock(MasterProcedureEnv env) 
+ModifyTableProcedure.acquireLock(MasterProcedureEnv env) 
 
 
 protected boolean
-RestoreSnapshotProcedure.acquireLock(MasterProcedureEnv env) 
+CloneSnapshotProcedure.acquireLock(MasterProcedureEnv env) 
 
 
 protected boolean
-ModifyNamespaceProcedure.acquireLock(MasterProcedureEnv env) 
+DeleteTableProcedure.acquireLock(MasterProcedureEnv env) 
 
 
 protected boolean
-DeleteNamespaceProcedure.acquireLock(MasterProcedureEnv env) 
+CreateTableProcedure.acquireLock(MasterProcedureEnv env) 
 
 
 protected boolean
-DeleteTableProcedure.acquireLock(MasterProcedureEnv env) 
+TruncateTableProcedure.acquireLock(MasterProcedureEnv env) 
 
 
 protected boolean
-ModifyColumnFamilyProcedure.acquireLock(MasterProcedureEnv env) 
+DeleteNamespaceProcedure.acquireLock(MasterProcedureEnv env) 
 
 
 protected boolean
-CreateNamespaceProcedure.acquireLock(MasterProcedureEnv env) 
+AddColumnFamilyProcedure.acquireLock(MasterProcedureEnv env) 
 
 
 protected boolean
-CreateTableProcedure.acquireLock(MasterProcedureEnv env) 
+DisableTableProcedure.acquireLock(MasterProcedureEnv env) 
 
 
 protected boolean
-TruncateTableProcedure.acquireLock(MasterProcedureEnv env) 
+RestoreSnapshotProcedure.acquireLo

[10/51] [partial] hbase-site git commit: Published site at 3b74b6f329ad6ebc0d2d2548a7e1290297674529.

2016-05-10 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f58e71c6/devapidocs/org/apache/hadoop/hbase/client/coprocessor/BigDecimalColumnInterpreter.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/client/coprocessor/BigDecimalColumnInterpreter.html
 
b/devapidocs/org/apache/hadoop/hbase/client/coprocessor/BigDecimalColumnInterpreter.html
index 8e18cd1..b687355 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/client/coprocessor/BigDecimalColumnInterpreter.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/client/coprocessor/BigDecimalColumnInterpreter.html
@@ -100,8 +100,9 @@
 
 
 
-@InterfaceAudience.Private
-public class BigDecimalColumnInterpreter
+@InterfaceAudience.LimitedPrivate(value="Coprocesssor")
+@InterfaceStability.Evolving
+public class BigDecimalColumnInterpreter
 extends ColumnInterpreterBigDecimal,http://docs.oracle.com/javase/7/docs/api/java/math/BigDecimal.html?is-external=true";
 title="class or interface in 
java.math">BigDecimal,org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.EmptyMsg,org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BigDecimalMsg,org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BigDecimalMsg>
 ColumnInterpreter for doing Aggregation's with BigDecimal 
columns. This class
  is required at the RegionServer also.
@@ -266,7 +267,7 @@ extends 
 
 BigDecimalColumnInterpreter
-public BigDecimalColumnInterpreter()
+public BigDecimalColumnInterpreter()
 
 
 
@@ -283,7 +284,7 @@ extends 
 
 getValue
-public http://docs.oracle.com/javase/7/docs/api/java/math/BigDecimal.html?is-external=true";
 title="class or interface in java.math">BigDecimal getValue(byte[] colFamily,
+public http://docs.oracle.com/javase/7/docs/api/java/math/BigDecimal.html?is-external=true";
 title="class or interface in java.math">BigDecimal getValue(byte[] colFamily,
   byte[] colQualifier,
   Cell kv)
 throws http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true";
 title="class or interface in java.io">IOException
@@ -301,7 +302,7 @@ extends 
 
 add
-public http://docs.oracle.com/javase/7/docs/api/java/math/BigDecimal.html?is-external=true";
 title="class or interface in java.math">BigDecimal add(http://docs.oracle.com/javase/7/docs/api/java/math/BigDecimal.html?is-external=true";
 title="class or interface in java.math">BigDecimal bd1,
+public http://docs.oracle.com/javase/7/docs/api/java/math/BigDecimal.html?is-external=true";
 title="class or interface in java.math">BigDecimal add(http://docs.oracle.com/javase/7/docs/api/java/math/BigDecimal.html?is-external=true";
 title="class or interface in java.math">BigDecimal bd1,
  http://docs.oracle.com/javase/7/docs/api/java/math/BigDecimal.html?is-external=true";
 title="class or interface in java.math">BigDecimal bd2)
 
 Specified by:
@@ -316,7 +317,7 @@ extends 
 
 compare
-public int compare(http://docs.oracle.com/javase/7/docs/api/java/math/BigDecimal.html?is-external=true";
 title="class or interface in java.math">BigDecimal bd1,
+public int compare(http://docs.oracle.com/javase/7/docs/api/java/math/BigDecimal.html?is-external=true";
 title="class or interface in java.math">BigDecimal bd1,
   http://docs.oracle.com/javase/7/docs/api/java/math/BigDecimal.html?is-external=true";
 title="class or interface in java.math">BigDecimal bd2)
 Description copied from class: ColumnInterpreter
 This takes care if either of arguments are null. returns 0 
if they are
@@ -337,7 +338,7 @@ extends 
 
 getMaxValue
-public http://docs.oracle.com/javase/7/docs/api/java/math/BigDecimal.html?is-external=true";
 title="class or interface in java.math">BigDecimal getMaxValue()
+public http://docs.oracle.com/javase/7/docs/api/java/math/BigDecimal.html?is-external=true";
 title="class or interface in java.math">BigDecimal getMaxValue()
 Description copied from class: ColumnInterpreter
 returns the maximum value for this type T
 
@@ -352,7 +353,7 @@ extends 
 
 increment
-public http://docs.oracle.com/javase/7/docs/api/java/math/BigDecimal.html?is-external=true";
 title="class or interface in java.math">BigDecimal increment(http://docs.oracle.com/javase/7/docs/api/java/math/BigDecimal.html?is-external=true";
 title="class or interface in java.math">BigDecimal bd)
+public http://docs.oracle.com/javase/7/docs/api/java/math/BigDecimal.html?is-external=true";
 title="class or interface in java.math">BigDecimal increment(http://docs.oracle.com/javase/7/docs/api/java/math/BigDecimal.html?is-external=true";
 title="class or interface in java.math">BigDecimal bd)
 
 Specified by:
 increment in
 class ColumnInterpreterBigDecimal,http://docs.oracle.com/j

[10/51] [partial] hbase-site git commit: Published site at 9ee0cbb995c1d7de905f4138a199f115762725e8.

2016-05-09 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/33c287c2/devapidocs/org/apache/hadoop/hbase/coprocessor/class-use/RegionCoprocessorEnvironment.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/coprocessor/class-use/RegionCoprocessorEnvironment.html
 
b/devapidocs/org/apache/hadoop/hbase/coprocessor/class-use/RegionCoprocessorEnvironment.html
index e39c4cd..ae29bc8 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/coprocessor/class-use/RegionCoprocessorEnvironment.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/coprocessor/class-use/RegionCoprocessorEnvironment.html
@@ -152,11 +152,11 @@
 
 
 private RegionCoprocessorEnvironment
-AggregateImplementation.env 
+MultiRowMutationEndpoint.env 
 
 
 private RegionCoprocessorEnvironment
-MultiRowMutationEndpoint.env 
+AggregateImplementation.env 
 
 
 private RegionCoprocessorEnvironment
@@ -1445,11 +1445,11 @@
 
 
 private RegionCoprocessorEnvironment
-BulkDeleteEndpoint.env 
+RowCountEndpoint.env 
 
 
 private RegionCoprocessorEnvironment
-RowCountEndpoint.env 
+BulkDeleteEndpoint.env 
 
 
 
@@ -1920,14 +1920,14 @@
 
 
 void
-DefaultVisibilityLabelServiceImpl.init(RegionCoprocessorEnvironment e) 
-
-
-void
 VisibilityLabelService.init(RegionCoprocessorEnvironment e)
 System calls this after opening of regions.
 
 
+
+void
+DefaultVisibilityLabelServiceImpl.init(RegionCoprocessorEnvironment e) 
+
 
 private void
 VisibilityController.initVisibilityLabelService(RegionCoprocessorEnvironment env) 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/33c287c2/devapidocs/org/apache/hadoop/hbase/coprocessor/class-use/RegionServerCoprocessorEnvironment.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/coprocessor/class-use/RegionServerCoprocessorEnvironment.html
 
b/devapidocs/org/apache/hadoop/hbase/coprocessor/class-use/RegionServerCoprocessorEnvironment.html
index a6609dd..f76650a 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/coprocessor/class-use/RegionServerCoprocessorEnvironment.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/coprocessor/class-use/RegionServerCoprocessorEnvironment.html
@@ -110,63 +110,69 @@
 
 
 ReplicationEndpoint
+BaseRegionServerObserver.postCreateReplicationEndPoint(ObserverContext ctx,
+  ReplicationEndpoint endpoint) 
+
+
+ReplicationEndpoint
 RegionServerObserver.postCreateReplicationEndPoint(ObserverContext ctx,
   ReplicationEndpoint endpoint)
 This will be called after the replication endpoint is 
instantiated.
 
 
-
-ReplicationEndpoint
-BaseRegionServerObserver.postCreateReplicationEndPoint(ObserverContext ctx,
-  ReplicationEndpoint endpoint) 
-
 
 void
-RegionServerObserver.postMerge(ObserverContext c,
+BaseRegionServerObserver.postMerge(ObserverContext c,
   Region regionA,
   Region regionB,
-  Region mergedRegion)
-called after the regions merge.
-
+  Region mergedRegion) 
 
 
 void
-BaseRegionServerObserver.postMerge(ObserverContext c,
+RegionServerObserver.postMerge(ObserverContext c,
   Region regionA,
   Region regionB,
-  Region mergedRegion) 
+  Region mergedRegion)
+called after the regions merge.
+
 
 
 void
-RegionServerObserver.postMergeCommit(ObserverContext ctx,
+BaseRegionServerObserver.postMergeCommit(ObserverContext ctx,
   Region regionA,
   Region regionB,
-  Region mergedRegion)
-This will be called after PONR step as part of regions 
merge transaction.
-
+  Region mergedRegion) 
 
 
 void
-BaseRegionServerObserver.postMergeCommit(ObserverContext ctx,
+RegionServerObserver.postMergeCommit(ObserverContext ctx,
   Region regionA,
   Region regionB,
-  Region mergedRegion) 
+  Region mergedRegion)
+This will be called after PONR step as part of regions 
merge transaction.
+
 
 
 void
+BaseRegionServerObserver.postReplicateLogEntries(ObserverContext ctx,
+  http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true";
 title="class or interface in 
java.util">List<

[10/51] [partial] hbase-site git commit: Published site at 387c7e6b083fddeae2a7ebe1fef3546f38ef9fb5.

2016-05-06 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/958717f4/devapidocs/org/apache/hadoop/hbase/class-use/InterProcessLock.MetadataHandler.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/class-use/InterProcessLock.MetadataHandler.html
 
b/devapidocs/org/apache/hadoop/hbase/class-use/InterProcessLock.MetadataHandler.html
index 1689c47..38c22d9 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/class-use/InterProcessLock.MetadataHandler.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/class-use/InterProcessLock.MetadataHandler.html
@@ -166,13 +166,13 @@
 
 
 
-protected InterProcessLock.MetadataHandler
-ZKInterProcessLockBase.handler 
-
-
 private InterProcessLock.MetadataHandler
 ZKInterProcessReadWriteLock.handler 
 
+
+protected InterProcessLock.MetadataHandler
+ZKInterProcessLockBase.handler 
+
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/958717f4/devapidocs/org/apache/hadoop/hbase/class-use/KeepDeletedCells.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/class-use/KeepDeletedCells.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/KeepDeletedCells.html
index 4f4468b..e9a6b35 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/KeepDeletedCells.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/KeepDeletedCells.html
@@ -159,14 +159,14 @@ the order they are declared.
 
 
 private KeepDeletedCells
-ScanInfo.keepDeletedCells 
-
-
-private KeepDeletedCells
 ScanQueryMatcher.keepDeletedCells
 whether to return deleted rows
 
 
+
+private KeepDeletedCells
+ScanInfo.keepDeletedCells 
+
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/958717f4/devapidocs/org/apache/hadoop/hbase/class-use/KeyValue.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/class-use/KeyValue.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/KeyValue.html
index 3b95f52..4cec977 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/KeyValue.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/KeyValue.html
@@ -85,40 +85,33 @@
 
 
 
-org.apache.hadoop.hbase.io.hfile
-
-Provides implementations of HFile and HFile
- BlockCache.
-
-
-
 org.apache.hadoop.hbase.mapreduce
 
 Provides HBase http://wiki.apache.org/hadoop/HadoopMapReduce";>MapReduce
 Input/OutputFormats, a table indexing MapReduce job, and utility methods.
 
 
-
+
 org.apache.hadoop.hbase.mob
  
 
-
+
 org.apache.hadoop.hbase.mob.mapreduce
  
 
-
+
 org.apache.hadoop.hbase.regionserver
  
 
-
+
 org.apache.hadoop.hbase.regionserver.wal
  
 
-
+
 org.apache.hadoop.hbase.security.access
  
 
-
+
 org.apache.hadoop.hbase.util.test
  
 
@@ -608,28 +601,6 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 
-
-
-
-Uses of KeyValue in org.apache.hadoop.hbase.io.hfile
-
-Subclasses of KeyValue in org.apache.hadoop.hbase.io.hfile 
-
-Modifier and Type
-Class and Description
-
-
-
-private static class 
-HFileReaderImpl.HFileScannerImpl.ShareableMemoryKeyValue 
-
-
-private static class 
-HFileReaderImpl.HFileScannerImpl.ShareableMemoryNoTagsKeyValue 
-
-
-
-
 
 
 
@@ -814,10 +785,7 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 http://docs.oracle.com/javase/7/docs/api/java/util/Iterator.html?is-external=true";
 title="class or interface in java.util">Iterator
-StripeStoreFileManager.getCandidateFilesForRowKeyBefore(KeyValue targetKey)
-See StoreFileManager.getCandidateFilesForRowKeyBefore(KeyValue)
- for details on this methods.
-
+DefaultStoreFileManager.getCandidateFilesForRowKeyBefore(KeyValue targetKey) 
 
 
 http://docs.oracle.com/javase/7/docs/api/java/util/Iterator.html?is-external=true";
 title="class or interface in java.util">Iterator
@@ -827,17 +795,16 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 http://docs.oracle.com/javase/7/docs/api/java/util/Iterator.html?is-external=true";
 title="class or interface in java.util">Iterator
-DefaultStoreFileManager.getCandidateFilesForRowKeyBefore(KeyValue targetKey) 
+StripeStoreFileManager.getCandidateFilesForRowKeyBefore(KeyValue targetKey)
+See StoreFileManager.getCandidateFilesForRowKeyBefore(KeyValue)
+ for details on this methods.
+
 
 
 http://docs.oracle.com/javase/7/docs/api/java/util/Iterator.html?is-external=true";
 title="class or interface in java.util">Iterator
-StripeStoreFileManager.updateCandidateFilesForRowKeyBefore(http://docs.oracle.com/javase/7/docs/api/java/util/Iterator.html?is-external=true";
 title="class or interface in java.util">Iterator candidateFiles,
+DefaultStoreFileManager.updateCandidateFilesForRowKeyBefore(http://docs.oracle.com/javase/7/docs/api/java/util/Iterator.html?is-external=true";
 title="class or interface in java.util">Iterator candidateFiles,
   

[10/51] [partial] hbase-site git commit: Published site at 889d89a74c8b1ae6ce7a0f70a63073bbe7708e88.

2016-04-28 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/83cfd2ad/devapidocs/org/apache/hadoop/hbase/io/class-use/ImmutableBytesWritable.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/io/class-use/ImmutableBytesWritable.html 
b/devapidocs/org/apache/hadoop/hbase/io/class-use/ImmutableBytesWritable.html
index 64d6d2b..d3a9b03 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/io/class-use/ImmutableBytesWritable.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/io/class-use/ImmutableBytesWritable.html
@@ -158,7 +158,7 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 ImmutableBytesWritable
-TableSnapshotInputFormat.TableSnapshotRecordReader.createKey() 
+TableRecordReader.createKey() 
 
 
 ImmutableBytesWritable
@@ -166,7 +166,7 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 ImmutableBytesWritable
-TableRecordReader.createKey() 
+TableSnapshotInputFormat.TableSnapshotRecordReader.createKey() 
 
 
 
@@ -179,17 +179,17 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 org.apache.hadoop.mapred.RecordReader
-TableInputFormatBase.getRecordReader(org.apache.hadoop.mapred.InputSplit split,
+TableSnapshotInputFormat.getRecordReader(org.apache.hadoop.mapred.InputSplit split,
   org.apache.hadoop.mapred.JobConf job,
-  
org.apache.hadoop.mapred.Reporter reporter)
-Builds a TableRecordReader.
-
+  
org.apache.hadoop.mapred.Reporter reporter) 
 
 
 org.apache.hadoop.mapred.RecordReader
-TableSnapshotInputFormat.getRecordReader(org.apache.hadoop.mapred.InputSplit split,
+TableInputFormatBase.getRecordReader(org.apache.hadoop.mapred.InputSplit split,
   org.apache.hadoop.mapred.JobConf job,
-  
org.apache.hadoop.mapred.Reporter reporter) 
+  
org.apache.hadoop.mapred.Reporter reporter)
+Builds a TableRecordReader.
+
 
 
 org.apache.hadoop.mapred.RecordReader
@@ -214,15 +214,6 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 void
-IdentityTableMap.map(ImmutableBytesWritable key,
-  Result value,
-  org.apache.hadoop.mapred.OutputCollector output,
-  org.apache.hadoop.mapred.Reporter reporter)
-Pass the key, value to reduce
-
-
-
-void
 GroupingTableMap.map(ImmutableBytesWritable key,
   Result value,
   org.apache.hadoop.mapred.OutputCollector output,
@@ -230,16 +221,25 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 Extract the grouping columns from value to construct a new 
key.
 
 
-
+
 void
 RowCounter.RowCounterMapper.map(ImmutableBytesWritable row,
   Result values,
   org.apache.hadoop.mapred.OutputCollector output,
   org.apache.hadoop.mapred.Reporter reporter) 
 
+
+void
+IdentityTableMap.map(ImmutableBytesWritable key,
+  Result value,
+  org.apache.hadoop.mapred.OutputCollector output,
+  org.apache.hadoop.mapred.Reporter reporter)
+Pass the key, value to reduce
+
+
 
 boolean
-TableSnapshotInputFormat.TableSnapshotRecordReader.next(ImmutableBytesWritable key,
+TableRecordReader.next(ImmutableBytesWritable key,
 Result value) 
 
 
@@ -249,7 +249,7 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 boolean
-TableRecordReader.next(ImmutableBytesWritable key,
+TableSnapshotInputFormat.TableSnapshotRecordReader.next(ImmutableBytesWritable key,
 Result value) 
 
 
@@ -277,15 +277,6 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 void
-IdentityTableMap.map(ImmutableBytesWritable key,
-  Result value,
-  org.apache.hadoop.mapred.OutputCollector output,
-  org.apache.hadoop.mapred.Reporter reporter)
-Pass the key, value to reduce
-
-
-
-void
 GroupingTableMap.map(ImmutableBytesWritable key,
   Result value,
   org.apache.hadoop.mapred.OutputCollector output,
@@ -293,13 +284,22 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 Extract the grouping columns from value to construct a new 
key.
 
 
-
+
 void
 RowCounter.RowCounterMapper.map(ImmutableBytesWritable row,
   Result values,
   org.apache.hadoop.mapred.OutputCollector output,
   org.apache.hadoop.mapred.Reporter reporter) 
 
+
+void
+IdentityTableMap.map(ImmutableBytesWritable key,
+  Result value,
+  org.apache.hadoop.mapred.OutputCollector output,
+  org.apache.hadoop.mapred.Reporter reporter)
+Pass the key, value to reduce
+
+
 
 vo

[10/51] [partial] hbase-site git commit: Published site at ce318a2906817058ae7b2fce6e9b54d9d6230f9b.

2016-04-27 Thread busbey
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/4131cace/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RSRpcServices.RegionScannersCloseCallBack.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RSRpcServices.RegionScannersCloseCallBack.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RSRpcServices.RegionScannersCloseCallBack.html
index 1601586..2d848a4 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RSRpcServices.RegionScannersCloseCallBack.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RSRpcServices.RegionScannersCloseCallBack.html
@@ -1188,1748 +1188,1749 @@
 1180   * @throws IOException if the 
specifier is not null,
 1181   *but failed to find the region
 1182   */
-1183  Region getRegion(
-1184  final RegionSpecifier 
regionSpecifier) throws IOException {
-1185ByteString value = 
regionSpecifier.getValue();
-1186RegionSpecifierType type = 
regionSpecifier.getType();
-1187switch (type) {
-1188  case REGION_NAME:
-1189byte[] regionName = 
value.toByteArray();
-1190String encodedRegionName = 
HRegionInfo.encodeRegionName(regionName);
-1191return 
regionServer.getRegionByEncodedName(regionName, encodedRegionName);
-1192  case ENCODED_REGION_NAME:
-1193return 
regionServer.getRegionByEncodedName(value.toStringUtf8());
-1194  default:
-1195throw new 
DoNotRetryIOException(
-1196  "Unsupported region specifier 
type: " + type);
-1197}
-1198  }
-1199
-1200  @VisibleForTesting
-1201  public PriorityFunction getPriority() 
{
-1202return priority;
-1203  }
-1204
-1205  @VisibleForTesting
-1206  public Configuration 
getConfiguration() {
-1207return 
regionServer.getConfiguration();
-1208  }
-1209
-1210  private RegionServerQuotaManager 
getQuotaManager() {
-1211return 
regionServer.getRegionServerQuotaManager();
-1212  }
-1213
-1214  void start() {
-1215rpcServer.start();
-1216  }
-1217
-1218  void stop() {
-1219closeAllScanners();
-1220rpcServer.stop();
-1221  }
-1222
-1223  /**
-1224   * Called to verify that this server 
is up and running.
-1225   *
-1226   * @throws IOException
-1227   */
-1228  protected void checkOpen() throws 
IOException {
-1229if (regionServer.isAborted()) {
-1230  throw new 
RegionServerAbortedException("Server " + regionServer.serverName + " 
aborting");
-1231}
-1232if (regionServer.isStopped()) {
-1233  throw new 
RegionServerStoppedException("Server " + regionServer.serverName + " 
stopping");
-1234}
-1235if (!regionServer.fsOk) {
-1236  throw new 
RegionServerStoppedException("File system not available");
-1237}
-1238if (!regionServer.isOnline()) {
-1239  throw new 
ServerNotRunningYetException("Server is not running yet");
-1240}
-1241  }
-1242
-1243  /**
-1244   * @return list of blocking services 
and their security info classes that this server supports
-1245   */
-1246  protected 
List getServices() {
-1247
List bssi = new 
ArrayList(2);
-1248bssi.add(new 
BlockingServiceAndInterface(
-1249  
ClientService.newReflectiveBlockingService(this),
-1250  
ClientService.BlockingInterface.class));
-1251bssi.add(new 
BlockingServiceAndInterface(
-1252  
AdminService.newReflectiveBlockingService(this),
-1253  
AdminService.BlockingInterface.class));
-1254return bssi;
-1255  }
-1256
-1257  public InetSocketAddress 
getSocketAddress() {
-1258return isa;
-1259  }
-1260
-1261  @Override
-1262  public int getPriority(RequestHeader 
header, Message param, User user) {
-1263return priority.getPriority(header, 
param, user);
-1264  }
-1265
-1266  @Override
-1267  public long getDeadline(RequestHeader 
header, Message param) {
-1268return priority.getDeadline(header, 
param);
-1269  }
-1270
-1271  /*
-1272   * Check if an OOME and, if so, abort 
immediately to avoid creating more objects.
-1273   *
-1274   * @param e
-1275   *
-1276   * @return True if we OOME'd and are 
aborting.
-1277   */
-1278  @Override
-1279  public boolean checkOOME(final 
Throwable e) {
-1280boolean stop = false;
-1281try {
-1282  if (e instanceof 
OutOfMemoryError
-1283  || (e.getCause() != null 
&& e.getCause() instanceof OutOfMemoryError)
-1284  || (e.getMessage() != null 
&& e.getMessage().contains(
-1285  
"java.lang.OutOfMemoryError"))) {
-1286stop = true;
-1287LOG.fatal("Run out of memory; " 
+ getClass().getSimpleName()
-1288  + " will abort itself 
immediately", e);
-1289  }
-1290} finally {
-1291  if (stop) {
-1292Runtime.getRuntime().halt(1);
-1293  }
-1294}
-1295return stop;
-1296  }
-1297
-1298  /**
-1299   * Close a region on the region 
server.
-1300   *
-

[10/51] [partial] hbase-site git commit: Published site at e5d01577e4e989b1023559aa6addd1b37252293e.

2016-04-26 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/641881f9/devapidocs/org/apache/hadoop/hbase/io/asyncfs/AsyncFSOutputHelper.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/io/asyncfs/AsyncFSOutputHelper.html 
b/devapidocs/org/apache/hadoop/hbase/io/asyncfs/AsyncFSOutputHelper.html
new file mode 100644
index 000..22129e0
--- /dev/null
+++ b/devapidocs/org/apache/hadoop/hbase/io/asyncfs/AsyncFSOutputHelper.html
@@ -0,0 +1,280 @@
+http://www.w3.org/TR/html4/loose.dtd";>
+
+
+
+
+AsyncFSOutputHelper (Apache HBase 2.0.0-SNAPSHOT API)
+
+
+
+
+
+
+JavaScript is disabled on your browser.
+
+
+
+
+
+
+
+
+Overview
+Package
+Class
+Use
+Tree
+Deprecated
+Index
+Help
+
+
+
+
+Prev Class
+Next 
Class
+
+
+Frames
+No Frames
+
+
+All Classes
+
+
+
+
+
+
+
+Summary: 
+Nested | 
+Field | 
+Constr | 
+Method
+
+
+Detail: 
+Field | 
+Constr | 
+Method
+
+
+
+
+
+
+
+
+org.apache.hadoop.hbase.io.asyncfs
+Class 
AsyncFSOutputHelper
+
+
+
+http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true";
 title="class or interface in java.lang">java.lang.Object
+
+
+org.apache.hadoop.hbase.io.asyncfs.AsyncFSOutputHelper
+
+
+
+
+
+
+
+
+@InterfaceAudience.Private
+public final class AsyncFSOutputHelper
+extends http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true";
 title="class or interface in java.lang">Object
+Helper class for creating AsyncFSOutput.
+
+
+
+
+
+
+
+
+
+
+
+Constructor Summary
+
+Constructors 
+
+Modifier
+Constructor and Description
+
+
+private 
+AsyncFSOutputHelper() 
+
+
+
+
+
+
+
+
+
+Method Summary
+
+Methods 
+
+Modifier and Type
+Method and Description
+
+
+static AsyncFSOutput
+createOutput(org.apache.hadoop.fs.FileSystem fs,
+org.apache.hadoop.fs.Path f,
+boolean overwrite,
+boolean createParent,
+short replication,
+long blockSize,
+io.netty.channel.EventLoop eventLoop)
+Create FanOutOneBlockAsyncDFSOutput
 for DistributedFileSystem, and a simple
+ implementation for other FileSystem which wraps around a 
FSDataOutputStream.
+
+
+
+
+
+
+
+Methods inherited from class java.lang.http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true";
 title="class or interface in java.lang">Object
+http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true#clone()"
 title="class or interface in java.lang">clone, http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true#equals(java.lang.Object)"
 title="class or interface in java.lang">equals, http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true#finalize()"
 title="class or interface in java.lang">finalize, http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true#getClass()"
 title="class or interface in java.lang">getClass, http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true#hashCode()"
 title="class or interface in java.lang">hashCode, http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true#notify()"
 title="class or interface in java.lang">notify, http://docs.oracle.com/javase/7/docs/api/java/lang
 /Object.html?is-external=true#notifyAll()" title="class or interface in 
java.lang">notifyAll, http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true#toString()"
 title="class or interface in java.lang">toString, http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true#wait()"
 title="class or interface in java.lang">wait, http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true#wait(long)"
 title="class or interface in java.lang">wait, http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true#wait(long,%20int)"
 title="class or interface in java.lang">wait
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+Constructor Detail
+
+
+
+
+
+AsyncFSOutputHelper
+private AsyncFSOutputHelper()
+
+
+
+
+
+
+
+
+
+Method Detail
+
+
+
+
+
+createOutput
+public static AsyncFSOutput createOutput(org.apache.hadoop.fs.FileSystem fs,
+ org.apache.hadoop.fs.Path f,
+ boolean overwrite,
+ boolean createParent,
+ short replication,
+ long blockSize,
+ io.netty.channel.EventLoop eventLoop)
+  throws http://docs.oracle.com/javase/7/docs

[10/51] [partial] hbase-site git commit: Published site at 57e1dbc8a65071df3ccd4e0c1f7d124d6a03f0ed.

2016-04-22 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/31b3fd50/devapidocs/org/apache/hadoop/hbase/regionserver/class-use/StoreFileScanner.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/regionserver/class-use/StoreFileScanner.html
 
b/devapidocs/org/apache/hadoop/hbase/regionserver/class-use/StoreFileScanner.html
index 11f1f14..095938e 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/regionserver/class-use/StoreFileScanner.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/regionserver/class-use/StoreFileScanner.html
@@ -246,14 +246,14 @@
 
 
 InternalScanner
-Compactor.InternalScannerFactory.createScanner(http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List scanners,
+StripeCompactor.StripeInternalScannerFactory.createScanner(http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List scanners,
   ScanType scanType,
   Compactor.FileDetails fd,
   long smallestReadPoint) 
 
 
 InternalScanner
-StripeCompactor.StripeInternalScannerFactory.createScanner(http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List scanners,
+Compactor.InternalScannerFactory.createScanner(http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List scanners,
   ScanType scanType,
   Compactor.FileDetails fd,
   long smallestReadPoint) 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/31b3fd50/devapidocs/org/apache/hadoop/hbase/regionserver/class-use/StoreFileWriter.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/regionserver/class-use/StoreFileWriter.html
 
b/devapidocs/org/apache/hadoop/hbase/regionserver/class-use/StoreFileWriter.html
index 2ede2d9..5d964e7 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/regionserver/class-use/StoreFileWriter.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/regionserver/class-use/StoreFileWriter.html
@@ -403,17 +403,17 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 
-protected abstract http://docs.oracle.com/javase/7/docs/api/java/util/Collection.html?is-external=true";
 title="class or interface in java.util">Collection
-AbstractMultiFileWriter.writers() 
-
-
 protected http://docs.oracle.com/javase/7/docs/api/java/util/Collection.html?is-external=true";
 title="class or interface in java.util">Collection
 DateTieredMultiFileWriter.writers() 
 
-
+
 protected http://docs.oracle.com/javase/7/docs/api/java/util/Collection.html?is-external=true";
 title="class or interface in java.util">Collection
 StripeMultiFileWriter.writers() 
 
+
+protected abstract http://docs.oracle.com/javase/7/docs/api/java/util/Collection.html?is-external=true";
 title="class or interface in java.util">Collection
+AbstractMultiFileWriter.writers() 
+
 
 
 
@@ -431,13 +431,13 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 protected void
-AbstractMultiFileWriter.preCloseWriter(StoreFileWriter writer)
-Subclasses override this method to be called before we 
close the give writer.
-
+StripeMultiFileWriter.preCloseWriter(StoreFileWriter writer) 
 
 
 protected void
-StripeMultiFileWriter.preCloseWriter(StoreFileWriter writer) 
+AbstractMultiFileWriter.preCloseWriter(StoreFileWriter writer)
+Subclasses override this method to be called before we 
close the give writer.
+
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/31b3fd50/devapidocs/org/apache/hadoop/hbase/regionserver/class-use/StripeStoreConfig.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/regionserver/class-use/StripeStoreConfig.html
 
b/devapidocs/org/apache/hadoop/hbase/regionserver/class-use/StripeStoreConfig.html
index 961cd25..064e800 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/regionserver/class-use/StripeStoreConfig.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/regionserver/class-use/StripeStoreConfig.html
@@ -100,11 +100,11 @@
 
 
 private StripeStoreConfig
-StripeStoreFileManager.config 
+StripeStoreEngine.config 
 
 
 private StripeStoreConfig
-StripeStoreEngine.config 
+StripeStoreFileManager.config 
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/31b3fd50/devapidocs/org/apache/hadoop/hbase/regionserver/class-use/TimeRangeTracker.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/regionserver/class-use/

[10/51] [partial] hbase-site git commit: Published site at 03f3c392a3ae940f26ddef56e6991a07f2c993f7.

2016-04-21 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/c4be37af/devapidocs/org/apache/hadoop/hbase/filter/class-use/Filter.ReturnCode.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/filter/class-use/Filter.ReturnCode.html 
b/devapidocs/org/apache/hadoop/hbase/filter/class-use/Filter.ReturnCode.html
index 8a99b48..229c596 100644
--- a/devapidocs/org/apache/hadoop/hbase/filter/class-use/Filter.ReturnCode.html
+++ b/devapidocs/org/apache/hadoop/hbase/filter/class-use/Filter.ReturnCode.html
@@ -132,111 +132,111 @@
 
 
 Filter.ReturnCode
-QualifierFilter.filterKeyValue(Cell v) 
+FamilyFilter.filterKeyValue(Cell v) 
 
 
 Filter.ReturnCode
-WhileMatchFilter.filterKeyValue(Cell v) 
+ColumnPaginationFilter.filterKeyValue(Cell v) 
 
 
 Filter.ReturnCode
-RandomRowFilter.filterKeyValue(Cell v) 
+ValueFilter.filterKeyValue(Cell v) 
 
 
 Filter.ReturnCode
-ColumnCountGetFilter.filterKeyValue(Cell v) 
+KeyOnlyFilter.filterKeyValue(Cell ignored) 
 
 
 Filter.ReturnCode
-DependentColumnFilter.filterKeyValue(Cell c) 
+MultipleColumnPrefixFilter.filterKeyValue(Cell kv) 
 
 
 Filter.ReturnCode
-KeyOnlyFilter.filterKeyValue(Cell ignored) 
+FirstKeyOnlyFilter.filterKeyValue(Cell v) 
 
 
 Filter.ReturnCode
-FuzzyRowFilter.filterKeyValue(Cell c) 
+ColumnCountGetFilter.filterKeyValue(Cell v) 
 
 
 Filter.ReturnCode
-SingleColumnValueFilter.filterKeyValue(Cell c) 
+ColumnPrefixFilter.filterKeyValue(Cell cell) 
 
 
 Filter.ReturnCode
-FamilyFilter.filterKeyValue(Cell v) 
+TimestampsFilter.filterKeyValue(Cell v) 
 
 
 Filter.ReturnCode
-MultipleColumnPrefixFilter.filterKeyValue(Cell kv) 
+FilterList.filterKeyValue(Cell c) 
 
 
 Filter.ReturnCode
-FilterList.filterKeyValue(Cell c) 
+PrefixFilter.filterKeyValue(Cell v) 
 
 
 Filter.ReturnCode
-ColumnPrefixFilter.filterKeyValue(Cell cell) 
+QualifierFilter.filterKeyValue(Cell v) 
 
 
 Filter.ReturnCode
-ColumnRangeFilter.filterKeyValue(Cell kv) 
+MultiRowRangeFilter.filterKeyValue(Cell ignored) 
 
 
 Filter.ReturnCode
-PrefixFilter.filterKeyValue(Cell v) 
+FuzzyRowFilter.filterKeyValue(Cell c) 
 
 
 Filter.ReturnCode
-ValueFilter.filterKeyValue(Cell v) 
+ColumnRangeFilter.filterKeyValue(Cell kv) 
 
 
-abstract Filter.ReturnCode
-Filter.filterKeyValue(Cell v)
-A way to filter based on the column family, column 
qualifier and/or the column value.
-
+Filter.ReturnCode
+RowFilter.filterKeyValue(Cell v) 
 
 
 Filter.ReturnCode
-MultiRowRangeFilter.filterKeyValue(Cell ignored) 
+RandomRowFilter.filterKeyValue(Cell v) 
 
 
 Filter.ReturnCode
-FirstKeyOnlyFilter.filterKeyValue(Cell v) 
+SkipFilter.filterKeyValue(Cell v) 
 
 
 Filter.ReturnCode
-FirstKeyValueMatchingQualifiersFilter.filterKeyValue(Cell v)
-Deprecated. 
- 
+PageFilter.filterKeyValue(Cell ignored) 
 
 
 Filter.ReturnCode
-PageFilter.filterKeyValue(Cell ignored) 
+FirstKeyValueMatchingQualifiersFilter.filterKeyValue(Cell v)
+Deprecated. 
+ 
 
 
 Filter.ReturnCode
-TimestampsFilter.filterKeyValue(Cell v) 
+WhileMatchFilter.filterKeyValue(Cell v) 
 
 
 Filter.ReturnCode
-ColumnPaginationFilter.filterKeyValue(Cell v) 
+SingleColumnValueFilter.filterKeyValue(Cell c) 
 
 
 Filter.ReturnCode
-SkipFilter.filterKeyValue(Cell v) 
+FilterWrapper.filterKeyValue(Cell v) 
 
 
 Filter.ReturnCode
-RowFilter.filterKeyValue(Cell v) 
+DependentColumnFilter.filterKeyValue(Cell c) 
 
 
 Filter.ReturnCode
 InclusiveStopFilter.filterKeyValue(Cell v) 
 
 
-Filter.ReturnCode
-FilterWrapper.filterKeyValue(Cell v) 
+abstract Filter.ReturnCode
+Filter.filterKeyValue(Cell v)
+A way to filter based on the column family, column 
qualifier and/or the column value.
+
 
 
 static Filter.ReturnCode
@@ -303,11 +303,11 @@ the order they are declared.
 
 
 Filter.ReturnCode
-VisibilityController.DeleteVersionVisibilityExpressionFilter.filterKeyValue(Cell cell) 
+VisibilityLabelFilter.filterKeyValue(Cell cell) 
 
 
 Filter.ReturnCode
-VisibilityLabelFilter.filterKeyValue(Cell cell) 
+VisibilityController.DeleteVersionVisibilityExpressionFilter.filterKeyValue(Cell cell) 
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/c4be37af/devapidocs/org/apache/hadoop/hbase/filter/class-use/Filter.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/filter/class-use/Filter.html 
b/devapidocs/org/apache/hadoop/hbase/filter/class-use/Filter.html
index 2075033..964f396 100644
--- a/devapidocs/org/apache/hadoop/hbase/filter/class-use/Filter.html
+++ b/devapidocs/org/apache/hadoop/hbase/filter/class-use/Filter.html
@@ -174,19 +174,19 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 
-Get
-Get.setFilter(Filter filter) 
-
-
 Query
 Query.setFilter(Filter filter)
 Apply the specified server-side filter when performing the 
Query.
 
 
-
+
 Scan
 Scan.setFilter(Filter filter) 
 
+
+Get
+Get.setFilter(Filter filter) 
+
 
 
 
@@ -414,11 +414,11 @@ Input/OutputFormats, a table indexing MapReduce job, a

[10/51] [partial] hbase-site git commit: Published site at 3e1bdccc53a0a41fc231cf5aec358ace367b0a62.

2016-04-20 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/09017087/devapidocs/org/apache/hadoop/hbase/regionserver/class-use/ScannerContext.LimitScope.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/regionserver/class-use/ScannerContext.LimitScope.html
 
b/devapidocs/org/apache/hadoop/hbase/regionserver/class-use/ScannerContext.LimitScope.html
index ee668a4..6bc9821 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/regionserver/class-use/ScannerContext.LimitScope.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/regionserver/class-use/ScannerContext.LimitScope.html
@@ -165,35 +165,35 @@ the order they are declared.
 
 
 (package private) boolean
-NoLimitScannerContext.checkAnyLimitReached(ScannerContext.LimitScope checkerScope) 
+ScannerContext.checkAnyLimitReached(ScannerContext.LimitScope checkerScope) 
 
 
 (package private) boolean
-ScannerContext.checkAnyLimitReached(ScannerContext.LimitScope checkerScope) 
+NoLimitScannerContext.checkAnyLimitReached(ScannerContext.LimitScope checkerScope) 
 
 
 (package private) boolean
-NoLimitScannerContext.checkBatchLimit(ScannerContext.LimitScope checkerScope) 
+ScannerContext.checkBatchLimit(ScannerContext.LimitScope checkerScope) 
 
 
 (package private) boolean
-ScannerContext.checkBatchLimit(ScannerContext.LimitScope checkerScope) 
+NoLimitScannerContext.checkBatchLimit(ScannerContext.LimitScope checkerScope) 
 
 
 (package private) boolean
-NoLimitScannerContext.checkSizeLimit(ScannerContext.LimitScope checkerScope) 
+ScannerContext.checkSizeLimit(ScannerContext.LimitScope checkerScope) 
 
 
 (package private) boolean
-ScannerContext.checkSizeLimit(ScannerContext.LimitScope checkerScope) 
+NoLimitScannerContext.checkSizeLimit(ScannerContext.LimitScope checkerScope) 
 
 
 (package private) boolean
-NoLimitScannerContext.checkTimeLimit(ScannerContext.LimitScope checkerScope) 
+ScannerContext.checkTimeLimit(ScannerContext.LimitScope checkerScope) 
 
 
 (package private) boolean
-ScannerContext.checkTimeLimit(ScannerContext.LimitScope checkerScope) 
+NoLimitScannerContext.checkTimeLimit(ScannerContext.LimitScope checkerScope) 
 
 
 (package private) boolean
@@ -228,11 +228,11 @@ the order they are declared.
 
 
 (package private) void
-NoLimitScannerContext.setSizeLimitScope(ScannerContext.LimitScope scope) 
+ScannerContext.setSizeLimitScope(ScannerContext.LimitScope scope) 
 
 
 (package private) void
-ScannerContext.setSizeLimitScope(ScannerContext.LimitScope scope) 
+NoLimitScannerContext.setSizeLimitScope(ScannerContext.LimitScope scope) 
 
 
 (package private) void
@@ -247,11 +247,11 @@ the order they are declared.
 
 
 (package private) void
-NoLimitScannerContext.setTimeLimitScope(ScannerContext.LimitScope scope) 
+ScannerContext.setTimeLimitScope(ScannerContext.LimitScope scope) 
 
 
 (package private) void
-ScannerContext.setTimeLimitScope(ScannerContext.LimitScope scope) 
+NoLimitScannerContext.setTimeLimitScope(ScannerContext.LimitScope scope) 
 
 
 (package private) void

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/09017087/devapidocs/org/apache/hadoop/hbase/regionserver/class-use/ScannerContext.NextState.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/regionserver/class-use/ScannerContext.NextState.html
 
b/devapidocs/org/apache/hadoop/hbase/regionserver/class-use/ScannerContext.NextState.html
index 37b4a41..9d0e289 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/regionserver/class-use/ScannerContext.NextState.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/regionserver/class-use/ScannerContext.NextState.html
@@ -116,14 +116,14 @@
 
 
 (package private) ScannerContext.NextState
-NoLimitScannerContext.setScannerState(ScannerContext.NextState state) 
-
-
-(package private) ScannerContext.NextState
 ScannerContext.setScannerState(ScannerContext.NextState state)
 Note that this is not a typical setter.
 
 
+
+(package private) ScannerContext.NextState
+NoLimitScannerContext.setScannerState(ScannerContext.NextState state) 
+
 
 static ScannerContext.NextState
 ScannerContext.NextState.valueOf(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String name)
@@ -156,14 +156,14 @@ the order they are declared.
 
 
 (package private) ScannerContext.NextState
-NoLimitScannerContext.setScannerState(ScannerContext.NextState state) 
-
-
-(package private) ScannerContext.NextState
 ScannerContext.setScannerState(ScannerContext.NextState state)
 Note that this is not a typical setter.
 
 
+
+(package private) ScannerContext.NextState
+NoLimitScannerContext.setScannerState(ScannerContext.NextState state) 
+
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/09017087/devapidocs/org/apache/hadoop/hbase/regionserver/class-use/ScannerContext.html
--
diff --git 
a/devap

[10/51] [partial] hbase-site git commit: Published site at bd3b9753a9a792b402064ec4fabf2dc3c2eb41f1.

2016-04-19 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/0d22bc0c/devapidocs/org/apache/hadoop/hbase/regionserver/StoreFileManager.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/regionserver/StoreFileManager.html 
b/devapidocs/org/apache/hadoop/hbase/regionserver/StoreFileManager.html
index 3d10cca..dceaf97 100644
--- a/devapidocs/org/apache/hadoop/hbase/regionserver/StoreFileManager.html
+++ b/devapidocs/org/apache/hadoop/hbase/regionserver/StoreFileManager.html
@@ -92,7 +92,7 @@
 
 
 @InterfaceAudience.Private
-public interface StoreFileManager
+public interface StoreFileManager
 Manages the store files and basic metadata about that that 
determines the logical structure
  (e.g. what files to return for scan, how to determine split point, and such).
  Does NOT affect the physical structure of files in HDFS.
@@ -173,41 +173,45 @@ public interface getStoreCompactionPriority() 
 
 
+http://docs.oracle.com/javase/7/docs/api/java/util/Comparator.html?is-external=true";
 title="class or interface in java.util">Comparator
+getStoreFileComparator() 
+
+
 int
 getStorefileCount()
 Returns the number of files currently in use.
 
 
-
+
 http://docs.oracle.com/javase/7/docs/api/java/util/Collection.html?is-external=true";
 title="class or interface in java.util">Collection
 getStorefiles()
 Gets the snapshot of the store files currently in use.
 
 
-
+
 http://docs.oracle.com/javase/7/docs/api/java/util/Collection.html?is-external=true";
 title="class or interface in java.util">Collection
 getUnneededFiles(long maxTs,
 http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List filesCompacting) 
 
-
+
 void
 insertNewFiles(http://docs.oracle.com/javase/7/docs/api/java/util/Collection.html?is-external=true";
 title="class or interface in java.util">Collection sfs)
 Adds new files, either for from MemStore flush or bulk 
insert, into the structure.
 
 
-
+
 void
 loadFiles(http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List storeFiles)
 Loads the initial store files into empty 
StoreFileManager.
 
 
-
+
 void
 removeCompactedFiles(http://docs.oracle.com/javase/7/docs/api/java/util/Collection.html?is-external=true";
 title="class or interface in java.util">Collection compactedFiles)
 Remove the compacted files
 
 
-
+
 http://docs.oracle.com/javase/7/docs/api/java/util/Iterator.html?is-external=true";
 title="class or interface in java.util">Iterator
 updateCandidateFilesForRowKeyBefore(http://docs.oracle.com/javase/7/docs/api/java/util/Iterator.html?is-external=true";
 title="class or interface in java.util">Iterator candidateFiles,
   KeyValue targetKey,
@@ -236,7 +240,7 @@ public interface 
 
 loadFiles
-void loadFiles(http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List storeFiles)
+void loadFiles(http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List storeFiles)
 Loads the initial store files into empty 
StoreFileManager.
 Parameters:storeFiles - The files 
to load.
 
@@ -247,7 +251,7 @@ public interface 
 
 insertNewFiles
-void insertNewFiles(http://docs.oracle.com/javase/7/docs/api/java/util/Collection.html?is-external=true";
 title="class or interface in java.util">Collection sfs)
+void insertNewFiles(http://docs.oracle.com/javase/7/docs/api/java/util/Collection.html?is-external=true";
 title="class or interface in java.util">Collection sfs)
 throws http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true";
 title="class or interface in java.io">IOException
 Adds new files, either for from MemStore flush or bulk 
insert, into the structure.
 Parameters:sfs - New 
store files.
@@ -261,7 +265,7 @@ public interface 
 
 addCompactionResults
-void addCompactionResults(http://docs.oracle.com/javase/7/docs/api/java/util/Collection.html?is-external=true";
 title="class or interface in java.util">Collection compactedFiles,
+void addCompactionResults(http://docs.oracle.com/javase/7/docs/api/java/util/Collection.html?is-external=true";
 title="class or interface in java.util">Collection compactedFiles,
 http://docs.oracle.com/javase/7/docs/api/java/util/Collection.html?is-external=true";
 title="class or interface in java.util">Collection results)
   throws http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true";
 title="class or interface in java.io">IOException
 Adds only the 

[10/51] [partial] hbase-site git commit: Published site at f2e0aca2b6925cdd1f2896052f4b4e847ee4e1fd.

2016-04-18 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/9f9a078f/devapidocs/org/apache/hadoop/hbase/filter/class-use/CompareFilter.CompareOp.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/filter/class-use/CompareFilter.CompareOp.html
 
b/devapidocs/org/apache/hadoop/hbase/filter/class-use/CompareFilter.CompareOp.html
index 4ab2ad2..cb92b06 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/filter/class-use/CompareFilter.CompareOp.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/filter/class-use/CompareFilter.CompareOp.html
@@ -241,147 +241,147 @@ service.
 
 
 boolean
-BaseRegionObserver.postCheckAndDelete(ObserverContext e,
+RegionObserver.postCheckAndDelete(ObserverContext c,
 byte[] row,
 byte[] family,
 byte[] qualifier,
 CompareFilter.CompareOp compareOp,
 ByteArrayComparable comparator,
 Delete delete,
-boolean result) 
+boolean result)
+Called after checkAndDelete
+
 
 
 boolean
-RegionObserver.postCheckAndDelete(ObserverContext c,
+BaseRegionObserver.postCheckAndDelete(ObserverContext e,
 byte[] row,
 byte[] family,
 byte[] qualifier,
 CompareFilter.CompareOp compareOp,
 ByteArrayComparable comparator,
 Delete delete,
-boolean result)
-Called after checkAndDelete
-
+boolean result) 
 
 
 boolean
-BaseRegionObserver.postCheckAndPut(ObserverContext e,
+RegionObserver.postCheckAndPut(ObserverContext c,
   byte[] row,
   byte[] family,
   byte[] qualifier,
   CompareFilter.CompareOp compareOp,
   ByteArrayComparable comparator,
   Put put,
-  boolean result) 
+  boolean result)
+Called after checkAndPut
+
 
 
 boolean
-RegionObserver.postCheckAndPut(ObserverContext c,
+BaseRegionObserver.postCheckAndPut(ObserverContext e,
   byte[] row,
   byte[] family,
   byte[] qualifier,
   CompareFilter.CompareOp compareOp,
   ByteArrayComparable comparator,
   Put put,
-  boolean result)
-Called after checkAndPut
-
+  boolean result) 
 
 
 boolean
-BaseRegionObserver.preCheckAndDelete(ObserverContext e,
+RegionObserver.preCheckAndDelete(ObserverContext c,
   byte[] row,
   byte[] family,
   byte[] qualifier,
   CompareFilter.CompareOp compareOp,
   ByteArrayComparable comparator,
   Delete delete,
-  boolean result) 
+  boolean result)
+Called before checkAndDelete.
+
 
 
 boolean
-RegionObserver.preCheckAndDelete(ObserverContext c,
+BaseRegionObserver.preCheckAndDelete(ObserverContext e,
   byte[] row,
   byte[] family,
   byte[] qualifier,
   CompareFilter.CompareOp compareOp,
   ByteArrayComparable comparator,
   Delete delete,
-  boolean result)
-Called before checkAndDelete.
-
+  boolean result) 
 
 
 boolean
-BaseRegionObserver.preCheckAndDeleteAfterRowLock(ObserverContext e,
+RegionObserver.preCheckAndDeleteAfterRowLock(ObserverContext c,
   byte[] row,
   byte[] family,
   
byte[] qualifier,
   C

[10/51] [partial] hbase-site git commit: Published site at 7efb9edecbdf8b35046230575d504e4caeb80f34.

2016-04-14 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/3c6f3528/devapidocs/org/apache/hadoop/hbase/exceptions/class-use/DeserializationException.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/exceptions/class-use/DeserializationException.html
 
b/devapidocs/org/apache/hadoop/hbase/exceptions/class-use/DeserializationException.html
index 97a2309..8d2fbd1 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/exceptions/class-use/DeserializationException.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/exceptions/class-use/DeserializationException.html
@@ -144,16 +144,16 @@
 
 
 
-static HTableDescriptor
-HTableDescriptor.parseFrom(byte[] bytes) 
+static ClusterId
+ClusterId.parseFrom(byte[] bytes) 
 
 
 static HColumnDescriptor
 HColumnDescriptor.parseFrom(byte[] bytes) 
 
 
-static ClusterId
-ClusterId.parseFrom(byte[] bytes) 
+static HTableDescriptor
+HTableDescriptor.parseFrom(byte[] bytes) 
 
 
 static HRegionInfo
@@ -257,141 +257,141 @@
 ByteArrayComparable.parseFrom(byte[] pbBytes) 
 
 
-static QualifierFilter
-QualifierFilter.parseFrom(byte[] pbBytes) 
+static BinaryPrefixComparator
+BinaryPrefixComparator.parseFrom(byte[] pbBytes) 
 
 
-static WhileMatchFilter
-WhileMatchFilter.parseFrom(byte[] pbBytes) 
+static FuzzyRowFilter
+FuzzyRowFilter.parseFrom(byte[] pbBytes) 
 
 
-static RandomRowFilter
-RandomRowFilter.parseFrom(byte[] pbBytes) 
+static BitComparator
+BitComparator.parseFrom(byte[] pbBytes) 
 
 
-static ColumnCountGetFilter
-ColumnCountGetFilter.parseFrom(byte[] pbBytes) 
+static MultipleColumnPrefixFilter
+MultipleColumnPrefixFilter.parseFrom(byte[] pbBytes) 
 
 
-static DependentColumnFilter
-DependentColumnFilter.parseFrom(byte[] pbBytes) 
+static RowFilter
+RowFilter.parseFrom(byte[] pbBytes) 
 
 
-static KeyOnlyFilter
-KeyOnlyFilter.parseFrom(byte[] pbBytes) 
+static FamilyFilter
+FamilyFilter.parseFrom(byte[] pbBytes) 
 
 
-static FuzzyRowFilter
-FuzzyRowFilter.parseFrom(byte[] pbBytes) 
+static SkipFilter
+SkipFilter.parseFrom(byte[] pbBytes) 
 
 
-static SingleColumnValueFilter
-SingleColumnValueFilter.parseFrom(byte[] pbBytes) 
+static PrefixFilter
+PrefixFilter.parseFrom(byte[] pbBytes) 
 
 
-static FamilyFilter
-FamilyFilter.parseFrom(byte[] pbBytes) 
+static FilterList
+FilterList.parseFrom(byte[] pbBytes) 
 
 
-static LongComparator
-LongComparator.parseFrom(byte[] pbBytes) 
+static SubstringComparator
+SubstringComparator.parseFrom(byte[] pbBytes) 
 
 
-static MultipleColumnPrefixFilter
-MultipleColumnPrefixFilter.parseFrom(byte[] pbBytes) 
+static Filter
+Filter.parseFrom(byte[] pbBytes)
+Concrete implementers can signal a failure condition in 
their code by throwing an
+ http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true";
 title="class or interface in java.io">IOException.
+
 
 
-static FilterList
-FilterList.parseFrom(byte[] pbBytes) 
+static DependentColumnFilter
+DependentColumnFilter.parseFrom(byte[] pbBytes) 
 
 
-static BinaryComparator
-BinaryComparator.parseFrom(byte[] pbBytes) 
+static SingleColumnValueFilter
+SingleColumnValueFilter.parseFrom(byte[] pbBytes) 
 
 
-static ColumnPrefixFilter
-ColumnPrefixFilter.parseFrom(byte[] pbBytes) 
+static InclusiveStopFilter
+InclusiveStopFilter.parseFrom(byte[] pbBytes) 
 
 
-static ColumnRangeFilter
-ColumnRangeFilter.parseFrom(byte[] pbBytes) 
+static BinaryComparator
+BinaryComparator.parseFrom(byte[] pbBytes) 
 
 
-static SubstringComparator
-SubstringComparator.parseFrom(byte[] pbBytes) 
+static TimestampsFilter
+TimestampsFilter.parseFrom(byte[] pbBytes) 
 
 
-static PrefixFilter
-PrefixFilter.parseFrom(byte[] pbBytes) 
+static MultiRowRangeFilter
+MultiRowRangeFilter.parseFrom(byte[] pbBytes) 
 
 
-static BitComparator
-BitComparator.parseFrom(byte[] pbBytes) 
+static ColumnPaginationFilter
+ColumnPaginationFilter.parseFrom(byte[] pbBytes) 
 
 
-static ValueFilter
-ValueFilter.parseFrom(byte[] pbBytes) 
+static LongComparator
+LongComparator.parseFrom(byte[] pbBytes) 
 
 
-static Filter
-Filter.parseFrom(byte[] pbBytes)
-Concrete implementers can signal a failure condition in 
their code by throwing an
- http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true";
 title="class or interface in java.io">IOException.
-
+static WhileMatchFilter
+WhileMatchFilter.parseFrom(byte[] pbBytes) 
 
 
-static MultiRowRangeFilter
-MultiRowRangeFilter.parseFrom(byte[] pbBytes) 
+static PageFilter
+PageFilter.parseFrom(byte[] pbBytes) 
 
 
-static FirstKeyOnlyFilter
-FirstKeyOnlyFilter.parseFrom(byte[] pbBytes) 
+static SingleColumnValueExcludeFilter
+SingleColumnValueExcludeFilter.parseFrom(byte[] pbBytes) 
 
 
+static ColumnPrefixFilter
+ColumnPrefixFilter.parseFrom(byte[] pbBytes) 
+
+
 static FirstKeyValueMatchingQualifiersFilter
 FirstKeyValueMatchingQualifiersFilter.parseFrom(byte[] pbBytes)
 Deprecated. 
  
 
-
-static PageFilter
-PageFilter.parseFrom(byte[] pbBytes) 
-
 
-static TimestampsFilter
-Ti

[10/51] [partial] hbase-site git commit: Published site at ae7e5e29f9be9999b495b7e30331b351b1888d8f.

2016-04-13 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/31709f2f/devapidocs/org/apache/hadoop/hbase/regionserver/throttle/class-use/ThroughputController.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/regionserver/throttle/class-use/ThroughputController.html
 
b/devapidocs/org/apache/hadoop/hbase/regionserver/throttle/class-use/ThroughputController.html
index d216194..dd4bb45 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/regionserver/throttle/class-use/ThroughputController.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/regionserver/throttle/class-use/ThroughputController.html
@@ -200,42 +200,42 @@
 
 
 http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List
-HMobStore.compact(CompactionContext compaction,
+Store.compact(CompactionContext compaction,
   ThroughputController throughputController)
-The compaction in the store of mob.
+Deprecated. 
+see compact(CompactionContext, ThroughputController, 
User)
+
 
 
 
 http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List
-HStore.compact(CompactionContext compaction,
+HMobStore.compact(CompactionContext compaction,
   ThroughputController throughputController)
-Compact the StoreFiles.
+The compaction in the store of mob.
 
 
 
 http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List
-Store.compact(CompactionContext compaction,
+HStore.compact(CompactionContext compaction,
   ThroughputController throughputController)
-Deprecated. 
-see compact(CompactionContext, ThroughputController, 
User)
-
+Compact the StoreFiles.
 
 
 
 http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List
-HStore.compact(CompactionContext compaction,
+Store.compact(CompactionContext compaction,
   ThroughputController throughputController,
   User user) 
 
 
 http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List
-Store.compact(CompactionContext compaction,
+HStore.compact(CompactionContext compaction,
   ThroughputController throughputController,
   User user) 
 
 
 http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true";
 title="class or interface in 
java.util">List
-DateTieredStoreEngine.DateTieredCompactionContext.compact(ThroughputController throughputController,
+StripeStoreEngine.StripeCompaction.compact(ThroughputController throughputController,
   User user) 
 
 
@@ -245,7 +245,7 @@
 
 
 http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true";
 title="class or interface in 
java.util">List
-StripeStoreEngine.StripeCompaction.compact(ThroughputController throughputController,
+DateTieredStoreEngine.DateTieredCompactionContext.compact(ThroughputController throughputController,
   User user) 
 
 
@@ -266,13 +266,6 @@
 
 
 
-http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true";
 title="class or interface in 
java.util">List
-DefaultStoreFlusher.flushSnapshot(MemStoreSnapshot snapshot,
-  long cacheFlushId,
-  MonitoredTask status,
-  ThroughputController throughputController) 
-
-
 abstract http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true";
 title="class or interface in 
java.util">List
 StoreFlusher.flushSnapshot(MemStoreSnapshot snapshot,
   long cacheFlushSeqNum,
@@ -281,13 +274,20 @@
 Turns a snapshot of memstore into a set of store 
files.
 
 
-
+
 http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true";
 title="class or interface in 
java.util">List
 StripeStoreFlusher.flushSnapshot(MemStoreSnapshot snapshot,
   long cacheFlushSeqNum,
   MonitoredTask status,
   ThroughputController throughputController) 
 
+
+http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true";
 title="class or interface in 
java.util">List
+DefaultStoreFlusher.flushSnapshot(MemStoreSnapshot snapshot,
+  long cacheFlushId,
+  MonitoredTask status,
+  ThroughputController throughputController) 
+
 
 protected void
 StoreFlusher.performFlush(InternalScanner scanner,
@@ -476,12 +476,12 @@
 
 
 static ThroughputController
-FlushThroughputControllerFactory.create(RegionServerServices serv

[10/51] [partial] hbase-site git commit: Published site at ff9c92e16831fe350904ac99f92619fb97ba2bef.

2016-04-12 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/5c0cc30a/devapidocs/org/apache/hadoop/hbase/regionserver/throttle/class-use/ThroughputController.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/regionserver/throttle/class-use/ThroughputController.html
 
b/devapidocs/org/apache/hadoop/hbase/regionserver/throttle/class-use/ThroughputController.html
index dd4bb45..d216194 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/regionserver/throttle/class-use/ThroughputController.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/regionserver/throttle/class-use/ThroughputController.html
@@ -200,42 +200,42 @@
 
 
 http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List
-Store.compact(CompactionContext compaction,
+HMobStore.compact(CompactionContext compaction,
   ThroughputController throughputController)
-Deprecated. 
-see compact(CompactionContext, ThroughputController, 
User)
-
+The compaction in the store of mob.
 
 
 
 http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List
-HMobStore.compact(CompactionContext compaction,
+HStore.compact(CompactionContext compaction,
   ThroughputController throughputController)
-The compaction in the store of mob.
+Compact the StoreFiles.
 
 
 
 http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List
-HStore.compact(CompactionContext compaction,
+Store.compact(CompactionContext compaction,
   ThroughputController throughputController)
-Compact the StoreFiles.
+Deprecated. 
+see compact(CompactionContext, ThroughputController, 
User)
+
 
 
 
 http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List
-Store.compact(CompactionContext compaction,
+HStore.compact(CompactionContext compaction,
   ThroughputController throughputController,
   User user) 
 
 
 http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List
-HStore.compact(CompactionContext compaction,
+Store.compact(CompactionContext compaction,
   ThroughputController throughputController,
   User user) 
 
 
 http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true";
 title="class or interface in 
java.util">List
-StripeStoreEngine.StripeCompaction.compact(ThroughputController throughputController,
+DateTieredStoreEngine.DateTieredCompactionContext.compact(ThroughputController throughputController,
   User user) 
 
 
@@ -245,7 +245,7 @@
 
 
 http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true";
 title="class or interface in 
java.util">List
-DateTieredStoreEngine.DateTieredCompactionContext.compact(ThroughputController throughputController,
+StripeStoreEngine.StripeCompaction.compact(ThroughputController throughputController,
   User user) 
 
 
@@ -266,6 +266,13 @@
 
 
 
+http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true";
 title="class or interface in 
java.util">List
+DefaultStoreFlusher.flushSnapshot(MemStoreSnapshot snapshot,
+  long cacheFlushId,
+  MonitoredTask status,
+  ThroughputController throughputController) 
+
+
 abstract http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true";
 title="class or interface in 
java.util">List
 StoreFlusher.flushSnapshot(MemStoreSnapshot snapshot,
   long cacheFlushSeqNum,
@@ -274,20 +281,13 @@
 Turns a snapshot of memstore into a set of store 
files.
 
 
-
+
 http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true";
 title="class or interface in 
java.util">List
 StripeStoreFlusher.flushSnapshot(MemStoreSnapshot snapshot,
   long cacheFlushSeqNum,
   MonitoredTask status,
   ThroughputController throughputController) 
 
-
-http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true";
 title="class or interface in 
java.util">List
-DefaultStoreFlusher.flushSnapshot(MemStoreSnapshot snapshot,
-  long cacheFlushId,
-  MonitoredTask status,
-  ThroughputController throughputController) 
-
 
 protected void
 StoreFlusher.performFlush(InternalScanner scanner,
@@ -476,12 +476,12 @@
 
 
 static ThroughputController
-CompactionThroughputControllerFactory.create(RegionServerServices

[10/51] [partial] hbase-site git commit: Published site at a395922ad5af9494bb55feee3c275c6d3a575e92.

2016-04-11 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/16980207/devapidocs/org/apache/hadoop/hbase/coprocessor/class-use/ObserverContext.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/coprocessor/class-use/ObserverContext.html 
b/devapidocs/org/apache/hadoop/hbase/coprocessor/class-use/ObserverContext.html
index d0bdf04..a01cc24 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/coprocessor/class-use/ObserverContext.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/coprocessor/class-use/ObserverContext.html
@@ -185,9 +185,7 @@
 
 
 void
-MasterObserver.postAbortProcedure(ObserverContext ctx)
-Called after a abortProcedure request has been 
processed.
-
+BaseMasterObserver.postAbortProcedure(ObserverContext ctx) 
 
 
 void
@@ -195,18 +193,16 @@
 
 
 void
-BaseMasterObserver.postAbortProcedure(ObserverContext ctx) 
+MasterObserver.postAbortProcedure(ObserverContext ctx)
+Called after a abortProcedure request has been 
processed.
+
 
 
 void
-MasterObserver.postAddColumn(ObserverContext ctx,
+BaseMasterObserver.postAddColumn(ObserverContext ctx,
   TableName tableName,
   HColumnDescriptor columnFamily)
-Deprecated. 
-As of release 2.0.0, this will be removed in HBase 3.0.0
- (https://issues.apache.org/jira/browse/HBASE-13645";>HBASE-13645).
- Use MasterObserver.postAddColumnFamily(ObserverContext,
 TableName, HColumnDescriptor).
-
+Deprecated. 
 
 
 
@@ -219,19 +215,21 @@
 
 
 void
-BaseMasterObserver.postAddColumn(ObserverContext ctx,
+MasterObserver.postAddColumn(ObserverContext ctx,
   TableName tableName,
   HColumnDescriptor columnFamily)
-Deprecated. 
+Deprecated. 
+As of release 2.0.0, this will be removed in HBase 3.0.0
+ (https://issues.apache.org/jira/browse/HBASE-13645";>HBASE-13645).
+ Use MasterObserver.postAddColumnFamily(ObserverContext,
 TableName, HColumnDescriptor).
+
 
 
 
 void
-MasterObserver.postAddColumnFamily(ObserverContext ctx,
+BaseMasterObserver.postAddColumnFamily(ObserverContext ctx,
   TableName tableName,
-  HColumnDescriptor columnFamily)
-Called after the new column family has been created.
-
+  HColumnDescriptor columnFamily) 
 
 
 void
@@ -241,17 +239,17 @@
 
 
 void
-BaseMasterObserver.postAddColumnFamily(ObserverContext ctx,
+MasterObserver.postAddColumnFamily(ObserverContext ctx,
   TableName tableName,
-  HColumnDescriptor columnFamily) 
+  HColumnDescriptor columnFamily)
+Called after the new column family has been created.
+
 
 
 void
-MasterObserver.postAddColumnFamilyHandler(ObserverContext ctx,
+BaseMasterObserver.postAddColumnFamilyHandler(ObserverContext ctx,
 TableName tableName,
-HColumnDescriptor columnFamily)
-Called after the new column family has been created.
-
+HColumnDescriptor columnFamily) 
 
 
 void
@@ -261,20 +259,18 @@
 
 
 void
-BaseMasterObserver.postAddColumnFamilyHandler(ObserverContext ctx,
+MasterObserver.postAddColumnFamilyHandler(ObserverContext ctx,
 TableName tableName,
-HColumnDescriptor columnFamily) 
+HColumnDescriptor columnFamily)
+Called after the new column family has been created.
+
 
 
 void
-MasterObserver.postAddColumnHandler(ObserverContext ctx,
+BaseMasterObserver.postAddColumnHandler(ObserverContext ctx,
 TableName tableName,
 HColumnDescriptor columnFamily)
-Deprecated. 
-As of release 2.0.0, this will be removed in HBase 3.0.0
- (https://issues.apache.org/jira/browse/HBASE-13645";>HBASE-13645). Use
- MasterObserver.postAddColumnFamilyHandler(ObserverContext,
 TableName, HColumnDescriptor).
-
+Deprecated. 
 
 
 
@@ -287,18 +283,20 @@
 
 
 void
-BaseMasterObserver.postAddColumnHandler(ObserverContext ctx,
+MasterObserver.postAddColum

[10/51] [partial] hbase-site git commit: Published site at 6ea4994569e05ff44e0fa571e053cef828ab57ed.

2016-04-08 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/db94a639/devapidocs/org/apache/hadoop/hbase/master/procedure/class-use/MasterProcedureEnv.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/master/procedure/class-use/MasterProcedureEnv.html
 
b/devapidocs/org/apache/hadoop/hbase/master/procedure/class-use/MasterProcedureEnv.html
index 60c1334..e5f8bce 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/master/procedure/class-use/MasterProcedureEnv.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/master/procedure/class-use/MasterProcedureEnv.html
@@ -114,9 +114,11 @@
 
 
 void
-BaseMasterObserver.preAbortProcedure(ObserverContext ctx,
+MasterObserver.preAbortProcedure(ObserverContext ctx,
   ProcedureExecutor procEnv,
-  long procId) 
+  long procId)
+Called before a abortProcedure request has been 
processed.
+
 
 
 void
@@ -126,11 +128,9 @@
 
 
 void
-MasterObserver.preAbortProcedure(ObserverContext ctx,
+BaseMasterObserver.preAbortProcedure(ObserverContext ctx,
   ProcedureExecutor procEnv,
-  long procId)
-Called before a abortProcedure request has been 
processed.
-
+  long procId) 
 
 
 
@@ -197,123 +197,123 @@
 
 
 boolean
-CloneSnapshotProcedure.abort(MasterProcedureEnv env) 
+DeleteColumnFamilyProcedure.abort(MasterProcedureEnv env) 
 
 
 boolean
-DeleteTableProcedure.abort(MasterProcedureEnv env) 
+CreateNamespaceProcedure.abort(MasterProcedureEnv env) 
 
 
 boolean
-ModifyTableProcedure.abort(MasterProcedureEnv env) 
+DeleteNamespaceProcedure.abort(MasterProcedureEnv env) 
 
 
 boolean
-EnableTableProcedure.abort(MasterProcedureEnv env) 
+CloneSnapshotProcedure.abort(MasterProcedureEnv env) 
 
 
-protected boolean
-ServerCrashProcedure.abort(MasterProcedureEnv env) 
+boolean
+DeleteTableProcedure.abort(MasterProcedureEnv env) 
 
 
 boolean
-DeleteNamespaceProcedure.abort(MasterProcedureEnv env) 
+CreateTableProcedure.abort(MasterProcedureEnv env) 
 
 
 boolean
-CreateTableProcedure.abort(MasterProcedureEnv env) 
+EnableTableProcedure.abort(MasterProcedureEnv env) 
 
 
 boolean
-ModifyNamespaceProcedure.abort(MasterProcedureEnv env) 
+TruncateTableProcedure.abort(MasterProcedureEnv env) 
 
 
 boolean
-RestoreSnapshotProcedure.abort(MasterProcedureEnv env) 
+ModifyColumnFamilyProcedure.abort(MasterProcedureEnv env) 
 
 
 boolean
-ModifyColumnFamilyProcedure.abort(MasterProcedureEnv env) 
+RestoreSnapshotProcedure.abort(MasterProcedureEnv env) 
 
 
 boolean
-AddColumnFamilyProcedure.abort(MasterProcedureEnv env) 
+ModifyTableProcedure.abort(MasterProcedureEnv env) 
 
 
-boolean
-DisableTableProcedure.abort(MasterProcedureEnv env) 
+protected boolean
+ServerCrashProcedure.abort(MasterProcedureEnv env) 
 
 
 boolean
-DeleteColumnFamilyProcedure.abort(MasterProcedureEnv env) 
+ModifyNamespaceProcedure.abort(MasterProcedureEnv env) 
 
 
 boolean
-CreateNamespaceProcedure.abort(MasterProcedureEnv env) 
+DisableTableProcedure.abort(MasterProcedureEnv env) 
 
 
 boolean
-TruncateTableProcedure.abort(MasterProcedureEnv env) 
+AddColumnFamilyProcedure.abort(MasterProcedureEnv env) 
 
 
 protected boolean
-CloneSnapshotProcedure.acquireLock(MasterProcedureEnv env) 
+DeleteColumnFamilyProcedure.acquireLock(MasterProcedureEnv env) 
 
 
 protected boolean
-DeleteTableProcedure.acquireLock(MasterProcedureEnv env) 
+CreateNamespaceProcedure.acquireLock(MasterProcedureEnv env) 
 
 
 protected boolean
-ModifyTableProcedure.acquireLock(MasterProcedureEnv env) 
+DeleteNamespaceProcedure.acquireLock(MasterProcedureEnv env) 
 
 
 protected boolean
-EnableTableProcedure.acquireLock(MasterProcedureEnv env) 
+CloneSnapshotProcedure.acquireLock(MasterProcedureEnv env) 
 
 
 protected boolean
-ServerCrashProcedure.acquireLock(MasterProcedureEnv env) 
+DeleteTableProcedure.acquireLock(MasterProcedureEnv env) 
 
 
 protected boolean
-DeleteNamespaceProcedure.acquireLock(MasterProcedureEnv env) 
+CreateTableProcedure.acquireLock(MasterProcedureEnv env) 
 
 
 protected boolean
-CreateTableProcedure.acquireLock(MasterProcedureEnv env) 
+EnableTableProcedure.acquireLock(MasterProcedureEnv env) 
 
 
 protected boolean
-ModifyNamespaceProcedure.acquireLock(MasterProcedureEnv env) 
+TruncateTableProcedure.acquireLock(MasterProcedureEnv env) 
 
 
 protected boolean
-RestoreSnapshotProcedure.acquireLock(MasterProcedureEnv env) 
+ModifyColumnFamilyProcedure.acquireLock(MasterProcedureEnv env) 
 
 
 protected boolean
-ModifyColumnFamilyProcedure.acquireLock(MasterProcedureEnv env) 
+RestoreSnapshotProcedure.acquireLock(MasterProcedureEnv env) 
 
 
 protected boolean
-AddColumnFamilyProcedure.acquireLock(MasterProcedure

[10/51] [partial] hbase-site git commit: Published site at ac8cd373ebe81ed24cab6737154c6902c05ff059.

2016-04-07 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/b0a04862/devapidocs/org/apache/hadoop/hbase/master/procedure/class-use/TableProcedureInterface.TableOperationType.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/master/procedure/class-use/TableProcedureInterface.TableOperationType.html
 
b/devapidocs/org/apache/hadoop/hbase/master/procedure/class-use/TableProcedureInterface.TableOperationType.html
index 2fed958..6a68522 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/master/procedure/class-use/TableProcedureInterface.TableOperationType.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/master/procedure/class-use/TableProcedureInterface.TableOperationType.html
@@ -96,53 +96,53 @@
 
 
 TableProcedureInterface.TableOperationType
-TableProcedureInterface.getTableOperationType()
-Given an operation type we can take decisions about what to 
do with pending operations.
-
+CloneSnapshotProcedure.getTableOperationType() 
 
 
 TableProcedureInterface.TableOperationType
-DeleteNamespaceProcedure.getTableOperationType() 
+TableProcedureInterface.getTableOperationType()
+Given an operation type we can take decisions about what to 
do with pending operations.
+
 
 
 TableProcedureInterface.TableOperationType
-DisableTableProcedure.getTableOperationType() 
+DeleteTableProcedure.getTableOperationType() 
 
 
 TableProcedureInterface.TableOperationType
-EnableTableProcedure.getTableOperationType() 
+ModifyTableProcedure.getTableOperationType() 
 
 
 TableProcedureInterface.TableOperationType
-CreateTableProcedure.getTableOperationType() 
+EnableTableProcedure.getTableOperationType() 
 
 
 TableProcedureInterface.TableOperationType
-TruncateTableProcedure.getTableOperationType() 
+DeleteNamespaceProcedure.getTableOperationType() 
 
 
 TableProcedureInterface.TableOperationType
-DeleteTableProcedure.getTableOperationType() 
+CreateTableProcedure.getTableOperationType() 
 
 
 TableProcedureInterface.TableOperationType
-CloneSnapshotProcedure.getTableOperationType() 
+ModifyNamespaceProcedure.getTableOperationType() 
 
 
 TableProcedureInterface.TableOperationType
-CreateNamespaceProcedure.getTableOperationType() 
+RestoreSnapshotProcedure.getTableOperationType() 
 
 
 TableProcedureInterface.TableOperationType
-ModifyNamespaceProcedure.getTableOperationType() 
+ModifyColumnFamilyProcedure.getTableOperationType() 
 
 
 TableProcedureInterface.TableOperationType
-ModifyTableProcedure.getTableOperationType() 
+AddColumnFamilyProcedure.getTableOperationType() 
 
 
 TableProcedureInterface.TableOperationType
-AddColumnFamilyProcedure.getTableOperationType() 
+DisableTableProcedure.getTableOperationType() 
 
 
 TableProcedureInterface.TableOperationType
@@ -150,11 +150,11 @@
 
 
 TableProcedureInterface.TableOperationType
-RestoreSnapshotProcedure.getTableOperationType() 
+CreateNamespaceProcedure.getTableOperationType() 
 
 
 TableProcedureInterface.TableOperationType
-ModifyColumnFamilyProcedure.getTableOperationType() 
+TruncateTableProcedure.getTableOperationType() 
 
 
 static TableProcedureInterface.TableOperationType

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/b0a04862/devapidocs/org/apache/hadoop/hbase/master/procedure/package-tree.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/master/procedure/package-tree.html 
b/devapidocs/org/apache/hadoop/hbase/master/procedure/package-tree.html
index aab927f..bcbb26e 100644
--- a/devapidocs/org/apache/hadoop/hbase/master/procedure/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/master/procedure/package-tree.html
@@ -147,9 +147,9 @@
 
 java.lang.http://docs.oracle.com/javase/7/docs/api/java/lang/Enum.html?is-external=true";
 title="class or interface in java.lang">Enum (implements java.lang.http://docs.oracle.com/javase/7/docs/api/java/lang/Comparable.html?is-external=true";
 title="class or interface in java.lang">Comparable, java.io.http://docs.oracle.com/javase/7/docs/api/java/io/Serializable.html?is-external=true";
 title="class or interface in java.io">Serializable)
 
+org.apache.hadoop.hbase.master.procedure.ServerProcedureInterface.ServerOperationType
 org.apache.hadoop.hbase.master.procedure.DisableTableProcedure.MarkRegionOfflineOpResult
 org.apache.hadoop.hbase.master.procedure.TableProcedureInterface.TableOperationType
-org.apache.hadoop.hbase.master.procedure.ServerProcedureInterface.ServerOperationType
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/b0a04862/devapidocs/org/apache/hadoop/hbase/master/snapshot/class-use/SnapshotManager.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/master/snapshot/class-use/SnapshotManager.html
 
b/devapidocs/org/apache/hadoop/hbase/master/snapshot/class-use/SnapshotManager.html
index 8628091..d0ec8d7 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/master/snapshot/clas

[10/51] [partial] hbase-site git commit: Published site at 25419d8b18dd8f35a102614cd31b274659f747ef.

2016-04-01 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ce2de59a/devapidocs/org/apache/hadoop/hbase/regionserver/package-tree.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/regionserver/package-tree.html 
b/devapidocs/org/apache/hadoop/hbase/regionserver/package-tree.html
index 9c2777c..8de8e73 100644
--- a/devapidocs/org/apache/hadoop/hbase/regionserver/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/regionserver/package-tree.html
@@ -624,24 +624,24 @@
 
 java.lang.http://docs.oracle.com/javase/7/docs/api/java/lang/Enum.html?is-external=true";
 title="class or interface in java.lang">Enum (implements java.lang.http://docs.oracle.com/javase/7/docs/api/java/lang/Comparable.html?is-external=true";
 title="class or interface in java.lang">Comparable, java.io.http://docs.oracle.com/javase/7/docs/api/java/io/Serializable.html?is-external=true";
 title="class or interface in java.io">Serializable)
 
-org.apache.hadoop.hbase.regionserver.ScanQueryMatcher.MatchCode
-org.apache.hadoop.hbase.regionserver.MemStoreScanner.Type
-org.apache.hadoop.hbase.regionserver.MetricsRegionServerSourceFactoryImpl.FactoryStorage
-org.apache.hadoop.hbase.regionserver.Region.FlushResult.Result
+org.apache.hadoop.hbase.regionserver.SplitLogWorker.TaskExecutor.Status
 org.apache.hadoop.hbase.regionserver.SplitTransaction.SplitTransactionPhase
-org.apache.hadoop.hbase.regionserver.ScanType
-org.apache.hadoop.hbase.regionserver.ScannerContext.LimitScope
-org.apache.hadoop.hbase.regionserver.FlushType
+org.apache.hadoop.hbase.regionserver.RegionMergeTransaction.RegionMergeTransactionPhase
 org.apache.hadoop.hbase.regionserver.DefaultHeapMemoryTuner.StepDirection
+org.apache.hadoop.hbase.regionserver.Region.Operation
+org.apache.hadoop.hbase.regionserver.MemStoreScanner.Type
+org.apache.hadoop.hbase.regionserver.FlushType
 org.apache.hadoop.hbase.regionserver.DeleteTracker.DeleteResult
-org.apache.hadoop.hbase.regionserver.SplitLogWorker.TaskExecutor.Status
+org.apache.hadoop.hbase.regionserver.ScannerContext.LimitScope
 org.apache.hadoop.hbase.regionserver.BloomType
+org.apache.hadoop.hbase.regionserver.DeleteTracker.DeleteCompare
+org.apache.hadoop.hbase.regionserver.StoreScanner.StoreScannerCompactionRace
+org.apache.hadoop.hbase.regionserver.MetricsRegionServerSourceFactoryImpl.FactoryStorage
+org.apache.hadoop.hbase.regionserver.Region.FlushResult.Result
+org.apache.hadoop.hbase.regionserver.ScanQueryMatcher.MatchCode
 org.apache.hadoop.hbase.regionserver.RegionOpeningState
-org.apache.hadoop.hbase.regionserver.Region.Operation
 org.apache.hadoop.hbase.regionserver.ScannerContext.NextState
-org.apache.hadoop.hbase.regionserver.RegionMergeTransaction.RegionMergeTransactionPhase
-org.apache.hadoop.hbase.regionserver.StoreScanner.StoreScannerCompactionRace
-org.apache.hadoop.hbase.regionserver.DeleteTracker.DeleteCompare
+org.apache.hadoop.hbase.regionserver.ScanType
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ce2de59a/devapidocs/org/apache/hadoop/hbase/regionserver/wal/AbstractFSWAL.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/regionserver/wal/AbstractFSWAL.html 
b/devapidocs/org/apache/hadoop/hbase/regionserver/wal/AbstractFSWAL.html
index 0faceeb..756d9eb 100644
--- a/devapidocs/org/apache/hadoop/hbase/regionserver/wal/AbstractFSWAL.html
+++ b/devapidocs/org/apache/hadoop/hbase/regionserver/wal/AbstractFSWAL.html
@@ -1547,7 +1547,10 @@ implements 
 postAppend
 private long postAppend(WAL.Entry e,
-  long elapsedTime)
+  long elapsedTime)
+ throws http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true";
 title="class or interface in java.io">IOException
+Throws:
+http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true";
 title="class or interface in java.io">IOException
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ce2de59a/devapidocs/org/apache/hadoop/hbase/regionserver/wal/MetricsWAL.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/regionserver/wal/MetricsWAL.html 
b/devapidocs/org/apache/hadoop/hbase/regionserver/wal/MetricsWAL.html
index af6f293..ffa088b 100644
--- a/devapidocs/org/apache/hadoop/hbase/regionserver/wal/MetricsWAL.html
+++ b/devapidocs/org/apache/hadoop/hbase/regionserver/wal/MetricsWAL.html
@@ -105,7 +105,7 @@
 
 
 @InterfaceAudience.Private
-public class MetricsWAL
+public class MetricsWAL
 extends WALActionsListener.Base
 Class used to push numbers about the WAL into the metrics 
subsystem.  This will take a
  single function call and turn it into multiple manipulations of the hadoop 
metrics system.
@@ -193,8 +193,10 @@ extends 
 void
-postAppend(long size,
-long time)
+postAppend(long size,
+long time,

[10/51] [partial] hbase-site git commit: Published site at d6fd85945130516ba10fe4854ce080e5a2329983.

2016-03-31 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f8d69509/devapidocs/src-html/org/apache/hadoop/hbase/tool/Canary.RegionServerMonitor.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/tool/Canary.RegionServerMonitor.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/tool/Canary.RegionServerMonitor.html
index 9c063a3..9cb1f08 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/tool/Canary.RegionServerMonitor.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/tool/Canary.RegionServerMonitor.html
@@ -54,1212 +54,1218 @@
 046import 
org.apache.hadoop.conf.Configuration;
 047import 
org.apache.hadoop.hbase.AuthUtil;
 048import 
org.apache.hadoop.hbase.ChoreService;
-049import 
org.apache.hadoop.hbase.DoNotRetryIOException;
-050import 
org.apache.hadoop.hbase.HBaseConfiguration;
-051import 
org.apache.hadoop.hbase.HColumnDescriptor;
-052import 
org.apache.hadoop.hbase.HConstants;
-053import 
org.apache.hadoop.hbase.HRegionInfo;
-054import 
org.apache.hadoop.hbase.HRegionLocation;
-055import 
org.apache.hadoop.hbase.HTableDescriptor;
-056import 
org.apache.hadoop.hbase.MetaTableAccessor;
-057import 
org.apache.hadoop.hbase.NamespaceDescriptor;
-058import 
org.apache.hadoop.hbase.ScheduledChore;
-059import 
org.apache.hadoop.hbase.ServerName;
-060import 
org.apache.hadoop.hbase.TableName;
-061import 
org.apache.hadoop.hbase.TableNotEnabledException;
-062import 
org.apache.hadoop.hbase.TableNotFoundException;
-063import 
org.apache.hadoop.hbase.client.Admin;
-064import 
org.apache.hadoop.hbase.client.Connection;
-065import 
org.apache.hadoop.hbase.client.ConnectionFactory;
-066import 
org.apache.hadoop.hbase.client.Get;
-067import 
org.apache.hadoop.hbase.client.Put;
-068import 
org.apache.hadoop.hbase.client.RegionLocator;
-069import 
org.apache.hadoop.hbase.client.ResultScanner;
-070import 
org.apache.hadoop.hbase.client.Scan;
-071import 
org.apache.hadoop.hbase.client.Table;
-072import 
org.apache.hadoop.hbase.filter.FirstKeyOnlyFilter;
-073import 
org.apache.hadoop.hbase.tool.Canary.RegionTask.TaskType;
-074import 
org.apache.hadoop.hbase.util.Bytes;
-075import 
org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
-076import 
org.apache.hadoop.hbase.util.Pair;
-077import 
org.apache.hadoop.hbase.util.ReflectionUtils;
-078import 
org.apache.hadoop.hbase.util.RegionSplitter;
-079import 
org.apache.hadoop.util.GenericOptionsParser;
-080import org.apache.hadoop.util.Tool;
-081import 
org.apache.hadoop.util.ToolRunner;
-082
-083/**
-084 * HBase Canary Tool, that that can be 
used to do
-085 * "canary monitoring" of a running HBase 
cluster.
-086 *
-087 * Here are two modes
-088 * 1. region mode - Foreach region tries 
to get one row per column family
-089 * and outputs some information about 
failure or latency.
-090 *
-091 * 2. regionserver mode - Foreach 
regionserver tries to get one row from one table
-092 * selected randomly and outputs some 
information about failure or latency.
-093 */
-094public final class Canary implements Tool 
{
-095  // Sink interface used by the canary to 
outputs information
-096  public interface Sink {
-097public long getReadFailureCount();
-098public long incReadFailureCount();
-099public void 
publishReadFailure(HRegionInfo region, Exception e);
-100public void 
publishReadFailure(HRegionInfo region, HColumnDescriptor column, Exception 
e);
-101public void 
publishReadTiming(HRegionInfo region, HColumnDescriptor column, long msTime);
-102public long getWriteFailureCount();
-103public void 
publishWriteFailure(HRegionInfo region, Exception e);
-104public void 
publishWriteFailure(HRegionInfo region, HColumnDescriptor column, Exception 
e);
-105public void 
publishWriteTiming(HRegionInfo region, HColumnDescriptor column, long 
msTime);
-106  }
-107  // new extended sink for output 
regionserver mode info
-108  // do not change the Sink interface 
directly due to maintaining the API
-109  public interface ExtendedSink extends 
Sink {
-110public void publishReadFailure(String 
table, String server);
-111public void publishReadTiming(String 
table, String server, long msTime);
-112  }
-113
-114  // Simple implementation of canary sink 
that allows to plot on
-115  // file or standard output timings or 
failures.
-116  public static class StdOutSink 
implements Sink {
-117private AtomicLong readFailureCount = 
new AtomicLong(0),
-118writeFailureCount = new 
AtomicLong(0);
-119
-120@Override
-121public long getReadFailureCount() {
-122  return readFailureCount.get();
-123}
-124
-125@Override
-126public long incReadFailureCount() {
-127  return 
readFailureCount.incrementAndGet();
-128}
-129
-130@Override
-131public void 
publishReadFailure(HRegionInfo region, Exception e) {
-132  
readFailureCount.incrementAndGet();
-133  LOG.error(String.format("read from 
region %s failed", region.g

[10/51] [partial] hbase-site git commit: Published site at 7f39baf0f4572ff209837d7de5d37554851ecbb7.

2016-03-29 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/fda4017d/devapidocs/org/apache/hadoop/hbase/regionserver/HRegion.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/regionserver/HRegion.html 
b/devapidocs/org/apache/hadoop/hbase/regionserver/HRegion.html
index 6bf8957..a8cafeb 100644
--- a/devapidocs/org/apache/hadoop/hbase/regionserver/HRegion.html
+++ b/devapidocs/org/apache/hadoop/hbase/regionserver/HRegion.html
@@ -2887,7 +2887,7 @@ implements 
 
 closeLock
-private final http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true";
 title="class or interface in java.lang">Object closeLock
+private final http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true";
 title="class or interface in java.lang">Object closeLock
 
 
 
@@ -2896,7 +2896,7 @@ implements 
 
 MEMSTORE_PERIODIC_FLUSH_INTERVAL
-public static final http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String MEMSTORE_PERIODIC_FLUSH_INTERVAL
+public static final http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String MEMSTORE_PERIODIC_FLUSH_INTERVAL
 Conf key for the periodic flush interval
 See Also:Constant
 Field Values
 
@@ -2907,7 +2907,7 @@ implements 
 
 DEFAULT_CACHE_FLUSH_INTERVAL
-public static final int DEFAULT_CACHE_FLUSH_INTERVAL
+public static final int DEFAULT_CACHE_FLUSH_INTERVAL
 Default interval for the memstore flush
 See Also:Constant
 Field Values
 
@@ -2918,7 +2918,7 @@ implements 
 
 SYSTEM_CACHE_FLUSH_INTERVAL
-public static final int SYSTEM_CACHE_FLUSH_INTERVAL
+public static final int SYSTEM_CACHE_FLUSH_INTERVAL
 Default interval for System tables memstore flush
 See Also:Constant
 Field Values
 
@@ -2929,7 +2929,7 @@ implements 
 
 MEMSTORE_FLUSH_PER_CHANGES
-public static final http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String MEMSTORE_FLUSH_PER_CHANGES
+public static final http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String MEMSTORE_FLUSH_PER_CHANGES
 Conf key to force a flush if there are already enough 
changes for one region in memstore
 See Also:Constant
 Field Values
 
@@ -2940,7 +2940,7 @@ implements 
 
 DEFAULT_FLUSH_PER_CHANGES
-public static final long DEFAULT_FLUSH_PER_CHANGES
+public static final long DEFAULT_FLUSH_PER_CHANGES
 See Also:Constant
 Field Values
 
 
@@ -2950,7 +2950,7 @@ implements 
 
 MAX_FLUSH_PER_CHANGES
-public static final long MAX_FLUSH_PER_CHANGES
+public static final long MAX_FLUSH_PER_CHANGES
 The following MAX_FLUSH_PER_CHANGES is large enough because 
each KeyValue has 20+ bytes
  overhead. Therefore, even 1G empty KVs occupy at least 20GB memstore size for 
a single region
 See Also:Constant
 Field Values
@@ -2962,7 +2962,7 @@ implements 
 
 FOR_UNIT_TESTS_ONLY
-private static final byte[] FOR_UNIT_TESTS_ONLY
+private static final byte[] FOR_UNIT_TESTS_ONLY
 Row needed by below method.
 
 
@@ -2972,7 +2972,7 @@ implements 
 
 FIXED_OVERHEAD
-public static final long FIXED_OVERHEAD
+public static final long FIXED_OVERHEAD
 
 
 
@@ -2981,7 +2981,7 @@ implements 
 
 DEEP_OVERHEAD
-public static final long DEEP_OVERHEAD
+public static final long DEEP_OVERHEAD
 
 
 
@@ -2990,7 +2990,7 @@ implements 
 
 MOCKED_LIST
-private static final http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List MOCKED_LIST
+private static final http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List MOCKED_LIST
 A mocked list implementation - discards all updates.
 
 
@@ -3530,7 +3530,7 @@ public long 
 
 isRecovering
-public boolean isRecovering()
+public boolean isRecovering()
 
 Specified by:
 isRecovering in
 interface Region
@@ -3543,7 +3543,7 @@ public long 
 
 isAvailable
-public boolean isAvailable()
+public boolean isAvailable()
 
 Specified by:
 isAvailable in
 interface Region
@@ -3556,7 +3556,7 @@ public long 
 
 isSplittable
-public boolean isSplittable()
+public boolean isSplittable()
 Returns:true if region is 
splittable
 
 
@@ -3566,7 +3566,7 @@ public long 
 
 isMergeable
-public boolean isMergeable()
+public boolean isMergeable()
 Returns:true if region is 
mergeable
 
 
@@ -3576,7 +3576,7 @@ public long 
 
 areWritesEnabled
-public boolean areWritesEnabled()
+public boolean areWritesEnabled()
 
 
 
@@ -3585,7 +3585,7 @@ public long 
 
 getMVCC
-public MultiVersionConcurrencyControl getMVCC()
+public MultiVersionConcurrencyControl getMVCC()
 
 
 
@@ -3594,7 +3594,7 @@ public long 
 
 getMaxFlushedSeqId
-public long getMaxFlushedSeqId()
+public long getMaxFlushedSeqId()
 
 Specified by:
 getMaxFlushedSeqId in
 interf

[10/51] [partial] hbase-site git commit: Published site at 52fd70500e0a00e273e2ec0c09d7c914b89432ce.

2016-03-24 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f30982bd/testdevapidocs/org/apache/hadoop/hbase/regionserver/class-use/TestTimestampFilterSeekHint.html
--
diff --git 
a/testdevapidocs/org/apache/hadoop/hbase/regionserver/class-use/TestTimestampFilterSeekHint.html
 
b/testdevapidocs/org/apache/hadoop/hbase/regionserver/class-use/TestTimestampFilterSeekHint.html
new file mode 100644
index 000..5ea8d1c
--- /dev/null
+++ 
b/testdevapidocs/org/apache/hadoop/hbase/regionserver/class-use/TestTimestampFilterSeekHint.html
@@ -0,0 +1,115 @@
+http://www.w3.org/TR/html4/loose.dtd";>
+
+
+
+
+Uses of Class 
org.apache.hadoop.hbase.regionserver.TestTimestampFilterSeekHint (Apache HBase 
2.0.0-SNAPSHOT Test API)
+
+
+
+
+
+
+JavaScript is disabled on your browser.
+
+
+
+
+
+
+
+
+Overview
+Package
+Class
+Use
+Tree
+Deprecated
+Index
+Help
+
+
+
+
+Prev
+Next
+
+
+Frames
+No Frames
+
+
+All Classes
+
+
+
+
+
+
+
+
+
+
+Uses of 
Classorg.apache.hadoop.hbase.regionserver.TestTimestampFilterSeekHint
+
+No usage of 
org.apache.hadoop.hbase.regionserver.TestTimestampFilterSeekHint
+
+
+
+
+
+
+
+Overview
+Package
+Class
+Use
+Tree
+Deprecated
+Index
+Help
+
+
+
+
+Prev
+Next
+
+
+Frames
+No Frames
+
+
+All Classes
+
+
+
+
+
+
+
+
+
+Copyright © 2007–2016 http://www.apache.org/";>The Apache Software Foundation. All rights 
reserved.
+
+

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f30982bd/testdevapidocs/org/apache/hadoop/hbase/regionserver/package-frame.html
--
diff --git 
a/testdevapidocs/org/apache/hadoop/hbase/regionserver/package-frame.html 
b/testdevapidocs/org/apache/hadoop/hbase/regionserver/package-frame.html
index c9cf55d..c4d46e2 100644
--- a/testdevapidocs/org/apache/hadoop/hbase/regionserver/package-frame.html
+++ b/testdevapidocs/org/apache/hadoop/hbase/regionserver/package-frame.html
@@ -190,6 +190,7 @@
 TestStoreFileRefresherChore.StaleStorefileRefresherChore
 TestStoreFileScannerWithTagCompression
 TestStoreScanner
+TestStoreScanner.CellGridStoreScanner
 TestStripeCompactor
 TestStripeCompactor.Scanner
 TestStripeCompactor.StoreFileWritersCapture
@@ -200,6 +201,7 @@
 TestTags
 TestTags.TestCoprocessorForTags
 TestTimeRangeTracker
+TestTimestampFilterSeekHint
 TestWALLockup
 TestWideScanner
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f30982bd/testdevapidocs/org/apache/hadoop/hbase/regionserver/package-summary.html
--
diff --git 
a/testdevapidocs/org/apache/hadoop/hbase/regionserver/package-summary.html 
b/testdevapidocs/org/apache/hadoop/hbase/regionserver/package-summary.html
index c9adcdf..d395f03 100644
--- a/testdevapidocs/org/apache/hadoop/hbase/regionserver/package-summary.html
+++ b/testdevapidocs/org/apache/hadoop/hbase/regionserver/package-summary.html
@@ -998,47 +998,57 @@
  
 
 
+TestStoreScanner.CellGridStoreScanner
+
+A StoreScanner for our CELL_GRID above.
+
+
+
 TestStripeCompactor
  
 
-
+
 TestStripeCompactor.Scanner
  
 
-
+
 TestStripeCompactor.StoreFileWritersCapture
  
 
-
+
 TestStripeCompactor.StoreFileWritersCapture.Writer
  
 
-
+
 TestStripeStoreEngine
  
 
-
+
 TestStripeStoreEngine.TestStoreEngine
  
 
-
+
 TestStripeStoreFileManager
  
 
-
+
 TestTags
 
 Class that test tags
 
 
-
+
 TestTags.TestCoprocessorForTags
  
 
-
+
 TestTimeRangeTracker
  
 
+
+TestTimestampFilterSeekHint
+ 
+
 
 TestWALLockup
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f30982bd/testdevapidocs/org/apache/hadoop/hbase/regionserver/package-tree.html
--
diff --git 
a/testdevapidocs/org/apache/hadoop/hbase/regionserver/package-tree.html 
b/testdevapidocs/org/apache/hadoop/hbase/regionserver/package-tree.html
index 7036c7f..cd6fab5 100644
--- a/testdevapidocs/org/apache/hadoop/hbase/regionserver/package-tree.html
+++ b/testdevapidocs/org/apache/hadoop/hbase/regionserver/package-tree.html
@@ -276,6 +276,11 @@
 org.apache.hadoop.hbase.regionserver.TestScannerHeartbeatMessages.HeartbeatKVHeap
 
 
+org.apache.hadoop.hbase.regionserver.StoreScanner 
(implements org.apache.hadoop.hbase.regionserver.ChangedReadersObserver, 
org.apache.hadoop.hbase.region

[10/51] [partial] hbase-site git commit: Published site at cadfb21f4bb465d1e305db2a159b8574282c8150.

2016-03-23 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/123539c5/devapidocs/org/apache/hadoop/hbase/regionserver/MetricsRegionServerWrapperImpl.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/regionserver/MetricsRegionServerWrapperImpl.html
 
b/devapidocs/org/apache/hadoop/hbase/regionserver/MetricsRegionServerWrapperImpl.html
index edb4b3e..17b8675 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/regionserver/MetricsRegionServerWrapperImpl.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/regionserver/MetricsRegionServerWrapperImpl.html
@@ -147,219 +147,223 @@ implements 
 private long
-avgStoreFileAge 
+averageRegionSize 
 
 
+private long
+avgStoreFileAge 
+
+
 private BlockCache
 blockCache 
 
-
+
 private long
 blockedRequestsCount 
 
-
+
 private CacheStats
 cacheStats 
 
-
+
 private long
 cellsCountCompactedFromMob 
 
-
+
 private long
 cellsCountCompactedToMob 
 
-
+
 private long
 cellsSizeCompactedFromMob 
 
-
+
 private long
 cellsSizeCompactedToMob 
 
-
+
 private long
 checkAndMutateChecksFailed 
 
-
+
 private long
 checkAndMutateChecksPassed 
 
-
+
 private long
 compactedCellsCount 
 
-
+
 private long
 compactedCellsSize 
 
-
+
 private long
 dataInMemoryWithoutWAL 
 
-
+
 private 
org.apache.hadoop.hdfs.DFSHedgedReadMetrics
 dfsHedgedReadMetrics
 Can be null if not on hdfs.
 
 
-
+
 private http://docs.oracle.com/javase/7/docs/api/java/util/concurrent/ScheduledExecutorService.html?is-external=true";
 title="class or interface in 
java.util.concurrent">ScheduledExecutorService
 executor 
 
-
+
 private long
 filteredReadRequestsCount 
 
-
+
 private long
 flushedCellsCount 
 
-
+
 private long
 flushedCellsSize 
 
-
+
 private static 
org.apache.commons.logging.Log
 LOG 
 
-
+
 private long
 majorCompactedCellsCount 
 
-
+
 private long
 majorCompactedCellsSize 
 
-
+
 private long
 maxStoreFileAge 
 
-
+
 private long
 memstoreSize 
 
-
+
 private MetricsWALSource
 metricsWALSource 
 
-
+
 private long
 minStoreFileAge 
 
-
+
 private MobFileCache
 mobFileCache 
 
-
+
 private long
 mobFileCacheAccessCount 
 
-
+
 private long
 mobFileCacheCount 
 
-
+
 private long
 mobFileCacheEvictedCount 
 
-
+
 private double
 mobFileCacheHitRatio 
 
-
+
 private long
 mobFileCacheMissCount 
 
-
+
 private long
 mobFlushCount 
 
-
+
 private long
 mobFlushedCellsCount 
 
-
+
 private long
 mobFlushedCellsSize 
 
-
+
 private long
 mobScanCellsCount 
 
-
+
 private long
 mobScanCellsSize 
 
-
+
 private long
 numMutationsWithoutWAL 
 
-
+
 private long
 numReferenceFiles 
 
-
+
 private long
 numStoreFiles 
 
-
+
 private long
 numStores 
 
-
+
 private long
 numWALFiles 
 
-
+
 private double
 percentFileLocal 
 
-
+
 private double
 percentFileLocalSecondaryRegions 
 
-
+
 private long
 period 
 
-
+
 private long
 readRequestsCount 
 
-
+
 private HRegionServer
 regionServer 
 
-
+
 private double
 requestsPerSecond 
 
-
+
 private http://docs.oracle.com/javase/7/docs/api/java/lang/Runnable.html?is-external=true";
 title="class or interface in java.lang">Runnable
 runnable 
 
-
+
 private long
 storefileIndexSize 
 
-
+
 private long
 storeFileSize 
 
-
+
 private long
 totalStaticBloomSize 
 
-
+
 private long
 totalStaticIndexSize 
 
-
+
 private long
 walFileSize 
 
-
+
 private long
 writeRequestsCount 
 
@@ -403,451 +407,457 @@ implements 
 long
-getAvgStoreFileAge() 
+getAverageRegionSize()
+Get the average region size to this region server.
+
 
 
 long
+getAvgStoreFileAge() 
+
+
+long
 getBlockCacheCount()
 Get the number of items in the block cache.
 
 
-
+
 long
 getBlockCacheEvictedCount()
 Get the number of items evicted from the block cache.
 
 
-
+
 long
 getBlockCacheFailedInsertions()
 Number of cache insertions that failed.
 
 
-
+
 long
 getBlockCacheFreeSize()
 Get the size (in bytes) of the block cache that is 
free.
 
 
-
+
 double
 getBlockCacheHitCachingPercent()
 Get the percent of requests with the block cache turned on 
that hit the block cache.
 
 
-
+
 long
 getBlockCacheHitCount()
 Get the count of hits to the block cache
 
 
-
+
 double
 getBlockCacheHitPercent()
 Get the percent of all requests that hit the block 
cache.
 
 
-
+
 long
 getBlockCacheMissCount()
 Get the count of misses to the block cache.
 
 
-
+
 long
 getBlockCachePrimaryEvictedCount()
 Get the number of items evicted from primary replica in the 
block cache.
 
 
-
+
 long
 getBlockCachePrimaryHitCount()
 Get the count of hits to primary replica in the block 
cache
 
 
-
+
 long
 getBlockCachePrimaryMissCount()
 Get the count of misses to primary replica in the block 
cache.
 
 
-
+
 long
 getBlockCacheSize()
 Get the total size (in bytes) of the block cache.
 
 
-
+
 long
 getBlockedRequestsCount() 
 
-
+
 long
 getCellsCountCompactedFromMob()
 Gets the number of cells moved from mob during 
compaction.
 
 
-
+
 long
 getCellsCountCompactedToMob()
 Gets the number of cells moved to mob during 
compaction.
 
 
-
+
 long
 getCell

[10/51] [partial] hbase-site git commit: Published site at a2c99b133f8ad2a02e8710a4e0f9a448539cc127.

2016-03-19 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/2264aeb1/devapidocs/org/apache/hadoop/hbase/rest/provider/producer/package-summary.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/rest/provider/producer/package-summary.html
 
b/devapidocs/org/apache/hadoop/hbase/rest/provider/producer/package-summary.html
index 159781d..dcc26f0 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/rest/provider/producer/package-summary.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/rest/provider/producer/package-summary.html
@@ -36,7 +36,7 @@
 
 
 Prev
 Package
-Next
 Package
+Next
 Package
 
 
 Frames
@@ -111,7 +111,7 @@
 
 
 Prev
 Package
-Next
 Package
+Next
 Package
 
 
 Frames

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/2264aeb1/devapidocs/org/apache/hadoop/hbase/rest/provider/producer/package-tree.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/rest/provider/producer/package-tree.html 
b/devapidocs/org/apache/hadoop/hbase/rest/provider/producer/package-tree.html
index 3c0c877..64d59f6 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/rest/provider/producer/package-tree.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/rest/provider/producer/package-tree.html
@@ -36,7 +36,7 @@
 
 
 Prev
-Next
+Next
 
 
 Frames
@@ -99,7 +99,7 @@
 
 
 Prev
-Next
+Next
 
 
 Frames

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/2264aeb1/devapidocs/org/apache/hadoop/hbase/rsgroup/RSGroupAdmin.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/rsgroup/RSGroupAdmin.html 
b/devapidocs/org/apache/hadoop/hbase/rsgroup/RSGroupAdmin.html
new file mode 100644
index 000..895160f
--- /dev/null
+++ b/devapidocs/org/apache/hadoop/hbase/rsgroup/RSGroupAdmin.html
@@ -0,0 +1,471 @@
+http://www.w3.org/TR/html4/loose.dtd";>
+
+
+
+
+RSGroupAdmin (Apache HBase 2.0.0-SNAPSHOT API)
+
+
+
+
+
+
+JavaScript is disabled on your browser.
+
+
+
+
+
+
+
+
+Overview
+Package
+Class
+Use
+Tree
+Deprecated
+Index
+Help
+
+
+
+
+Prev 
Class
+Next 
Class
+
+
+Frames
+No Frames
+
+
+All Classes
+
+
+
+
+
+
+
+Summary: 
+Nested | 
+Field | 
+Constr | 
+Method
+
+
+Detail: 
+Field | 
+Constr | 
+Method
+
+
+
+
+
+
+
+
+org.apache.hadoop.hbase.rsgroup
+Class RSGroupAdmin
+
+
+
+http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true";
 title="class or interface in java.lang">java.lang.Object
+
+
+org.apache.hadoop.hbase.rsgroup.RSGroupAdmin
+
+
+
+
+
+
+
+All Implemented Interfaces:
+http://docs.oracle.com/javase/7/docs/api/java/io/Closeable.html?is-external=true";
 title="class or interface in java.io">Closeable, http://docs.oracle.com/javase/7/docs/api/java/lang/AutoCloseable.html?is-external=true";
 title="class or interface in java.lang">AutoCloseable
+
+
+Direct Known Subclasses:
+RSGroupAdminClient, RSGroupAdminServer
+
+
+
+@InterfaceAudience.Private
+public abstract class RSGroupAdmin
+extends http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true";
 title="class or interface in java.lang">Object
+implements http://docs.oracle.com/javase/7/docs/api/java/io/Closeable.html?is-external=true";
 title="class or interface in java.io">Closeable
+Group user API interface used between client and 
server.
+
+
+
+
+
+
+
+
+
+
+
+Constructor Summary
+
+Constructors 
+
+Constructor and Description
+
+
+RSGroupAdmin() 
+
+
+
+
+
+
+
+
+
+Method Summary
+
+Methods 
+
+Modifier and Type
+Method and Description
+
+
+abstract void
+addRSGroup(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String name)
+Add a new group
+
+
+
+abstract boolean
+balanceRSGroup(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String name)
+Balance the regions in a group
+
+
+
+abstract RSGroupInfo
+getRSGroupInfo(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String groupName)
+Gets the regionserver group information.
+
+
+
+abstract RSGroupInfo
+getRSGroupInfoOfTable(TableName tableName)
+Gets the regionserver group info of table.
+
+
+
+abstract RSGroupInfo
+getRSGroupOfServer(com.google.common.net.HostAndPort hostPort)
+Retrieve the RSGroupInfo a server is affiliated to
+
+
+
+abstract http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List

[10/51] [partial] hbase-site git commit: Published site at eea8b38dfa0180d3e6f93d3e8055d5d4fbf673c3.

2016-03-11 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/0b785cb2/devapidocs/src-html/org/apache/hadoop/hbase/util/Bytes.Comparer.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/util/Bytes.Comparer.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/util/Bytes.Comparer.html
index a529fdf..cf8fb3f 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/util/Bytes.Comparer.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/util/Bytes.Comparer.html
@@ -140,2514 +140,2515 @@
 132  // SizeOf which uses 
java.lang.instrument says 24 bytes. (3 longs?)
 133  public static final int 
ESTIMATED_HEAP_TAX = 16;
 134
-135
-136  /**
-137   * Returns length of the byte array, 
returning 0 if the array is null.
-138   * Useful for calculating sizes.
-139   * @param b byte array, which can be 
null
-140   * @return 0 if b is null, otherwise 
returns length
-141   */
-142  final public static int len(byte[] b) 
{
-143return b == null ? 0 : b.length;
-144  }
-145
-146  private byte[] bytes;
-147  private int offset;
-148  private int length;
-149
-150  /**
-151   * Create a zero-size sequence.
-152   */
-153  public Bytes() {
-154super();
-155  }
-156
-157  /**
-158   * Create a Bytes using the byte array 
as the initial value.
-159   * @param bytes This array becomes the 
backing storage for the object.
-160   */
-161  public Bytes(byte[] bytes) {
-162this(bytes, 0, bytes.length);
-163  }
-164
-165  /**
-166   * Set the new Bytes to the contents of 
the passed
-167   * ibw.
-168   * @param ibw the value to set this 
Bytes to.
-169   */
-170  public Bytes(final Bytes ibw) {
-171this(ibw.get(), ibw.getOffset(), 
ibw.getLength());
-172  }
-173
-174  /**
-175   * Set the value to a given byte 
range
-176   * @param bytes the new byte range to 
set to
-177   * @param offset the offset in newData 
to start at
-178   * @param length the number of bytes in 
the range
-179   */
-180  public Bytes(final byte[] bytes, final 
int offset,
-181  final int length) {
-182this.bytes = bytes;
-183this.offset = offset;
-184this.length = length;
-185  }
-186
-187  /**
-188   * Copy bytes from ByteString 
instance.
-189   * @param byteString copy from
-190   */
-191  public Bytes(final ByteString 
byteString) {
-192this(byteString.toByteArray());
-193  }
-194
-195  /**
-196   * Get the data from the Bytes.
-197   * @return The data is only valid 
between offset and offset+length.
-198   */
-199  public byte [] get() {
-200if (this.bytes == null) {
-201  throw new 
IllegalStateException("Uninitialiized. Null constructor " +
-202  "called w/o accompaying 
readFields invocation");
-203}
-204return this.bytes;
-205  }
-206
-207  /**
-208   * @param b Use passed bytes as backing 
array for this instance.
-209   */
-210  public void set(final byte [] b) {
-211set(b, 0, b.length);
-212  }
-213
-214  /**
-215   * @param b Use passed bytes as backing 
array for this instance.
-216   * @param offset
-217   * @param length
-218   */
-219  public void set(final byte [] b, final 
int offset, final int length) {
-220this.bytes = b;
-221this.offset = offset;
-222this.length = length;
-223  }
-224
-225  /**
-226   * @return the number of valid bytes in 
the buffer
-227   * @deprecated use {@link #getLength()} 
instead
-228   */
-229  @Deprecated
-230  public int getSize() {
-231if (this.bytes == null) {
-232  throw new 
IllegalStateException("Uninitialiized. Null constructor " +
-233  "called w/o accompaying 
readFields invocation");
-234}
-235return this.length;
-236  }
-237
-238  /**
-239   * @return the number of valid bytes in 
the buffer
-240   */
-241  public int getLength() {
-242if (this.bytes == null) {
-243  throw new 
IllegalStateException("Uninitialiized. Null constructor " +
-244  "called w/o accompaying 
readFields invocation");
-245}
-246return this.length;
-247  }
-248
-249  /**
-250   * @return offset
-251   */
-252  public int getOffset(){
-253return this.offset;
-254  }
-255
-256  public ByteString toByteString() {
-257return 
ByteString.copyFrom(this.bytes, this.offset, this.length);
-258  }
-259
-260  @Override
-261  public int hashCode() {
-262return Bytes.hashCode(bytes, offset, 
length);
-263  }
-264
-265  /**
-266   * Define the sort order of the 
Bytes.
-267   * @param that The other bytes 
writable
-268   * @return Positive if left is bigger 
than right, 0 if they are equal, and
-269   * negative if left is smaller 
than right.
-270   */
-271  public int compareTo(Bytes that) {
-272return BYTES_RAWCOMPARATOR.compare(
-273this.bytes, this.offset, 
this.length,
-274that.bytes, that.offset, 
that.length);
-275  }
-276
-277  /**
-278   * Compares the bytes in this object to 
the specified byte array
-279   * @param that
-280   * @return Positive if left is bigger 
than right, 

[10/51] [partial] hbase-site git commit: Published site at f6945c4631e7697976fd8c2272f8152905c6f875.

2016-03-10 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/5eb82203/testdevapidocs/src-html/org/apache/hadoop/hbase/procedure2/TestYieldProcedures.TestStateMachineProcedure.State.html
--
diff --git 
a/testdevapidocs/src-html/org/apache/hadoop/hbase/procedure2/TestYieldProcedures.TestStateMachineProcedure.State.html
 
b/testdevapidocs/src-html/org/apache/hadoop/hbase/procedure2/TestYieldProcedures.TestStateMachineProcedure.State.html
index 79aa3db..3cbc9f0 100644
--- 
a/testdevapidocs/src-html/org/apache/hadoop/hbase/procedure2/TestYieldProcedures.TestStateMachineProcedure.State.html
+++ 
b/testdevapidocs/src-html/org/apache/hadoop/hbase/procedure2/TestYieldProcedures.TestStateMachineProcedure.State.html
@@ -27,264 +27,368 @@
 019package 
org.apache.hadoop.hbase.procedure2;
 020
 021import java.io.IOException;
-022import java.util.ArrayList;
-023import 
java.util.concurrent.atomic.AtomicBoolean;
-024import 
java.util.concurrent.atomic.AtomicLong;
-025
-026import org.apache.commons.logging.Log;
-027import 
org.apache.commons.logging.LogFactory;
-028import org.apache.hadoop.fs.FileSystem;
-029import org.apache.hadoop.fs.Path;
-030import 
org.apache.hadoop.hbase.HBaseCommonTestingUtility;
-031import 
org.apache.hadoop.hbase.procedure2.store.ProcedureStore;
-032import 
org.apache.hadoop.hbase.testclassification.SmallTests;
-033import 
org.apache.hadoop.hbase.testclassification.MasterTests;
-034
-035import org.junit.After;
-036import org.junit.Before;
-037import org.junit.Test;
-038import 
org.junit.experimental.categories.Category;
-039
-040import static 
org.junit.Assert.assertEquals;
-041import static 
org.junit.Assert.assertTrue;
-042
-043@Category({MasterTests.class, 
SmallTests.class})
-044public class TestYieldProcedures {
-045  private static final Log LOG = 
LogFactory.getLog(TestYieldProcedures.class);
-046
-047  private static final int 
PROCEDURE_EXECUTOR_SLOTS = 1;
-048  private static final Procedure 
NULL_PROC = null;
-049
-050  private 
ProcedureExecutor procExecutor;
-051  private ProcedureStore procStore;
-052
-053  private HBaseCommonTestingUtility 
htu;
-054  private FileSystem fs;
-055  private Path testDir;
-056  private Path logDir;
-057
-058  @Before
-059  public void setUp() throws IOException 
{
-060htu = new 
HBaseCommonTestingUtility();
-061testDir = htu.getDataTestDir();
-062fs = 
testDir.getFileSystem(htu.getConfiguration());
-063assertTrue(testDir.depth() > 1);
-064
-065logDir = new Path(testDir, 
"proc-logs");
-066procStore = 
ProcedureTestingUtility.createWalStore(htu.getConfiguration(), fs, logDir);
-067procExecutor = new 
ProcedureExecutor(htu.getConfiguration(), new TestProcEnv(), procStore);
-068
procStore.start(PROCEDURE_EXECUTOR_SLOTS);
-069
procExecutor.start(PROCEDURE_EXECUTOR_SLOTS, true);
-070  }
-071
-072  @After
-073  public void tearDown() throws 
IOException {
-074procExecutor.stop();
-075procStore.stop(false);
-076fs.delete(logDir, true);
-077  }
-078
-079  @Test
-080  public void 
testYieldEachExecutionStep() throws Exception {
-081final int NUM_STATES = 3;
-082
-083TestStateMachineProcedure[] procs = 
new TestStateMachineProcedure[3];
-084for (int i = 0; i < procs.length; 
++i) {
-085  procs[i] = new 
TestStateMachineProcedure(true, false);
-086  
procExecutor.submitProcedure(procs[i]);
-087}
-088
ProcedureTestingUtility.waitNoProcedureRunning(procExecutor);
-089
-090// verify yield during execute()
-091long prevTimestamp = 0;
-092for (int execStep = 0; execStep < 
NUM_STATES; ++execStep) {
-093  for (int i = 0; i < 
procs.length; ++i) {
-094assertEquals(NUM_STATES * 2, 
procs[i].getExecutionInfo().size());
-095
TestStateMachineProcedure.ExecutionInfo info = 
procs[i].getExecutionInfo().get(execStep);
-096LOG.info("i=" + i + " execStep=" 
+ execStep + " timestamp=" + info.getTimestamp());
-097assertEquals(false, 
info.isRollback());
-098assertEquals(execStep, 
info.getStep().ordinal());
-099assertEquals(prevTimestamp + 1, 
info.getTimestamp());
-100prevTimestamp++;
-101  }
-102}
-103
-104// verify yield during rollback()
-105int count = NUM_STATES;
-106for (int execStep = NUM_STATES - 1; 
execStep >= 0; --execStep) {
-107  for (int i = 0; i < 
procs.length; ++i) {
-108assertEquals(NUM_STATES * 2, 
procs[i].getExecutionInfo().size());
-109
TestStateMachineProcedure.ExecutionInfo info = 
procs[i].getExecutionInfo().get(count);
-110LOG.info("i=" + i + " execStep=" 
+ execStep + " timestamp=" + info.getTimestamp());
-111assertEquals(true, 
info.isRollback());
-112assertEquals(execStep, 
info.getStep().ordinal());
-113assertEquals(prevTimestamp + 1, 
info.getTimestamp());
-114prevTimestamp++;
-115  }
-116  count++;
-117}
-118  }
-119
-120  @Test
-121  publi

[10/51] [partial] hbase-site git commit: Published site at 05161fcbfdd78f5684b9cb52c49a02be5ad14499.

2016-03-07 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/2211f347/devapidocs/src-html/org/apache/hadoop/hbase/client/MetricsConnection.RunnerStats.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/MetricsConnection.RunnerStats.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/MetricsConnection.RunnerStats.html
index b1686e3..8533e9d 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/MetricsConnection.RunnerStats.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/MetricsConnection.RunnerStats.html
@@ -60,7 +60,7 @@
 052 * {@link #shutdown()} to terminate the 
thread pools they allocate.
 053 */
 054@InterfaceAudience.Private
-055public class MetricsConnection {
+055public class MetricsConnection implements 
StatisticTrackable {
 056
 057  /** Set this key to {@code true} to 
enable metrics collection of client requests. */
 058  public static final String 
CLIENT_SIDE_METRICS_ENABLED_KEY = "hbase.client.metrics.enable";
@@ -207,238 +207,244 @@
 199}
 200Result result = (Result) r;
 201ClientProtos.RegionLoadStats stats = 
result.getStats();
-202if(stats == null){
+202if (stats == null) {
 203  return;
 204}
-205String name = 
serverName.getServerName() + "," + Bytes.toStringBinary(regionName);
-206ConcurrentMap rsStats = null;
-207if 
(serverStats.containsKey(serverName)) {
-208  rsStats = 
serverStats.get(serverName);
-209} else {
-210  rsStats = 
serverStats.putIfAbsent(serverName,
-211  new 
ConcurrentSkipListMap(Bytes.BYTES_COMPARATOR));
-212  if (rsStats == null) {
-213rsStats = 
serverStats.get(serverName);
-214  }
-215}
-216RegionStats regionStats = null;
-217if (rsStats.containsKey(regionName)) 
{
-218  regionStats = 
rsStats.get(regionName);
-219} else {
-220  regionStats = 
rsStats.putIfAbsent(regionName, new RegionStats(this.registry, name));
-221  if (regionStats == null) {
-222regionStats = 
rsStats.get(regionName);
-223  }
-224}
-225regionStats.update(stats);
-226  }
-227
-228
-229  /** A lambda for dispatching to the 
appropriate metric factory method */
-230  private static interface 
NewMetric {
-231T newMetric(Class clazz, 
String name, String scope);
+205updateRegionStats(serverName, 
regionName, stats);
+206  }
+207
+208  @Override
+209  public void 
updateRegionStats(ServerName serverName, byte[] regionName,
+210ClientProtos.RegionLoadStats stats) 
{
+211String name = 
serverName.getServerName() + "," + Bytes.toStringBinary(regionName);
+212ConcurrentMap rsStats = null;
+213if 
(serverStats.containsKey(serverName)) {
+214  rsStats = 
serverStats.get(serverName);
+215} else {
+216  rsStats = 
serverStats.putIfAbsent(serverName,
+217  new 
ConcurrentSkipListMap(Bytes.BYTES_COMPARATOR));
+218  if (rsStats == null) {
+219rsStats = 
serverStats.get(serverName);
+220  }
+221}
+222RegionStats regionStats = null;
+223if (rsStats.containsKey(regionName)) 
{
+224  regionStats = 
rsStats.get(regionName);
+225} else {
+226  regionStats = 
rsStats.putIfAbsent(regionName, new RegionStats(this.registry, name));
+227  if (regionStats == null) {
+228regionStats = 
rsStats.get(regionName);
+229  }
+230}
+231regionStats.update(stats);
 232  }
 233
-234  /** Anticipated number of metric 
entries */
-235  private static final int CAPACITY = 
50;
-236  /** Default load factor from {@link 
java.util.HashMap#DEFAULT_LOAD_FACTOR} */
-237  private static final float LOAD_FACTOR 
= 0.75f;
-238  /**
-239   * Anticipated number of concurrent 
accessor threads, from
-240   * {@link 
ConnectionImplementation#getBatchPool()}
-241   */
-242  private static final int 
CONCURRENCY_LEVEL = 256;
-243
-244  private final MetricRegistry 
registry;
-245  private final JmxReporter reporter;
-246  private final String scope;
-247
-248  private final NewMetric 
timerFactory = new NewMetric() {
-249@Override public Timer 
newMetric(Class clazz, String name, String scope) {
-250  return registry.timer(name(clazz, 
name, scope));
-251}
-252  };
+234
+235  /** A lambda for dispatching to the 
appropriate metric factory method */
+236  private static interface 
NewMetric {
+237T newMetric(Class clazz, 
String name, String scope);
+238  }
+239
+240  /** Anticipated number of metric 
entries */
+241  private static final int CAPACITY = 
50;
+242  /** Default load factor from {@link 
java.util.HashMap#DEFAULT_LOAD_FACTOR} */
+243  private static final float LOAD_FACTOR 
= 0.75f;
+244  /**
+245   * Anticipated number of concurrent 
accessor threads, from
+246   * {@link 
ConnectionImplementation#getBatchPool()}
+247   */
+248  private static final int 
CONCUR

[10/51] [partial] hbase-site git commit: Published site at 7dabcf23e8dd53f563981e1e03f82336fc0a44da.

2016-03-04 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/3e48e84d/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFile.CachingBlockReader.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFile.CachingBlockReader.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFile.CachingBlockReader.html
index 8fd15a0..da22771 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFile.CachingBlockReader.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFile.CachingBlockReader.html
@@ -186,741 +186,742 @@
 178   * The number of bytes per checksum.
 179   */
 180  public static final int 
DEFAULT_BYTES_PER_CHECKSUM = 16 * 1024;
-181  // For measuring number of checksum 
failures
-182  static final Counter checksumFailures = 
new Counter();
-183
-184  // for test purpose
-185  public static final Counter 
dataBlockReadCnt = new Counter();
-186
-187  /**
-188   * Number of checksum verification 
failures. It also
-189   * clears the counter.
-190   */
-191  public static final long 
getChecksumFailuresCount() {
-192long count = 
checksumFailures.get();
-193checksumFailures.set(0);
-194return count;
-195  }
-196
-197  /** API required to write an {@link 
HFile} */
-198  public interface Writer extends 
Closeable {
-199/** Max memstore (mvcc) timestamp in 
FileInfo */
-200public static final byte [] 
MAX_MEMSTORE_TS_KEY = Bytes.toBytes("MAX_MEMSTORE_TS_KEY");
-201
-202/** Add an element to the file info 
map. */
-203void appendFileInfo(byte[] key, 
byte[] value) throws IOException;
-204
-205void append(Cell cell) throws 
IOException;
-206
-207/** @return the path to this {@link 
HFile} */
-208Path getPath();
-209
-210/**
-211 * Adds an inline block writer such 
as a multi-level block index writer or
-212 * a compound Bloom filter writer.
-213 */
-214void 
addInlineBlockWriter(InlineBlockWriter bloomWriter);
-215
-216// The below three methods take 
Writables.  We'd like to undo Writables but undoing the below would be pretty
-217// painful.  Could take a byte [] or 
a Message but we want to be backward compatible around hfiles so would need
-218// to map between Message and 
Writable or byte [] and current Writable serialization.  This would be a bit of 
work
-219// to little gain.  Thats my thinking 
at moment.  St.Ack 20121129
-220
-221void appendMetaBlock(String 
bloomFilterMetaKey, Writable metaWriter);
-222
-223/**
-224 * Store general Bloom filter in the 
file. This does not deal with Bloom filter
-225 * internals but is necessary, since 
Bloom filters are stored differently
-226 * in HFile version 1 and version 
2.
-227 */
-228void 
addGeneralBloomFilter(BloomFilterWriter bfw);
-229
-230/**
-231 * Store delete family Bloom filter 
in the file, which is only supported in
-232 * HFile V2.
-233 */
-234void 
addDeleteFamilyBloomFilter(BloomFilterWriter bfw) throws IOException;
-235
-236/**
-237 * Return the file context for the 
HFile this writer belongs to
-238 */
-239HFileContext getFileContext();
-240  }
-241
-242  /**
-243   * This variety of ways to construct 
writers is used throughout the code, and
-244   * we want to be able to swap writer 
implementations.
-245   */
-246  public static class WriterFactory {
-247protected final Configuration conf;
-248protected final CacheConfig 
cacheConf;
-249protected FileSystem fs;
-250protected Path path;
-251protected FSDataOutputStream 
ostream;
-252protected CellComparator comparator = 

-253CellComparator.COMPARATOR;
-254protected InetSocketAddress[] 
favoredNodes;
-255private HFileContext fileContext;
-256protected boolean shouldDropBehind = 
false;
-257
-258WriterFactory(Configuration conf, 
CacheConfig cacheConf) {
-259  this.conf = conf;
-260  this.cacheConf = cacheConf;
-261}
-262
-263public WriterFactory 
withPath(FileSystem fs, Path path) {
-264  Preconditions.checkNotNull(fs);
-265  Preconditions.checkNotNull(path);
-266  this.fs = fs;
-267  this.path = path;
-268  return this;
-269}
-270
-271public WriterFactory 
withOutputStream(FSDataOutputStream ostream) {
-272  
Preconditions.checkNotNull(ostream);
-273  this.ostream = ostream;
-274  return this;
-275}
-276
-277public WriterFactory 
withComparator(CellComparator comparator) {
-278  
Preconditions.checkNotNull(comparator);
-279  this.comparator = comparator;
-280  return this;
-281}
-282
-283public WriterFactory 
withFavoredNodes(InetSocketAddress[] favoredNodes) {
-284  // Deliberately not checking for 
null here.
-285  this.favoredNodes = favoredNodes;
-286  return this;
-287}
-288
-289public WriterFactory 
withFileContext(HFileContext fileContext) {
-290  this.fileContext = fileCo

[10/51] [partial] hbase-site git commit: Published site at 88f775996b3b52d784ad13ab07515134619316ba.

2016-02-29 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/bdd1f3f0/devapidocs/src-html/org/apache/hadoop/hbase/client/ConnectionImplementation.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/ConnectionImplementation.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/ConnectionImplementation.html
index 00013eb..a0f4a2d 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/ConnectionImplementation.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/ConnectionImplementation.html
@@ -1750,401 +1750,401 @@
 1742  }
 1743
 1744  @Override
-1745  public IsNormalizerEnabledResponse 
isNormalizerEnabled(RpcController controller,
-1746  IsNormalizerEnabledRequest 
request) throws ServiceException {
-1747return 
stub.isNormalizerEnabled(controller, request);
-1748  }
-1749
-1750  @Override
-1751  public 
SecurityCapabilitiesResponse getSecurityCapabilities(RpcController 
controller,
-1752  SecurityCapabilitiesRequest 
request) throws ServiceException {
-1753return 
stub.getSecurityCapabilities(controller, request);
-1754  }
-1755};
-1756  }
+1745  public 
MasterProtos.SetSplitOrMergeEnabledResponse setSplitOrMergeEnabled(
+1746RpcController controller, 
MasterProtos.SetSplitOrMergeEnabledRequest request)
+1747throws ServiceException {
+1748return 
stub.setSplitOrMergeEnabled(controller, request);
+1749  }
+1750
+1751  @Override
+1752  public 
MasterProtos.IsSplitOrMergeEnabledResponse isSplitOrMergeEnabled(
+1753RpcController controller, 
MasterProtos.IsSplitOrMergeEnabledRequest request)
+1754  throws ServiceException 
{
+1755return 
stub.isSplitOrMergeEnabled(controller, request);
+1756  }
 1757
-1758  private static void 
release(MasterServiceState mss) {
-1759if (mss != null && 
mss.connection != null) {
-1760  
((ConnectionImplementation)mss.connection).releaseMaster(mss);
-1761}
-1762  }
+1758  @Override
+1759  public IsNormalizerEnabledResponse 
isNormalizerEnabled(RpcController controller,
+1760  IsNormalizerEnabledRequest 
request) throws ServiceException {
+1761return 
stub.isNormalizerEnabled(controller, request);
+1762  }
 1763
-1764  private boolean 
isKeepAliveMasterConnectedAndRunning(MasterServiceState mss) {
-1765if (mss.getStub() == null){
-1766  return false;
-1767}
-1768try {
-1769  return mss.isMasterRunning();
-1770} catch 
(UndeclaredThrowableException e) {
-1771  // It's somehow messy, but we can 
receive exceptions such as
-1772  //  java.net.ConnectException but 
they're not declared. So we catch it...
-1773  LOG.info("Master connection is not 
running anymore", e.getUndeclaredThrowable());
-1774  return false;
-1775} catch (ServiceException se) {
-1776  LOG.warn("Checking master 
connection", se);
-1777  return false;
-1778}
-1779  }
-1780
-1781  void releaseMaster(MasterServiceState 
mss) {
-1782if (mss.getStub() == null) return;
-1783synchronized (masterAndZKLock) {
-1784  --mss.userCount;
-1785}
-1786  }
-1787
-1788  private void 
closeMasterService(MasterServiceState mss) {
-1789if (mss.getStub() != null) {
-1790  LOG.info("Closing master protocol: 
" + mss);
-1791  mss.clearStub();
+1764  @Override
+1765  public 
SecurityCapabilitiesResponse getSecurityCapabilities(RpcController 
controller,
+1766  SecurityCapabilitiesRequest 
request) throws ServiceException {
+1767return 
stub.getSecurityCapabilities(controller, request);
+1768  }
+1769};
+1770  }
+1771
+1772  private static void 
release(MasterServiceState mss) {
+1773if (mss != null && 
mss.connection != null) {
+1774  
((ConnectionImplementation)mss.connection).releaseMaster(mss);
+1775}
+1776  }
+1777
+1778  private boolean 
isKeepAliveMasterConnectedAndRunning(MasterServiceState mss) {
+1779if (mss.getStub() == null){
+1780  return false;
+1781}
+1782try {
+1783  return mss.isMasterRunning();
+1784} catch 
(UndeclaredThrowableException e) {
+1785  // It's somehow messy, but we can 
receive exceptions such as
+1786  //  java.net.ConnectException but 
they're not declared. So we catch it...
+1787  LOG.info("Master connection is not 
running anymore", e.getUndeclaredThrowable());
+1788  return false;
+1789} catch (ServiceException se) {
+1790  LOG.warn("Checking master 
connection", se);
+1791  return false;
 1792}
-1793mss.userCount = 0;
-1794  }
-1795
-1796  /**
-1797   * Immediate close of the shared 
master. Can be by the delayed close or when closing the
-1798   * connection itself.
-1799   */
-1800  private void closeMaster() {
-1801synchronized (masterAndZKLock) {
-1802  
closeMasterService(masterServiceState);
-1803}
-1804  }
-1805
-180

[10/51] [partial] hbase-site git commit: Published site at c5288947ddc4abae2f4036544a775ff81538df2f.

2016-02-26 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/55dfd6fe/devapidocs/org/apache/hadoop/hbase/util/FanOutOneBlockAsyncDFSOutputHelper.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/util/FanOutOneBlockAsyncDFSOutputHelper.html
 
b/devapidocs/org/apache/hadoop/hbase/util/FanOutOneBlockAsyncDFSOutputHelper.html
new file mode 100644
index 000..5ad1c3b
--- /dev/null
+++ 
b/devapidocs/org/apache/hadoop/hbase/util/FanOutOneBlockAsyncDFSOutputHelper.html
@@ -0,0 +1,715 @@
+http://www.w3.org/TR/html4/loose.dtd";>
+
+
+
+
+FanOutOneBlockAsyncDFSOutputHelper (Apache HBase 2.0.0-SNAPSHOT 
API)
+
+
+
+
+
+
+JavaScript is disabled on your browser.
+
+
+
+
+
+
+
+
+Overview
+Package
+Class
+Use
+Tree
+Deprecated
+Index
+Help
+
+
+
+
+Prev 
Class
+Next 
Class
+
+
+Frames
+No 
Frames
+
+
+All Classes
+
+
+
+
+
+
+
+Summary: 
+Nested | 
+Field | 
+Constr | 
+Method
+
+
+Detail: 
+Field | 
+Constr | 
+Method
+
+
+
+
+
+
+
+
+org.apache.hadoop.hbase.util
+Class 
FanOutOneBlockAsyncDFSOutputHelper
+
+
+
+http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true";
 title="class or interface in java.lang">java.lang.Object
+
+
+org.apache.hadoop.hbase.util.FanOutOneBlockAsyncDFSOutputHelper
+
+
+
+
+
+
+
+
+@InterfaceAudience.Private
+public class FanOutOneBlockAsyncDFSOutputHelper
+extends http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true";
 title="class or interface in java.lang">Object
+Helper class for implementing FanOutOneBlockAsyncDFSOutput.
+
+
+
+
+
+
+
+
+
+
+
+Nested Class Summary
+
+Nested Classes 
+
+Modifier and Type
+Class and Description
+
+
+(package private) static class 
+FanOutOneBlockAsyncDFSOutputHelper.CancelOnClose 
+
+
+private static interface 
+FanOutOneBlockAsyncDFSOutputHelper.DFSClientAdaptor 
+
+
+private static interface 
+FanOutOneBlockAsyncDFSOutputHelper.FileCreater 
+
+
+private static interface 
+FanOutOneBlockAsyncDFSOutputHelper.LeaseManager 
+
+
+private static interface 
+FanOutOneBlockAsyncDFSOutputHelper.PipelineAckStatusGetter 
+
+
+private static interface 
+FanOutOneBlockAsyncDFSOutputHelper.StorageTypeSetter 
+
+
+
+
+
+
+
+
+
+Field Summary
+
+Fields 
+
+Modifier and Type
+Field and Description
+
+
+private static 
io.netty.buffer.ByteBufAllocator
+ALLOC 
+
+
+private static http://docs.oracle.com/javase/7/docs/api/java/lang/reflect/Method.html?is-external=true";
 title="class or interface in java.lang.reflect">Method
+CREATE_CHECKSUM 
+
+
+private static FanOutOneBlockAsyncDFSOutputHelper.DFSClientAdaptor
+DFS_CLIENT_ADAPTOR 
+
+
+private static FanOutOneBlockAsyncDFSOutputHelper.FileCreater
+FILE_CREATER 
+
+
+static long
+HEART_BEAT_SEQNO 
+
+
+private static FanOutOneBlockAsyncDFSOutputHelper.LeaseManager
+LEASE_MANAGER 
+
+
+private static 
org.apache.commons.logging.Log
+LOG 
+
+
+private static FanOutOneBlockAsyncDFSOutputHelper.PipelineAckStatusGetter
+PIPELINE_ACK_STATUS_GETTER 
+
+
+private static FanOutOneBlockAsyncDFSOutputHelper.StorageTypeSetter
+STORAGE_TYPE_SETTER 
+
+
+
+
+
+
+
+
+
+Constructor Summary
+
+Constructors 
+
+Modifier
+Constructor and Description
+
+
+private 
+FanOutOneBlockAsyncDFSOutputHelper() 
+
+
+
+
+
+
+
+
+
+Method Summary
+
+Methods 
+
+Modifier and Type
+Method and Description
+
+
+(package private) static void
+beginFileLease(org.apache.hadoop.hdfs.DFSClient client,
+http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String src,
+long inodeId) 
+
+
+(package private) static void
+completeFile(org.apache.hadoop.hdfs.DFSClient client,
+
org.apache.hadoop.hdfs.protocol.ClientProtocol namenode,
+http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String src,
+http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String clientName,
+
org.apache.hadoop.hdfs.protocol.ExtendedBlock block,
+long fileId) 
+
+
+private static http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true";
 title="class or interface in 
java.util">List>
+connectToDataNodes(org.apache.hadoop.conf.Configuration conf,
+http://docs.oracle.com/javase/7/docs/api/j

[10/51] [partial] hbase-site git commit: Published site at 58283fa1b1b10beec62cefa40babff6a1424b06c.

2016-02-23 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d02dd5db/devapidocs/src-html/org/apache/hadoop/hbase/security/access/SecureBulkLoadEndpoint.SecureBulkLoadListener.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/security/access/SecureBulkLoadEndpoint.SecureBulkLoadListener.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/security/access/SecureBulkLoadEndpoint.SecureBulkLoadListener.html
index 03578e8..d0873aa 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/security/access/SecureBulkLoadEndpoint.SecureBulkLoadListener.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/security/access/SecureBulkLoadEndpoint.SecureBulkLoadListener.html
@@ -300,173 +300,183 @@
 292new 
SecureBulkLoadListener(fs, bulkToken, conf));
 293  } catch (Exception e) {
 294LOG.error("Failed to complete 
bulk load", e);
-295  }
-296  return false;
-297}
-298  });
-299}
-300if (region.getCoprocessorHost() != 
null) {
-301  try {
-302loaded = 
region.getCoprocessorHost().postBulkLoadHFile(familyPaths, loaded);
-303  } catch (IOException e) {
-304
ResponseConverter.setControllerException(controller, e);
-305
done.run(SecureBulkLoadHFilesResponse.newBuilder().setLoaded(false).build());
-306return;
-307  }
-308}
-309
done.run(SecureBulkLoadHFilesResponse.newBuilder().setLoaded(loaded).build());
-310  }
-311
-312  private List 
getBulkLoadObservers() {
-313List 
coprocessorList =
-314  
this.env.getRegion().getCoprocessorHost().findCoprocessors(BulkLoadObserver.class);
-315
-316return coprocessorList;
-317  }
-318
-319  private Path createStagingDir(Path 
baseDir,
-320User 
user,
-321TableName 
tableName) throws IOException {
-322String tblName = 
tableName.getNameAsString().replace(":", "_");
-323String randomDir = 
user.getShortName()+"__"+ tblName +"__"+
-324(new BigInteger(RANDOM_WIDTH, 
random).toString(RANDOM_RADIX));
-325return createStagingDir(baseDir, 
user, randomDir);
-326  }
-327
-328  private Path createStagingDir(Path 
baseDir,
-329User 
user,
-330String 
randomDir) throws IOException {
-331Path p = new Path(baseDir, 
randomDir);
-332fs.mkdirs(p, PERM_ALL_ACCESS);
-333fs.setPermission(p, 
PERM_ALL_ACCESS);
-334return p;
-335  }
-336
-337  private User getActiveUser() {
-338User user = 
RpcServer.getRequestUser();
-339if (user == null) {
-340  return null;
-341}
-342
-343//this is for testing
-344if 
(userProvider.isHadoopSecurityEnabled()
-345&& 
"simple".equalsIgnoreCase(conf.get(User.HBASE_SECURITY_CONF_KEY))) {
-346  return 
User.createUserForTesting(conf, user.getShortName(), new String[]{});
-347}
-348
-349return user;
-350  }
-351
-352  @Override
-353  public Service getService() {
-354return this;
-355  }
-356
-357  private static class 
SecureBulkLoadListener implements BulkLoadListener {
-358// Target filesystem
-359private FileSystem fs;
-360private String stagingDir;
-361private Configuration conf;
-362// Source filesystem
-363private FileSystem srcFs = null;
-364private Map origPermissions = null;
-365
-366public 
SecureBulkLoadListener(FileSystem fs, String stagingDir, Configuration conf) 
{
-367  this.fs = fs;
-368  this.stagingDir = stagingDir;
-369  this.conf = conf;
-370  this.origPermissions = new 
HashMap();
-371}
-372
-373@Override
-374public String prepareBulkLoad(final 
byte[] family, final String srcPath) throws IOException {
-375  Path p = new Path(srcPath);
-376  Path stageP = new Path(stagingDir, 
new Path(Bytes.toString(family), p.getName()));
-377
-378  // In case of Replication for bulk 
load files, hfiles are already copied in staging directory
-379  if (p.equals(stageP)) {
-380LOG.debug(p.getName()
-381+ " is already available in 
staging directory. Skipping copy or rename.");
-382return stageP.toString();
-383  }
-384
-385  if (srcFs == null) {
-386srcFs = FileSystem.get(p.toUri(), 
conf);
-387  }
-388
-389  if(!isFile(p)) {
-390throw new IOException("Path does 
not reference a file: " + p);
-391  }
-392
-393  // Check to see if the source and 
target filesystems are the same
-394  if (!FSHDFSUtils.isSameHdfs(conf, 
srcFs, fs)) {
-395LOG.debug("Bulk-load file " + 
srcPath + " is on different filesystem than " +
-396"the destination filesystem. 
Copying file over to destination staging dir.");
-397FileUtil.copy(srcFs, p, fs, 
stageP, false, conf);
-398  } else {
-399LOG.debug(

[10/51] [partial] hbase-site git commit: Published site at e58c0385a738df63fa3fff287e1ddcfe6da1d046.

2016-02-22 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f6cc9224/devapidocs/src-html/org/apache/hadoop/hbase/tool/Canary.Monitor.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/tool/Canary.Monitor.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/tool/Canary.Monitor.html
index dac2d4d..9c063a3 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/tool/Canary.Monitor.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/tool/Canary.Monitor.html
@@ -102,1116 +102,1164 @@
 094public final class Canary implements Tool 
{
 095  // Sink interface used by the canary to 
outputs information
 096  public interface Sink {
-097public void 
publishReadFailure(HRegionInfo region, Exception e);
-098public void 
publishReadFailure(HRegionInfo region, HColumnDescriptor column, Exception 
e);
-099public void 
publishReadTiming(HRegionInfo region, HColumnDescriptor column, long msTime);
-100public void 
publishWriteFailure(HRegionInfo region, Exception e);
-101public void 
publishWriteFailure(HRegionInfo region, HColumnDescriptor column, Exception 
e);
-102public void 
publishWriteTiming(HRegionInfo region, HColumnDescriptor column, long 
msTime);
-103  }
-104  // new extended sink for output 
regionserver mode info
-105  // do not change the Sink interface 
directly due to maintaining the API
-106  public interface ExtendedSink extends 
Sink {
-107public void publishReadFailure(String 
table, String server);
-108public void publishReadTiming(String 
table, String server, long msTime);
-109  }
-110
-111  // Simple implementation of canary sink 
that allows to plot on
-112  // file or standard output timings or 
failures.
-113  public static class StdOutSink 
implements Sink {
-114@Override
-115public void 
publishReadFailure(HRegionInfo region, Exception e) {
-116  LOG.error(String.format("read from 
region %s failed", region.getRegionNameAsString()), e);
-117}
-118
-119@Override
-120public void 
publishReadFailure(HRegionInfo region, HColumnDescriptor column, Exception e) 
{
-121  LOG.error(String.format("read from 
region %s column family %s failed",
-122
region.getRegionNameAsString(), column.getNameAsString()), e);
+097public long getReadFailureCount();
+098public long incReadFailureCount();
+099public void 
publishReadFailure(HRegionInfo region, Exception e);
+100public void 
publishReadFailure(HRegionInfo region, HColumnDescriptor column, Exception 
e);
+101public void 
publishReadTiming(HRegionInfo region, HColumnDescriptor column, long msTime);
+102public long getWriteFailureCount();
+103public void 
publishWriteFailure(HRegionInfo region, Exception e);
+104public void 
publishWriteFailure(HRegionInfo region, HColumnDescriptor column, Exception 
e);
+105public void 
publishWriteTiming(HRegionInfo region, HColumnDescriptor column, long 
msTime);
+106  }
+107  // new extended sink for output 
regionserver mode info
+108  // do not change the Sink interface 
directly due to maintaining the API
+109  public interface ExtendedSink extends 
Sink {
+110public void publishReadFailure(String 
table, String server);
+111public void publishReadTiming(String 
table, String server, long msTime);
+112  }
+113
+114  // Simple implementation of canary sink 
that allows to plot on
+115  // file or standard output timings or 
failures.
+116  public static class StdOutSink 
implements Sink {
+117private AtomicLong readFailureCount = 
new AtomicLong(0),
+118writeFailureCount = new 
AtomicLong(0);
+119
+120@Override
+121public long getReadFailureCount() {
+122  return readFailureCount.get();
 123}
 124
 125@Override
-126public void 
publishReadTiming(HRegionInfo region, HColumnDescriptor column, long msTime) 
{
-127  LOG.info(String.format("read from 
region %s column family %s in %dms",
-128   
region.getRegionNameAsString(), column.getNameAsString(), msTime));
-129}
-130
-131@Override
-132public void 
publishWriteFailure(HRegionInfo region, Exception e) {
-133  LOG.error(String.format("write to 
region %s failed", region.getRegionNameAsString()), e);
+126public long incReadFailureCount() {
+127  return 
readFailureCount.incrementAndGet();
+128}
+129
+130@Override
+131public void 
publishReadFailure(HRegionInfo region, Exception e) {
+132  
readFailureCount.incrementAndGet();
+133  LOG.error(String.format("read from 
region %s failed", region.getRegionNameAsString()), e);
 134}
 135
 136@Override
-137public void 
publishWriteFailure(HRegionInfo region, HColumnDescriptor column, Exception e) 
{
-138  LOG.error(String.format("write to 
region %s column family %s failed",
-139region.getRegionNameAsString(), 
column.getNameAsString()), e);
-140}
-141
-142@Override
-143public void 
publishWriteTiming(HRe

[10/51] [partial] hbase-site git commit: Published site at d2ba87509b8d193f58183beff4ab76c7edf47e11.

2016-02-18 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f32f549a/devapidocs/src-html/org/apache/hadoop/hbase/wal/WALSplitter.html
--
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/wal/WALSplitter.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/wal/WALSplitter.html
index 0597160..b444f46 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/wal/WALSplitter.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/wal/WALSplitter.html
@@ -184,2145 +184,2183 @@
 176  // Min batch size when replay WAL 
edits
 177  private final int minBatchSize;
 178
-179  WALSplitter(final WALFactory factory, 
Configuration conf, Path rootDir,
-180  FileSystem fs, LastSequenceId 
idChecker,
-181  CoordinatedStateManager csm, 
RecoveryMode mode) {
-182this.conf = 
HBaseConfiguration.create(conf);
-183String codecClassName = conf
-184
.get(WALCellCodec.WAL_CELL_CODEC_CLASS_KEY, WALCellCodec.class.getName());
-185
this.conf.set(HConstants.RPC_CODEC_CONF_KEY, codecClassName);
-186this.rootDir = rootDir;
-187this.fs = fs;
-188this.sequenceIdChecker = idChecker;
-189this.csm = 
(BaseCoordinatedStateManager)csm;
-190this.walFactory = factory;
-191this.controller = new 
PipelineController();
-192
-193entryBuffers = new 
EntryBuffers(controller,
-194
this.conf.getInt("hbase.regionserver.hlog.splitlog.buffersize",
-195128*1024*1024));
+179  // the file being split currently
+180  private FileStatus fileBeingSplit;
+181
+182  @VisibleForTesting
+183  WALSplitter(final WALFactory factory, 
Configuration conf, Path rootDir,
+184  FileSystem fs, LastSequenceId 
idChecker,
+185  CoordinatedStateManager csm, 
RecoveryMode mode) {
+186this.conf = 
HBaseConfiguration.create(conf);
+187String codecClassName = conf
+188
.get(WALCellCodec.WAL_CELL_CODEC_CLASS_KEY, WALCellCodec.class.getName());
+189
this.conf.set(HConstants.RPC_CODEC_CONF_KEY, codecClassName);
+190this.rootDir = rootDir;
+191this.fs = fs;
+192this.sequenceIdChecker = idChecker;
+193this.csm = 
(BaseCoordinatedStateManager)csm;
+194this.walFactory = factory;
+195this.controller = new 
PipelineController();
 196
-197// a larger minBatchSize may slow 
down recovery because replay writer has to wait for
-198// enough edits before replaying 
them
-199this.minBatchSize = 
this.conf.getInt("hbase.regionserver.wal.logreplay.batch.size", 64);
-200this.distributedLogReplay = 
(RecoveryMode.LOG_REPLAY == mode);
-201
-202this.numWriterThreads = 
this.conf.getInt("hbase.regionserver.hlog.splitlog.writer.threads", 3);
-203if (csm != null && 
this.distributedLogReplay) {
-204  outputSink = new 
LogReplayOutputSink(controller, entryBuffers, numWriterThreads);
-205} else {
-206  if (this.distributedLogReplay) {
-207LOG.info("ZooKeeperWatcher is 
passed in as NULL so disable distrubitedLogRepaly.");
-208  }
-209  this.distributedLogReplay = 
false;
-210  outputSink = new 
LogRecoveredEditsOutputSink(controller, entryBuffers, numWriterThreads);
-211}
-212
-213  }
-214
-215  /**
-216   * Splits a WAL file into region's 
recovered-edits directory.
-217   * This is the main entry point for 
distributed log splitting from SplitLogWorker.
-218   * 

-219 * If the log file has N regions then N recovered.edits files will be produced. -220 *

-221 * @param rootDir -222 * @param logfile -223 * @param fs -224 * @param conf -225 * @param reporter -226 * @param idChecker -227 * @param cp coordination state manager -228 * @return false if it is interrupted by the progress-able. -229 * @throws IOException -230 */ -231 public static boolean splitLogFile(Path rootDir, FileStatus logfile, FileSystem fs, -232 Configuration conf, CancelableProgressable reporter, LastSequenceId idChecker, -233 CoordinatedStateManager cp, RecoveryMode mode, final WALFactory factory) throws IOException { -234WALSplitter s = new WALSplitter(factory, conf, rootDir, fs, idChecker, cp, mode); -235return s.splitLogFile(logfile, reporter); -236 } -237 -238 // A wrapper to split one log folder using the method used by distributed -239 // log splitting. Used by tools and unit tests. It should be package private. -240 // It is public only because TestWALObserver is in a different package, -241 // which uses this method to do log splitting. -242 @VisibleForTesting -243 public static List split(Path rootDir, Path logDir, Path oldLogDir, -244 FileSystem fs, Configuration conf, final WALFactory factory) throws IOException { -245final FileStatus[] logfiles = SplitLogManager.getFileList(conf, -246 Collections.singletonList(logDir), null); -247List splits = new ArrayList(); -248if (logfiles != null && logfiles.length > 0) { -249 for (FileStatus logfile: logfiles) { -250


[10/51] [partial] hbase-site git commit: Published site at 85e1d9a109341c5f4aabb0e82c96ab52e99a6d72.

2016-02-12 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/526c7822/testdevapidocs/org/apache/hadoop/hbase/regionserver/TestHRegion.HRegionWithSeqId.html
--
diff --git 
a/testdevapidocs/org/apache/hadoop/hbase/regionserver/TestHRegion.HRegionWithSeqId.html
 
b/testdevapidocs/org/apache/hadoop/hbase/regionserver/TestHRegion.HRegionWithSeqId.html
index b4ecdec..6a210d8 100644
--- 
a/testdevapidocs/org/apache/hadoop/hbase/regionserver/TestHRegion.HRegionWithSeqId.html
+++ 
b/testdevapidocs/org/apache/hadoop/hbase/regionserver/TestHRegion.HRegionWithSeqId.html
@@ -108,7 +108,7 @@
 
 
 
-static class TestHRegion.HRegionWithSeqId
+static class TestHRegion.HRegionWithSeqId
 extends org.apache.hadoop.hbase.regionserver.HRegion
 
 
@@ -227,7 +227,7 @@ extends org.apache.hadoop.hbase.regionserver.HRegion
 
 
 TestHRegion.HRegionWithSeqId
-public TestHRegion.HRegionWithSeqId(org.apache.hadoop.fs.Path tableDir,
+public TestHRegion.HRegionWithSeqId(org.apache.hadoop.fs.Path tableDir,
 org.apache.hadoop.hbase.wal.WAL wal,
 org.apache.hadoop.fs.FileSystem fs,
 
org.apache.hadoop.conf.Configuration confParam,
@@ -250,7 +250,7 @@ extends org.apache.hadoop.hbase.regionserver.HRegion
 
 
 getNextSequenceId
-protected long getNextSequenceId(org.apache.hadoop.hbase.wal.WAL wal)
+protected long getNextSequenceId(org.apache.hadoop.hbase.wal.WAL wal)
   throws http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true";
 title="class or interface in java.io">IOException
 
 Overrides:

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/526c7822/testdevapidocs/org/apache/hadoop/hbase/regionserver/TestHRegion.Incrementer.html
--
diff --git 
a/testdevapidocs/org/apache/hadoop/hbase/regionserver/TestHRegion.Incrementer.html
 
b/testdevapidocs/org/apache/hadoop/hbase/regionserver/TestHRegion.Incrementer.html
index 7e6383b..6061c8c 100644
--- 
a/testdevapidocs/org/apache/hadoop/hbase/regionserver/TestHRegion.Incrementer.html
+++ 
b/testdevapidocs/org/apache/hadoop/hbase/regionserver/TestHRegion.Incrementer.html
@@ -103,7 +103,7 @@
 
 
 
-private static class TestHRegion.Incrementer
+private static class TestHRegion.Incrementer
 extends http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true";
 title="class or interface in java.lang">Object
 implements http://docs.oracle.com/javase/7/docs/api/java/lang/Runnable.html?is-external=true";
 title="class or interface in java.lang">Runnable
 TestCase for increment
@@ -214,7 +214,7 @@ implements http://docs.oracle.com/javase/7/docs/api/java/lang/Runnable.
 
 
 region
-private org.apache.hadoop.hbase.regionserver.HRegion region
+private org.apache.hadoop.hbase.regionserver.HRegion region
 
 
 
@@ -223,7 +223,7 @@ implements http://docs.oracle.com/javase/7/docs/api/java/lang/Runnable.
 
 
 incRow
-private static final byte[] incRow
+private static final byte[] incRow
 
 
 
@@ -232,7 +232,7 @@ implements http://docs.oracle.com/javase/7/docs/api/java/lang/Runnable.
 
 
 family
-private static final byte[] family
+private static final byte[] family
 
 
 
@@ -241,7 +241,7 @@ implements http://docs.oracle.com/javase/7/docs/api/java/lang/Runnable.
 
 
 qualifier
-private static final byte[] qualifier
+private static final byte[] qualifier
 
 
 
@@ -250,7 +250,7 @@ implements http://docs.oracle.com/javase/7/docs/api/java/lang/Runnable.
 
 
 ONE
-private static final long ONE
+private static final long ONE
 See Also:Constant
 Field Values
 
 
@@ -260,7 +260,7 @@ implements http://docs.oracle.com/javase/7/docs/api/java/lang/Runnable.
 
 
 incCounter
-private int incCounter
+private int incCounter
 
 
 
@@ -277,7 +277,7 @@ implements http://docs.oracle.com/javase/7/docs/api/java/lang/Runnable.
 
 
 TestHRegion.Incrementer
-public TestHRegion.Incrementer(org.apache.hadoop.hbase.regionserver.HRegion region,
+public TestHRegion.Incrementer(org.apache.hadoop.hbase.regionserver.HRegion region,
int incCounter)
 
 
@@ -295,7 +295,7 @@ implements http://docs.oracle.com/javase/7/docs/api/java/lang/Runnable.
 
 
 run
-public void run()
+public void run()
 
 Specified by:
 http://docs.oracle.com/javase/7/docs/api/java/lang/Runnable.html?is-external=true#run()"
 title="class or interface in java.lang">run in 
interface http://docs.oracle.com/javase/7/docs/api/java/lang/Runnable.html?is-external=true";
 title="class or interface in java.lang">Runnable

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/526c7822/testdevapidocs/org/apache/hadoop/hbase/regionserver/TestHRegion.IsFlushWALMarker.html
--
diff --git 
a/testdevapidocs/org/apache/hadoop/hbase/regionserver/TestHRegion.IsFlushWALMarker.html
 
b/testdevapidocs/org/apache/hadoop/hbase/

[10/51] [partial] hbase-site git commit: Published site at 29a192ef3cbe3b9cc12a6ee38f39e1199ac9790f.

2016-02-11 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8bb348c6/devapidocs/src-html/org/apache/hadoop/hbase/tool/Canary.RegionMonitor.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/tool/Canary.RegionMonitor.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/tool/Canary.RegionMonitor.html
index d3abe39..c51826f 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/tool/Canary.RegionMonitor.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/tool/Canary.RegionMonitor.html
@@ -615,9 +615,9 @@
 607if (this.failOnError 
&& monitor.hasError()) {
 608  
monitorThread.interrupt();
 609  if (monitor.initialized) 
{
-610
System.exit(monitor.errorCode);
+610return 
monitor.errorCode;
 611  } else {
-612
System.exit(INIT_ERROR_EXIT_CODE);
+612return 
INIT_ERROR_EXIT_CODE;
 613  }
 614}
 615currentTimeLength = 
System.currentTimeMillis() - startTime;
@@ -626,630 +626,629 @@
 618  + ") after timeout 
limit:" + this.timeout
 619  + " will be killed 
itself !!");
 620  if (monitor.initialized) 
{
-621
System.exit(TIMEOUT_ERROR_EXIT_CODE);
+621return 
TIMEOUT_ERROR_EXIT_CODE;
 622  } else {
-623
System.exit(INIT_ERROR_EXIT_CODE);
+623return 
INIT_ERROR_EXIT_CODE;
 624  }
-625  break;
-626}
-627  }
-628
-629  if (this.failOnError && 
monitor.finalCheckForErrors()) {
-630monitorThread.interrupt();
-631
System.exit(monitor.errorCode);
-632  }
-633} finally {
-634  if (monitor != null) 
monitor.close();
-635}
-636
-637Thread.sleep(interval);
-638  } while (interval > 0);
-639} // try-with-resources close
-640
-641if (choreService != null) {
-642  choreService.shutdown();
-643}
-644return(monitor.errorCode);
-645  }
-646
-647  private void printUsageAndExit() {
-648System.err.printf(
-649  "Usage: bin/hbase %s [opts] [table1 
[table2]...] | [regionserver1 [regionserver2]..]%n",
-650getClass().getName());
-651System.err.println(" where [opts] 
are:");
-652System.err.println("   -help  
Show this help and exit.");
-653System.err.println("   -regionserver  
replace the table argument to regionserver,");
-654System.err.println("  which means 
to enable regionserver mode");
-655System.err.println("   -allRegions
Tries all regions on a regionserver,");
-656System.err.println("  only works 
in regionserver mode.");
-657System.err.println("   -daemon
Continuous check at defined intervals.");
-658System.err.println("   -interval 
  Interval between checks (sec)");
-659System.err.println("   -e 
Use table/regionserver as regular expression");
-660System.err.println("  which means 
the table/regionserver is regular expression pattern");
-661System.err.println("   -f    
  stop whole program if first error occurs," +
-662" default is true");
-663System.err.println("   -t    
  timeout for a check, default is 60 (milisecs)");
-664System.err.println("   -writeSniffing 
enable the write sniffing in canary");
-665System.err.println("   
-treatFailureAsError treats read / write failure as error");
-666System.err.println("   -writeTable
The table used for write sniffing."
-667+ " Default is hbase:canary");
-668System.err
-669.println("   
-D= assigning or override the configuration 
params");
-670System.exit(USAGE_EXIT_CODE);
-671  }
-672
-673  /**
-674   * A Factory method for {@link 
Monitor}.
-675   * Can be overridden by user.
-676   * @param index a start index for 
monitor target
-677   * @param args args passed from user
-678   * @return a Monitor instance
-679   */
-680  public Monitor newMonitor(final 
Connection connection, int index, String[] args) {
-681Monitor monitor = null;
-682String[] monitorTargets = null;
-683
-684if(index >= 0) {
-685  int length = args.length - index;
-686  monitorTargets = new 
String[length];
-687  System.arraycopy(args, index, 
monitorTargets, 0, length);
-688}
-689
-690if (this.regionServerMode) {
-691  monitor =
-692  new 
RegionServerMonitor(connection, monitorTargets, this.useRegExp,
-693  (ExtendedSink) this.sink, 
this.executor, this.regionServerAllRegions,
-694  
this.treatFailureAsError);
-695} else {
-696  monitor =
-697  new RegionMonitor(connection, 
monitorTargets, this.useRegExp, this.sink, this.executor,
-698  this.writeSniffing, 
this.writeTableName, this.treatFailureAsError);
-699}
-700return monito

[10/51] [partial] hbase-site git commit: Published site at df829ea7d1b4d2ef745e29d2b25b12966000eeb2.

2016-02-10 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/40ef21e4/testdevapidocs/src-html/org/apache/hadoop/hbase/PerformanceEvaluation.CASTableTest.html
--
diff --git 
a/testdevapidocs/src-html/org/apache/hadoop/hbase/PerformanceEvaluation.CASTableTest.html
 
b/testdevapidocs/src-html/org/apache/hadoop/hbase/PerformanceEvaluation.CASTableTest.html
index 9b74031..92e0e39 100644
--- 
a/testdevapidocs/src-html/org/apache/hadoop/hbase/PerformanceEvaluation.CASTableTest.html
+++ 
b/testdevapidocs/src-html/org/apache/hadoop/hbase/PerformanceEvaluation.CASTableTest.html
@@ -1835,395 +1835,397 @@
 1827System.err.println("Usage: java " + 
className + " \\");
 1828System.err.println("  
 [-D]*  
");
 1829System.err.println();
-1830System.err.println("Options:");
+1830System.err.println("General 
Options:");
 1831System.err.println(" nomapred
Run multiple clients using threads " +
 1832  "(rather than use mapreduce)");
-1833System.err.println(" rows
Rows each client runs. Default: One million");
-1834System.err.println(" size
Total size in GiB. Mutually exclusive with --rows. " +
-1835  "Default: 1.0.");
-1836System.err.println(" sampleRate  
Execute test on a sample of total " +
-1837  "rows. Only supported by 
randomRead. Default: 1.0");
-1838System.err.println(" traceRate   
Enable HTrace spans. Initiate tracing every N rows. " +
-1839  "Default: 0");
-1840System.err.println(" table   
Alternate table name. Default: 'TestTable'");
-1841System.err.println(" multiGet
If >0, when doing RandomRead, perform multiple gets " +
-1842  "instead of single gets. Default: 
0");
-1843System.err.println(" compress
Compression type to use (GZ, LZO, ...). Default: 'NONE'");
-1844System.err.println(" flushCommits
Used to determine if the test should flush the table. " +
-1845  "Default: false");
-1846System.err.println(" writeToWAL  
Set writeToWAL on puts. Default: True");
-1847System.err.println(" autoFlush   
Set autoFlush on htable. Default: False");
-1848System.err.println(" oneCon  
all the threads share the same connection. Default: False");
-1849System.err.println(" presplit
Create presplit table. Recommended for accurate perf " +
-1850  "analysis (see guide).  Default: 
disabled");
-1851System.err.println(" inmemory
Tries to keep the HFiles of the CF " +
-1852  "inmemory as far as possible. Not 
guaranteed that reads are always served " +
-1853  "from memory.  Default: false");
-1854System.err.println(" usetags 
Writes tags along with KVs. Use with HFile V3. " +
+1833System.err.println(" oneCon  
all the threads share the same connection. Default: False");
+1834System.err.println(" sampleRate  
Execute test on a sample of total " +
+1835  "rows. Only supported by 
randomRead. Default: 1.0");
+1836System.err.println(" period  
Report every 'period' rows: " +
+1837  "Default: opts.perClientRunRows / 
10");
+1838System.err.println(" cycles  
How many times to cycle the test. Defaults: 1.");
+1839System.err.println(" traceRate   
Enable HTrace spans. Initiate tracing every N rows. " +
+1840  "Default: 0");
+1841System.err.println(" latency 
Set to report operation latencies. Default: False");
+1842System.err.println(" measureAfter
Start to measure the latency once 'measureAfter'" +
+1843" rows have been treated. 
Default: 0");
+1844System.err.println(" valueSize   
Pass value size to use: Default: 1024");
+1845System.err.println(" valueRandom 
Set if we should vary value size between 0 and " +
+1846"'valueSize'; set on read for 
stats on size: Default: Not set.");
+1847System.err.println();
+1848System.err.println("Table Creation / 
Write Tests:");
+1849System.err.println(" table   
Alternate table name. Default: 'TestTable'");
+1850System.err.println(" rows
Rows each client runs. Default: One million");
+1851System.err.println(" size
Total size in GiB. Mutually exclusive with --rows. " +
+1852  "Default: 1.0.");
+1853System.err.println(" compress
Compression type to use (GZ, LZO, ...). Default: 'NONE'");
+1854System.err.println(" flushCommits
Used to determine if the test should flush the table. " +
 1855  "Default: false");
-1856System.err.println(" numoftags   
Specify the no of tags that would be needed. " +
-1857   "This works only if usetags is 
true.");
-1858System.err.println(" filterAll   
Helps to filter out all the rows on the server side"
-1859+ " there by not returning any 
thing back to the client.  Helps to check the server side"
-1860+ " performance.  Us

[10/51] [partial] hbase-site git commit: Published site at 7bb68b9031591cf378954a0eb8f71a8b9be01f9c.

2016-02-09 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/358717f6/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.RowLockImpl.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.RowLockImpl.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.RowLockImpl.html
index d266952..fbdde18 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.RowLockImpl.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.RowLockImpl.html
@@ -7,36 +7,36 @@
 
 
 001/*
-002 *
-003 * Licensed to the Apache Software 
Foundation (ASF) under one
-004 * or more contributor license 
agreements.  See the NOTICE file
-005 * distributed with this work for 
additional information
-006 * regarding copyright ownership.  The 
ASF licenses this file
-007 * to you under the Apache License, 
Version 2.0 (the
-008 * "License"); you may not use this file 
except in compliance
-009 * with the License.  You may obtain a 
copy of the License at
-010 *
-011 * 
http://www.apache.org/licenses/LICENSE-2.0
-012 *
-013 * Unless required by applicable law or 
agreed to in writing, software
-014 * distributed under the License is 
distributed on an "AS IS" BASIS,
-015 * WITHOUT WARRANTIES OR CONDITIONS OF 
ANY KIND, either express or implied.
-016 * See the License for the specific 
language governing permissions and
-017 * limitations under the License.
-018 */
-019package 
org.apache.hadoop.hbase.regionserver;
-020
-021import java.io.EOFException;
-022import java.io.FileNotFoundException;
-023import java.io.IOException;
-024import java.io.InterruptedIOException;
-025import java.lang.reflect.Constructor;
-026import java.text.ParseException;
-027import java.util.AbstractList;
-028import java.util.ArrayList;
-029import java.util.Arrays;
-030import java.util.Collection;
-031import java.util.Collections;
+002 * Licensed to the Apache Software 
Foundation (ASF) under one
+003 * or more contributor license 
agreements.  See the NOTICE file
+004 * distributed with this work for 
additional information
+005 * regarding copyright ownership.  The 
ASF licenses this file
+006 * to you under the Apache License, 
Version 2.0 (the
+007 * "License"); you may not use this file 
except in compliance
+008 * with the License.  You may obtain a 
copy of the License at
+009 *
+010 * 
http://www.apache.org/licenses/LICENSE-2.0
+011 *
+012 * Unless required by applicable law or 
agreed to in writing, software
+013 * distributed under the License is 
distributed on an "AS IS" BASIS,
+014 * WITHOUT WARRANTIES OR CONDITIONS OF 
ANY KIND, either express or implied.
+015 * See the License for the specific 
language governing permissions and
+016 * limitations under the License.
+017 */
+018package 
org.apache.hadoop.hbase.regionserver;
+019
+020import java.io.EOFException;
+021import java.io.FileNotFoundException;
+022import java.io.IOException;
+023import java.io.InterruptedIOException;
+024import java.lang.reflect.Constructor;
+025import java.text.ParseException;
+026import java.util.AbstractList;
+027import java.util.ArrayList;
+028import java.util.Arrays;
+029import java.util.Collection;
+030import java.util.Collections;
+031import java.util.Comparator;
 032import java.util.HashMap;
 033import java.util.HashSet;
 034import java.util.Iterator;
@@ -49,35 +49,35 @@
 041import java.util.RandomAccess;
 042import java.util.Set;
 043import java.util.TreeMap;
-044import java.util.concurrent.Callable;
-045import 
java.util.concurrent.CompletionService;
-046import 
java.util.concurrent.ConcurrentHashMap;
-047import 
java.util.concurrent.ConcurrentMap;
-048import 
java.util.concurrent.ConcurrentSkipListMap;
-049import 
java.util.concurrent.ExecutionException;
-050import 
java.util.concurrent.ExecutorCompletionService;
-051import 
java.util.concurrent.ExecutorService;
-052import java.util.concurrent.Executors;
-053import java.util.concurrent.Future;
-054import java.util.concurrent.FutureTask;
-055import 
java.util.concurrent.ThreadFactory;
-056import 
java.util.concurrent.ThreadPoolExecutor;
-057import java.util.concurrent.TimeUnit;
-058import 
java.util.concurrent.TimeoutException;
-059import 
java.util.concurrent.atomic.AtomicBoolean;
-060import 
java.util.concurrent.atomic.AtomicInteger;
-061import 
java.util.concurrent.atomic.AtomicLong;
-062import java.util.concurrent.locks.Lock;
-063import 
java.util.concurrent.locks.ReadWriteLock;
-064import 
java.util.concurrent.locks.ReentrantReadWriteLock;
-065
-066import org.apache.commons.logging.Log;
-067import 
org.apache.commons.logging.LogFactory;
-068import 
org.apache.hadoop.conf.Configuration;
-069import org.apache.hadoop.fs.FileStatus;
-070import org.apache.hadoop.fs.FileSystem;
-071import org.apache.hadoop.fs.Path;
-072import 
org.apache.hadoop.hbase.ArrayBackedTag;
+044import java.util.UUID;
+045import java.util.concurrent.Callable;
+046import 
ja

[10/51] [partial] hbase-site git commit: Published site at eacf7bcf97f09c9a6e68baf9a4a9ceb1d83c9fb0.

2016-02-08 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/670bf1f0/metrics.html
--
diff --git a/metrics.html b/metrics.html
index 32503b8..924ef4d 100644
--- a/metrics.html
+++ b/metrics.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase –  
   Apache HBase (TM) Metrics
@@ -458,7 +458,7 @@ export HBASE_REGIONSERVER_OPTS="$HBASE_JMX_OPTS 
-Dcom.sun.management.jmxrem
 http://www.apache.org/";>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2016-02-05
+  Last Published: 
2016-02-07
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/670bf1f0/modules.html
--
diff --git a/modules.html b/modules.html
index 26d9aa0..442e6d6 100644
--- a/modules.html
+++ b/modules.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase – Project Modules
 
@@ -366,7 +366,7 @@
 http://www.apache.org/";>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2016-02-05
+  Last Published: 
2016-02-07
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/670bf1f0/old_news.html
--
diff --git a/old_news.html b/old_news.html
index befeffc..48966f3 100644
--- a/old_news.html
+++ b/old_news.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase – 
   Old Apache HBase (TM) News
@@ -413,7 +413,7 @@ under the License. -->
 http://www.apache.org/";>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2016-02-05
+  Last Published: 
2016-02-07
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/670bf1f0/plugin-management.html
--
diff --git a/plugin-management.html b/plugin-management.html
index 3d14983..b3c5bf2 100644
--- a/plugin-management.html
+++ b/plugin-management.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase – Project Plugin Management
 
@@ -423,7 +423,7 @@
 http://www.apache.org/";>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2016-02-05
+  Last Published: 
2016-02-07
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/670bf1f0/plugins.html
--
diff --git a/plugins.html b/plugins.html
index 0b5c96e..b02a14b 100644
--- a/plugins.html
+++ b/plugins.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase – Project Build Plugins
 
@@ -366,7 +366,7 @@
 http://www.apache.org/";>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2016-02-05
+  Last Published: 
2016-02-07
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/670bf1f0/poweredbyhbase.html
--
diff --git a/poweredbyhbase.html b/poweredbyhbase.html
index e0e17ea..411ab87 100644
--- a/poweredbyhbase.html
+++ b/poweredbyhbase.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase – Powered By Apache HBase™
 
@@ -768,7 +768,7 @@ under the License. -->
 http://www.apache.org/";>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2016-02-05
+  Last Published: 
2016-02-07
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/670bf1f0/project-info.html
--
diff --git a/project-info.html b/project-info.html
index 2d13a68..d39fbef 100644
--- a/project-info.html
+++ b/project-info.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase – Project Information
 
@@ -340,7 +340,7 @@
 http://www.apache.org/";>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2016-02-05
+  Last Published: 
2016-02-07
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/670bf1f0/project-reports.html
--
diff --git a/project-reports.html b/project-repo

[10/51] [partial] hbase-site git commit: Published site at 6f6a8ed71fe98b83e8a8db974fc15b0d8597b174.

2016-02-05 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1b9384b2/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/Region.Operation.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/Region.Operation.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/Region.Operation.html
index 5acbf19..365d061 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/Region.Operation.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/Region.Operation.html
@@ -193,523 +193,526 @@
 185   */
 186  void updateReadRequestsCount(long i);
 187
-188  /** @return write request count for 
this region */
-189  long getWriteRequestsCount();
+188  /** @return filtered read requests 
count for this region */
+189  long getFilteredReadRequestsCount();
 190
-191  /**
-192   * Update the write request count for 
this region
-193   * @param i increment
-194   */
-195  void updateWriteRequestsCount(long 
i);
-196
-197  /** @return memstore size for this 
region, in bytes */
-198  long getMemstoreSize();
+191  /** @return write request count for 
this region */
+192  long getWriteRequestsCount();
+193
+194  /**
+195   * Update the write request count for 
this region
+196   * @param i increment
+197   */
+198  void updateWriteRequestsCount(long 
i);
 199
-200  /** @return the number of mutations 
processed bypassing the WAL */
-201  long getNumMutationsWithoutWAL();
+200  /** @return memstore size for this 
region, in bytes */
+201  long getMemstoreSize();
 202
-203  /** @return the size of data processed 
bypassing the WAL, in bytes */
-204  long getDataInMemoryWithoutWAL();
+203  /** @return the number of mutations 
processed bypassing the WAL */
+204  long getNumMutationsWithoutWAL();
 205
-206  /** @return the number of blocked 
requests */
-207  long getBlockedRequestsCount();
+206  /** @return the size of data processed 
bypassing the WAL, in bytes */
+207  long getDataInMemoryWithoutWAL();
 208
-209  /** @return the number of 
checkAndMutate guards that passed */
-210  long getCheckAndMutateChecksPassed();
+209  /** @return the number of blocked 
requests */
+210  long getBlockedRequestsCount();
 211
-212  /** @return the number of failed 
checkAndMutate guards */
-213  long getCheckAndMutateChecksFailed();
+212  /** @return the number of 
checkAndMutate guards that passed */
+213  long getCheckAndMutateChecksPassed();
 214
-215  /** @return the MetricsRegion for this 
region */
-216  MetricsRegion getMetrics();
+215  /** @return the number of failed 
checkAndMutate guards */
+216  long getCheckAndMutateChecksFailed();
 217
-218  /** @return the block distribution for 
all Stores managed by this region */
-219  HDFSBlocksDistribution 
getHDFSBlocksDistribution();
+218  /** @return the MetricsRegion for this 
region */
+219  MetricsRegion getMetrics();
 220
-221  
///
-222  // Locking
+221  /** @return the block distribution for 
all Stores managed by this region */
+222  HDFSBlocksDistribution 
getHDFSBlocksDistribution();
 223
-224  // Region read locks
-225
-226  /**
-227   * Operation enum is used in {@link 
Region#startRegionOperation} and elsewhere to provide
-228   * context for various checks.
-229   */
-230  enum Operation {
-231ANY, GET, PUT, DELETE, SCAN, APPEND, 
INCREMENT, SPLIT_REGION, MERGE_REGION, BATCH_MUTATE,
-232REPLAY_BATCH_MUTATE, COMPACT_REGION, 
REPLAY_EVENT
-233  }
-234
-235  /**
-236   * This method needs to be called 
before any public call that reads or
-237   * modifies data.
-238   * Acquires a read lock and checks if 
the region is closing or closed.
-239   * 

{@link #closeRegionOperation} MUST then always be called after -240 * the operation has completed, whether it succeeded or failed. -241 * @throws IOException -242 */ -243 void startRegionOperation() throws IOException; -244 -245 /** -246 * This method needs to be called before any public call that reads or -247 * modifies data. -248 * Acquires a read lock and checks if the region is closing or closed. -249 *

{@link #closeRegionOperation} MUST then always be called after -250 * the operation has completed, whether it succeeded or failed. -251 * @param op The operation is about to be taken on the region -252 * @throws IOException -253 */ -254 void startRegionOperation(Operation op) throws IOException; -255 -256 /** -257 * Closes the region operation lock. -258 * @throws IOException -259 */ -260 void closeRegionOperation() throws IOException; -261 -262 // Row write locks -263 -264 /** -265 * Row lock held by a given thread. -266 * One thread may acquire multiple locks on the same row simultaneously. -267 * The locks must be released by calling release() from the same thread. -268 */ -269 public interface RowLock { -270/** -271 * Release the given lock. If there


[10/51] [partial] hbase-site git commit: Published site at 18eff3c1c337003b2a419490e621f931d16936fb.

2016-02-04 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/a8725a46/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.ReplayBatch.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.ReplayBatch.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.ReplayBatch.html
index e2c4389..d03724e 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.ReplayBatch.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.ReplayBatch.html
@@ -5185,3056 +5185,3058 @@
 5177   * @param readLock is the lock reader 
or writer. True indicates that a non-exlcusive
 5178   * lock is requested
 5179   */
-5180  public RowLock getRowLock(byte[] row, 
boolean readLock) throws IOException {
-5181// Make sure the row is inside of 
this region before getting the lock for it.
-5182checkRow(row, "row lock");
-5183// create an object to use a a key 
in the row lock map
-5184HashedBytes rowKey = new 
HashedBytes(row);
-5185
-5186RowLockContext rowLockContext = 
null;
-5187RowLockImpl result = null;
-5188TraceScope traceScope = null;
-5189
-5190// If we're tracing start a span to 
show how long this took.
-5191if (Trace.isTracing()) {
-5192  traceScope = 
Trace.startSpan("HRegion.getRowLock");
-5193  
traceScope.getSpan().addTimelineAnnotation("Getting a " + 
(readLock?"readLock":"writeLock"));
-5194}
-5195
-5196try {
-5197  // Keep trying until we have a 
lock or error out.
-5198  // TODO: do we need to add a time 
component here?
-5199  while (result == null) {
-5200
-5201// Try adding a RowLockContext 
to the lockedRows.
-5202// If we can add it then there's 
no other transactions currently running.
-5203rowLockContext = new 
RowLockContext(rowKey);
-5204RowLockContext existingContext = 
lockedRows.putIfAbsent(rowKey, rowLockContext);
-5205
-5206// if there was a running 
transaction then there's already a context.
-5207if (existingContext != null) {
-5208  rowLockContext = 
existingContext;
-5209}
-5210
-5211// Now try an get the lock.
-5212//
-5213// This can fail as
-5214if (readLock) {
-5215  result = 
rowLockContext.newReadLock();
-5216} else {
-5217  result = 
rowLockContext.newWriteLock();
-5218}
-5219  }
-5220  if 
(!result.getLock().tryLock(this.rowLockWaitDuration, TimeUnit.MILLISECONDS)) 
{
-5221if (traceScope != null) {
-5222  
traceScope.getSpan().addTimelineAnnotation("Failed to get row lock");
-5223}
-5224result = null;
-5225// Clean up the counts just in 
case this was the thing keeping the context alive.
-5226rowLockContext.cleanUp();
-5227throw new IOException("Timed out 
waiting for lock for row: " + rowKey);
-5228  }
-5229  return result;
-5230} catch (InterruptedException ie) 
{
-5231  LOG.warn("Thread interrupted 
waiting for lock on row: " + rowKey);
-5232  InterruptedIOException iie = new 
InterruptedIOException();
-5233  iie.initCause(ie);
-5234  if (traceScope != null) {
-5235
traceScope.getSpan().addTimelineAnnotation("Interrupted exception getting row 
lock");
-5236  }
-5237  
Thread.currentThread().interrupt();
-5238  throw iie;
-5239} finally {
-5240  if (traceScope != null) {
-5241traceScope.close();
-5242  }
-5243}
-5244  }
-5245
-5246  @Override
-5247  public void 
releaseRowLocks(List rowLocks) {
-5248if (rowLocks != null) {
-5249  for (RowLock rowLock : rowLocks) 
{
-5250rowLock.release();
-5251  }
-5252  rowLocks.clear();
-5253}
-5254  }
-5255
-5256  @VisibleForTesting
-5257  class RowLockContext {
-5258private final HashedBytes row;
-5259final ReadWriteLock readWriteLock = 
new ReentrantReadWriteLock(true);
-5260final AtomicBoolean usable = new 
AtomicBoolean(true);
-5261final AtomicInteger count = new 
AtomicInteger(0);
-5262final Object lock = new Object();
-5263
-5264RowLockContext(HashedBytes row) {
-5265  this.row = row;
-5266}
-5267
-5268RowLockImpl newWriteLock() {
-5269  Lock l = 
readWriteLock.writeLock();
-5270  return getRowLock(l);
-5271}
-5272RowLockImpl newReadLock() {
-5273  Lock l = 
readWriteLock.readLock();
-5274  return getRowLock(l);
-5275}
-5276
-5277private RowLockImpl getRowLock(Lock 
l) {
-5278  count.incrementAndGet();
-5279  synchronized (lock) {
-5280if (usable.get()) {
-5281  return new RowLockImpl(this, 
l);
-5282} else {
-5283  return null;
-5284}
-5285  }
-5286}
-5287
-5288void cleanUp() {
-5289  long c = 
count.decrementAndGet();
-5290  if (c <= 0) {
-5291synchronized (lock) {
-

[10/51] [partial] hbase-site git commit: Published site at 2f5767376f42c0416e025df412e3d5944a1b2a67.

2016-02-03 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6a13df3e/testdevapidocs/src-html/org/apache/hadoop/hbase/regionserver/TestAtomicOperation.MockHRegion.WrappedRowLock.html
--
diff --git 
a/testdevapidocs/src-html/org/apache/hadoop/hbase/regionserver/TestAtomicOperation.MockHRegion.WrappedRowLock.html
 
b/testdevapidocs/src-html/org/apache/hadoop/hbase/regionserver/TestAtomicOperation.MockHRegion.WrappedRowLock.html
index 9cddc0a..1240e3e 100644
--- 
a/testdevapidocs/src-html/org/apache/hadoop/hbase/regionserver/TestAtomicOperation.MockHRegion.WrappedRowLock.html
+++ 
b/testdevapidocs/src-html/org/apache/hadoop/hbase/regionserver/TestAtomicOperation.MockHRegion.WrappedRowLock.html
@@ -27,8 +27,8 @@
 019import static 
org.apache.hadoop.hbase.HBaseTestingUtility.fam1;
 020import static 
org.apache.hadoop.hbase.HBaseTestingUtility.fam2;
 021import static 
org.junit.Assert.assertEquals;
-022import static 
org.junit.Assert.assertTrue;
-023import static 
org.junit.Assert.assertNull;
+022import static 
org.junit.Assert.assertNull;
+023import static 
org.junit.Assert.assertTrue;
 024import static org.junit.Assert.fail;
 025
 026import java.io.IOException;
@@ -61,19 +61,19 @@
 053import 
org.apache.hadoop.hbase.client.Durability;
 054import 
org.apache.hadoop.hbase.client.Get;
 055import 
org.apache.hadoop.hbase.client.Increment;
-056import 
org.apache.hadoop.hbase.client.Mutation;
-057import 
org.apache.hadoop.hbase.client.Put;
-058import 
org.apache.hadoop.hbase.client.Result;
-059import 
org.apache.hadoop.hbase.client.RowMutations;
-060import 
org.apache.hadoop.hbase.client.Scan;
-061import 
org.apache.hadoop.hbase.filter.BinaryComparator;
-062import 
org.apache.hadoop.hbase.filter.CompareFilter.CompareOp;
-063import 
org.apache.hadoop.hbase.io.HeapSize;
-064import 
org.apache.hadoop.hbase.io.hfile.BlockCache;
-065import 
org.apache.hadoop.hbase.testclassification.MediumTests;
-066import 
org.apache.hadoop.hbase.testclassification.VerySlowRegionServerTests;
-067import 
org.apache.hadoop.hbase.util.Bytes;
-068import 
org.apache.hadoop.hbase.util.Threads;
+056import 
org.apache.hadoop.hbase.client.IsolationLevel;
+057import 
org.apache.hadoop.hbase.client.Mutation;
+058import 
org.apache.hadoop.hbase.client.Put;
+059import 
org.apache.hadoop.hbase.client.Result;
+060import 
org.apache.hadoop.hbase.client.RowMutations;
+061import 
org.apache.hadoop.hbase.client.Scan;
+062import 
org.apache.hadoop.hbase.filter.BinaryComparator;
+063import 
org.apache.hadoop.hbase.filter.CompareFilter.CompareOp;
+064import 
org.apache.hadoop.hbase.io.HeapSize;
+065import 
org.apache.hadoop.hbase.io.hfile.BlockCache;
+066import 
org.apache.hadoop.hbase.testclassification.MediumTests;
+067import 
org.apache.hadoop.hbase.testclassification.VerySlowRegionServerTests;
+068import 
org.apache.hadoop.hbase.util.Bytes;
 069import org.apache.hadoop.hbase.wal.WAL;
 070import org.junit.After;
 071import org.junit.Before;
@@ -191,535 +191,539 @@
 183   */
 184  @Test
 185  public void testIncrementMultiThreads() 
throws IOException {
-186LOG.info("Starting test 
testIncrementMultiThreads");
-187// run a with mixed column families 
(1 and 3 versions)
-188initHRegion(tableName, 
name.getMethodName(), new int[] {1,3}, fam1, fam2);
-189
-190// create 25 threads, each will 
increment by its own quantity
-191int numThreads = 25;
-192int incrementsPerThread = 1000;
-193Incrementer[] all = new 
Incrementer[numThreads];
-194int expectedTotal = 0;
-195// create all threads
-196for (int i = 0; i < numThreads; 
i++) {
-197  all[i] = new Incrementer(region, i, 
i, incrementsPerThread);
-198  expectedTotal += (i * 
incrementsPerThread);
-199}
-200
-201// run all threads
-202for (int i = 0; i < numThreads; 
i++) {
-203  all[i].start();
-204}
-205
-206// wait for all threads to finish
-207for (int i = 0; i < numThreads; 
i++) {
-208  try {
-209all[i].join();
-210  } catch (InterruptedException e) 
{
-211LOG.info("Ignored", e);
-212  }
-213}
-214assertICV(row, fam1, qual1, 
expectedTotal);
-215assertICV(row, fam1, qual2, 
expectedTotal*2);
-216assertICV(row, fam2, qual3, 
expectedTotal*3);
-217LOG.info("testIncrementMultiThreads 
successfully verified that total is " + expectedTotal);
-218  }
-219
-220
-221  private void assertICV(byte [] row,
-222 byte [] 
familiy,
-223 byte[] 
qualifier,
-224 long amount) 
throws IOException {
-225// run a get and see?
-226Get get = new Get(row);
-227get.addColumn(familiy, qualifier);
-228Result result = region.get(get);
-229assertEquals(1, result.size());
-230
-231Cell kv = result.rawCells()[0];
-232long r = 
Bytes.toLong(CellUtil.cloneValue(kv));
-233assertEquals(amount, r);
-234  }
-235
-236  private void initHRegion (byte [] 
tab

[10/51] [partial] hbase-site git commit: Published site at 2cc48e039d1f800832ac8880bbc820982e0ac8a5.

2016-02-01 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/a7e5a8ce/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.HFileScannerImpl.ShareableMemoryKeyValue.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.HFileScannerImpl.ShareableMemoryKeyValue.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.HFileScannerImpl.ShareableMemoryKeyValue.html
index e8248d4..c36648a 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.HFileScannerImpl.ShareableMemoryKeyValue.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.HFileScannerImpl.ShareableMemoryKeyValue.html
@@ -59,1847 +59,1848 @@
 051import 
org.apache.hadoop.hbase.io.encoding.HFileBlockDecodingContext;
 052import 
org.apache.hadoop.hbase.io.hfile.HFile.FileInfo;
 053import 
org.apache.hadoop.hbase.nio.ByteBuff;
-054import 
org.apache.hadoop.hbase.security.EncryptionUtil;
-055import 
org.apache.hadoop.hbase.util.ByteBufferUtils;
-056import 
org.apache.hadoop.hbase.util.Bytes;
-057import 
org.apache.hadoop.hbase.util.IdLock;
-058import 
org.apache.hadoop.hbase.util.ObjectIntPair;
-059import 
org.apache.hadoop.io.WritableUtils;
-060import org.apache.htrace.Trace;
-061import org.apache.htrace.TraceScope;
-062
-063import 
com.google.common.annotations.VisibleForTesting;
-064
-065/**
-066 * Implementation that can handle all 
hfile versions of {@link HFile.Reader}.
-067 */
-068@InterfaceAudience.Private
-069@edu.umd.cs.findbugs.annotations.SuppressWarnings(value="URF_UNREAD_PUBLIC_OR_PROTECTED_FIELD")
-070public class HFileReaderImpl implements 
HFile.Reader, Configurable {
-071  // This class is HFileReaderV3 + 
HFileReaderV2 + AbstractHFileReader all squashed together into
-072  // one file.  Ditto for all the 
HFileReader.ScannerV? implementations. I was running up against
-073  // the MaxInlineLevel limit because too 
many tiers involved reading from an hfile. Was also hard
-074  // to navigate the source code when so 
many classes participating in read.
-075  private static final Log LOG = 
LogFactory.getLog(HFileReaderImpl.class);
-076
-077  /** Data block index reader keeping the 
root data index in memory */
-078  private 
HFileBlockIndex.CellBasedKeyBlockIndexReader dataBlockIndexReader;
-079
-080  /** Meta block index reader -- always 
single level */
-081  private 
HFileBlockIndex.ByteArrayKeyBlockIndexReader metaBlockIndexReader;
-082
-083  private final FixedFileTrailer 
trailer;
-084
-085  /** Filled when we read in the trailer. 
*/
-086  private final Compression.Algorithm 
compressAlgo;
-087
-088  private boolean 
isPrimaryReplicaReader;
-089
-090  /**
-091   * What kind of data block encoding 
should be used while reading, writing,
-092   * and handling cache.
-093   */
-094  private HFileDataBlockEncoder 
dataBlockEncoder = NoOpDataBlockEncoder.INSTANCE;
-095
-096  /** Last key in the file. Filled in 
when we read in the file info */
-097  private Cell lastKeyCell = null;
-098
-099  /** Average key length read from file 
info */
-100  private int avgKeyLen = -1;
-101
-102  /** Average value length read from file 
info */
-103  private int avgValueLen = -1;
-104
-105  /** Key comparator */
-106  private CellComparator comparator = 
CellComparator.COMPARATOR;
-107
-108  /** Size of this file. */
-109  private final long fileSize;
-110
-111  /** Block cache configuration. */
-112  private final CacheConfig cacheConf;
-113
-114  /** Path of file */
-115  private final Path path;
-116
-117  /** File name to be used for block 
names */
-118  private final String name;
-119
-120  private FileInfo fileInfo;
-121
-122  private Configuration conf;
-123
-124  private HFileContext hfileContext;
-125
-126  /** Filesystem-level block reader. */
-127  private HFileBlock.FSReader 
fsBlockReader;
-128
-129  /**
-130   * A "sparse lock" implementation 
allowing to lock on a particular block
-131   * identified by offset. The purpose of 
this is to avoid two clients loading
-132   * the same block, and have all but one 
client wait to get the block from the
-133   * cache.
-134   */
-135  private IdLock offsetLock = new 
IdLock();
-136
-137  /**
-138   * Blocks read from the load-on-open 
section, excluding data root index, meta
-139   * index, and file info.
-140   */
-141  private List 
loadOnOpenBlocks = new ArrayList();
-142
-143  /** Minimum minor version supported by 
this HFile format */
-144  static final int MIN_MINOR_VERSION = 
0;
-145
-146  /** Maximum minor version supported by 
this HFile format */
-147  // We went to version 2 when we moved 
to pb'ing fileinfo and the trailer on
-148  // the file. This version can read 
Writables version 1.
-149  static final int MAX_MINOR_VERSION = 
3;
-150
-151  /**
-152   * We can read files whose major 
version is v2 IFF their minor version is at least 3.
-153   */
-154  private sta

[10/51] [partial] hbase-site git commit: Published site at 0de221a19d799ad515f8f4556cacd05e6b4e74f8.

2016-01-29 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/c6155501/devapidocs/org/apache/hadoop/hbase/regionserver/throttle/package-use.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/regionserver/throttle/package-use.html 
b/devapidocs/org/apache/hadoop/hbase/regionserver/throttle/package-use.html
new file mode 100644
index 000..54b33dd
--- /dev/null
+++ b/devapidocs/org/apache/hadoop/hbase/regionserver/throttle/package-use.html
@@ -0,0 +1,228 @@
+http://www.w3.org/TR/html4/loose.dtd";>
+
+
+
+
+Uses of Package org.apache.hadoop.hbase.regionserver.throttle (Apache 
HBase 2.0.0-SNAPSHOT API)
+
+
+
+
+
+
+JavaScript is disabled on your browser.
+
+
+
+
+
+
+
+
+Overview
+Package
+Class
+Use
+Tree
+Deprecated
+Index
+Help
+
+
+
+
+Prev
+Next
+
+
+Frames
+No Frames
+
+
+All Classes
+
+
+
+
+
+
+
+
+
+
+Uses of 
Packageorg.apache.hadoop.hbase.regionserver.throttle
+
+
+
+
+
+Packages that use org.apache.hadoop.hbase.regionserver.throttle 
+
+Package
+Description
+
+
+
+org.apache.hadoop.hbase.mob
+ 
+
+
+org.apache.hadoop.hbase.regionserver
+ 
+
+
+org.apache.hadoop.hbase.regionserver.compactions
+ 
+
+
+org.apache.hadoop.hbase.regionserver.throttle
+ 
+
+
+
+
+
+
+
+
+Classes in org.apache.hadoop.hbase.regionserver.throttle
 used by org.apache.hadoop.hbase.mob 
+
+Class and Description
+
+
+
+ThroughputController
+A utility that constrains the total throughput of one or 
more simultaneous flows by
+ sleeping when necessary.
+
+
+
+
+
+
+
+
+
+Classes in org.apache.hadoop.hbase.regionserver.throttle
 used by org.apache.hadoop.hbase.regionserver 
+
+Class and Description
+
+
+
+ThroughputController
+A utility that constrains the total throughput of one or 
more simultaneous flows by
+ sleeping when necessary.
+
+
+
+
+
+
+
+
+
+Classes in org.apache.hadoop.hbase.regionserver.throttle
 used by org.apache.hadoop.hbase.regionserver.compactions 
+
+Class and Description
+
+
+
+ThroughputController
+A utility that constrains the total throughput of one or 
more simultaneous flows by
+ sleeping when necessary.
+
+
+
+
+
+
+
+
+
+Classes in org.apache.hadoop.hbase.regionserver.throttle
 used by org.apache.hadoop.hbase.regionserver.throttle 
+
+Class and Description
+
+
+
+NoLimitThroughputController 
+
+
+PressureAwareThroughputController 
+
+
+PressureAwareThroughputController.ActiveOperation
+Stores the information of one controlled compaction.
+
+
+
+ThroughputController
+A utility that constrains the total throughput of one or 
more simultaneous flows by
+ sleeping when necessary.
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+Overview
+Package
+Class
+Use
+Tree
+Deprecated
+Index
+Help
+
+
+
+
+Prev
+Next
+
+
+Frames
+No Frames
+
+
+All Classes
+
+
+
+
+
+
+
+
+
+Copyright © 2007–2016 http://www.apache.org/";>The Apache Software Foundation. All rights 
reserved.
+
+

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/c6155501/devapidocs/org/apache/hadoop/hbase/regionserver/wal/package-summary.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/regionserver/wal/package-summary.html 
b/devapidocs/org/apache/hadoop/hbase/regionserver/wal/package-summary.html
index 303e4d6..c6e14ac 100644
--- a/devapidocs/org/apache/hadoop/hbase/regionserver/wal/package-summary.html
+++ b/devapidocs/org/apache/hadoop/hbase/regionserver/wal/package-summary.html
@@ -35,7 +35,7 @@
 
 
 
-Prev
 Package
+Prev
 Package
 Next
 Package
 
 
@@ -405,7 +405,7 @@
 
 
 
-Prev
 Package
+Prev
 Package
 Next
 Package
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/c6155501/devapidocs/org/apache/hadoop/hbase/regionserver/wal/package-tree.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/regionserver/wal/package-tree.html 
b/devapidocs/org/apache/hadoop/hbase/regionserver/wal/package-tree.html
index 6044822..ec7b7b7 100644
--- a/devapidocs/org/apache/hadoop/hbase/regionserver/wal/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/regionserver/wal/package-tree.html
@@ -35,7 +35,7 @@
 
 
 
-Prev
+Prev
 Next
 
 
@@ -283,7 +283,7 @@
 
 
 
-Prev
+Prev
 Next
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/c6155501/devapidocs/org/apache/hadoop/hbase/replication/regionserver/ReplicationSource.

[10/51] [partial] hbase-site git commit: Published site at 138b754671d51d3f494adc250ab0cb9e085c858a.

2016-01-28 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/39cf5e9b/supportingprojects.html
--
diff --git a/supportingprojects.html b/supportingprojects.html
index 749a263..2361dfe 100644
--- a/supportingprojects.html
+++ b/supportingprojects.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase – Supporting Projects
 
@@ -519,7 +519,7 @@ under the License. -->
 http://www.apache.org/";>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2016-01-27
+  Last Published: 
2016-01-28
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/39cf5e9b/team-list.html
--
diff --git a/team-list.html b/team-list.html
index 00efa7c..a8cf85f 100644
--- a/team-list.html
+++ b/team-list.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase – Team list
 
@@ -785,7 +785,7 @@ window.onLoad = init();
 http://www.apache.org/";>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2016-01-27
+  Last Published: 
2016-01-28
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/39cf5e9b/testapidocs/index-all.html
--
diff --git a/testapidocs/index-all.html b/testapidocs/index-all.html
index 6b72c64..f363614 100644
--- a/testapidocs/index-all.html
+++ b/testapidocs/index-all.html
@@ -846,6 +846,8 @@
  
 org.apache.hadoop.hbase.errorhandling
 - package org.apache.hadoop.hbase.errorhandling
  
+org.apache.hadoop.hbase.exceptions
 - package org.apache.hadoop.hbase.exceptions
+ 
 org.apache.hadoop.hbase.filter
 - package org.apache.hadoop.hbase.filter
  
 org.apache.hadoop.hbase.http
 - package org.apache.hadoop.hbase.http

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/39cf5e9b/testapidocs/org/apache/hadoop/hbase/errorhandling/package-summary.html
--
diff --git 
a/testapidocs/org/apache/hadoop/hbase/errorhandling/package-summary.html 
b/testapidocs/org/apache/hadoop/hbase/errorhandling/package-summary.html
index 8ebad1c..6115a84 100644
--- a/testapidocs/org/apache/hadoop/hbase/errorhandling/package-summary.html
+++ b/testapidocs/org/apache/hadoop/hbase/errorhandling/package-summary.html
@@ -36,7 +36,7 @@
 
 
 Prev
 Package
-Next 
Package
+Next
 Package
 
 
 Frames
@@ -84,7 +84,7 @@
 
 
 Prev
 Package
-Next 
Package
+Next
 Package
 
 
 Frames

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/39cf5e9b/testapidocs/org/apache/hadoop/hbase/errorhandling/package-tree.html
--
diff --git 
a/testapidocs/org/apache/hadoop/hbase/errorhandling/package-tree.html 
b/testapidocs/org/apache/hadoop/hbase/errorhandling/package-tree.html
index 8c9809d..91d01ff 100644
--- a/testapidocs/org/apache/hadoop/hbase/errorhandling/package-tree.html
+++ b/testapidocs/org/apache/hadoop/hbase/errorhandling/package-tree.html
@@ -36,7 +36,7 @@
 
 
 Prev
-Next
+Next
 
 
 Frames
@@ -88,7 +88,7 @@
 
 
 Prev
-Next
+Next
 
 
 Frames

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/39cf5e9b/testapidocs/org/apache/hadoop/hbase/exceptions/package-frame.html
--
diff --git a/testapidocs/org/apache/hadoop/hbase/exceptions/package-frame.html 
b/testapidocs/org/apache/hadoop/hbase/exceptions/package-frame.html
new file mode 100644
index 000..17ca55f
--- /dev/null
+++ b/testapidocs/org/apache/hadoop/hbase/exceptions/package-frame.html
@@ -0,0 +1,12 @@
+http://www.w3.org/TR/html4/loose.dtd";>
+
+
+
+
+org.apache.hadoop.hbase.exceptions (Apache HBase 2.0.0-SNAPSHOT Test 
API)
+
+
+
+org.apache.hadoop.hbase.exceptions
+
+

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/39cf5e9b/testapidocs/org/apache/hadoop/hbase/exceptions/package-summary.html
--
diff --git 
a/testapidocs/org/apache/hadoop/hbase/exceptions/package-summary.html 
b/testapidocs/org/apache/hadoop/hbase/exceptions/package-summary.html
new file mode 100644
index 000..6263f45
--- /dev/null
+++ b/testapidocs/org/apache/hadoop/hbase/exceptions/package-summary.html
@@ -0,0 +1,114 @@
+http://www.w3.org/TR/html4/loose.dtd";>
+
+
+
+
+org.apache.hadoop.hbase.exceptions (Apache HBase 2.0.0-SNAPSHOT Test 
API)
+
+
+
+
+
+
+JavaScript is disabled on your browser.
+
+
+
+
+
+
+
+
+Overview
+Package
+Class
+

[10/51] [partial] hbase-site git commit: Published site at a87d9560fcf4803bdd7a01b6e4ec21435d4e11b9.

2016-01-25 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f8d6f420/devapidocs/src-html/org/apache/hadoop/hbase/procedure2/store/wal/ProcedureWALFormat.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/procedure2/store/wal/ProcedureWALFormat.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/procedure2/store/wal/ProcedureWALFormat.html
index 7e13ac4..a2d35fb 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/procedure2/store/wal/ProcedureWALFormat.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/procedure2/store/wal/ProcedureWALFormat.html
@@ -73,173 +73,172 @@
 065  }
 066
 067  interface Loader extends 
ProcedureLoader {
-068void removeLog(ProcedureWALFile 
log);
-069void 
markCorruptedWAL(ProcedureWALFile log, IOException e);
-070  }
-071
-072  private ProcedureWALFormat() {}
-073
-074  public static void load(final 
Iterator logs,
-075  final ProcedureStoreTracker 
tracker, final Loader loader) throws IOException {
-076ProcedureWALFormatReader reader = new 
ProcedureWALFormatReader(tracker);
-077tracker.setKeepDeletes(true);
-078try {
-079  while (logs.hasNext()) {
-080ProcedureWALFile log = 
logs.next();
-081log.open();
-082try {
-083  reader.read(log, loader);
-084} finally {
-085  log.close();
-086}
-087  }
-088  reader.finalize(loader);
-089  // The tracker is now updated with 
all the procedures read from the logs
-090  tracker.setPartialFlag(false);
-091  tracker.resetUpdates();
-092} finally {
-093  tracker.setKeepDeletes(false);
-094}
-095  }
-096
-097  public static void 
writeHeader(OutputStream stream, ProcedureWALHeader header)
-098  throws IOException {
-099header.writeDelimitedTo(stream);
-100  }
-101
-102  /*
-103   * +-+
-104   * | END OF WAL DATA | <---+
-105   * +-+ |
-106   * | | |
-107   * | Tracker | |
-108   * | | |
-109   * +-+ |
-110   * | version | |
-111   * +-+ |
-112   * |  TRAILER_MAGIC  | |
-113   * +-+ |
-114   * |  offset |-+
-115   * +-+
-116   */
-117  public static void 
writeTrailer(FSDataOutputStream stream, ProcedureStoreTracker tracker)
-118  throws IOException {
-119long offset = stream.getPos();
-120
-121// Write EOF Entry
-122ProcedureWALEntry.newBuilder()
-123  
.setType(ProcedureWALEntry.Type.PROCEDURE_WAL_EOF)
-124  
.build().writeDelimitedTo(stream);
-125
-126// Write Tracker
-127tracker.writeTo(stream);
-128
-129stream.write(TRAILER_VERSION);
-130StreamUtils.writeLong(stream, 
TRAILER_MAGIC);
-131StreamUtils.writeLong(stream, 
offset);
-132  }
-133
-134  public static ProcedureWALHeader 
readHeader(InputStream stream)
-135  throws IOException {
-136ProcedureWALHeader header;
-137try {
-138  header = 
ProcedureWALHeader.parseDelimitedFrom(stream);
-139} catch 
(InvalidProtocolBufferException e) {
-140  throw new 
InvalidWALDataException(e);
-141}
-142
-143if (header == null) {
-144  throw new 
InvalidWALDataException("No data available to read the Header");
-145}
-146
-147if (header.getVersion() < 0 || 
header.getVersion() != HEADER_VERSION) {
-148  throw new 
InvalidWALDataException("Invalid Header version. got " + header.getVersion() 
+
-149  " expected " + 
HEADER_VERSION);
-150}
-151
-152if (header.getType() < 0 || 
header.getType() > LOG_TYPE_MAX_VALID) {
-153  throw new 
InvalidWALDataException("Invalid header type. got " + header.getType());
-154}
-155
-156return header;
-157  }
-158
-159  public static ProcedureWALTrailer 
readTrailer(FSDataInputStream stream, long startPos, long size)
-160  throws IOException {
-161long trailerPos = size - 17; // 
Beginning of the Trailer Jump
-162
-163if (trailerPos < startPos) {
-164  throw new 
InvalidWALDataException("Missing trailer: size=" + size + " startPos=" + 
startPos);
-165}
-166
-167stream.seek(trailerPos);
-168int version = stream.read();
-169if (version != TRAILER_VERSION) {
-170  throw new 
InvalidWALDataException("Invalid Trailer version. got " + version +
-171  " expected " + 
TRAILER_VERSION);
-172}
-173
-174long magic = 
StreamUtils.readLong(stream);
-175if (magic != TRAILER_MAGIC) {
-176  throw new 
InvalidWALDataException("Invalid Trailer magic. got " + magic +
-177  " expected " + 
TRAILER_MAGIC);
-178}
-179
-180long trailerOffset = 
StreamUtils.readLong(stream);
-181stream.seek(trailerOffset);
-182
-183ProcedureWALEntry entry = 
readEntry(stream);
-184if (entry.getType() != 
ProcedureWALEntry.Type.PROCEDURE_WAL_EOF) {
-185  throw new 
InvalidWALDataException("In

[10/51] [partial] hbase-site git commit: Published site at 4bf6f8379d7f85413b914dddf607d016780d40ce.

2016-01-21 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/75eda567/devapidocs/org/apache/hadoop/hbase/master/normalizer/NormalizationPlan.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/master/normalizer/NormalizationPlan.html 
b/devapidocs/org/apache/hadoop/hbase/master/normalizer/NormalizationPlan.html
new file mode 100644
index 000..6f08751
--- /dev/null
+++ 
b/devapidocs/org/apache/hadoop/hbase/master/normalizer/NormalizationPlan.html
@@ -0,0 +1,249 @@
+http://www.w3.org/TR/html4/loose.dtd";>
+
+
+
+
+NormalizationPlan (Apache HBase 2.0.0-SNAPSHOT API)
+
+
+
+
+
+
+JavaScript is disabled on your browser.
+
+
+
+
+
+
+
+
+Overview
+Package
+Class
+Use
+Tree
+Deprecated
+Index
+Help
+
+
+
+
+Prev Class
+Next Class
+
+
+Frames
+No Frames
+
+
+All Classes
+
+
+
+
+
+
+
+Summary: 
+Nested | 
+Field | 
+Constr | 
+Method
+
+
+Detail: 
+Field | 
+Constr | 
+Method
+
+
+
+
+
+
+
+
+org.apache.hadoop.hbase.master.normalizer
+Interface 
NormalizationPlan
+
+
+
+
+
+
+All Known Implementing Classes:
+EmptyNormalizationPlan, MergeNormalizationPlan, SplitNormalizationPlan
+
+
+
+@InterfaceAudience.Private
+public interface NormalizationPlan
+Interface for normalization plan.
+
+
+
+
+
+
+
+
+
+
+
+Nested Class Summary
+
+Nested Classes 
+
+Modifier and Type
+Interface and Description
+
+
+static class 
+NormalizationPlan.PlanType 
+
+
+
+
+
+
+
+
+
+Method Summary
+
+Methods 
+
+Modifier and Type
+Method and Description
+
+
+void
+execute(Admin admin)
+Executes normalization plan on cluster (does actual 
splitting/merging work).
+
+
+
+NormalizationPlan.PlanType
+getType() 
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+Method Detail
+
+
+
+
+
+execute
+void execute(Admin admin)
+Executes normalization plan on cluster (does actual 
splitting/merging work).
+Parameters:admin - 
instance of Admin
+
+
+
+
+
+
+
+getType
+NormalizationPlan.PlanType getType()
+Returns:the type of this 
plan
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+Overview
+Package
+Class
+Use
+Tree
+Deprecated
+Index
+Help
+
+
+
+
+Prev Class
+Next Class
+
+
+Frames
+No Frames
+
+
+All Classes
+
+
+
+
+
+
+
+Summary: 
+Nested | 
+Field | 
+Constr | 
+Method
+
+
+Detail: 
+Field | 
+Constr | 
+Method
+
+
+
+
+
+
+Copyright © 2007–2016 http://www.apache.org/";>The Apache Software Foundation. All rights 
reserved.
+
+

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/75eda567/devapidocs/org/apache/hadoop/hbase/master/normalizer/RegionNormalizer.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/master/normalizer/RegionNormalizer.html 
b/devapidocs/org/apache/hadoop/hbase/master/normalizer/RegionNormalizer.html
index 31addc4..a3187fd 100644
--- a/devapidocs/org/apache/hadoop/hbase/master/normalizer/RegionNormalizer.html
+++ b/devapidocs/org/apache/hadoop/hbase/master/normalizer/RegionNormalizer.html
@@ -35,7 +35,7 @@
 
 
 
-Prev Class
+Prev Class
 Next Class
 
 
@@ -92,7 +92,7 @@
 
 
 @InterfaceAudience.Private
-public interface RegionNormalizer
+public interface RegionNormalizer
 Performs "normalization" of regions on the cluster, making 
sure that suboptimal
  choice of split keys doesn't leave cluster in a situation when some regions 
are
  substantially larger than others for considerable amount of time.
@@ -120,20 +120,19 @@ public interface Method and Description
 
 
-http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List
-computePlanForTable(TableName table,
-  http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List types)
+http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List
+computePlanForTable(TableName table)
 Computes next optimal normalization plan.
 
 
 
 long
-getSkippedCount(NormalizationPlan.PlanType type) 
+getSkippedCount(NormalizationPlan.PlanType type) 
 
 
 void
-planSkipped(HRegionInfo hri,
-  NormalizationPlan.PlanType type)
+planSkipped(HRegionInfo hri,
+  NormalizationPlan.PlanType type)
 Notification for the case where plan couldn't be executed 
due to constraint vio

[10/51] [partial] hbase-site git commit: Published site at cb17c7a97a1e2eb0ebd532f614191e4edbb9e49b.

2016-01-15 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/50917b1d/devapidocs/src-html/org/apache/hadoop/hbase/master/procedure/DisableTableProcedure.BulkDisabler.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/master/procedure/DisableTableProcedure.BulkDisabler.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/master/procedure/DisableTableProcedure.BulkDisabler.html
index 5439921..945a72c 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/master/procedure/DisableTableProcedure.BulkDisabler.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/master/procedure/DisableTableProcedure.BulkDisabler.html
@@ -44,511 +44,508 @@
 036import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
 037import 
org.apache.hadoop.hbase.client.TableState;
 038import 
org.apache.hadoop.hbase.constraint.ConstraintException;
-039import 
org.apache.hadoop.hbase.executor.EventType;
-040import 
org.apache.hadoop.hbase.master.AssignmentManager;
-041import 
org.apache.hadoop.hbase.master.BulkAssigner;
-042import 
org.apache.hadoop.hbase.master.MasterCoprocessorHost;
-043import 
org.apache.hadoop.hbase.master.RegionState;
-044import 
org.apache.hadoop.hbase.master.RegionStates;
-045import 
org.apache.hadoop.hbase.master.TableStateManager;
-046import 
org.apache.hadoop.hbase.procedure2.StateMachineProcedure;
-047import 
org.apache.hadoop.hbase.protobuf.ProtobufUtil;
-048import 
org.apache.hadoop.hbase.protobuf.generated.MasterProcedureProtos;
-049import 
org.apache.hadoop.hbase.protobuf.generated.MasterProcedureProtos.DisableTableState;
-050import 
org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
-051import 
org.apache.hadoop.security.UserGroupInformation;
-052import org.apache.htrace.Trace;
-053
-054@InterfaceAudience.Private
-055public class DisableTableProcedure
-056extends 
StateMachineProcedure
-057implements TableProcedureInterface 
{
-058  private static final Log LOG = 
LogFactory.getLog(DisableTableProcedure.class);
-059
-060  private final AtomicBoolean aborted = 
new AtomicBoolean(false);
-061
-062  // This is for back compatible with 1.0 
asynchronized operations.
-063  private final ProcedurePrepareLatch 
syncLatch;
-064
-065  private TableName tableName;
-066  private boolean skipTableStateCheck;
-067  private UserGroupInformation user;
-068
-069  private Boolean traceEnabled = null;
-070
-071  enum MarkRegionOfflineOpResult {
-072
MARK_ALL_REGIONS_OFFLINE_SUCCESSFUL,
-073BULK_ASSIGN_REGIONS_FAILED,
-074
MARK_ALL_REGIONS_OFFLINE_INTERRUPTED,
-075  }
-076
-077  public DisableTableProcedure() {
-078syncLatch = null;
-079  }
-080
-081  /**
-082   * Constructor
-083   * @param env MasterProcedureEnv
-084   * @param tableName the table to 
operate on
-085   * @param skipTableStateCheck whether 
to check table state
-086   * @throws IOException
-087   */
-088  public DisableTableProcedure(
-089  final MasterProcedureEnv env,
-090  final TableName tableName,
-091  final boolean skipTableStateCheck) 
throws IOException {
-092this(env, tableName, 
skipTableStateCheck, null);
-093  }
-094
-095  /**
-096   * Constructor
-097   * @param env MasterProcedureEnv
-098   * @param tableName the table to 
operate on
-099   * @param skipTableStateCheck whether 
to check table state
-100   * @throws IOException
-101   */
-102  public DisableTableProcedure(
-103  final MasterProcedureEnv env,
-104  final TableName tableName,
-105  final boolean 
skipTableStateCheck,
-106  final ProcedurePrepareLatch 
syncLatch) throws IOException {
-107this.tableName = tableName;
-108this.skipTableStateCheck = 
skipTableStateCheck;
-109this.user = 
env.getRequestUser().getUGI();
-110
this.setOwner(this.user.getShortUserName());
-111
-112// Compatible with 1.0: We use latch 
to make sure that this procedure implementation is
-113// compatible with 1.0 asynchronized 
operations. We need to lock the table and check
-114// whether the Disable operation 
could be performed (table exists and online; table state
-115// is ENABLED). Once it is done, we 
are good to release the latch and the client can
-116// start asynchronously wait for the 
operation.
-117//
-118// Note: the member syncLatch could 
be null if we are in failover or recovery scenario.
-119// This is ok for backward 
compatible, as 1.0 client would not able to peek at procedure.
-120this.syncLatch = syncLatch;
-121  }
-122
-123  @Override
-124  protected Flow executeFromState(final 
MasterProcedureEnv env, final DisableTableState state)
-125  throws InterruptedException {
-126if (isTraceEnabled()) {
-127  LOG.trace(this + " execute state=" 
+ state);
-128}
-129
-130try {
-131  switch (state) {
-132  case DISABLE_TABLE_PREPARE:
-133if (prepareDisable(env)) {
-134  
setNextState(DisableTableState.DISABLE_TABLE_P

[10/51] [partial] hbase-site git commit: Published site at c8b9754a5e0372f93d0e9cf1d9ce788c3d0dcda1.

2016-01-14 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/9577bd5e/devapidocs/org/apache/hadoop/hbase/ChoreService.ChoreServiceThreadFactory.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/ChoreService.ChoreServiceThreadFactory.html
 
b/devapidocs/org/apache/hadoop/hbase/ChoreService.ChoreServiceThreadFactory.html
index 0eaf1bf..bd3d0f3 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/ChoreService.ChoreServiceThreadFactory.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/ChoreService.ChoreServiceThreadFactory.html
@@ -330,6 +330,6 @@ implements http://docs.oracle.com/javase/7/docs/api/java/util/concurren
 
 
 
-Copyright © 2007–2015 http://www.apache.org/";>The Apache Software Foundation. All rights 
reserved.
+Copyright © 2007–2016 http://www.apache.org/";>The Apache Software Foundation. All rights 
reserved.
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/9577bd5e/devapidocs/org/apache/hadoop/hbase/ChoreService.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/ChoreService.html 
b/devapidocs/org/apache/hadoop/hbase/ChoreService.html
index 8ba9067..9d61d8a 100644
--- a/devapidocs/org/apache/hadoop/hbase/ChoreService.html
+++ b/devapidocs/org/apache/hadoop/hbase/ChoreService.html
@@ -775,6 +775,6 @@ implements Copyright © 2007–2015 http://www.apache.org/";>The Apache Software Foundation. All rights 
reserved.
+Copyright © 2007–2016 http://www.apache.org/";>The Apache Software Foundation. All rights 
reserved.
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/9577bd5e/devapidocs/org/apache/hadoop/hbase/ClockOutOfSyncException.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/ClockOutOfSyncException.html 
b/devapidocs/org/apache/hadoop/hbase/ClockOutOfSyncException.html
index abfe8c5..56e5794 100644
--- a/devapidocs/org/apache/hadoop/hbase/ClockOutOfSyncException.html
+++ b/devapidocs/org/apache/hadoop/hbase/ClockOutOfSyncException.html
@@ -254,6 +254,6 @@ extends http://docs.oracle.com/javase/7/docs/api/java/io/IOException.ht
 
 
 
-Copyright © 2007–2015 http://www.apache.org/";>The Apache Software Foundation. All rights 
reserved.
+Copyright © 2007–2016 http://www.apache.org/";>The Apache Software Foundation. All rights 
reserved.
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/9577bd5e/devapidocs/org/apache/hadoop/hbase/ClusterId.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/ClusterId.html 
b/devapidocs/org/apache/hadoop/hbase/ClusterId.html
index 3054fda..39f818d 100644
--- a/devapidocs/org/apache/hadoop/hbase/ClusterId.html
+++ b/devapidocs/org/apache/hadoop/hbase/ClusterId.html
@@ -371,6 +371,6 @@ extends http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 
 
 
-Copyright © 2007–2015 http://www.apache.org/";>The Apache Software Foundation. All rights 
reserved.
+Copyright © 2007–2016 http://www.apache.org/";>The Apache Software Foundation. All rights 
reserved.
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/9577bd5e/devapidocs/org/apache/hadoop/hbase/ClusterStatus.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/ClusterStatus.html 
b/devapidocs/org/apache/hadoop/hbase/ClusterStatus.html
index 0957247..71eb8d7 100644
--- a/devapidocs/org/apache/hadoop/hbase/ClusterStatus.html
+++ b/devapidocs/org/apache/hadoop/hbase/ClusterStatus.html
@@ -842,6 +842,6 @@ public http://docs.oracle.com/javase/7/docs/api/java/util/Map.html
 
 
 
-Copyright © 2007–2015 http://www.apache.org/";>The Apache Software Foundation. All rights 
reserved.
+Copyright © 2007–2016 http://www.apache.org/";>The Apache Software Foundation. All rights 
reserved.
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/9577bd5e/devapidocs/org/apache/hadoop/hbase/CompatibilityFactory.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/CompatibilityFactory.html 
b/devapidocs/org/apache/hadoop/hbase/CompatibilityFactory.html
index 5dd0d62..8261fa8 100644
--- a/devapidocs/org/apache/hadoop/hbase/CompatibilityFactory.html
+++ b/devapidocs/org/apache/hadoop/hbase/CompatibilityFactory.html
@@ -338,6 +338,6 @@ extends http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 
 
 
-Copyright © 2007–2015 http://www.apache.org/";>The Apache Software Foundation. All rights 
reserved.
+Copyright © 2007–2016 http://www.apache.org/";>The Apache Software Foundation. All rights 
reserved.
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/9577bd5e/devapidocs/org/apache/hadoop/hbase/CompatibilitySingletonFactory.SingletonStorage.html
--
diff --git 
a/devapidocs/org/apache/hadoo

[10/51] [partial] hbase-site git commit: Published site at 07b623670647686084f8f5fd2038e2bafcfdac54.

2015-12-30 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e5c3dcd1/devapidocs/src-html/org/apache/hadoop/hbase/thrift2/ThriftHBaseServiceHandler.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/thrift2/ThriftHBaseServiceHandler.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/thrift2/ThriftHBaseServiceHandler.html
index 6be575b..dc94e9d 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/thrift2/ThriftHBaseServiceHandler.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/thrift2/ThriftHBaseServiceHandler.html
@@ -27,457 +27,472 @@
 019package 
org.apache.hadoop.hbase.thrift2;
 020
 021import static 
org.apache.hadoop.hbase.thrift2.ThriftUtilities.appendFromThrift;
-022import static 
org.apache.hadoop.hbase.thrift2.ThriftUtilities.deleteFromThrift;
-023import static 
org.apache.hadoop.hbase.thrift2.ThriftUtilities.deletesFromThrift;
-024import static 
org.apache.hadoop.hbase.thrift2.ThriftUtilities.getFromThrift;
-025import static 
org.apache.hadoop.hbase.thrift2.ThriftUtilities.getsFromThrift;
-026import static 
org.apache.hadoop.hbase.thrift2.ThriftUtilities.incrementFromThrift;
-027import static 
org.apache.hadoop.hbase.thrift2.ThriftUtilities.putFromThrift;
-028import static 
org.apache.hadoop.hbase.thrift2.ThriftUtilities.putsFromThrift;
-029import static 
org.apache.hadoop.hbase.thrift2.ThriftUtilities.resultFromHBase;
-030import static 
org.apache.hadoop.hbase.thrift2.ThriftUtilities.resultsFromHBase;
-031import static 
org.apache.hadoop.hbase.thrift2.ThriftUtilities.rowMutationsFromThrift;
-032import static 
org.apache.hadoop.hbase.thrift2.ThriftUtilities.scanFromThrift;
-033import static 
org.apache.thrift.TBaseHelper.byteBufferToByteArray;
-034
-035import java.io.IOException;
-036import 
java.lang.reflect.InvocationHandler;
-037import 
java.lang.reflect.InvocationTargetException;
-038import java.lang.reflect.Method;
-039import java.lang.reflect.Proxy;
-040import java.nio.ByteBuffer;
-041import java.util.Collections;
-042import java.util.List;
-043import java.util.Map;
-044import 
java.util.concurrent.ConcurrentHashMap;
-045import 
java.util.concurrent.atomic.AtomicInteger;
-046
-047import org.apache.commons.logging.Log;
-048import 
org.apache.commons.logging.LogFactory;
-049import 
org.apache.hadoop.conf.Configuration;
-050import 
org.apache.hadoop.hbase.HRegionLocation;
-051import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
-052import 
org.apache.hadoop.hbase.client.RegionLocator;
-053import 
org.apache.hadoop.hbase.client.ResultScanner;
-054import 
org.apache.hadoop.hbase.client.Table;
-055import 
org.apache.hadoop.hbase.security.UserProvider;
-056import 
org.apache.hadoop.hbase.thrift.ThriftMetrics;
-057import 
org.apache.hadoop.hbase.thrift2.generated.TAppend;
-058import 
org.apache.hadoop.hbase.thrift2.generated.TDelete;
-059import 
org.apache.hadoop.hbase.thrift2.generated.TGet;
-060import 
org.apache.hadoop.hbase.thrift2.generated.THBaseService;
-061import 
org.apache.hadoop.hbase.thrift2.generated.THRegionLocation;
-062import 
org.apache.hadoop.hbase.thrift2.generated.TIOError;
-063import 
org.apache.hadoop.hbase.thrift2.generated.TIllegalArgument;
-064import 
org.apache.hadoop.hbase.thrift2.generated.TIncrement;
-065import 
org.apache.hadoop.hbase.thrift2.generated.TPut;
-066import 
org.apache.hadoop.hbase.thrift2.generated.TResult;
-067import 
org.apache.hadoop.hbase.thrift2.generated.TRowMutations;
-068import 
org.apache.hadoop.hbase.thrift2.generated.TScan;
-069import 
org.apache.hadoop.hbase.util.Bytes;
-070import 
org.apache.hadoop.hbase.util.ConnectionCache;
-071import org.apache.thrift.TException;
-072
-073/**
-074 * This class is a glue object that 
connects Thrift RPC calls to the HBase client API primarily
-075 * defined in the HTableInterface.
-076 */
-077@InterfaceAudience.Private
-078@SuppressWarnings("deprecation")
-079public class ThriftHBaseServiceHandler 
implements THBaseService.Iface {
-080
-081  // TODO: Size of pool configuraple
-082  private static final Log LOG = 
LogFactory.getLog(ThriftHBaseServiceHandler.class);
-083
-084  // nextScannerId and scannerMap are 
used to manage scanner state
-085  // TODO: Cleanup thread for Scanners, 
Scanner id wrap
-086  private final AtomicInteger 
nextScannerId = new AtomicInteger(0);
-087  private final Map scannerMap =
-088  new ConcurrentHashMap();
-089
-090  private final ConnectionCache 
connectionCache;
+022import static 
org.apache.hadoop.hbase.thrift2.ThriftUtilities.compareOpFromThrift;
+023import static 
org.apache.hadoop.hbase.thrift2.ThriftUtilities.deleteFromThrift;
+024import static 
org.apache.hadoop.hbase.thrift2.ThriftUtilities.deletesFromThrift;
+025import static 
org.apache.hadoop.hbase.thrift2.ThriftUtilities.getFromThrift;
+026import static 
org.apache.hadoop.hbase.thrift2.ThriftUtilities.getsFromThrift;
+027import static 
org.apache.hadoop.hbas

[10/51] [partial] hbase-site git commit: Published site at 95a13b51ee052eb73882682e8f009bfa1e914866.

2015-12-22 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/32d40534/devapidocs/src-html/org/apache/hadoop/hbase/wal/WALSplitter.WriterAndPath.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/wal/WALSplitter.WriterAndPath.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/wal/WALSplitter.WriterAndPath.html
index f3aed1b..0597160 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/wal/WALSplitter.WriterAndPath.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/wal/WALSplitter.WriterAndPath.html
@@ -1513,810 +1513,816 @@
 1505  if (maxSeqIdInStores == null || 
maxSeqIdInStores.isEmpty()) {
 1506return;
 1507  }
-1508  List skippedCells = 
new ArrayList();
-1509  for (Cell cell : 
logEntry.getEdit().getCells()) {
-1510if 
(!CellUtil.matchingFamily(cell, WALEdit.METAFAMILY)) {
-1511  byte[] family = 
CellUtil.cloneFamily(cell);
-1512  Long maxSeqId = 
maxSeqIdInStores.get(family);
-1513  // Do not skip cell even if 
maxSeqId is null. Maybe we are in a rolling upgrade,
-1514  // or the master was crashed 
before and we can not get the information.
-1515  if (maxSeqId != null 
&& maxSeqId.longValue() >= logEntry.getKey().getLogSeqNum()) {
-1516skippedCells.add(cell);
-1517  }
-1518}
-1519  }
-1520  if (!skippedCells.isEmpty()) {
-1521
logEntry.getEdit().getCells().removeAll(skippedCells);
-1522  }
-1523}
+1508  // Create the array list for the 
cells that aren't filtered.
+1509  // We make the assumption that 
most cells will be kept.
+1510  ArrayList keptCells = 
new ArrayList(logEntry.getEdit().getCells().size());
+1511  for (Cell cell : 
logEntry.getEdit().getCells()) {
+1512if 
(CellUtil.matchingFamily(cell, WALEdit.METAFAMILY)) {
+1513  keptCells.add(cell);
+1514} else {
+1515  byte[] family = 
CellUtil.cloneFamily(cell);
+1516  Long maxSeqId = 
maxSeqIdInStores.get(family);
+1517  // Do not skip cell even if 
maxSeqId is null. Maybe we are in a rolling upgrade,
+1518  // or the master was crashed 
before and we can not get the information.
+1519  if (maxSeqId == null || 
maxSeqId.longValue() < logEntry.getKey().getLogSeqNum()) {
+1520keptCells.add(cell);
+1521  }
+1522}
+1523  }
 1524
-1525@Override
-1526public void append(RegionEntryBuffer 
buffer) throws IOException {
-1527  List entries = 
buffer.entryBuffer;
-1528  if (entries.isEmpty()) {
-1529LOG.warn("got an empty buffer, 
skipping");
-1530return;
-1531  }
-1532
-1533  WriterAndPath wap = null;
-1534
-1535  long startTime = 
System.nanoTime();
-1536  try {
-1537int editsCount = 0;
+1525  // Anything in the keptCells array 
list is still live.
+1526  // So rather than removing the 
cells from the array list
+1527  // which would be an O(n^2) 
operation, we just replace the list
+1528  
logEntry.getEdit().setCells(keptCells);
+1529}
+1530
+1531@Override
+1532public void append(RegionEntryBuffer 
buffer) throws IOException {
+1533  List entries = 
buffer.entryBuffer;
+1534  if (entries.isEmpty()) {
+1535LOG.warn("got an empty buffer, 
skipping");
+1536return;
+1537  }
 1538
-1539for (Entry logEntry : entries) 
{
-1540  if (wap == null) {
-1541wap = 
getWriterAndPath(logEntry);
-1542if (wap == null) {
-1543  if (LOG.isDebugEnabled()) 
{
-1544
LOG.debug("getWriterAndPath decided we don't need to write edits for " + 
logEntry);
-1545  }
-1546  return;
-1547}
-1548  }
-1549  filterCellByStore(logEntry);
-1550  if 
(!logEntry.getEdit().isEmpty()) {
-1551wap.w.append(logEntry);
-1552
this.updateRegionMaximumEditLogSeqNum(logEntry);
-1553editsCount++;
-1554  } else {
-1555
wap.incrementSkippedEdits(1);
-1556  }
-1557}
-1558// Pass along summary 
statistics
-1559
wap.incrementEdits(editsCount);
-1560
wap.incrementNanoTime(System.nanoTime() - startTime);
-1561  } catch (IOException e) {
-1562  e = e instanceof 
RemoteException ?
-1563  
((RemoteException)e).unwrapRemoteException() : e;
-1564LOG.fatal(" Got while writing 
log entry to log", e);
-1565throw e;
-1566  }
-1567}
-1568
-1569/**
-1570 * @return a map from encoded region 
ID to the number of edits written out for that region.
-1571 */
-1572@Override
-1573public Map 
getOutputCounts() {
-1574  TreeMap ret = 
new TreeMap(Bytes.BYTES_COMPARATOR);
-1575  synchronized (writers) {
-1576for (M

<    1   2   3   4   >