hbase git commit: HBASE-15605 Remove PB references from HCD and HTD for 2.0 (Ram)

2016-04-11 Thread ramkrishna
Repository: hbase
Updated Branches:
  refs/heads/master 80df1cb7b -> a395922ad


HBASE-15605 Remove PB references from HCD and HTD for 2.0 (Ram)


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/a395922a
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/a395922a
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/a395922a

Branch: refs/heads/master
Commit: a395922ad5af9494bb55feee3c275c6d3a575e92
Parents: 80df1cb
Author: Vasudevan 
Authored: Mon Apr 11 14:12:07 2016 +0530
Committer: Vasudevan 
Committed: Mon Apr 11 14:12:07 2016 +0530

--
 .../apache/hadoop/hbase/HColumnDescriptor.java  |  57 +
 .../apache/hadoop/hbase/HTableDescriptor.java   |  57 +
 .../hbase/client/ConnectionImplementation.java  |   2 +-
 .../apache/hadoop/hbase/client/HBaseAdmin.java  |   4 +-
 .../hadoop/hbase/protobuf/ProtobufUtil.java | 120 ---
 .../hadoop/hbase/protobuf/RequestConverter.java |   8 +-
 .../apache/hadoop/hbase/TableDescriptor.java|   4 +-
 .../mapreduce/TableSnapshotInputFormatImpl.java |   5 +-
 .../hadoop/hbase/master/MasterRpcServices.java  |  12 +-
 .../procedure/AddColumnFamilyProcedure.java |   9 +-
 .../procedure/CloneSnapshotProcedure.java   |   5 +-
 .../master/procedure/CreateTableProcedure.java  |   5 +-
 .../procedure/DeleteColumnFamilyProcedure.java  |   5 +-
 .../procedure/ModifyColumnFamilyProcedure.java  |   9 +-
 .../master/procedure/ModifyTableProcedure.java  |  10 +-
 .../procedure/RestoreSnapshotProcedure.java |   5 +-
 .../procedure/TruncateTableProcedure.java   |   4 +-
 .../hadoop/hbase/snapshot/SnapshotManifest.java |   4 +-
 .../hbase/snapshot/TestSnapshotManifest.java|   4 +-
 19 files changed, 162 insertions(+), 167 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/a395922a/hbase-client/src/main/java/org/apache/hadoop/hbase/HColumnDescriptor.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/HColumnDescriptor.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/HColumnDescriptor.java
index 2c10308..3c16f4e 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/HColumnDescriptor.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/HColumnDescriptor.java
@@ -32,11 +32,8 @@ import org.apache.hadoop.hbase.exceptions.HBaseException;
 import org.apache.hadoop.hbase.io.compress.Compression;
 import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
 import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
-import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair;
 import 
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema;
-import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair;
 import org.apache.hadoop.hbase.regionserver.BloomType;
-import org.apache.hadoop.hbase.util.ByteStringer;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.hbase.util.PrettyPrinter;
 import org.apache.hadoop.hbase.util.PrettyPrinter.Unit;
@@ -297,13 +294,6 @@ public class HColumnDescriptor implements 
Comparable {
   private int cachedMaxVersions = UNINITIALIZED;
 
   /**
-   * Default constructor. Must be present for PB deserializations.
-   */
-  private HColumnDescriptor() {
-this.name = null;
-  }
-
-  /**
* Construct a column descriptor specifying only the family name
* The other attributes are defaulted.
*
@@ -1075,8 +1065,9 @@ public class HColumnDescriptor implements 
Comparable {
* @return This instance serialized with pb with pb magic prefix
* @see #parseFrom(byte[])
*/
-  public byte [] toByteArray() {
-return ProtobufUtil.prependPBMagic(convert().toByteArray());
+  public byte[] toByteArray() {
+return ProtobufUtil
+
.prependPBMagic(ProtobufUtil.convertToColumnFamilySchema(this).toByteArray());
   }
 
   /**
@@ -1096,47 +1087,7 @@ public class HColumnDescriptor implements 
Comparable {
 } catch (IOException e) {
   throw new DeserializationException(e);
 }
-return convert(cfs);
-  }
-
-  /**
-   * @param cfs
-   * @return An {@link HColumnDescriptor} made from the passed in 
cfs
-   */
-  public static HColumnDescriptor convert(final ColumnFamilySchema cfs) {
-// Use the empty constructor so we preserve the initial values set on 
construction for things
-// like maxVersion.  Otherwise, we pick up wrong values on deserialization 
which makes for
-// unrelated-looking test failures that are hard to trace back to here.
-HColumnDescriptor hcd = new HColumnDescriptor();
-hcd.name = cfs.getName().toByteArray();
-for (BytesBytesPair a: cfs.getAttributesList()) {
-  hcd.setValue(a.getFirst().toByteArray(), a.getSecond().toByteArray());
-}
-  

hbase git commit: HBASE-15605 Remove PB references from HCD and HTD for 2.0 (Ram_

2016-04-11 Thread ramkrishna
Repository: hbase
Updated Branches:
  refs/heads/branch-1 ff835d5ae -> e407e1e81


HBASE-15605 Remove PB references from HCD and HTD for 2.0 (Ram_


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/e407e1e8
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/e407e1e8
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/e407e1e8

Branch: refs/heads/branch-1
Commit: e407e1e81d82984d4c52b88a9b4ee582f25c7792
Parents: ff835d5
Author: Vasudevan 
Authored: Mon Apr 11 14:14:06 2016 +0530
Committer: Vasudevan 
Committed: Mon Apr 11 14:15:09 2016 +0530

--
 .../src/main/java/org/apache/hadoop/hbase/HColumnDescriptor.java   | 2 ++
 .../src/main/java/org/apache/hadoop/hbase/HTableDescriptor.java| 2 ++
 2 files changed, 4 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/e407e1e8/hbase-client/src/main/java/org/apache/hadoop/hbase/HColumnDescriptor.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/HColumnDescriptor.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/HColumnDescriptor.java
index 17ca37a..6912ab5 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/HColumnDescriptor.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/HColumnDescriptor.java
@@ -1434,6 +1434,7 @@ public class HColumnDescriptor implements 
WritableComparable
* @param cfs
* @return An {@link HColumnDescriptor} made from the passed in 
cfs
*/
+  @Deprecated
   public static HColumnDescriptor convert(final ColumnFamilySchema cfs) {
 // Use the empty constructor so we preserve the initial values set on 
construction for things
 // like maxVersion.  Otherwise, we pick up wrong values on deserialization 
which makes for
@@ -1452,6 +1453,7 @@ public class HColumnDescriptor implements 
WritableComparable
   /**
* @return Convert this instance to a the pb column family type
*/
+  @Deprecated
   public ColumnFamilySchema convert() {
 ColumnFamilySchema.Builder builder = ColumnFamilySchema.newBuilder();
 builder.setName(ByteStringer.wrap(getName()));

http://git-wip-us.apache.org/repos/asf/hbase/blob/e407e1e8/hbase-client/src/main/java/org/apache/hadoop/hbase/HTableDescriptor.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/HTableDescriptor.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/HTableDescriptor.java
index a6c08c3..d4c4c2b 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/HTableDescriptor.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/HTableDescriptor.java
@@ -1642,6 +1642,7 @@ public class HTableDescriptor implements 
WritableComparable {
   /**
* @return Convert the current {@link HTableDescriptor} into a pb 
TableSchema instance.
*/
+  @Deprecated
   public TableSchema convert() {
 TableSchema.Builder builder = TableSchema.newBuilder();
 builder.setTableName(ProtobufUtil.toProtoTableName(getTableName()));
@@ -1667,6 +1668,7 @@ public class HTableDescriptor implements 
WritableComparable {
* @param ts A pb TableSchema instance.
* @return An {@link HTableDescriptor} made from the passed in pb 
ts.
*/
+  @Deprecated
   public static HTableDescriptor convert(final TableSchema ts) {
 List list = ts.getColumnFamiliesList();
 HColumnDescriptor [] hcds = new HColumnDescriptor[list.size()];



hbase git commit: HBASE-15605 Remove PB references from HCD and HTD for 2.0 (Ram)

2016-04-11 Thread ramkrishna
Repository: hbase
Updated Branches:
  refs/heads/branch-1.3 b29074292 -> 672c427ce


HBASE-15605 Remove PB references from HCD and HTD for 2.0 (Ram)


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/672c427c
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/672c427c
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/672c427c

Branch: refs/heads/branch-1.3
Commit: 672c427ce4da7f01168e1e0be221602d34cc7324
Parents: b290742
Author: Vasudevan 
Authored: Mon Apr 11 14:15:57 2016 +0530
Committer: Vasudevan 
Committed: Mon Apr 11 14:15:57 2016 +0530

--
 .../src/main/java/org/apache/hadoop/hbase/HColumnDescriptor.java   | 2 ++
 .../src/main/java/org/apache/hadoop/hbase/HTableDescriptor.java| 2 ++
 2 files changed, 4 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/672c427c/hbase-client/src/main/java/org/apache/hadoop/hbase/HColumnDescriptor.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/HColumnDescriptor.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/HColumnDescriptor.java
index 17ca37a..6912ab5 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/HColumnDescriptor.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/HColumnDescriptor.java
@@ -1434,6 +1434,7 @@ public class HColumnDescriptor implements 
WritableComparable
* @param cfs
* @return An {@link HColumnDescriptor} made from the passed in 
cfs
*/
+  @Deprecated
   public static HColumnDescriptor convert(final ColumnFamilySchema cfs) {
 // Use the empty constructor so we preserve the initial values set on 
construction for things
 // like maxVersion.  Otherwise, we pick up wrong values on deserialization 
which makes for
@@ -1452,6 +1453,7 @@ public class HColumnDescriptor implements 
WritableComparable
   /**
* @return Convert this instance to a the pb column family type
*/
+  @Deprecated
   public ColumnFamilySchema convert() {
 ColumnFamilySchema.Builder builder = ColumnFamilySchema.newBuilder();
 builder.setName(ByteStringer.wrap(getName()));

http://git-wip-us.apache.org/repos/asf/hbase/blob/672c427c/hbase-client/src/main/java/org/apache/hadoop/hbase/HTableDescriptor.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/HTableDescriptor.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/HTableDescriptor.java
index a6c08c3..d4c4c2b 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/HTableDescriptor.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/HTableDescriptor.java
@@ -1642,6 +1642,7 @@ public class HTableDescriptor implements 
WritableComparable {
   /**
* @return Convert the current {@link HTableDescriptor} into a pb 
TableSchema instance.
*/
+  @Deprecated
   public TableSchema convert() {
 TableSchema.Builder builder = TableSchema.newBuilder();
 builder.setTableName(ProtobufUtil.toProtoTableName(getTableName()));
@@ -1667,6 +1668,7 @@ public class HTableDescriptor implements 
WritableComparable {
* @param ts A pb TableSchema instance.
* @return An {@link HTableDescriptor} made from the passed in pb 
ts.
*/
+  @Deprecated
   public static HTableDescriptor convert(final TableSchema ts) {
 List list = ts.getColumnFamiliesList();
 HColumnDescriptor [] hcds = new HColumnDescriptor[list.size()];



[hbase] Git Push Summary

2016-04-11 Thread busbey
Repository: hbase
Updated Tags:  refs/tags/rel/1.2.1 [created] 2f38483c8


hbase git commit: HBASE-15093 Replication can report incorrect size of log queue for the global source when multiwal is enabled (Ashu Pachauri)

2016-04-11 Thread tedyu
Repository: hbase
Updated Branches:
  refs/heads/master a395922ad -> 8541fe4ad


HBASE-15093 Replication can report incorrect size of log queue for the global 
source when multiwal is enabled (Ashu Pachauri)


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/8541fe4a
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/8541fe4a
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/8541fe4a

Branch: refs/heads/master
Commit: 8541fe4ad10737efbec3734d3ba4d835c51afa7d
Parents: a395922
Author: tedyu 
Authored: Mon Apr 11 08:17:20 2016 -0700
Committer: tedyu 
Committed: Mon Apr 11 08:17:20 2016 -0700

--
 .../MetricsReplicationSourceSource.java   |  2 +-
 .../MetricsReplicationGlobalSourceSource.java |  9 +
 .../MetricsReplicationSourceSourceImpl.java   |  9 +
 .../replication/regionserver/MetricsSource.java   | 18 ++
 .../regionserver/ReplicationSource.java   | 14 ++
 5 files changed, 27 insertions(+), 25 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/8541fe4a/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/replication/regionserver/MetricsReplicationSourceSource.java
--
diff --git 
a/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/replication/regionserver/MetricsReplicationSourceSource.java
 
b/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/replication/regionserver/MetricsReplicationSourceSource.java
index 188c3a3..3aa01ab 100644
--- 
a/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/replication/regionserver/MetricsReplicationSourceSource.java
+++ 
b/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/replication/regionserver/MetricsReplicationSourceSource.java
@@ -36,7 +36,6 @@ public interface MetricsReplicationSourceSource {
   public static final String SOURCE_SIZE_OF_HFILE_REFS_QUEUE = 
"source.sizeOfHFileRefsQueue";
 
   void setLastShippedAge(long age);
-  void setSizeOfLogQueue(int size);
   void incrSizeOfLogQueue(int size);
   void decrSizeOfLogQueue(int size);
   void incrLogEditsFiltered(long size);
@@ -47,6 +46,7 @@ public interface MetricsReplicationSourceSource {
   void incrLogReadInEdits(long size);
   void clear();
   long getLastShippedAge();
+  int getSizeOfLogQueue();
   void incrHFilesShipped(long hfiles);
   void incrSizeOfHFileRefsQueue(long size);
   void decrSizeOfHFileRefsQueue(long size);

http://git-wip-us.apache.org/repos/asf/hbase/blob/8541fe4a/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/replication/regionserver/MetricsReplicationGlobalSourceSource.java
--
diff --git 
a/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/replication/regionserver/MetricsReplicationGlobalSourceSource.java
 
b/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/replication/regionserver/MetricsReplicationGlobalSourceSource.java
index 93b10b6..2526f32 100644
--- 
a/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/replication/regionserver/MetricsReplicationGlobalSourceSource.java
+++ 
b/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/replication/regionserver/MetricsReplicationGlobalSourceSource.java
@@ -64,10 +64,6 @@ public class MetricsReplicationGlobalSourceSource implements 
MetricsReplicationS
 ageOfLastShippedOpGauge.set(age);
   }
 
-  @Override public void setSizeOfLogQueue(int size) {
-sizeOfLogQueueGauge.set(size);
-  }
-
   @Override public void incrSizeOfLogQueue(int size) {
 sizeOfLogQueueGauge.incr(size);
   }
@@ -121,4 +117,9 @@ public class MetricsReplicationGlobalSourceSource 
implements MetricsReplicationS
   public void decrSizeOfHFileRefsQueue(long size) {
 sizeOfHFileRefsQueueGauge.decr(size);
   }
+
+  @Override
+  public int getSizeOfLogQueue() {
+return (int)sizeOfLogQueueGauge.value();
+  }
 }

http://git-wip-us.apache.org/repos/asf/hbase/blob/8541fe4a/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/replication/regionserver/MetricsReplicationSourceSourceImpl.java
--
diff --git 
a/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/replication/regionserver/MetricsReplicationSourceSourceImpl.java
 
b/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/replication/regionserver/MetricsReplicationSourceSourceImpl.java
index 9941712..03e3116 100644
--- 
a/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/replication/regionserver/MetricsReplicationSourceSourceImpl.java
+++ 
b/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/replication/regionserver/MetricsReplicationSourceSourceImpl.java
@@ -85,10 +85,6 @@ public class MetricsReplica

hbase git commit: HBASE-15093 Replication can report incorrect size of log queue for the global source when multiwal is enabled (Ashu Pachauri)

2016-04-11 Thread tedyu
Repository: hbase
Updated Branches:
  refs/heads/branch-1 e407e1e81 -> b7502feff


HBASE-15093 Replication can report incorrect size of log queue for the global 
source when multiwal is enabled (Ashu Pachauri)


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/b7502fef
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/b7502fef
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/b7502fef

Branch: refs/heads/branch-1
Commit: b7502feff3259dcfb27ce28838122fb1b240eede
Parents: e407e1e
Author: tedyu 
Authored: Mon Apr 11 08:23:34 2016 -0700
Committer: tedyu 
Committed: Mon Apr 11 08:23:34 2016 -0700

--
 .../MetricsReplicationSourceSource.java   |  2 +-
 .../MetricsReplicationGlobalSourceSource.java |  9 +
 .../MetricsReplicationSourceSourceImpl.java   |  9 +
 .../replication/regionserver/MetricsSource.java   | 18 ++
 .../regionserver/ReplicationSource.java   | 11 ---
 5 files changed, 25 insertions(+), 24 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/b7502fef/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/replication/regionserver/MetricsReplicationSourceSource.java
--
diff --git 
a/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/replication/regionserver/MetricsReplicationSourceSource.java
 
b/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/replication/regionserver/MetricsReplicationSourceSource.java
index 188c3a3..3aa01ab 100644
--- 
a/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/replication/regionserver/MetricsReplicationSourceSource.java
+++ 
b/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/replication/regionserver/MetricsReplicationSourceSource.java
@@ -36,7 +36,6 @@ public interface MetricsReplicationSourceSource {
   public static final String SOURCE_SIZE_OF_HFILE_REFS_QUEUE = 
"source.sizeOfHFileRefsQueue";
 
   void setLastShippedAge(long age);
-  void setSizeOfLogQueue(int size);
   void incrSizeOfLogQueue(int size);
   void decrSizeOfLogQueue(int size);
   void incrLogEditsFiltered(long size);
@@ -47,6 +46,7 @@ public interface MetricsReplicationSourceSource {
   void incrLogReadInEdits(long size);
   void clear();
   long getLastShippedAge();
+  int getSizeOfLogQueue();
   void incrHFilesShipped(long hfiles);
   void incrSizeOfHFileRefsQueue(long size);
   void decrSizeOfHFileRefsQueue(long size);

http://git-wip-us.apache.org/repos/asf/hbase/blob/b7502fef/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/replication/regionserver/MetricsReplicationGlobalSourceSource.java
--
diff --git 
a/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/replication/regionserver/MetricsReplicationGlobalSourceSource.java
 
b/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/replication/regionserver/MetricsReplicationGlobalSourceSource.java
index 93b10b6..2526f32 100644
--- 
a/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/replication/regionserver/MetricsReplicationGlobalSourceSource.java
+++ 
b/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/replication/regionserver/MetricsReplicationGlobalSourceSource.java
@@ -64,10 +64,6 @@ public class MetricsReplicationGlobalSourceSource implements 
MetricsReplicationS
 ageOfLastShippedOpGauge.set(age);
   }
 
-  @Override public void setSizeOfLogQueue(int size) {
-sizeOfLogQueueGauge.set(size);
-  }
-
   @Override public void incrSizeOfLogQueue(int size) {
 sizeOfLogQueueGauge.incr(size);
   }
@@ -121,4 +117,9 @@ public class MetricsReplicationGlobalSourceSource 
implements MetricsReplicationS
   public void decrSizeOfHFileRefsQueue(long size) {
 sizeOfHFileRefsQueueGauge.decr(size);
   }
+
+  @Override
+  public int getSizeOfLogQueue() {
+return (int)sizeOfLogQueueGauge.value();
+  }
 }

http://git-wip-us.apache.org/repos/asf/hbase/blob/b7502fef/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/replication/regionserver/MetricsReplicationSourceSourceImpl.java
--
diff --git 
a/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/replication/regionserver/MetricsReplicationSourceSourceImpl.java
 
b/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/replication/regionserver/MetricsReplicationSourceSourceImpl.java
index 9941712..03e3116 100644
--- 
a/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/replication/regionserver/MetricsReplicationSourceSourceImpl.java
+++ 
b/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/replication/regionserver/MetricsReplicationSourceSourceImpl.java
@@ -85,10 +85,6 @@ public class MetricsReplic

hbase git commit: HBASE-15093 Replication can report incorrect size of log queue for the global source when multiwal is enabled (Ashu Pachauri)

2016-04-11 Thread tedyu
Repository: hbase
Updated Branches:
  refs/heads/branch-1.3 672c427ce -> 9473ab4ba


HBASE-15093 Replication can report incorrect size of log queue for the global 
source when multiwal is enabled (Ashu Pachauri)


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/9473ab4b
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/9473ab4b
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/9473ab4b

Branch: refs/heads/branch-1.3
Commit: 9473ab4bace61e9ca6f75fa4d0edb32d1b8d417e
Parents: 672c427
Author: tedyu 
Authored: Mon Apr 11 08:26:48 2016 -0700
Committer: tedyu 
Committed: Mon Apr 11 08:26:48 2016 -0700

--
 .../MetricsReplicationSourceSource.java   |  2 +-
 .../MetricsReplicationGlobalSourceSource.java |  9 +
 .../MetricsReplicationSourceSourceImpl.java   |  9 +
 .../replication/regionserver/MetricsSource.java   | 18 ++
 .../regionserver/ReplicationSource.java   | 11 ---
 5 files changed, 25 insertions(+), 24 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/9473ab4b/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/replication/regionserver/MetricsReplicationSourceSource.java
--
diff --git 
a/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/replication/regionserver/MetricsReplicationSourceSource.java
 
b/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/replication/regionserver/MetricsReplicationSourceSource.java
index 188c3a3..3aa01ab 100644
--- 
a/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/replication/regionserver/MetricsReplicationSourceSource.java
+++ 
b/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/replication/regionserver/MetricsReplicationSourceSource.java
@@ -36,7 +36,6 @@ public interface MetricsReplicationSourceSource {
   public static final String SOURCE_SIZE_OF_HFILE_REFS_QUEUE = 
"source.sizeOfHFileRefsQueue";
 
   void setLastShippedAge(long age);
-  void setSizeOfLogQueue(int size);
   void incrSizeOfLogQueue(int size);
   void decrSizeOfLogQueue(int size);
   void incrLogEditsFiltered(long size);
@@ -47,6 +46,7 @@ public interface MetricsReplicationSourceSource {
   void incrLogReadInEdits(long size);
   void clear();
   long getLastShippedAge();
+  int getSizeOfLogQueue();
   void incrHFilesShipped(long hfiles);
   void incrSizeOfHFileRefsQueue(long size);
   void decrSizeOfHFileRefsQueue(long size);

http://git-wip-us.apache.org/repos/asf/hbase/blob/9473ab4b/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/replication/regionserver/MetricsReplicationGlobalSourceSource.java
--
diff --git 
a/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/replication/regionserver/MetricsReplicationGlobalSourceSource.java
 
b/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/replication/regionserver/MetricsReplicationGlobalSourceSource.java
index 93b10b6..2526f32 100644
--- 
a/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/replication/regionserver/MetricsReplicationGlobalSourceSource.java
+++ 
b/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/replication/regionserver/MetricsReplicationGlobalSourceSource.java
@@ -64,10 +64,6 @@ public class MetricsReplicationGlobalSourceSource implements 
MetricsReplicationS
 ageOfLastShippedOpGauge.set(age);
   }
 
-  @Override public void setSizeOfLogQueue(int size) {
-sizeOfLogQueueGauge.set(size);
-  }
-
   @Override public void incrSizeOfLogQueue(int size) {
 sizeOfLogQueueGauge.incr(size);
   }
@@ -121,4 +117,9 @@ public class MetricsReplicationGlobalSourceSource 
implements MetricsReplicationS
   public void decrSizeOfHFileRefsQueue(long size) {
 sizeOfHFileRefsQueueGauge.decr(size);
   }
+
+  @Override
+  public int getSizeOfLogQueue() {
+return (int)sizeOfLogQueueGauge.value();
+  }
 }

http://git-wip-us.apache.org/repos/asf/hbase/blob/9473ab4b/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/replication/regionserver/MetricsReplicationSourceSourceImpl.java
--
diff --git 
a/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/replication/regionserver/MetricsReplicationSourceSourceImpl.java
 
b/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/replication/regionserver/MetricsReplicationSourceSourceImpl.java
index 9941712..03e3116 100644
--- 
a/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/replication/regionserver/MetricsReplicationSourceSourceImpl.java
+++ 
b/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/replication/regionserver/MetricsReplicationSourceSourceImpl.java
@@ -85,10 +85,6 @@ public class MetricsRe

[1/6] hbase git commit: HBASE-15627 Miss space and closing quote in AccessController#checkSystemOrSuperUser (Huaxiang Sun)

2016-04-11 Thread mbertozzi
Repository: hbase
Updated Branches:
  refs/heads/0.98 d4dcf7a11 -> 7a804030f
  refs/heads/branch-1 b7502feff -> 2dbbe8960
  refs/heads/branch-1.1 0d2c8bde9 -> da021be17
  refs/heads/branch-1.2 19d110051 -> 53b94c140
  refs/heads/branch-1.3 9473ab4ba -> a4cd35338
  refs/heads/master 8541fe4ad -> 896457339


HBASE-15627 Miss space and closing quote in 
AccessController#checkSystemOrSuperUser (Huaxiang Sun)


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/53b94c14
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/53b94c14
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/53b94c14

Branch: refs/heads/branch-1.2
Commit: 53b94c140ab8700d2930663c2b060030a602a364
Parents: 19d1100
Author: Matteo Bertozzi 
Authored: Mon Apr 11 08:08:35 2016 -0700
Committer: Matteo Bertozzi 
Committed: Mon Apr 11 08:21:32 2016 -0700

--
 .../org/apache/hadoop/hbase/security/access/AccessController.java  | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/53b94c14/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java
index 426a5d6..cb48488 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java
@@ -2468,7 +2468,7 @@ public class AccessController extends 
BaseMasterAndRegionObserver
 User activeUser = getActiveUser();
 if (!Superusers.isSuperUser(activeUser)) {
   throw new AccessDeniedException("User '" + (activeUser != null ?
-activeUser.getShortName() : "null") + "is not system or super user.");
+activeUser.getShortName() : "null") + "' is not system or super 
user.");
 }
   }
 



[6/6] hbase git commit: HBASE-15627 Miss space and closing quote in AccessController#checkSystemOrSuperUser (Huaxiang Sun)

2016-04-11 Thread mbertozzi
HBASE-15627 Miss space and closing quote in 
AccessController#checkSystemOrSuperUser (Huaxiang Sun)


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/89645733
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/89645733
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/89645733

Branch: refs/heads/master
Commit: 8964573394bcf2026bb5514c08918bd272784ed0
Parents: 8541fe4
Author: Matteo Bertozzi 
Authored: Mon Apr 11 08:08:35 2016 -0700
Committer: Matteo Bertozzi 
Committed: Mon Apr 11 08:28:26 2016 -0700

--
 .../org/apache/hadoop/hbase/security/access/AccessController.java  | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/89645733/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java
index 3b36ada..fb19a96 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java
@@ -2464,7 +2464,7 @@ public class AccessController extends 
BaseMasterAndRegionObserver
 User activeUser = getActiveUser();
 if (!Superusers.isSuperUser(activeUser)) {
   throw new AccessDeniedException("User '" + (activeUser != null ?
-activeUser.getShortName() : "null") + "is not system or super user.");
+activeUser.getShortName() : "null") + "' is not system or super 
user.");
 }
   }
 



[2/6] hbase git commit: HBASE-15627 Miss space and closing quote in AccessController#checkSystemOrSuperUser (Huaxiang Sun)

2016-04-11 Thread mbertozzi
HBASE-15627 Miss space and closing quote in 
AccessController#checkSystemOrSuperUser (Huaxiang Sun)


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/da021be1
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/da021be1
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/da021be1

Branch: refs/heads/branch-1.1
Commit: da021be17158e49ff41f23e914519efd80f9a0ae
Parents: 0d2c8bd
Author: Matteo Bertozzi 
Authored: Mon Apr 11 08:08:35 2016 -0700
Committer: Matteo Bertozzi 
Committed: Mon Apr 11 08:23:11 2016 -0700

--
 .../org/apache/hadoop/hbase/security/access/AccessController.java  | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/da021be1/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java
index fa175fb..8047bb8 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java
@@ -2457,7 +2457,7 @@ public class AccessController extends 
BaseMasterAndRegionObserver
 User activeUser = getActiveUser();
 if (!Superusers.isSuperUser(activeUser)) {
   throw new AccessDeniedException("User '" + (activeUser != null ?
-activeUser.getShortName() : "null") + "is not system or super user.");
+activeUser.getShortName() : "null") + "' is not system or super 
user.");
 }
   }
 



[3/6] hbase git commit: HBASE-15627 Miss space and closing quote in AccessController#checkSystemOrSuperUser (Huaxiang Sun)

2016-04-11 Thread mbertozzi
HBASE-15627 Miss space and closing quote in 
AccessController#checkSystemOrSuperUser (Huaxiang Sun)


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/7a804030
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/7a804030
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/7a804030

Branch: refs/heads/0.98
Commit: 7a804030feb50a9e4ad07ad12ca45e1444d2f9e1
Parents: d4dcf7a
Author: Matteo Bertozzi 
Authored: Mon Apr 11 08:08:35 2016 -0700
Committer: Matteo Bertozzi 
Committed: Mon Apr 11 08:25:15 2016 -0700

--
 .../org/apache/hadoop/hbase/security/access/AccessController.java  | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/7a804030/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java
index eaf1521..1f0bffe 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java
@@ -2408,7 +2408,7 @@ public class AccessController extends 
BaseMasterAndRegionObserver
 User activeUser = getActiveUser();
 if (!Superusers.isSuperUser(activeUser)) {
   throw new AccessDeniedException("User '" + (activeUser != null ?
-activeUser.getShortName() : "null") + "is not system or super user.");
+activeUser.getShortName() : "null") + "' is not system or super 
user.");
 }
   }
 



[5/6] hbase git commit: HBASE-15627 Miss space and closing quote in AccessController#checkSystemOrSuperUser (Huaxiang Sun)

2016-04-11 Thread mbertozzi
HBASE-15627 Miss space and closing quote in 
AccessController#checkSystemOrSuperUser (Huaxiang Sun)


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/a4cd3533
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/a4cd3533
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/a4cd3533

Branch: refs/heads/branch-1.3
Commit: a4cd35338502b83e28318e3a93009159db3133d5
Parents: 9473ab4
Author: Matteo Bertozzi 
Authored: Mon Apr 11 08:08:35 2016 -0700
Committer: Matteo Bertozzi 
Committed: Mon Apr 11 08:28:17 2016 -0700

--
 .../org/apache/hadoop/hbase/security/access/AccessController.java  | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/a4cd3533/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java
index b0e8c8c..a147b12 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java
@@ -2490,7 +2490,7 @@ public class AccessController extends 
BaseMasterAndRegionObserver
 User activeUser = getActiveUser();
 if (!Superusers.isSuperUser(activeUser)) {
   throw new AccessDeniedException("User '" + (activeUser != null ?
-activeUser.getShortName() : "null") + "is not system or super user.");
+activeUser.getShortName() : "null") + "' is not system or super 
user.");
 }
   }
 



[4/6] hbase git commit: HBASE-15627 Miss space and closing quote in AccessController#checkSystemOrSuperUser (Huaxiang Sun)

2016-04-11 Thread mbertozzi
HBASE-15627 Miss space and closing quote in 
AccessController#checkSystemOrSuperUser (Huaxiang Sun)


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/2dbbe896
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/2dbbe896
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/2dbbe896

Branch: refs/heads/branch-1
Commit: 2dbbe8960a1e07d109dae8affe326d3aa3acca76
Parents: b7502fe
Author: Matteo Bertozzi 
Authored: Mon Apr 11 08:08:35 2016 -0700
Committer: Matteo Bertozzi 
Committed: Mon Apr 11 08:28:02 2016 -0700

--
 .../org/apache/hadoop/hbase/security/access/AccessController.java  | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/2dbbe896/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java
index b0e8c8c..a147b12 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java
@@ -2490,7 +2490,7 @@ public class AccessController extends 
BaseMasterAndRegionObserver
 User activeUser = getActiveUser();
 if (!Superusers.isSuperUser(activeUser)) {
   throw new AccessDeniedException("User '" + (activeUser != null ?
-activeUser.getShortName() : "null") + "is not system or super user.");
+activeUser.getShortName() : "null") + "' is not system or super 
user.");
 }
   }
 



[16/24] hbase git commit: HBASE-15586 Unify human readable numbers in the web UI

2016-04-11 Thread syuanjiang
HBASE-15586 Unify human readable numbers in the web UI


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/2dcd08bc
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/2dcd08bc
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/2dcd08bc

Branch: refs/heads/hbase-12439
Commit: 2dcd08bc3d8ab2f26da85128102926c68a95186f
Parents: a146a71
Author: Enis Soztutar 
Authored: Thu Apr 7 17:07:33 2016 -0700
Committer: Enis Soztutar 
Committed: Thu Apr 7 17:07:33 2016 -0700

--
 .../tmpl/master/RegionServerListTmpl.jamon  | 22 ++-
 .../tmpl/regionserver/BlockCacheTmpl.jamon  | 12 
 .../tmpl/regionserver/RegionListTmpl.jamon  | 16 +++
 .../tmpl/regionserver/ServerMetricsTmpl.jamon   | 29 ++--
 4 files changed, 48 insertions(+), 31 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/2dcd08bc/hbase-server/src/main/jamon/org/apache/hadoop/hbase/tmpl/master/RegionServerListTmpl.jamon
--
diff --git 
a/hbase-server/src/main/jamon/org/apache/hadoop/hbase/tmpl/master/RegionServerListTmpl.jamon
 
b/hbase-server/src/main/jamon/org/apache/hadoop/hbase/tmpl/master/RegionServerListTmpl.jamon
index c051743..a62d5eb 100644
--- 
a/hbase-server/src/main/jamon/org/apache/hadoop/hbase/tmpl/master/RegionServerListTmpl.jamon
+++ 
b/hbase-server/src/main/jamon/org/apache/hadoop/hbase/tmpl/master/RegionServerListTmpl.jamon
@@ -29,6 +29,7 @@ HMaster master;
 org.apache.hadoop.hbase.ServerLoad;
 org.apache.hadoop.hbase.ServerName;
 org.apache.hadoop.hbase.util.VersionInfo;
+org.apache.hadoop.util.StringUtils.TraditionalBinaryPrefix;
 
 
 
@@ -148,9 +149,12 @@ for (ServerName serverName: serverNames) {
 
 
 <& serverNameLink; serverName=serverName; serverLoad = sl; &>
-<% sl.getUsedHeapMB() %>m
-<% sl.getMaxHeapMB() %>m
-<% sl.getMemstoreSizeInMB() %>m
+<% TraditionalBinaryPrefix.long2String(sl.getUsedHeapMB()
+  * TraditionalBinaryPrefix.MEGA.value, "B", 1) %>
+<% TraditionalBinaryPrefix.long2String(sl.getMaxHeapMB()
+  * TraditionalBinaryPrefix.MEGA.value, "B", 1) %>
+<% TraditionalBinaryPrefix.long2String(sl.getMemstoreSizeInMB()
+  * TraditionalBinaryPrefix.MEGA.value, "B", 1) %>
 
 
 <%java>
@@ -226,10 +230,14 @@ if (sl != null) {
 <& serverNameLink; serverName=serverName; serverLoad = sl; &>
 <% sl.getStores() %>
 <% sl.getStorefiles() %>
-<% sl.getStoreUncompressedSizeMB() %>m
-<% sl.getStorefileSizeInMB() %>mb
-<% sl.getTotalStaticIndexSizeKB() %>k
-<% sl.getTotalStaticBloomSizeKB() %>k
+<% TraditionalBinaryPrefix.long2String(
+  sl.getStoreUncompressedSizeMB() * TraditionalBinaryPrefix.MEGA.value, "B", 
1) %>
+<% TraditionalBinaryPrefix.long2String(sl.getStorefileSizeInMB()
+  * TraditionalBinaryPrefix.MEGA.value, "B", 1) %>
+<% TraditionalBinaryPrefix.long2String(sl.getTotalStaticIndexSizeKB()
+  * TraditionalBinaryPrefix.KILO.value, "B", 1) %>
+<% TraditionalBinaryPrefix.long2String(sl.getTotalStaticBloomSizeKB()
+  * TraditionalBinaryPrefix.KILO.value, "B", 1) %>
 
 <%java>
 }  else {

http://git-wip-us.apache.org/repos/asf/hbase/blob/2dcd08bc/hbase-server/src/main/jamon/org/apache/hadoop/hbase/tmpl/regionserver/BlockCacheTmpl.jamon
--
diff --git 
a/hbase-server/src/main/jamon/org/apache/hadoop/hbase/tmpl/regionserver/BlockCacheTmpl.jamon
 
b/hbase-server/src/main/jamon/org/apache/hadoop/hbase/tmpl/regionserver/BlockCacheTmpl.jamon
index 3dcd5e2..1277acc 100644
--- 
a/hbase-server/src/main/jamon/org/apache/hadoop/hbase/tmpl/regionserver/BlockCacheTmpl.jamon
+++ 
b/hbase-server/src/main/jamon/org/apache/hadoop/hbase/tmpl/regionserver/BlockCacheTmpl.jamon
@@ -45,7 +45,7 @@ org.apache.hadoop.hbase.io.hfile.bucket.BucketCacheStats;
 org.apache.hadoop.hbase.io.hfile.bucket.BucketCache;
 org.apache.hadoop.hbase.io.hfile.bucket.BucketAllocator;
 org.apache.hadoop.hbase.io.hfile.bucket.BucketAllocator.Bucket;
-org.apache.hadoop.util.StringUtils;
+org.apache.hadoop.util.StringUtils.TraditionalBinaryPrefix;
 
 
 
@@ -245,12 +245,14 @@ org.apache.hadoop.util.StringUtils;
 
 
 Size
-<% 
StringUtils.humanReadableInt(cacheConfig.getBlockCache().getCurrentSize()) 
%>
+<% 
TraditionalBinaryPrefix.long2String(cacheConfig.getBlockCache().getCurrentSize(),
+"B", 1) %>
 Current size of block cache in use (bytes)
 
 
 Free
-<% 
StringUtils.humanReadableInt(cacheConfig.getBlockCache().getFreeSize()) %>
+<% 
TraditionalBinaryPrefix.long2String(cacheConfig.getBlockCache().getFreeSize(),
+"B", 1) %>
 The total free memory currently available to store more cache

[24/24] hbase git commit: HBASE-15605 Remove PB references from HCD and HTD for 2.0 (Ram)

2016-04-11 Thread syuanjiang
HBASE-15605 Remove PB references from HCD and HTD for 2.0 (Ram)


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/a395922a
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/a395922a
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/a395922a

Branch: refs/heads/hbase-12439
Commit: a395922ad5af9494bb55feee3c275c6d3a575e92
Parents: 80df1cb
Author: Vasudevan 
Authored: Mon Apr 11 14:12:07 2016 +0530
Committer: Vasudevan 
Committed: Mon Apr 11 14:12:07 2016 +0530

--
 .../apache/hadoop/hbase/HColumnDescriptor.java  |  57 +
 .../apache/hadoop/hbase/HTableDescriptor.java   |  57 +
 .../hbase/client/ConnectionImplementation.java  |   2 +-
 .../apache/hadoop/hbase/client/HBaseAdmin.java  |   4 +-
 .../hadoop/hbase/protobuf/ProtobufUtil.java | 120 ---
 .../hadoop/hbase/protobuf/RequestConverter.java |   8 +-
 .../apache/hadoop/hbase/TableDescriptor.java|   4 +-
 .../mapreduce/TableSnapshotInputFormatImpl.java |   5 +-
 .../hadoop/hbase/master/MasterRpcServices.java  |  12 +-
 .../procedure/AddColumnFamilyProcedure.java |   9 +-
 .../procedure/CloneSnapshotProcedure.java   |   5 +-
 .../master/procedure/CreateTableProcedure.java  |   5 +-
 .../procedure/DeleteColumnFamilyProcedure.java  |   5 +-
 .../procedure/ModifyColumnFamilyProcedure.java  |   9 +-
 .../master/procedure/ModifyTableProcedure.java  |  10 +-
 .../procedure/RestoreSnapshotProcedure.java |   5 +-
 .../procedure/TruncateTableProcedure.java   |   4 +-
 .../hadoop/hbase/snapshot/SnapshotManifest.java |   4 +-
 .../hbase/snapshot/TestSnapshotManifest.java|   4 +-
 19 files changed, 162 insertions(+), 167 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/a395922a/hbase-client/src/main/java/org/apache/hadoop/hbase/HColumnDescriptor.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/HColumnDescriptor.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/HColumnDescriptor.java
index 2c10308..3c16f4e 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/HColumnDescriptor.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/HColumnDescriptor.java
@@ -32,11 +32,8 @@ import org.apache.hadoop.hbase.exceptions.HBaseException;
 import org.apache.hadoop.hbase.io.compress.Compression;
 import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
 import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
-import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair;
 import 
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema;
-import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair;
 import org.apache.hadoop.hbase.regionserver.BloomType;
-import org.apache.hadoop.hbase.util.ByteStringer;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.hbase.util.PrettyPrinter;
 import org.apache.hadoop.hbase.util.PrettyPrinter.Unit;
@@ -297,13 +294,6 @@ public class HColumnDescriptor implements 
Comparable {
   private int cachedMaxVersions = UNINITIALIZED;
 
   /**
-   * Default constructor. Must be present for PB deserializations.
-   */
-  private HColumnDescriptor() {
-this.name = null;
-  }
-
-  /**
* Construct a column descriptor specifying only the family name
* The other attributes are defaulted.
*
@@ -1075,8 +1065,9 @@ public class HColumnDescriptor implements 
Comparable {
* @return This instance serialized with pb with pb magic prefix
* @see #parseFrom(byte[])
*/
-  public byte [] toByteArray() {
-return ProtobufUtil.prependPBMagic(convert().toByteArray());
+  public byte[] toByteArray() {
+return ProtobufUtil
+
.prependPBMagic(ProtobufUtil.convertToColumnFamilySchema(this).toByteArray());
   }
 
   /**
@@ -1096,47 +1087,7 @@ public class HColumnDescriptor implements 
Comparable {
 } catch (IOException e) {
   throw new DeserializationException(e);
 }
-return convert(cfs);
-  }
-
-  /**
-   * @param cfs
-   * @return An {@link HColumnDescriptor} made from the passed in 
cfs
-   */
-  public static HColumnDescriptor convert(final ColumnFamilySchema cfs) {
-// Use the empty constructor so we preserve the initial values set on 
construction for things
-// like maxVersion.  Otherwise, we pick up wrong values on deserialization 
which makes for
-// unrelated-looking test failures that are hard to trace back to here.
-HColumnDescriptor hcd = new HColumnDescriptor();
-hcd.name = cfs.getName().toByteArray();
-for (BytesBytesPair a: cfs.getAttributesList()) {
-  hcd.setValue(a.getFirst().toByteArray(), a.getSecond().toByteArray());
-}
-for (NameStringPair a: cfs.getConfigurationList()) {
-  hcd.setConfigu

[09/24] hbase git commit: HBASE-15587 FSTableDescriptors.getDescriptor() logs stack trace erronously

2016-04-11 Thread syuanjiang
HBASE-15587 FSTableDescriptors.getDescriptor() logs stack trace erronously


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/c93cffb9
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/c93cffb9
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/c93cffb9

Branch: refs/heads/hbase-12439
Commit: c93cffb95c0322fc4244fe78a584ff225bc105c9
Parents: 7e39988
Author: Enis Soztutar 
Authored: Tue Apr 5 18:13:40 2016 -0700
Committer: Enis Soztutar 
Committed: Tue Apr 5 18:13:40 2016 -0700

--
 .../java/org/apache/hadoop/hbase/util/FSTableDescriptors.java| 4 
 1 file changed, 4 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/c93cffb9/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSTableDescriptors.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSTableDescriptors.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSTableDescriptors.java
index cce37d7..18156cb 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSTableDescriptors.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSTableDescriptors.java
@@ -125,11 +125,13 @@ public class FSTableDescriptors implements 
TableDescriptors {
 this.metaTableDescritor = TableDescriptor.metaTableDescriptor(conf);
   }
 
+  @Override
   public void setCacheOn() throws IOException {
 this.cache.clear();
 this.usecache = true;
   }
 
+  @Override
   public void setCacheOff() throws IOException {
 this.usecache = false;
 this.cache.clear();
@@ -175,6 +177,8 @@ public class FSTableDescriptors implements TableDescriptors 
{
 } catch (NullPointerException e) {
   LOG.debug("Exception during readTableDecriptor. Current table name = "
   + tablename, e);
+} catch (TableInfoMissingException e) {
+  // ignore. This is regular operation
 } catch (IOException ioe) {
   LOG.debug("Exception during readTableDecriptor. Current table name = "
   + tablename, ioe);



[01/24] hbase git commit: HBASE-15234 Don't abort ReplicationLogCleaner on ZooKeeper errors

2016-04-11 Thread syuanjiang
Repository: hbase
Updated Branches:
  refs/heads/hbase-12439 e1d5c3d26 -> a395922ad


HBASE-15234 Don't abort ReplicationLogCleaner on ZooKeeper errors


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/2d8e0a04
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/2d8e0a04
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/2d8e0a04

Branch: refs/heads/hbase-12439
Commit: 2d8e0a0477cdfc1d13373cde54e0cd080db514f5
Parents: e1d5c3d
Author: Gary Helmling 
Authored: Tue Feb 16 14:19:19 2016 -0800
Committer: Gary Helmling 
Committed: Thu Mar 31 22:28:18 2016 -0700

--
 .../master/ReplicationLogCleaner.java   | 42 +++
 .../hbase/master/cleaner/TestLogsCleaner.java   | 75 
 2 files changed, 102 insertions(+), 15 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/2d8e0a04/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/master/ReplicationLogCleaner.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/master/ReplicationLogCleaner.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/master/ReplicationLogCleaner.java
index a6b6dd8..9ecba11 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/master/ReplicationLogCleaner.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/master/ReplicationLogCleaner.java
@@ -18,6 +18,7 @@
  */
 package org.apache.hadoop.hbase.replication.master;
 
+import com.google.common.annotations.VisibleForTesting;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.hbase.classification.InterfaceAudience;
@@ -47,12 +48,11 @@ import org.apache.zookeeper.KeeperException;
  * replication before deleting it when its TTL is over.
  */
 @InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.CONFIG)
-public class ReplicationLogCleaner extends BaseLogCleanerDelegate implements 
Abortable {
+public class ReplicationLogCleaner extends BaseLogCleanerDelegate {
   private static final Log LOG = 
LogFactory.getLog(ReplicationLogCleaner.class);
   private ZooKeeperWatcher zkw;
   private ReplicationQueuesClient replicationQueues;
   private boolean stopped = false;
-  private boolean aborted;
 
 
   @Override
@@ -136,15 +136,23 @@ public class ReplicationLogCleaner extends 
BaseLogCleanerDelegate implements Abo
 // Make my own Configuration.  Then I'll have my own connection to zk that
 // I can close myself when comes time.
 Configuration conf = new Configuration(config);
+try {
+  setConf(conf, new ZooKeeperWatcher(conf, "replicationLogCleaner", null));
+} catch (IOException e) {
+  LOG.error("Error while configuring " + this.getClass().getName(), e);
+}
+  }
+
+  @VisibleForTesting
+  public void setConf(Configuration conf, ZooKeeperWatcher zk) {
 super.setConf(conf);
 try {
-  this.zkw = new ZooKeeperWatcher(conf, "replicationLogCleaner", null);
-  this.replicationQueues = 
ReplicationFactory.getReplicationQueuesClient(zkw, conf, this);
+  this.zkw = zk;
+  this.replicationQueues = 
ReplicationFactory.getReplicationQueuesClient(zkw, conf,
+  new WarnOnlyAbortable());
   this.replicationQueues.init();
 } catch (ReplicationException e) {
   LOG.error("Error while configuring " + this.getClass().getName(), e);
-} catch (IOException e) {
-  LOG.error("Error while configuring " + this.getClass().getName(), e);
 }
   }
 
@@ -163,15 +171,19 @@ public class ReplicationLogCleaner extends 
BaseLogCleanerDelegate implements Abo
 return this.stopped;
   }
 
-  @Override
-  public void abort(String why, Throwable e) {
-LOG.warn("Aborting ReplicationLogCleaner because " + why, e);
-this.aborted = true;
-stop(why);
-  }
+  private static class WarnOnlyAbortable implements Abortable {
 
-  @Override
-  public boolean isAborted() {
-return this.aborted;
+@Override
+public void abort(String why, Throwable e) {
+  LOG.warn("ReplicationLogCleaner received abort, ignoring.  Reason: " + 
why);
+  if (LOG.isDebugEnabled()) {
+LOG.debug(e);
+  }
+}
+
+@Override
+public boolean isAborted() {
+  return false;
+}
   }
 }

http://git-wip-us.apache.org/repos/asf/hbase/blob/2d8e0a04/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestLogsCleaner.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestLogsCleaner.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestLogsCleaner.java
index ebf3699..47db32b 100644
--- 
a/hbase-server/src/test

[03/24] hbase git commit: HBASE-15293 Handle TableNotFound and IllegalArgument exceptions in table.jsp (Samir Ahmic)

2016-04-11 Thread syuanjiang
HBASE-15293 Handle TableNotFound and IllegalArgument exceptions in table.jsp 
(Samir Ahmic)


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/5d79790c
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/5d79790c
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/5d79790c

Branch: refs/heads/hbase-12439
Commit: 5d79790c55b6caa0f9cbc77e14a5f39940b02236
Parents: 89d7501
Author: tedyu 
Authored: Fri Apr 1 02:13:18 2016 -0700
Committer: tedyu 
Committed: Fri Apr 1 02:13:18 2016 -0700

--
 .../resources/hbase-webapps/master/table.jsp| 28 ++--
 1 file changed, 26 insertions(+), 2 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/5d79790c/hbase-server/src/main/resources/hbase-webapps/master/table.jsp
--
diff --git a/hbase-server/src/main/resources/hbase-webapps/master/table.jsp 
b/hbase-server/src/main/resources/hbase-webapps/master/table.jsp
index 8a83565..4a151e7 100644
--- a/hbase-server/src/main/resources/hbase-webapps/master/table.jsp
+++ b/hbase-server/src/main/resources/hbase-webapps/master/table.jsp
@@ -43,7 +43,8 @@
   import="org.apache.hadoop.hbase.TableName"
   import="org.apache.hadoop.hbase.HColumnDescriptor"
   import="org.apache.hadoop.hbase.client.RegionReplicaUtil"
-  import="org.apache.hadoop.hbase.HBaseConfiguration" %>
+  import="org.apache.hadoop.hbase.HBaseConfiguration"
+  import="org.apache.hadoop.hbase.TableNotFoundException"%>
 <%
   HMaster master = (HMaster)getServletContext().getAttribute(HMaster.MASTER);
   Configuration conf = master.getConfiguration();
@@ -126,6 +127,7 @@
 
 <%
 if ( fqtn != null ) {
+  try {
   table = (HTable) master.getConnection().getTable(TableName.valueOf(fqtn));
   if (table.getTableDescriptor().getRegionReplication() > 1) {
 tableHeader = "Table RegionsNameRegion ServerStart KeyEnd 
KeyLocalityRequestsReplicaID";
@@ -424,7 +426,29 @@ Actions:
 
 
 <% }
-} else { // handle the case for fqtn is null with error message + redirect
+  } catch(TableNotFoundException e) { %>
+  
+
+  
+Table not found
+   
+
+
+Go Back
+   <%
+  } catch(IllegalArgumentException e) { %>
+  
+
+  
+Table qualifier must not be empty
+  
+
+
+Go Back
+   <%
+  }
+}
+  else { // handle the case for fqtn is null with error message + redirect
 %>
 
 



[08/24] hbase git commit: HBASE-15505 ReplicationPeerConfig should be builder-style (Gabor Liptak)

2016-04-11 Thread syuanjiang
HBASE-15505 ReplicationPeerConfig should be builder-style (Gabor Liptak)


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/7e399883
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/7e399883
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/7e399883

Branch: refs/heads/hbase-12439
Commit: 7e399883f62fd37e5215ce3a456a917e690c921c
Parents: a93a887
Author: Enis Soztutar 
Authored: Tue Apr 5 11:44:05 2016 -0700
Committer: Enis Soztutar 
Committed: Tue Apr 5 11:44:05 2016 -0700

--
 .../client/UnmodifyableHTableDescriptor.java| 14 +++---
 .../replication/ReplicationPeerConfig.java  |  4 +-
 .../TestUnmodifyableHTableDescriptor.java   | 47 
 .../hadoop/hbase/quotas/TestQuotaFilter.java| 47 
 .../replication/TestReplicationPeerConfig.java  | 47 
 5 files changed, 151 insertions(+), 8 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/7e399883/hbase-client/src/main/java/org/apache/hadoop/hbase/client/UnmodifyableHTableDescriptor.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/UnmodifyableHTableDescriptor.java
 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/UnmodifyableHTableDescriptor.java
index 7331983..59a1bd5 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/UnmodifyableHTableDescriptor.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/UnmodifyableHTableDescriptor.java
@@ -68,12 +68,12 @@ public class UnmodifyableHTableDescriptor extends 
HTableDescriptor {
* @param family HColumnDescriptor of familyto add.
*/
   @Override
-  public HTableDescriptor addFamily(final HColumnDescriptor family) {
+  public UnmodifyableHTableDescriptor addFamily(final HColumnDescriptor 
family) {
 throw new UnsupportedOperationException("HTableDescriptor is read-only");
   }
 
   @Override
-  public HTableDescriptor modifyFamily(HColumnDescriptor family) {
+  public UnmodifyableHTableDescriptor modifyFamily(HColumnDescriptor family) {
 throw new UnsupportedOperationException("HTableDescriptor is read-only");
   }
 
@@ -91,7 +91,7 @@ public class UnmodifyableHTableDescriptor extends 
HTableDescriptor {
* @see org.apache.hadoop.hbase.HTableDescriptor#setReadOnly(boolean)
*/
   @Override
-  public HTableDescriptor setReadOnly(boolean readOnly) {
+  public UnmodifyableHTableDescriptor setReadOnly(boolean readOnly) {
 throw new UnsupportedOperationException("HTableDescriptor is read-only");
   }
 
@@ -99,7 +99,7 @@ public class UnmodifyableHTableDescriptor extends 
HTableDescriptor {
* @see org.apache.hadoop.hbase.HTableDescriptor#setValue(byte[], byte[])
*/
   @Override
-  public HTableDescriptor setValue(byte[] key, byte[] value) {
+  public UnmodifyableHTableDescriptor setValue(byte[] key, byte[] value) {
 throw new UnsupportedOperationException("HTableDescriptor is read-only");
   }
 
@@ -107,7 +107,7 @@ public class UnmodifyableHTableDescriptor extends 
HTableDescriptor {
* @see org.apache.hadoop.hbase.HTableDescriptor#setValue(java.lang.String, 
java.lang.String)
*/
   @Override
-  public HTableDescriptor setValue(String key, String value) {
+  public UnmodifyableHTableDescriptor setValue(String key, String value) {
 throw new UnsupportedOperationException("HTableDescriptor is read-only");
   }
 
@@ -115,7 +115,7 @@ public class UnmodifyableHTableDescriptor extends 
HTableDescriptor {
* @see org.apache.hadoop.hbase.HTableDescriptor#setMaxFileSize(long)
*/
   @Override
-  public HTableDescriptor setMaxFileSize(long maxFileSize) {
+  public UnmodifyableHTableDescriptor setMaxFileSize(long maxFileSize) {
 throw new UnsupportedOperationException("HTableDescriptor is read-only");
   }
 
@@ -123,7 +123,7 @@ public class UnmodifyableHTableDescriptor extends 
HTableDescriptor {
* @see org.apache.hadoop.hbase.HTableDescriptor#setMemStoreFlushSize(long)
*/
   @Override
-  public HTableDescriptor setMemStoreFlushSize(long memstoreFlushSize) {
+  public UnmodifyableHTableDescriptor setMemStoreFlushSize(long 
memstoreFlushSize) {
 throw new UnsupportedOperationException("HTableDescriptor is read-only");
   }
 }

http://git-wip-us.apache.org/repos/asf/hbase/blob/7e399883/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationPeerConfig.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationPeerConfig.java
 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationPeerConfig.java
index 8d05fa0..7799de6 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/re

[10/24] hbase git commit: HBASE-15369 Handle NPE in region.jsp (Samir Ahmic)

2016-04-11 Thread syuanjiang
HBASE-15369 Handle NPE in region.jsp (Samir Ahmic)


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/3826894f
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/3826894f
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/3826894f

Branch: refs/heads/hbase-12439
Commit: 3826894f890a850270053a25b53f07a007555711
Parents: c93cffb
Author: tedyu 
Authored: Wed Apr 6 06:52:51 2016 -0700
Committer: tedyu 
Committed: Wed Apr 6 06:52:51 2016 -0700

--
 .../hbase-webapps/regionserver/region.jsp | 18 +++---
 1 file changed, 15 insertions(+), 3 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/3826894f/hbase-server/src/main/resources/hbase-webapps/regionserver/region.jsp
--
diff --git 
a/hbase-server/src/main/resources/hbase-webapps/regionserver/region.jsp 
b/hbase-server/src/main/resources/hbase-webapps/regionserver/region.jsp
index 874ac43..02f3d94 100644
--- a/hbase-server/src/main/resources/hbase-webapps/regionserver/region.jsp
+++ b/hbase-server/src/main/resources/hbase-webapps/regionserver/region.jsp
@@ -21,6 +21,7 @@
   import="java.util.Collection"
   import="java.util.Date"
   import="java.util.List"
+  import="org.owasp.esapi.ESAPI"
   import="static org.apache.commons.lang.StringEscapeUtils.escapeXml"
   import="org.apache.hadoop.conf.Configuration"
   import="org.apache.hadoop.hbase.HTableDescriptor"
@@ -35,10 +36,14 @@
   String regionName = request.getParameter("name");
   HRegionServer rs = (HRegionServer) 
getServletContext().getAttribute(HRegionServer.REGIONSERVER);
   Configuration conf = rs.getConfiguration();
-
+  String displayName = null;
   Region region = rs.getFromOnlineRegions(regionName);
-  String displayName = 
HRegionInfo.getRegionNameAsStringForDisplay(region.getRegionInfo(),
+  if(region == null) {
+displayName= ESAPI.encoder().encodeForHTML(regionName) + " does not exist";
+  } else {
+displayName = 
HRegionInfo.getRegionNameAsStringForDisplay(region.getRegionInfo(),
 rs.getConfiguration());
+  }
 %>
 

[06/24] hbase git commit: HBASE-15578 Handle HBASE-15234 for ReplicationHFileCleaner

2016-04-11 Thread syuanjiang
HBASE-15578 Handle HBASE-15234 for ReplicationHFileCleaner


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/33396c36
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/33396c36
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/33396c36

Branch: refs/heads/hbase-12439
Commit: 33396c3629a83f2379a69f3a3b493ae8e6ee0a13
Parents: 79868bd
Author: Ashish Singhi 
Authored: Mon Apr 4 15:02:19 2016 +0530
Committer: Ashish Singhi 
Committed: Mon Apr 4 15:02:19 2016 +0530

--
 .../master/ReplicationHFileCleaner.java | 48 +-
 .../cleaner/TestReplicationHFileCleaner.java| 70 
 2 files changed, 100 insertions(+), 18 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/33396c36/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/master/ReplicationHFileCleaner.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/master/ReplicationHFileCleaner.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/master/ReplicationHFileCleaner.java
index 9bfea4b..5df9379 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/master/ReplicationHFileCleaner.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/master/ReplicationHFileCleaner.java
@@ -10,6 +10,7 @@
  */
 package org.apache.hadoop.hbase.replication.master;
 
+import com.google.common.annotations.VisibleForTesting;
 import com.google.common.base.Predicate;
 import com.google.common.collect.ImmutableSet;
 import com.google.common.collect.Iterables;
@@ -41,12 +42,11 @@ import org.apache.zookeeper.KeeperException;
  * deleting it from hfile archive directory.
  */
 @InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.CONFIG)
-public class ReplicationHFileCleaner extends BaseHFileCleanerDelegate 
implements Abortable {
+public class ReplicationHFileCleaner extends BaseHFileCleanerDelegate {
   private static final Log LOG = 
LogFactory.getLog(ReplicationHFileCleaner.class);
   private ZooKeeperWatcher zkw;
   private ReplicationQueuesClient rqc;
   private boolean stopped = false;
-  private boolean aborted;
 
   @Override
   public Iterable getDeletableFiles(Iterable files) {
@@ -129,18 +129,27 @@ public class ReplicationHFileCleaner extends 
BaseHFileCleanerDelegate implements
 // Make my own Configuration. Then I'll have my own connection to zk that
 // I can close myself when time comes.
 Configuration conf = new Configuration(config);
+try {
+  setConf(conf, new ZooKeeperWatcher(conf, "replicationHFileCleaner", 
null));
+} catch (IOException e) {
+  LOG.error("Error while configuring " + this.getClass().getName(), e);
+}
+  }
+
+  @VisibleForTesting
+  public void setConf(Configuration conf, ZooKeeperWatcher zk) {
 super.setConf(conf);
 try {
-  initReplicationQueuesClient(conf);
+  initReplicationQueuesClient(conf, zk);
 } catch (IOException e) {
   LOG.error("Error while configuring " + this.getClass().getName(), e);
 }
   }
 
-  private void initReplicationQueuesClient(Configuration conf)
+  private void initReplicationQueuesClient(Configuration conf, 
ZooKeeperWatcher zk)
   throws ZooKeeperConnectionException, IOException {
-this.zkw = new ZooKeeperWatcher(conf, "replicationHFileCleaner", null);
-this.rqc = ReplicationFactory.getReplicationQueuesClient(zkw, conf, this);
+this.zkw = zk;
+this.rqc = ReplicationFactory.getReplicationQueuesClient(zkw, conf, new 
WarnOnlyAbortable());
   }
 
   @Override
@@ -161,18 +170,6 @@ public class ReplicationHFileCleaner extends 
BaseHFileCleanerDelegate implements
   }
 
   @Override
-  public void abort(String why, Throwable e) {
-LOG.warn("Aborting ReplicationHFileCleaner because " + why, e);
-this.aborted = true;
-stop(why);
-  }
-
-  @Override
-  public boolean isAborted() {
-return this.aborted;
-  }
-
-  @Override
   public boolean isFileDeletable(FileStatus fStat) {
 Set hfileRefsFromQueue;
 // all members of this class are null if replication is disabled,
@@ -190,4 +187,19 @@ public class ReplicationHFileCleaner extends 
BaseHFileCleanerDelegate implements
 }
 return !hfileRefsFromQueue.contains(fStat.getPath().getName());
   }
+
+  private static class WarnOnlyAbortable implements Abortable {
+@Override
+public void abort(String why, Throwable e) {
+  LOG.warn("ReplicationHFileCleaner received abort, ignoring.  Reason: " + 
why);
+  if (LOG.isDebugEnabled()) {
+LOG.debug(e);
+  }
+}
+
+@Override
+public boolean isAborted() {
+  return false;
+}
+  }
 }

http://git-wip-us.apache.org/repos/asf/hbase/blob/33396c36/hbase-server/src

[14/24] hbase git commit: HBASE-15400 Use DateTieredCompactor for Date Tiered Compaction (Clara Xiong)

2016-04-11 Thread syuanjiang
HBASE-15400 Use DateTieredCompactor for Date Tiered Compaction (Clara Xiong)


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/f60fc9d1
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/f60fc9d1
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/f60fc9d1

Branch: refs/heads/hbase-12439
Commit: f60fc9d1a0625970aa2fd14d29e4c1266f9571b7
Parents: d393603
Author: tedyu 
Authored: Thu Apr 7 14:58:59 2016 -0700
Committer: tedyu 
Committed: Thu Apr 7 14:58:59 2016 -0700

--
 .../regionserver/DateTieredStoreEngine.java | 102 ++
 .../hadoop/hbase/regionserver/HStore.java   |   2 +-
 .../hadoop/hbase/regionserver/StoreFile.java|  34 +-
 .../compactions/CompactionConfiguration.java|  10 +
 .../compactions/CompactionPolicy.java   |   2 +-
 .../compactions/CompactionRequest.java  |  16 +-
 .../compactions/DateTieredCompactionPolicy.java | 358 +--
 .../DateTieredCompactionRequest.java|  44 +++
 .../compactions/ExploringCompactionPolicy.java  |   4 +-
 .../compactions/FIFOCompactionPolicy.java   |   5 +-
 .../compactions/RatioBasedCompactionPolicy.java | 318 
 .../compactions/SortedCompactionPolicy.java | 239 +
 .../compactions/StripeCompactionPolicy.java |   3 +-
 .../hbase/regionserver/MockStoreFile.java   |  40 ++-
 .../TestDateTieredCompactionPolicy.java | 325 +
 .../compactions/EverythingPolicy.java   |   2 +-
 16 files changed, 1102 insertions(+), 402 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/f60fc9d1/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/DateTieredStoreEngine.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/DateTieredStoreEngine.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/DateTieredStoreEngine.java
new file mode 100644
index 000..773baab
--- /dev/null
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/DateTieredStoreEngine.java
@@ -0,0 +1,102 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.regionserver;
+
+import java.io.IOException;
+import java.util.List;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hbase.CellComparator;
+import org.apache.hadoop.hbase.classification.InterfaceAudience;
+import org.apache.hadoop.hbase.regionserver.compactions.CompactionContext;
+import org.apache.hadoop.hbase.regionserver.compactions.CompactionRequest;
+import 
org.apache.hadoop.hbase.regionserver.compactions.DateTieredCompactionPolicy;
+import 
org.apache.hadoop.hbase.regionserver.compactions.DateTieredCompactionRequest;
+import org.apache.hadoop.hbase.regionserver.compactions.DateTieredCompactor;
+import org.apache.hadoop.hbase.regionserver.throttle.ThroughputController;
+import org.apache.hadoop.hbase.security.User;
+
+/**
+ * HBASE-15400 This store engine allows us to store data in date tiered layout 
with exponential
+ * sizing so that the more recent data has more granularity. Time-range scan 
will perform the
+ * best with most recent data. When data reach maxAge, they are compacted in 
fixed-size time
+ * windows for TTL and archiving. Please refer to design spec for more details.
+ * 
https://docs.google.com/document/d/1_AmlNb2N8Us1xICsTeGDLKIqL6T-oHoRLZ323MG_uy8/edit#heading=h.uk6y5pd3oqgx
+ */
+@InterfaceAudience.Private
+public class DateTieredStoreEngine extends StoreEngine {
+  @Override
+  public boolean needsCompaction(List filesCompacting) {
+return compactionPolicy.needsCompaction(storeFileManager.getStorefiles(),
+  filesCompacting);
+  }
+
+  @Override
+  public CompactionContext createCompaction() throws IOException {
+return new DateTieredCompactionContext();
+  }
+
+  @Override
+  protected void createComponents(Configuration conf, Store store, 
CellComparator kvComparator)
+ 

[15/24] hbase git commit: HBASE-15400 Use DateTieredCompactor for Date Tiered Compaction - drop TestDateTieredCompaction.java

2016-04-11 Thread syuanjiang
HBASE-15400 Use DateTieredCompactor for Date Tiered Compaction - drop 
TestDateTieredCompaction.java


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/a146a71a
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/a146a71a
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/a146a71a

Branch: refs/heads/hbase-12439
Commit: a146a71a332fdd58f9bf4e748861f5b050a5f22f
Parents: f60fc9d
Author: tedyu 
Authored: Thu Apr 7 15:01:00 2016 -0700
Committer: tedyu 
Committed: Thu Apr 7 15:01:00 2016 -0700

--
 .../regionserver/TestDateTieredCompaction.java  | 211 ---
 1 file changed, 211 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/a146a71a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestDateTieredCompaction.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestDateTieredCompaction.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestDateTieredCompaction.java
deleted file mode 100644
index cfb54b7..000
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestDateTieredCompaction.java
+++ /dev/null
@@ -1,211 +0,0 @@
-/**
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.hbase.regionserver;
-
-import com.google.common.collect.ImmutableList;
-import com.google.common.collect.Lists;
-
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.List;
-
-import 
org.apache.hadoop.hbase.regionserver.compactions.CompactionConfiguration;
-import 
org.apache.hadoop.hbase.regionserver.compactions.DateTieredCompactionPolicy;
-import org.apache.hadoop.hbase.testclassification.SmallTests;
-import org.junit.Assert;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
-
-@Category(SmallTests.class)
-public class TestDateTieredCompaction extends TestCompactionPolicy {
-  ArrayList sfCreate(long[] minTimestamps, long[] maxTimestamps, 
long[] sizes)
-  throws IOException {
-ArrayList ageInDisk = new ArrayList();
-for (int i = 0; i < sizes.length; i++) {
-  ageInDisk.add(0L);
-}
-
-ArrayList ret = Lists.newArrayList();
-for (int i = 0; i < sizes.length; i++) {
-  MockStoreFile msf =
-  new MockStoreFile(TEST_UTIL, TEST_FILE, sizes[i], ageInDisk.get(i), 
false, i);
-  msf.setTimeRangeTracker(new TimeRangeTracker(minTimestamps[i], 
maxTimestamps[i]));
-  ret.add(msf);
-}
-return ret;
-  }
-
-  @Override
-  protected void config() {
-super.config();
-
-// Set up policy
-conf.setLong(CompactionConfiguration.MAX_AGE_MILLIS_KEY, 100);
-conf.setLong(CompactionConfiguration.INCOMING_WINDOW_MIN_KEY, 3);
-conf.setLong(CompactionConfiguration.BASE_WINDOW_MILLIS_KEY, 6);
-conf.setInt(CompactionConfiguration.WINDOWS_PER_TIER_KEY, 4);
-conf.set(DefaultStoreEngine.DEFAULT_COMPACTION_POLICY_CLASS_KEY,
-  DateTieredCompactionPolicy.class.getName());
-
-// Special settings for compaction policy per window
-this.conf.setInt(CompactionConfiguration.HBASE_HSTORE_COMPACTION_MIN_KEY, 
2);
-this.conf.setInt(CompactionConfiguration.HBASE_HSTORE_COMPACTION_MAX_KEY, 
12);
-
this.conf.setFloat(CompactionConfiguration.HBASE_HSTORE_COMPACTION_RATIO_KEY, 
1.2F);
-  }
-
-  void compactEquals(long now, ArrayList candidates, long... 
expected)
-  throws IOException {
-Assert.assertTrue(((DateTieredCompactionPolicy) 
store.storeEngine.getCompactionPolicy())
-.needsCompaction(candidates, ImmutableList. of(), now));
-
-List actual =
-((DateTieredCompactionPolicy) store.storeEngine.getCompactionPolicy())
-.applyCompactionPolicy(candidates, false, false, now);
-
-Assert.assertEquals(Arrays.toString(expected), 
Arrays.toString(getSizes(actual)));
-  }
-
-  /**
-   * Test for incoming window
-   * @throws IOException with error
-   */
-  @Test
-  public void incomingWindow(

[20/24] hbase git commit: HBASE-15507 Online modification of enabled ReplicationPeerConfig (Geoffrey Jacoby)

2016-04-11 Thread syuanjiang
HBASE-15507 Online modification of enabled ReplicationPeerConfig (Geoffrey 
Jacoby)


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/e0f31ba6
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/e0f31ba6
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/e0f31ba6

Branch: refs/heads/hbase-12439
Commit: e0f31ba6e68416db359d31eee21750cd7559909f
Parents: 6ea4994
Author: tedyu 
Authored: Fri Apr 8 21:26:31 2016 -0700
Committer: tedyu 
Committed: Fri Apr 8 21:26:31 2016 -0700

--
 .../client/replication/ReplicationAdmin.java| 12 +++-
 .../hbase/replication/ReplicationPeer.java  |  2 +
 .../ReplicationPeerConfigListener.java  | 33 +++
 .../replication/ReplicationPeerZKImpl.java  | 26 -
 .../hbase/replication/ReplicationPeers.java |  2 +
 .../replication/ReplicationPeersZKImpl.java | 39 +
 .../replication/BaseReplicationEndpoint.java| 22 
 .../hbase/replication/ReplicationEndpoint.java  |  7 +--
 .../regionserver/ReplicationSourceManager.java  |  2 +-
 .../VisibilityReplicationEndpoint.java  |  6 ++
 .../TestReplicationAdminWithClusters.java   | 58 
 .../src/main/ruby/hbase/replication_admin.rb| 29 +-
 hbase-shell/src/main/ruby/shell.rb  |  1 +
 .../ruby/shell/commands/update_peer_config.rb   | 49 +
 .../test/ruby/hbase/replication_admin_test.rb   | 25 +
 15 files changed, 301 insertions(+), 12 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/e0f31ba6/hbase-client/src/main/java/org/apache/hadoop/hbase/client/replication/ReplicationAdmin.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/replication/ReplicationAdmin.java
 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/replication/ReplicationAdmin.java
index 8ee3a22..a2ad2e7 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/replication/ReplicationAdmin.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/replication/ReplicationAdmin.java
@@ -47,7 +47,6 @@ import org.apache.hadoop.hbase.client.Admin;
 import org.apache.hadoop.hbase.client.Connection;
 import org.apache.hadoop.hbase.client.ConnectionFactory;
 import org.apache.hadoop.hbase.client.RegionLocator;
-import org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos;
 import org.apache.hadoop.hbase.replication.ReplicationException;
 import org.apache.hadoop.hbase.replication.ReplicationFactory;
 import org.apache.hadoop.hbase.replication.ReplicationPeer;
@@ -201,7 +200,11 @@ public class ReplicationAdmin implements Closeable {
   public static Map> parseTableCFsFromConfig(String 
tableCFsConfig) {
 return ReplicationSerDeHelper.parseTableCFsFromConfig(tableCFsConfig);
   }
-  
+
+  public void updatePeerConfig(String id, ReplicationPeerConfig peerConfig)
+  throws ReplicationException {
+this.replicationPeers.updatePeerConfig(id, peerConfig);
+  }
   /**
* Removes a peer cluster and stops the replication to it.
* @param id a short name that identifies the cluster
@@ -550,6 +553,11 @@ public class ReplicationAdmin implements Closeable {
   }
 
   @VisibleForTesting
+  public void peerAdded(String id) throws ReplicationException {
+this.replicationPeers.peerAdded(id);
+  }
+
+  @VisibleForTesting
   List listReplicationPeers() {
 Map peers = listPeerConfigs();
 if (peers == null || peers.size() <= 0) {

http://git-wip-us.apache.org/repos/asf/hbase/blob/e0f31ba6/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationPeer.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationPeer.java
 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationPeer.java
index 920eea6..3da01fe 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationPeer.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationPeer.java
@@ -71,4 +71,6 @@ public interface ReplicationPeer {
*/
   public Map> getTableCFs();
 
+  void trackPeerConfigChanges(ReplicationPeerConfigListener listener);
+
 }

http://git-wip-us.apache.org/repos/asf/hbase/blob/e0f31ba6/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationPeerConfigListener.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationPeerConfigListener.java
 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationPeerConfigListener.java
new file mode 100644
index 000..4e04186
--- /de

[12/24] hbase git commit: HBASE-15606 Limit creating zk connection in HBaseAdmin#getCompactionState() only to case when 'hbase:meta' is checked.

2016-04-11 Thread syuanjiang
HBASE-15606 Limit creating zk connection in HBaseAdmin#getCompactionState() 
only to case when 'hbase:meta' is checked.

Signed-off-by: stack 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/d393603d
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/d393603d
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/d393603d

Branch: refs/heads/hbase-12439
Commit: d393603dea23306cd3f18f6dbd1cf14561d45bd0
Parents: ac8cd37
Author: Samir Ahmic 
Authored: Thu Apr 7 21:52:51 2016 +0200
Committer: stack 
Committed: Thu Apr 7 14:25:49 2016 -0700

--
 .../java/org/apache/hadoop/hbase/client/HBaseAdmin.java   | 10 ++
 1 file changed, 6 insertions(+), 4 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/d393603d/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java
index aea86b9..a900abd 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java
@@ -3017,12 +3017,12 @@ public class HBaseAdmin implements Admin {
 break;
   case NORMAL:
   default:
-ZooKeeperWatcher zookeeper =
-new ZooKeeperWatcher(conf, ZK_IDENTIFIER_PREFIX + 
connection.toString(),
-new ThrowableAbortable());
+ZooKeeperWatcher zookeeper = null;
 try {
   List> pairs;
   if (TableName.META_TABLE_NAME.equals(tableName)) {
+zookeeper = new ZooKeeperWatcher(conf, ZK_IDENTIFIER_PREFIX + 
connection.toString(),
+  new ThrowableAbortable());
 pairs = new 
MetaTableLocator().getMetaRegionsAndLocations(zookeeper);
   } else {
 pairs = MetaTableAccessor.getTableRegionsAndLocations(connection, 
tableName);
@@ -3074,7 +3074,9 @@ public class HBaseAdmin implements Admin {
 } catch (ServiceException se) {
   throw ProtobufUtil.getRemoteException(se);
 } finally {
-  zookeeper.close();
+  if (zookeeper != null) {
+zookeeper.close();
+  }
 }
 break;
 }



[05/24] hbase git commit: HBASE-15582 SnapshotManifestV1 too verbose when there are no regions

2016-04-11 Thread syuanjiang
HBASE-15582 SnapshotManifestV1 too verbose when there are no regions


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/79868bd3
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/79868bd3
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/79868bd3

Branch: refs/heads/hbase-12439
Commit: 79868bd394c0fd6743d6582aa4713f91e63a8baf
Parents: 25419d8
Author: Matteo Bertozzi 
Authored: Fri Apr 1 20:55:21 2016 -0700
Committer: Matteo Bertozzi 
Committed: Fri Apr 1 20:55:21 2016 -0700

--
 .../java/org/apache/hadoop/hbase/snapshot/SnapshotManifestV1.java  | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/79868bd3/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotManifestV1.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotManifestV1.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotManifestV1.java
index 328c998..a5afb91 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotManifestV1.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotManifestV1.java
@@ -116,7 +116,7 @@ public final class SnapshotManifestV1 {
   final SnapshotDescription desc) throws IOException {
 FileStatus[] regions = FSUtils.listStatus(fs, snapshotDir, new 
FSUtils.RegionDirFilter(fs));
 if (regions == null) {
-  LOG.info("No regions under directory:" + snapshotDir);
+  LOG.debug("No regions under directory:" + snapshotDir);
   return null;
 }
 



[19/24] hbase git commit: HBASE-15407 Add SASL support for fan out OutputStream

2016-04-11 Thread syuanjiang
HBASE-15407 Add SASL support for fan out OutputStream


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/6ea49945
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/6ea49945
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/6ea49945

Branch: refs/heads/hbase-12439
Commit: 6ea4994569e05ff44e0fa571e053cef828ab57ed
Parents: e450d94
Author: zhangduo 
Authored: Sun Mar 27 19:01:05 2016 +0800
Committer: zhangduo 
Committed: Fri Apr 8 21:46:47 2016 +0800

--
 .../util/FanOutOneBlockAsyncDFSOutput.java  |   38 +-
 .../FanOutOneBlockAsyncDFSOutputHelper.java |  230 ++--
 .../FanOutOneBlockAsyncDFSOutputSaslHelper.java | 1032 ++
 .../util/TestFanOutOneBlockAsyncDFSOutput.java  |   13 +-
 .../TestSaslFanOutOneBlockAsyncDFSOutput.java   |  192 
 5 files changed, 1385 insertions(+), 120 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/6ea49945/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FanOutOneBlockAsyncDFSOutput.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FanOutOneBlockAsyncDFSOutput.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FanOutOneBlockAsyncDFSOutput.java
index b10f180..bdbf865 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FanOutOneBlockAsyncDFSOutput.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FanOutOneBlockAsyncDFSOutput.java
@@ -17,11 +17,26 @@
  */
 package org.apache.hadoop.hbase.util;
 
+import static io.netty.handler.timeout.IdleState.READER_IDLE;
+import static io.netty.handler.timeout.IdleState.WRITER_IDLE;
 import static 
org.apache.hadoop.hbase.util.FanOutOneBlockAsyncDFSOutputHelper.HEART_BEAT_SEQNO;
 import static 
org.apache.hadoop.hbase.util.FanOutOneBlockAsyncDFSOutputHelper.completeFile;
 import static 
org.apache.hadoop.hbase.util.FanOutOneBlockAsyncDFSOutputHelper.endFileLease;
 import static 
org.apache.hadoop.hbase.util.FanOutOneBlockAsyncDFSOutputHelper.getStatus;
 import static 
org.apache.hadoop.hdfs.DFSConfigKeys.DFS_CLIENT_SOCKET_TIMEOUT_KEY;
+import io.netty.buffer.ByteBuf;
+import io.netty.buffer.ByteBufAllocator;
+import io.netty.channel.Channel;
+import io.netty.channel.ChannelHandlerContext;
+import io.netty.channel.EventLoop;
+import io.netty.channel.SimpleChannelInboundHandler;
+import io.netty.handler.codec.protobuf.ProtobufDecoder;
+import io.netty.handler.codec.protobuf.ProtobufVarint32FrameDecoder;
+import io.netty.handler.timeout.IdleStateEvent;
+import io.netty.handler.timeout.IdleStateHandler;
+import io.netty.util.concurrent.Future;
+import io.netty.util.concurrent.FutureListener;
+import io.netty.util.concurrent.Promise;
 
 import java.io.Closeable;
 import java.io.IOException;
@@ -36,6 +51,8 @@ import java.util.Set;
 import java.util.concurrent.Callable;
 import java.util.concurrent.TimeUnit;
 
+import com.google.common.base.Supplier;
+
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hbase.classification.InterfaceAudience;
@@ -52,23 +69,6 @@ import 
org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.Status;
 import org.apache.hadoop.hdfs.server.common.HdfsServerConstants;
 import org.apache.hadoop.util.DataChecksum;
 
-import com.google.common.base.Supplier;
-
-import io.netty.buffer.ByteBuf;
-import io.netty.buffer.ByteBufAllocator;
-import io.netty.channel.Channel;
-import io.netty.channel.ChannelHandlerContext;
-import io.netty.channel.EventLoop;
-import io.netty.channel.SimpleChannelInboundHandler;
-import io.netty.handler.codec.protobuf.ProtobufDecoder;
-import io.netty.handler.codec.protobuf.ProtobufVarint32FrameDecoder;
-import io.netty.handler.timeout.IdleState;
-import io.netty.handler.timeout.IdleStateEvent;
-import io.netty.handler.timeout.IdleStateHandler;
-import io.netty.util.concurrent.Future;
-import io.netty.util.concurrent.FutureListener;
-import io.netty.util.concurrent.Promise;
-
 /**
  * An asynchronous HDFS output stream implementation which fans out data to 
datanode and only
  * supports writing file with only one block.
@@ -278,7 +278,7 @@ public class FanOutOneBlockAsyncDFSOutput implements 
Closeable {
   public void userEventTriggered(ChannelHandlerContext ctx, Object evt) 
throws Exception {
 if (evt instanceof IdleStateEvent) {
   IdleStateEvent e = (IdleStateEvent) evt;
-  if (e.state() == IdleState.READER_IDLE) {
+  if (e.state() == READER_IDLE) {
 failed(ctx.channel(), new Supplier() {
 
   @Override
@@ -286,7 +286,7 @@ public class FanOutOneBlockAsyncDFSOutput implements 
Closeable {
 return new IOException("Timeout(" + timeoutMs + "ms) waiting 
for response"

[11/24] hbase git commit: HBASE-15592 Print Procedure WAL content

2016-04-11 Thread syuanjiang
HBASE-15592 Print Procedure WAL content


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/ac8cd373
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/ac8cd373
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/ac8cd373

Branch: refs/heads/hbase-12439
Commit: ac8cd373ebe81ed24cab6737154c6902c05ff059
Parents: 3826894
Author: Jerry He 
Authored: Wed Apr 6 21:42:38 2016 -0700
Committer: Jerry He 
Committed: Wed Apr 6 21:49:07 2016 -0700

--
 .../hadoop/hbase/procedure2/Procedure.java  |  33 
 .../store/wal/ProcedureWALPrettyPrinter.java| 189 +++
 2 files changed, 222 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/ac8cd373/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/Procedure.java
--
diff --git 
a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/Procedure.java
 
b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/Procedure.java
index aff2b15..781bad9 100644
--- 
a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/Procedure.java
+++ 
b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/Procedure.java
@@ -205,6 +205,16 @@ public abstract class Procedure implements 
Comparable {
 
   @Override
   public String toString() {
+// Return the simple String presentation of the procedure.
+return toStringSimpleSB().toString();
+  }
+
+  /**
+   * Build the StringBuilder for the simple form of
+   * procedure string.
+   * @return the StringBuilder
+   */
+  protected StringBuilder toStringSimpleSB() {
 StringBuilder sb = new StringBuilder();
 toStringClassDetails(sb);
 
@@ -225,6 +235,29 @@ public abstract class Procedure implements 
Comparable {
 
 sb.append(" state=");
 toStringState(sb);
+
+return sb;
+  }
+
+  /**
+   * Extend the toString() information with more procedure
+   * details
+   */
+  public String toStringDetails() {
+StringBuilder sb = toStringSimpleSB();
+
+sb.append(" startTime=");
+sb.append(getStartTime());
+
+sb.append(" lastUpdate=");
+sb.append(getLastUpdate());
+
+if (stackIndexes != null) {
+  sb.append("\n");
+  sb.append("stackIndexes=");
+  sb.append(Arrays.toString(getStackIndexes()));
+}
+
 return sb.toString();
   }
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/ac8cd373/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/store/wal/ProcedureWALPrettyPrinter.java
--
diff --git 
a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/store/wal/ProcedureWALPrettyPrinter.java
 
b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/store/wal/ProcedureWALPrettyPrinter.java
new file mode 100644
index 000..9c33ac2
--- /dev/null
+++ 
b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/store/wal/ProcedureWALPrettyPrinter.java
@@ -0,0 +1,189 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.procedure2.store.wal;
+
+import java.io.IOException;
+import java.io.PrintStream;
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.commons.cli.CommandLine;
+import org.apache.commons.cli.CommandLineParser;
+import org.apache.commons.cli.HelpFormatter;
+import org.apache.commons.cli.Options;
+import org.apache.commons.cli.ParseException;
+import org.apache.commons.cli.PosixParser;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FSDataInputStream;
+import org.apache.hadoop.fs.FileStatus;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hbase.HBaseConfiguration;
+import org.apache.hadoop.hbase.HBaseInterfaceAudience;
+import org.apache.hadoop.hbase.classification.InterfaceAudience;
+import org.apache.hadoop.hbase.classification.InterfaceStability;
+import org.apache.hadoop.hbase.procedure2.Procedure;
+import 
org

[13/24] hbase git commit: HBASE-15400 Use DateTieredCompactor for Date Tiered Compaction (Clara Xiong)

2016-04-11 Thread syuanjiang
http://git-wip-us.apache.org/repos/asf/hbase/blob/f60fc9d1/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestDateTieredCompactionPolicy.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestDateTieredCompactionPolicy.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestDateTieredCompactionPolicy.java
new file mode 100644
index 000..ecccbdd
--- /dev/null
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestDateTieredCompactionPolicy.java
@@ -0,0 +1,325 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.regionserver;
+
+import com.google.common.collect.ImmutableList;
+import com.google.common.collect.Lists;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+
+import org.apache.hadoop.hbase.HConstants;
+import 
org.apache.hadoop.hbase.regionserver.compactions.CompactionConfiguration;
+import 
org.apache.hadoop.hbase.regionserver.compactions.DateTieredCompactionPolicy;
+import 
org.apache.hadoop.hbase.regionserver.compactions.DateTieredCompactionRequest;
+import org.apache.hadoop.hbase.testclassification.SmallTests;
+import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
+import org.apache.hadoop.hbase.util.ManualEnvironmentEdge;
+import org.junit.Assert;
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+
+@Category(SmallTests.class)
+public class TestDateTieredCompactionPolicy extends TestCompactionPolicy {
+  ArrayList sfCreate(long[] minTimestamps, long[] maxTimestamps, 
long[] sizes)
+  throws IOException {
+ManualEnvironmentEdge timeMachine = new ManualEnvironmentEdge();
+EnvironmentEdgeManager.injectEdge(timeMachine);
+// Has to be  > 0 and < now.
+timeMachine.setValue(1);
+ArrayList ageInDisk = new ArrayList();
+for (int i = 0; i < sizes.length; i++) {
+  ageInDisk.add(0L);
+}
+
+ArrayList ret = Lists.newArrayList();
+for (int i = 0; i < sizes.length; i++) {
+  MockStoreFile msf =
+  new MockStoreFile(TEST_UTIL, TEST_FILE, sizes[i], ageInDisk.get(i), 
false, i);
+  msf.setTimeRangeTracker(new TimeRangeTracker(minTimestamps[i], 
maxTimestamps[i]));
+  ret.add(msf);
+}
+return ret;
+  }
+
+  @Override
+  protected void config() {
+super.config();
+
+// Set up policy
+conf.set(StoreEngine.STORE_ENGINE_CLASS_KEY,
+  "org.apache.hadoop.hbase.regionserver.DateTieredStoreEngine");
+conf.setLong(CompactionConfiguration.MAX_AGE_MILLIS_KEY, 100);
+conf.setLong(CompactionConfiguration.INCOMING_WINDOW_MIN_KEY, 3);
+conf.setLong(CompactionConfiguration.BASE_WINDOW_MILLIS_KEY, 6);
+conf.setInt(CompactionConfiguration.WINDOWS_PER_TIER_KEY, 4);
+
conf.setBoolean(CompactionConfiguration.SINGLE_OUTPUT_FOR_MINOR_COMPACTION_KEY, 
false);
+
+// Special settings for compaction policy per window
+this.conf.setInt(CompactionConfiguration.HBASE_HSTORE_COMPACTION_MIN_KEY, 
2);
+this.conf.setInt(CompactionConfiguration.HBASE_HSTORE_COMPACTION_MAX_KEY, 
12);
+
this.conf.setFloat(CompactionConfiguration.HBASE_HSTORE_COMPACTION_RATIO_KEY, 
1.2F);
+
+conf.setInt(HStore.BLOCKING_STOREFILES_KEY, 20);
+conf.setLong(HConstants.MAJOR_COMPACTION_PERIOD, 10);
+  }
+
+  void compactEquals(long now, ArrayList candidates, long[] 
expectedFileSizes,
+  long[] expectedBoundaries, boolean isMajor, boolean toCompact) throws 
IOException {
+ManualEnvironmentEdge timeMachine = new ManualEnvironmentEdge();
+EnvironmentEdgeManager.injectEdge(timeMachine);
+timeMachine.setValue(now);
+DateTieredCompactionRequest request;
+if (isMajor) {
+  for (StoreFile file : candidates) {
+((MockStoreFile)file).setIsMajor(true);
+  }
+  Assert.assertEquals(toCompact, ((DateTieredCompactionPolicy) 
store.storeEngine.getCompactionPolicy())
+.shouldPerformMajorCompaction(candidates));
+  request = (DateTieredCompactionRequest) ((DateTieredCompactionPolicy) 
store.storeEngine
+  .getCompactionPolicy()).selectMajorCompaction(c

[18/24] hbase git commit: HBASE-15380 Purge rollback support in Store etc.

2016-04-11 Thread syuanjiang
HBASE-15380 Purge rollback support in Store etc.


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/e450d94a
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/e450d94a
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/e450d94a

Branch: refs/heads/hbase-12439
Commit: e450d94a2c8655330ff1c80a757b39319d9f26d1
Parents: 394b89d
Author: stack 
Authored: Thu Apr 7 22:35:23 2016 -0700
Committer: stack 
Committed: Thu Apr 7 22:35:23 2016 -0700

--
 .../hbase/regionserver/AbstractMemStore.java   | 17 -
 .../apache/hadoop/hbase/regionserver/HStore.java   | 16 
 .../apache/hadoop/hbase/regionserver/MemStore.java |  7 ---
 .../apache/hadoop/hbase/regionserver/Store.java|  8 
 4 files changed, 4 insertions(+), 44 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/e450d94a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/AbstractMemStore.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/AbstractMemStore.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/AbstractMemStore.java
index 08e3f5e..7f1a6bb 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/AbstractMemStore.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/AbstractMemStore.java
@@ -226,23 +226,6 @@ public abstract class AbstractMemStore implements MemStore 
{
 return getSnapshot().getSize();
   }
 
-  /**
-   * Remove n key from the memstore. Only cells that have the same key and the
-   * same memstoreTS are removed.  It is ok to not update timeRangeTracker
-   * in this call. It is possible that we can optimize this method by using
-   * tailMap/iterator, but since this method is called rarely (only for
-   * error recovery), we can leave those optimization for the future.
-   * @param cell
-   */
-  @Override
-  public void rollback(Cell cell) {
-// If the key is in the active, delete it. Update this.size.
-long sz = active.rollback(cell);
-if (sz != 0) {
-  setOldestEditTimeToNow();
-}
-  }
-
   @Override
   public String toString() {
 StringBuffer buf = new StringBuffer();

http://git-wip-us.apache.org/repos/asf/hbase/blob/e450d94a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HStore.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HStore.java 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HStore.java
index 9524c5b..f7ad4ae 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HStore.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HStore.java
@@ -646,16 +646,6 @@ public class HStore implements Store {
 }
   }
 
-  @Override
-  public void rollback(final Cell cell) {
-lock.readLock().lock();
-try {
-  this.memstore.rollback(cell);
-} finally {
-  lock.readLock().unlock();
-}
-  }
-
   /**
* @return All store files.
*/
@@ -1817,8 +1807,10 @@ public class HStore implements Store {
 try {
   // Not split-able if we find a reference store file present in the store.
   boolean result = !hasReferences();
-  if (!result && LOG.isDebugEnabled()) {
-LOG.debug("Cannot split region due to reference files being there");
+  if (!result) {
+  if (LOG.isTraceEnabled()) {
+LOG.trace("Not splittable; has references: " + this);
+  }
   }
   return result;
 } finally {

http://git-wip-us.apache.org/repos/asf/hbase/blob/e450d94a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MemStore.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MemStore.java 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MemStore.java
index 6bb7081..ea72b7f 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MemStore.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MemStore.java
@@ -87,13 +87,6 @@ public interface MemStore extends HeapSize {
   long timeOfOldestEdit();
 
   /**
-   * Remove n key from the memstore. Only kvs that have the same key and the 
same memstoreTS are
-   * removed. It is ok to not update timeRangeTracker in this call.
-   * @param cell
-   */
-  void rollback(final Cell cell);
-
-  /**
* Write a delete
* @param deleteCell
* @return approximate size of the passed key and value.

http://git-wip-us.apache.org/repos/asf/hbase/blob/e450d94a/hbase-server/src/main/java/org/apache/hadoop/hbase/reg

[02/24] hbase git commit: HBASE-15568 Procedure V2 - Remove CreateTableHandler in HBase Apache 2.0 release (Stephen Yuan Jiang)

2016-04-11 Thread syuanjiang
HBASE-15568 Procedure V2 - Remove CreateTableHandler in HBase Apache 2.0 
release (Stephen Yuan Jiang)


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/89d75016
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/89d75016
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/89d75016

Branch: refs/heads/hbase-12439
Commit: 89d750166d32f1505c5278fd07d4fb575d6caa3d
Parents: 2d8e0a0
Author: Stephen Yuan Jiang 
Authored: Fri Apr 1 01:31:09 2016 -0700
Committer: Stephen Yuan Jiang 
Committed: Fri Apr 1 01:31:09 2016 -0700

--
 .../master/handler/CreateTableHandler.java  | 311 ---
 .../master/handler/TestCreateTableHandler.java  | 178 ---
 2 files changed, 489 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/89d75016/hbase-server/src/main/java/org/apache/hadoop/hbase/master/handler/CreateTableHandler.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/handler/CreateTableHandler.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/handler/CreateTableHandler.java
deleted file mode 100644
index b884544..000
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/handler/CreateTableHandler.java
+++ /dev/null
@@ -1,311 +0,0 @@
-/**
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.hbase.master.handler;
-
-import java.io.IOException;
-import java.io.InterruptedIOException;
-import java.security.PrivilegedExceptionAction;
-import java.util.ArrayList;
-import java.util.List;
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hbase.CoordinatedStateException;
-import org.apache.hadoop.hbase.HRegionInfo;
-import org.apache.hadoop.hbase.HTableDescriptor;
-import org.apache.hadoop.hbase.MetaTableAccessor;
-import org.apache.hadoop.hbase.NotAllMetaRegionsOnlineException;
-import org.apache.hadoop.hbase.Server;
-import org.apache.hadoop.hbase.TableDescriptor;
-import org.apache.hadoop.hbase.TableExistsException;
-import org.apache.hadoop.hbase.TableName;
-import org.apache.hadoop.hbase.client.RegionReplicaUtil;
-import org.apache.hadoop.hbase.client.TableState;
-import org.apache.hadoop.hbase.executor.EventHandler;
-import org.apache.hadoop.hbase.executor.EventType;
-import org.apache.hadoop.hbase.ipc.RpcServer;
-import org.apache.hadoop.hbase.master.AssignmentManager;
-import org.apache.hadoop.hbase.master.HMaster;
-import org.apache.hadoop.hbase.master.MasterCoprocessorHost;
-import org.apache.hadoop.hbase.master.MasterFileSystem;
-import org.apache.hadoop.hbase.master.MasterServices;
-import org.apache.hadoop.hbase.master.TableLockManager;
-import org.apache.hadoop.hbase.master.TableLockManager.TableLock;
-import org.apache.hadoop.hbase.security.User;
-import org.apache.hadoop.hbase.security.UserProvider;
-import org.apache.hadoop.hbase.util.FSTableDescriptors;
-import org.apache.hadoop.hbase.util.FSUtils;
-import org.apache.hadoop.hbase.util.ModifyRegionUtils;
-import org.apache.hadoop.hbase.util.ServerRegionReplicaUtil;
-
-/**
- * Handler to create a table.
- */
-@InterfaceAudience.Private
-public class CreateTableHandler extends EventHandler {
-  private static final Log LOG = LogFactory.getLog(CreateTableHandler.class);
-  protected final MasterFileSystem fileSystemManager;
-  protected final HTableDescriptor hTableDescriptor;
-  protected final Configuration conf;
-  private final AssignmentManager assignmentManager;
-  private final TableLockManager tableLockManager;
-  private final HRegionInfo [] newRegions;
-  private final MasterServices masterServices;
-  private final TableLock tableLock;
-  private User activeUser;
-
-  public CreateTableHandler(Server server, MasterFileSystem fileSystemManager,
-  

[04/24] hbase git commit: HBASE-15424 Add bulk load hfile-refs for replication in ZK after the event is appended in the WAL

2016-04-11 Thread syuanjiang
HBASE-15424 Add bulk load hfile-refs for replication in ZK after the event is 
appended in the WAL


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/25419d8b
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/25419d8b
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/25419d8b

Branch: refs/heads/hbase-12439
Commit: 25419d8b18dd8f35a102614cd31b274659f747ef
Parents: 5d79790
Author: Ashish Singhi 
Authored: Fri Apr 1 15:40:36 2016 +0530
Committer: Ashish Singhi 
Committed: Fri Apr 1 15:40:36 2016 +0530

--
 .../hbase/regionserver/wal/AbstractFSWAL.java   |  4 +-
 .../hbase/regionserver/wal/MetricsWAL.java  |  7 ++-
 .../regionserver/wal/WALActionsListener.java| 10 +++-
 .../replication/regionserver/Replication.java   | 50 
 .../hadoop/hbase/wal/DisabledWALProvider.java   |  7 +--
 .../hbase/regionserver/wal/TestMetricsWAL.java  | 10 ++--
 .../hbase/wal/WALPerformanceEvaluation.java |  3 +-
 7 files changed, 58 insertions(+), 33 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/25419d8b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/AbstractFSWAL.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/AbstractFSWAL.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/AbstractFSWAL.java
index f189ff1..b89488a 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/AbstractFSWAL.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/AbstractFSWAL.java
@@ -840,14 +840,14 @@ public abstract class AbstractFSWAL implements WAL {
 return true;
   }
 
-  private long postAppend(final Entry e, final long elapsedTime) {
+  private long postAppend(final Entry e, final long elapsedTime) throws 
IOException {
 long len = 0;
 if (!listeners.isEmpty()) {
   for (Cell cell : e.getEdit().getCells()) {
 len += CellUtil.estimatedSerializedSizeOf(cell);
   }
   for (WALActionsListener listener : listeners) {
-listener.postAppend(len, elapsedTime);
+listener.postAppend(len, elapsedTime, e.getKey(), e.getEdit());
   }
 }
 return len;

http://git-wip-us.apache.org/repos/asf/hbase/blob/25419d8b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/MetricsWAL.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/MetricsWAL.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/MetricsWAL.java
index 99792e5..69a31cd 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/MetricsWAL.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/MetricsWAL.java
@@ -20,9 +20,13 @@
 package org.apache.hadoop.hbase.regionserver.wal;
 
 import com.google.common.annotations.VisibleForTesting;
+
+import java.io.IOException;
+
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.hbase.classification.InterfaceAudience;
+import org.apache.hadoop.hbase.wal.WALKey;
 import org.apache.hadoop.hbase.CompatibilitySingletonFactory;
 import org.apache.hadoop.util.StringUtils;
 
@@ -51,7 +55,8 @@ public class MetricsWAL extends WALActionsListener.Base {
   }
 
   @Override
-  public void postAppend(final long size, final long time) {
+  public void postAppend(final long size, final long time, final WALKey logkey,
+  final WALEdit logEdit) throws IOException {
 source.incrementAppendCount();
 source.incrementAppendTime(time);
 source.incrementAppendSize(size);

http://git-wip-us.apache.org/repos/asf/hbase/blob/25419d8b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/WALActionsListener.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/WALActionsListener.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/WALActionsListener.java
index a6452e2..adcc6eb 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/WALActionsListener.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/WALActionsListener.java
@@ -98,8 +98,12 @@ public interface WALActionsListener {
* TODO: Combine this with above.
* @param entryLen approx length of cells in this append.
* @param elapsedTimeMillis elapsed time in milliseconds.
+   * @param logKey A WAL key
+   * @param logEdit A WAL edit containing list of cells.
+   * @throws IOException if any network or I/O error occurred
*/
-  void postAppend(

[21/24] hbase git commit: HBASE-15623 Update refguide to change hadoop <= 2.3.x from NT to X for hbase-1.2.x

2016-04-11 Thread syuanjiang
HBASE-15623 Update refguide to change hadoop <= 2.3.x from NT to X for 
hbase-1.2.x


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/a4dcf514
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/a4dcf514
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/a4dcf514

Branch: refs/heads/hbase-12439
Commit: a4dcf51415616772e462091ce93622f070ea8810
Parents: e0f31ba
Author: stack 
Authored: Sat Apr 9 15:17:57 2016 -0700
Committer: stack 
Committed: Sat Apr 9 15:17:57 2016 -0700

--
 src/main/asciidoc/_chapters/configuration.adoc | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/a4dcf514/src/main/asciidoc/_chapters/configuration.adoc
--
diff --git a/src/main/asciidoc/_chapters/configuration.adoc 
b/src/main/asciidoc/_chapters/configuration.adoc
index 49b0e7d..6aefd5a 100644
--- a/src/main/asciidoc/_chapters/configuration.adoc
+++ b/src/main/asciidoc/_chapters/configuration.adoc
@@ -222,8 +222,8 @@ Use the following legend to interpret this table:
 |Hadoop-0.23.x | S | X | X | X | X
 |Hadoop-2.0.x-alpha | NT | X | X | X | X
 |Hadoop-2.1.0-beta | NT | X | X | X | X
-|Hadoop-2.2.0 | NT | S | NT | NT | NT
-|Hadoop-2.3.x | NT | S | NT | NT | NT
+|Hadoop-2.2.0 | NT | S | NT | NT | X 
+|Hadoop-2.3.x | NT | S | NT | NT | X 
 |Hadoop-2.4.x | NT | S | S | S | S
 |Hadoop-2.5.x | NT | S | S | S | S
 |Hadoop-2.6.0 | X | X | X | X | X



[07/24] hbase git commit: HBASE-15485 Filter.reset() should not be called between batches (Phil Yang)

2016-04-11 Thread syuanjiang
HBASE-15485 Filter.reset() should not be called between batches (Phil Yang)


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/a93a8878
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/a93a8878
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/a93a8878

Branch: refs/heads/hbase-12439
Commit: a93a8878fea49224310e9e51ac929c33ae6aa41f
Parents: 33396c3
Author: tedyu 
Authored: Mon Apr 4 12:52:24 2016 -0700
Committer: tedyu 
Committed: Mon Apr 4 12:52:24 2016 -0700

--
 .../hadoop/hbase/regionserver/HRegion.java  |   4 +-
 .../hbase/regionserver/ScannerContext.java  |   9 +
 .../hbase/filter/TestFilterFromRegionSide.java  | 183 +++
 3 files changed, 194 insertions(+), 2 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/a93a8878/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
index 4da0f13..acaecf1 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
@@ -5652,7 +5652,7 @@ public class HRegion implements HeapSize, 
PropagatingConfigurationObserver, Regi
 // partial Result means that we should not reset the filters; filters
 // should only be reset in
 // between rows
-if (!scannerContext.partialResultFormed()) resetFilters();
+if (!scannerContext.midRowResultFormed()) resetFilters();
 
 if (isFilterDoneInternal()) {
   moreValues = false;
@@ -5727,7 +5727,7 @@ public class HRegion implements HeapSize, 
PropagatingConfigurationObserver, Regi
   nextKv = heap.peek();
   moreCellsInRow = moreCellsInRow(nextKv, currentRowCell);
   if (!moreCellsInRow) 
incrementCountOfRowsScannedMetric(scannerContext);
-  if (scannerContext.checkBatchLimit(limitScope)) {
+  if (moreCellsInRow && scannerContext.checkBatchLimit(limitScope)) {
 return 
scannerContext.setScannerState(NextState.BATCH_LIMIT_REACHED).hasMoreValues();
   } else if (scannerContext.checkSizeLimit(limitScope)) {
 ScannerContext.NextState state =

http://git-wip-us.apache.org/repos/asf/hbase/blob/a93a8878/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ScannerContext.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ScannerContext.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ScannerContext.java
index 6674443..de4647d 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ScannerContext.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ScannerContext.java
@@ -230,6 +230,15 @@ public class ScannerContext {
   }
 
   /**
+   * @return true when a mid-row result is formed.
+   */
+  boolean midRowResultFormed() {
+return scannerState == NextState.SIZE_LIMIT_REACHED_MID_ROW
+|| scannerState == NextState.TIME_LIMIT_REACHED_MID_ROW
+|| scannerState == NextState.BATCH_LIMIT_REACHED;
+  }
+
+  /**
* @param checkerScope
* @return true if the batch limit can be enforced in the checker's scope
*/

http://git-wip-us.apache.org/repos/asf/hbase/blob/a93a8878/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterFromRegionSide.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterFromRegionSide.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterFromRegionSide.java
new file mode 100644
index 000..0a287ce
--- /dev/null
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterFromRegionSide.java
@@ -0,0 +1,183 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the Lice

[22/24] hbase git commit: HBASE-15527 Refactor Compactor related classes

2016-04-11 Thread syuanjiang
HBASE-15527 Refactor Compactor related classes


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/f7d44e92
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/f7d44e92
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/f7d44e92

Branch: refs/heads/hbase-12439
Commit: f7d44e929fd2a8dac5f15c50c2eea1d448e92eb7
Parents: a4dcf51
Author: zhangduo 
Authored: Sat Apr 9 16:18:08 2016 +0800
Committer: zhangduo 
Committed: Sun Apr 10 09:26:28 2016 +0800

--
 .../hbase/mob/DefaultMobStoreCompactor.java |  76 +---
 .../AbstractMultiOutputCompactor.java   | 131 +++--
 .../regionserver/compactions/Compactor.java | 189 +++
 .../compactions/DateTieredCompactor.java|  33 ++--
 .../compactions/DefaultCompactor.java   | 171 +
 .../compactions/StripeCompactor.java|  60 +++---
 6 files changed, 317 insertions(+), 343 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/f7d44e92/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/DefaultMobStoreCompactor.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/DefaultMobStoreCompactor.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/DefaultMobStoreCompactor.java
index 33eb7b9..fe640c6 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/DefaultMobStoreCompactor.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/DefaultMobStoreCompactor.java
@@ -42,11 +42,12 @@ import 
org.apache.hadoop.hbase.regionserver.MobCompactionStoreScanner;
 import org.apache.hadoop.hbase.regionserver.ScanType;
 import org.apache.hadoop.hbase.regionserver.ScannerContext;
 import org.apache.hadoop.hbase.regionserver.Store;
-import org.apache.hadoop.hbase.regionserver.StoreFile;
 import org.apache.hadoop.hbase.regionserver.StoreFile.Writer;
 import org.apache.hadoop.hbase.regionserver.StoreFileScanner;
+import org.apache.hadoop.hbase.regionserver.compactions.CompactionRequest;
 import org.apache.hadoop.hbase.regionserver.compactions.DefaultCompactor;
 import org.apache.hadoop.hbase.regionserver.throttle.ThroughputController;
+import org.apache.hadoop.hbase.security.User;
 import org.apache.hadoop.hbase.util.Bytes;
 
 /**
@@ -58,6 +59,45 @@ public class DefaultMobStoreCompactor extends 
DefaultCompactor {
   private static final Log LOG = 
LogFactory.getLog(DefaultMobStoreCompactor.class);
   private long mobSizeThreshold;
   private HMobStore mobStore;
+
+  private final InternalScannerFactory scannerFactory = new 
InternalScannerFactory() {
+
+@Override
+public ScanType getScanType(CompactionRequest request) {
+  return request.isRetainDeleteMarkers() ? ScanType.COMPACT_RETAIN_DELETES
+  : ScanType.COMPACT_DROP_DELETES;
+}
+
+@Override
+public InternalScanner createScanner(List scanners,
+ScanType scanType, FileDetails fd, long smallestReadPoint) throws 
IOException {
+  Scan scan = new Scan();
+  scan.setMaxVersions(store.getFamily().getMaxVersions());
+  if (scanType == ScanType.COMPACT_DROP_DELETES) {
+// In major compaction, we need to write the delete markers to del 
files, so we have to
+// retain the them in scanning.
+scanType = ScanType.COMPACT_RETAIN_DELETES;
+return new MobCompactionStoreScanner(store, store.getScanInfo(), scan, 
scanners,
+scanType, smallestReadPoint, fd.earliestPutTs, true);
+  } else {
+return new MobCompactionStoreScanner(store, store.getScanInfo(), scan, 
scanners,
+scanType, smallestReadPoint, fd.earliestPutTs, false);
+  }
+}
+  };
+
+  private final CellSinkFactory writerFactory = new 
CellSinkFactory() {
+
+@Override
+public Writer createWriter(InternalScanner scanner,
+org.apache.hadoop.hbase.regionserver.compactions.Compactor.FileDetails 
fd,
+boolean shouldDropBehind) throws IOException {
+  // make this writer with tags always because of possible new cells with 
tags.
+  return store.createWriterInTmp(fd.maxKeyCount, compactionCompression, 
true, true, true,
+shouldDropBehind);
+}
+  };
+
   public DefaultMobStoreCompactor(Configuration conf, Store store) {
 super(conf, store);
 // The mob cells reside in the mob-enabled column family which is held by 
HMobStore.
@@ -71,36 +111,10 @@ public class DefaultMobStoreCompactor extends 
DefaultCompactor {
 mobSizeThreshold = store.getFamily().getMobThreshold();
   }
 
-  /**
-   * Creates a writer for a new file in a temporary directory.
-   * @param fd The file details.
-   * @param shouldDropBehind Should the writer drop behind.
-   * @return Writer for a new StoreFile in the tmp dir.
-   * @t

[17/24] hbase git commit: HBASE-15537 Make multi WAL work with WALs other than FSHLog

2016-04-11 Thread syuanjiang
HBASE-15537 Make multi WAL work with WALs other than FSHLog


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/394b89d1
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/394b89d1
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/394b89d1

Branch: refs/heads/hbase-12439
Commit: 394b89d153a9bef67a84633f4ff68aff26d53832
Parents: 2dcd08b
Author: zhangduo 
Authored: Wed Apr 6 17:04:28 2016 +0800
Committer: zhangduo 
Committed: Fri Apr 8 10:36:16 2016 +0800

--
 .../hbase/regionserver/wal/AbstractFSWAL.java   |   4 +
 .../hadoop/hbase/wal/AbstractFSWALProvider.java |   4 +-
 .../hbase/wal/RegionGroupingProvider.java   | 138 ---
 .../org/apache/hadoop/hbase/wal/WALFactory.java |  30 ++--
 ...ReplicationEndpointWithMultipleAsyncWAL.java |  36 +
 .../TestReplicationEndpointWithMultipleWAL.java |   2 +
 ...lMasterRSCompressedWithMultipleAsyncWAL.java |  37 +
 ...onKillMasterRSCompressedWithMultipleWAL.java |   2 +
 ...plicationSyncUpToolWithMultipleAsyncWAL.java |  37 +
 ...estReplicationSyncUpToolWithMultipleWAL.java |   2 +
 .../wal/TestBoundedRegionGroupingStrategy.java  | 131 ++
 11 files changed, 273 insertions(+), 150 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/394b89d1/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/AbstractFSWAL.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/AbstractFSWAL.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/AbstractFSWAL.java
index b89488a..e4c4eb3 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/AbstractFSWAL.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/AbstractFSWAL.java
@@ -678,6 +678,10 @@ public abstract class AbstractFSWAL implements WAL {
 // NewPath could be equal to oldPath if replaceWriter fails.
 newPath = replaceWriter(oldPath, newPath, nextWriter);
 tellListenersAboutPostLogRoll(oldPath, newPath);
+if (LOG.isDebugEnabled()) {
+  LOG.debug("Create new " + getClass().getSimpleName() + " writer with 
pipeline: "
+  + Arrays.toString(getPipeline()));
+}
 // Can we delete any of the old log files?
 if (getNumRolledLogFiles() > 0) {
   cleanOldLogs();

http://git-wip-us.apache.org/repos/asf/hbase/blob/394b89d1/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/AbstractFSWALProvider.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/AbstractFSWALProvider.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/AbstractFSWALProvider.java
index 2f5c299..e495e99 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/AbstractFSWALProvider.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/AbstractFSWALProvider.java
@@ -82,7 +82,7 @@ public abstract class AbstractFSWALProvider> implemen
* @param factory factory that made us, identity used for FS layout. may not 
be null
* @param conf may not be null
* @param listeners may be null
-   * @param providerId differentiate between providers from one facotry, used 
for FS layout. may be
+   * @param providerId differentiate between providers from one factory, used 
for FS layout. may be
*  null
*/
   @Override
@@ -109,7 +109,7 @@ public abstract class AbstractFSWALProvider> implemen
   }
 
   @Override
-  public WAL getWAL(byte[] identifier, byte[] namespace) throws IOException {
+  public T getWAL(byte[] identifier, byte[] namespace) throws IOException {
 T walCopy = wal;
 if (walCopy == null) {
   // only lock when need to create wal, and need to lock since

http://git-wip-us.apache.org/repos/asf/hbase/blob/394b89d1/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/RegionGroupingProvider.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/RegionGroupingProvider.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/RegionGroupingProvider.java
index 0aeaccf..b447e94 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/RegionGroupingProvider.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/RegionGroupingProvider.java
@@ -18,34 +18,31 @@
  */
 package org.apache.hadoop.hbase.wal;
 
-import static 
org.apache.hadoop.hbase.wal.DefaultWALProvider.META_WAL_PROVIDER_ID;
-import static 
org.apache.hadoop.hbase.wal.DefaultWALProvider.WAL_FILE_NAME_DELIMITER;
+import static 
org.apache.hadoop.hbase.wal.AbstractFSWALProvider.META_WAL_PROVIDER_ID

[23/24] hbase git commit: HBASE-15591 ServerCrashProcedure not yielding

2016-04-11 Thread syuanjiang
HBASE-15591 ServerCrashProcedure not yielding


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/80df1cb7
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/80df1cb7
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/80df1cb7

Branch: refs/heads/hbase-12439
Commit: 80df1cb7b6c3eaae20c3f1390e60f1d35be004b9
Parents: f7d44e9
Author: Jerry He 
Authored: Sun Apr 10 17:02:39 2016 -0700
Committer: Jerry He 
Committed: Sun Apr 10 17:02:39 2016 -0700

--
 .../master/procedure/ServerCrashProcedure.java | 17 ++---
 1 file changed, 14 insertions(+), 3 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/80df1cb7/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/ServerCrashProcedure.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/ServerCrashProcedure.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/ServerCrashProcedure.java
index 19e05fd..7de694c 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/ServerCrashProcedure.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/ServerCrashProcedure.java
@@ -172,7 +172,7 @@ implements ServerProcedureInterface {
 
   @Override
   protected Flow executeFromState(MasterProcedureEnv env, ServerCrashState 
state)
-  throws ProcedureYieldException {
+  throws ProcedureYieldException {
 if (LOG.isTraceEnabled()) {
   LOG.trace(state);
 }
@@ -208,10 +208,17 @@ implements ServerProcedureInterface {
   case SERVER_CRASH_GET_REGIONS:
 // If hbase:meta is not assigned, yield.
 if (!isMetaAssignedQuickTest(env)) {
+  // isMetaAssignedQuickTest does not really wait. Let's delay a 
little before
+  // another round of execution.
+  long wait =
+  env.getMasterConfiguration().getLong(KEY_SHORT_WAIT_ON_META,
+DEFAULT_SHORT_WAIT_ON_META);
+  wait = wait / 10;
+  Thread.sleep(wait);
   throwProcedureYieldException("Waiting on hbase:meta assignment");
 }
 this.regionsOnCrashedServer =
-  
services.getAssignmentManager().getRegionStates().getServerRegions(this.serverName);
+
services.getAssignmentManager().getRegionStates().getServerRegions(this.serverName);
 // Where to go next? Depends on whether we should split logs at all or 
if we should do
 // distributed log splitting (DLS) vs distributed log replay (DLR).
 if (!this.shouldSplitWal) {
@@ -291,8 +298,12 @@ implements ServerProcedureInterface {
 return Flow.NO_MORE_STATE;
 
   default:
-  throw new UnsupportedOperationException("unhandled state=" + state);
+throw new UnsupportedOperationException("unhandled state=" + state);
   }
+} catch (ProcedureYieldException e) {
+  LOG.warn("Failed serverName=" + this.serverName + ", state=" + state + 
"; retry "
+  + e.getMessage());
+  throw e;
 } catch (IOException e) {
   LOG.warn("Failed serverName=" + this.serverName + ", state=" + state + 
"; retry", e);
 } catch (InterruptedException e) {



[19/51] [partial] hbase-site git commit: Published site at a395922ad5af9494bb55feee3c275c6d3a575e92.

2016-04-11 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/16980207/devapidocs/org/apache/hadoop/hbase/class-use/ServerName.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/class-use/ServerName.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/ServerName.html
index d00e78c..92e8a0f 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/ServerName.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/ServerName.html
@@ -248,11 +248,11 @@
 
 
 ServerName
-Server.getServerName() 
+SplitLogTask.getServerName() 
 
 
 ServerName
-SplitLogTask.getServerName() 
+Server.getServerName() 
 
 
 static ServerName
@@ -698,16 +698,16 @@
 
 
 
-private void
-ConnectionImplementation.cacheLocation(TableName tableName,
+void
+MetaCache.cacheLocation(TableName tableName,
   ServerName source,
   HRegionLocation location)
 Put a newly discovered HRegionLocation into the cache.
 
 
 
-void
-MetaCache.cacheLocation(TableName tableName,
+private void
+ConnectionImplementation.cacheLocation(TableName tableName,
   ServerName source,
   HRegionLocation location)
 Put a newly discovered HRegionLocation into the cache.
@@ -736,20 +736,20 @@
 
 
 void
-ConnectionImplementation.clearCaches(ServerName serverName) 
+HConnection.clearCaches(ServerName sn)
+Deprecated. 
+internal method, do not use thru HConnection
+
+
 
 
 void
-ClusterConnection.clearCaches(ServerName sn)
-Clear any caches that pertain to server name 
sn.
-
+ConnectionImplementation.clearCaches(ServerName serverName) 
 
 
 void
-HConnection.clearCaches(ServerName sn)
-Deprecated. 
-internal method, do not use thru HConnection
-
+ClusterConnection.clearCaches(ServerName sn)
+Clear any caches that pertain to server name 
sn.
 
 
 
@@ -838,16 +838,6 @@
 
 
 org.apache.hadoop.hbase.protobuf.generated.AdminProtos.AdminService.BlockingInterface
-ConnectionImplementation.getAdmin(ServerName serverName) 
-
-
-org.apache.hadoop.hbase.protobuf.generated.AdminProtos.AdminService.BlockingInterface
-ClusterConnection.getAdmin(ServerName serverName)
-Establishes a connection to the region server at the 
specified address.
-
-
-
-org.apache.hadoop.hbase.protobuf.generated.AdminProtos.AdminService.BlockingInterface
 HConnection.getAdmin(ServerName serverName)
 Deprecated. 
 internal method, do not use thru HConnection
@@ -856,11 +846,16 @@
 
 
 org.apache.hadoop.hbase.protobuf.generated.AdminProtos.AdminService.BlockingInterface
-ConnectionImplementation.getAdmin(ServerName serverName,
-boolean master) 
+ConnectionImplementation.getAdmin(ServerName serverName) 
 
 
 org.apache.hadoop.hbase.protobuf.generated.AdminProtos.AdminService.BlockingInterface
+ClusterConnection.getAdmin(ServerName serverName)
+Establishes a connection to the region server at the 
specified address.
+
+
+
+org.apache.hadoop.hbase.protobuf.generated.AdminProtos.AdminService.BlockingInterface
 HConnection.getAdmin(ServerName serverName,
 boolean getMaster)
 Deprecated. 
@@ -868,6 +863,11 @@
 
 
 
+
+org.apache.hadoop.hbase.protobuf.generated.AdminProtos.AdminService.BlockingInterface
+ConnectionImplementation.getAdmin(ServerName serverName,
+boolean master) 
+
 
 private http://docs.oracle.com/javase/7/docs/api/java/lang/Long.html?is-external=true";
 title="class or interface in java.lang">Long
 AsyncProcess.AsyncRequestFutureImpl.getBackoff(ServerName server,
@@ -875,21 +875,21 @@
 
 
 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ClientService.BlockingInterface
-ConnectionImplementation.getClient(ServerName sn) 
+HConnection.getClient(ServerName serverName)
+Deprecated. 
+internal method, do not use thru HConnection
+
+
 
 
 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ClientService.BlockingInterface
-ClusterConnection.getClient(ServerName serverName)
-Establishes a connection to the region server at the 
specified address, and returns
- a region client protocol.
-
+ConnectionImplementation.getClient(ServerName sn) 
 
 
 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ClientService.BlockingInterface
-HConnection.getClient(ServerName serverName)
-Deprecated. 
-internal method, do not use thru HConnection
-
+ClusterConnection.getClient(ServerName serverName)
+Establishes a connection to the region server at the 
specified address, and returns
+ a region client protocol.
 
 
 
@@ -949,30 +949,30 @@
 
 
 boolean
-ConnectionImplementation.isDeadServer(ServerName sn) 
+HConnection.isDeadServer(ServerName serverName)
+Deprecated. 
+internal method, do not use thru HConnection
+
+
 
 
 boolean
+ConnectionImplementation.isDeadServer(ServerName sn) 
+
+
+boolean
 ClusterConnection.isDeadServer(ServerName serverName)
 Deprecated. 
 internal method, do not use thru HConnection
 
 
 
-
+
 boolean
 ClusterStatusListener.isDeadServer(ServerName sn)
 Check if we know if a server i

[26/51] [partial] hbase-site git commit: Published site at a395922ad5af9494bb55feee3c275c6d3a575e92.

2016-04-11 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/16980207/devapidocs/org/apache/hadoop/hbase/class-use/HColumnDescriptor.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/class-use/HColumnDescriptor.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/HColumnDescriptor.html
index 99b8782..b09affb 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/HColumnDescriptor.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/HColumnDescriptor.html
@@ -191,170 +191,166 @@ service.
 
 
 
-static HColumnDescriptor
-HColumnDescriptor.convert(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema cfs) 
-
-
 HColumnDescriptor[]
 HTableDescriptor.getColumnFamilies()
 Returns an array all the HColumnDescriptor of 
the column families
  of the table.
 
 
-
+
 HColumnDescriptor
 HTableDescriptor.getFamily(byte[] column)
 Returns the HColumnDescriptor for a specific column family 
with name as
  specified by the parameter column.
 
 
-
+
 static HColumnDescriptor
 HColumnDescriptor.parseFrom(byte[] bytes) 
 
-
+
 HColumnDescriptor
 HTableDescriptor.removeFamily(byte[] column)
 Removes the HColumnDescriptor with name specified by the 
parameter column
  from the table descriptor
 
 
-
+
 HColumnDescriptor
 HColumnDescriptor.setBlockCacheEnabled(boolean blockCacheEnabled) 
 
-
+
 HColumnDescriptor
 HColumnDescriptor.setBlocksize(int s) 
 
-
+
 HColumnDescriptor
 HColumnDescriptor.setBloomFilterType(BloomType bt) 
 
-
+
 HColumnDescriptor
 HColumnDescriptor.setCacheBloomsOnWrite(boolean value) 
 
-
+
 HColumnDescriptor
 HColumnDescriptor.setCacheDataInL1(boolean value) 
 
-
+
 HColumnDescriptor
 HColumnDescriptor.setCacheDataOnWrite(boolean value) 
 
-
+
 HColumnDescriptor
 HColumnDescriptor.setCacheIndexesOnWrite(boolean value) 
 
-
+
 HColumnDescriptor
 HColumnDescriptor.setCompactionCompressionType(Compression.Algorithm type)
 Compression types supported in hbase.
 
 
-
+
 HColumnDescriptor
 HColumnDescriptor.setCompressionType(Compression.Algorithm type)
 Compression types supported in hbase.
 
 
-
+
 HColumnDescriptor
 HColumnDescriptor.setCompressTags(boolean compressTags)
 Set whether the tags should be compressed along with 
DataBlockEncoding.
 
 
-
+
 HColumnDescriptor
 HColumnDescriptor.setConfiguration(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String key,
 http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String value)
 Setter for storing a configuration setting in configuration
 map.
 
 
-
+
 HColumnDescriptor
 HColumnDescriptor.setDataBlockEncoding(DataBlockEncoding type)
 Set data block encoding algorithm used in block cache.
 
 
-
+
 HColumnDescriptor
 HColumnDescriptor.setDFSReplication(short replication)
 Set the replication factor to hfile(s) belonging to this 
family
 
 
-
+
 HColumnDescriptor
 HColumnDescriptor.setEncryptionKey(byte[] keyBytes)
 Set the raw crypto key attribute for the family
 
 
-
+
 HColumnDescriptor
 HColumnDescriptor.setEncryptionType(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String algorithm)
 Set the encryption algorithm for use with this family
 
 
-
+
 HColumnDescriptor
 HColumnDescriptor.setEvictBlocksOnClose(boolean value) 
 
-
+
 HColumnDescriptor
 HColumnDescriptor.setInMemory(boolean inMemory) 
 
-
+
 HColumnDescriptor
 HColumnDescriptor.setKeepDeletedCells(KeepDeletedCells keepDeletedCells) 
 
-
+
 HColumnDescriptor
 HColumnDescriptor.setMaxVersions(int maxVersions) 
 
-
+
 HColumnDescriptor
 HColumnDescriptor.setMinVersions(int minVersions) 
 
-
+
 HColumnDescriptor
 HColumnDescriptor.setMobEnabled(boolean isMobEnabled)
 Enables the mob for the family.
 
 
-
+
 HColumnDescriptor
 HColumnDescriptor.setMobThreshold(long threshold)
 Sets the mob threshold of the family.
 
 
-
+
 HColumnDescriptor
 HColumnDescriptor.setPrefetchBlocksOnOpen(boolean value) 
 
-
+
 HColumnDescriptor
 HColumnDescriptor.setScope(int scope) 
 
-
+
 HColumnDescriptor
 HColumnDescriptor.setTimeToLive(int timeToLive) 
 
-
+
 HColumnDescriptor
 HColumnDescriptor.setTimeToLive(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in 
java.lang">String timeToLive) 
 
-
+
 HColumnDescriptor
 HColumnDescriptor.setValue(byte[] key,
 byte[] value) 
 
-
+
 HColumnDescriptor
 HColumnDescriptor.setValue(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String key,
 http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in 
java.lang">String value) 
 
-
+
 HColumnDescriptor
 HColumnDescriptor.setVersions(int minVersions,
 

[06/51] [partial] hbase-site git commit: Published site at a395922ad5af9494bb55feee3c275c6d3a575e92.

2016-04-11 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/16980207/devapidocs/org/apache/hadoop/hbase/io/encoding/class-use/HFileBlockDefaultEncodingContext.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/io/encoding/class-use/HFileBlockDefaultEncodingContext.html
 
b/devapidocs/org/apache/hadoop/hbase/io/encoding/class-use/HFileBlockDefaultEncodingContext.html
index 344c9be..974369a 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/io/encoding/class-use/HFileBlockDefaultEncodingContext.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/io/encoding/class-use/HFileBlockDefaultEncodingContext.html
@@ -114,17 +114,17 @@
 http://docs.oracle.com/javase/7/docs/api/java/io/DataOutputStream.html?is-external=true";
 title="class or interface in 
java.io">DataOutputStream out) 
 
 
-abstract int
-BufferedDataBlockEncoder.internalEncode(Cell cell,
-HFileBlockDefaultEncodingContext encodingCtx,
-http://docs.oracle.com/javase/7/docs/api/java/io/DataOutputStream.html?is-external=true";
 title="class or interface in 
java.io">DataOutputStream out) 
-
-
 int
 FastDiffDeltaEncoder.internalEncode(Cell cell,
 HFileBlockDefaultEncodingContext encodingContext,
 http://docs.oracle.com/javase/7/docs/api/java/io/DataOutputStream.html?is-external=true";
 title="class or interface in 
java.io">DataOutputStream out) 
 
+
+abstract int
+BufferedDataBlockEncoder.internalEncode(Cell cell,
+HFileBlockDefaultEncodingContext encodingCtx,
+http://docs.oracle.com/javase/7/docs/api/java/io/DataOutputStream.html?is-external=true";
 title="class or interface in 
java.io">DataOutputStream out) 
+
 
 int
 PrefixKeyDeltaEncoder.internalEncode(Cell cell,

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/16980207/devapidocs/org/apache/hadoop/hbase/io/encoding/class-use/HFileBlockEncodingContext.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/io/encoding/class-use/HFileBlockEncodingContext.html
 
b/devapidocs/org/apache/hadoop/hbase/io/encoding/class-use/HFileBlockEncodingContext.html
index c480add..820e8fe 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/io/encoding/class-use/HFileBlockEncodingContext.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/io/encoding/class-use/HFileBlockEncodingContext.html
@@ -181,18 +181,18 @@
 
 
 HFileBlockEncodingContext
-BufferedDataBlockEncoder.newDataBlockEncodingContext(DataBlockEncoding encoding,
-  byte[] header,
-  HFileContext meta) 
-
-
-HFileBlockEncodingContext
 DataBlockEncoder.newDataBlockEncodingContext(DataBlockEncoding encoding,
   byte[] headerBytes,
   HFileContext meta)
 Creates a encoder specific encoding context
 
 
+
+HFileBlockEncodingContext
+BufferedDataBlockEncoder.newDataBlockEncodingContext(DataBlockEncoding encoding,
+  byte[] header,
+  HFileContext meta) 
+
 
 
 
@@ -204,25 +204,19 @@
 
 
 int
-BufferedDataBlockEncoder.encode(Cell cell,
-HFileBlockEncodingContext encodingCtx,
-http://docs.oracle.com/javase/7/docs/api/java/io/DataOutputStream.html?is-external=true";
 title="class or interface in 
java.io">DataOutputStream out) 
-
-
-int
 DataBlockEncoder.encode(Cell cell,
 HFileBlockEncodingContext encodingCtx,
 http://docs.oracle.com/javase/7/docs/api/java/io/DataOutputStream.html?is-external=true";
 title="class or interface in java.io">DataOutputStream out)
 Encodes a KeyValue.
 
 
-
-void
-BufferedDataBlockEncoder.endBlockEncoding(HFileBlockEncodingContext encodingCtx,
-http://docs.oracle.com/javase/7/docs/api/java/io/DataOutputStream.html?is-external=true";
 title="class or interface in java.io">DataOutputStream out,
-
byte[] uncompressedBytesWithHeader) 
-
 
+int
+BufferedDataBlockEncoder.encode(Cell cell,
+HFileBlockEncodingContext encodingCtx,
+http://docs.oracle.com/javase/7/docs/api/java/io/DataOutputStream.html?is-external=true";
 title="class or interface in 
java.io">DataOutputStream out) 
+
+
 void
 DataBlockEncoder.endBlockEncoding(HFileBlockEncodingContext encodingCtx,
 http://docs.oracle.com/javase/7/docs/api/java/io/DataOutputStream.html?is-external=true";
 title="class or interface in java.io">DataOutputStream out,
@@ -230,18 +224,24 @@
 Ends encoding for a block of KeyValues.
 
 
-
+
 void
-BufferedDataBlockEncoder.startBlockEncoding(HFileBlockEncodingConte

[34/51] [partial] hbase-site git commit: Published site at a395922ad5af9494bb55feee3c275c6d3a575e92.

2016-04-11 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/16980207/checkstyle.rss
--
diff --git a/checkstyle.rss b/checkstyle.rss
index 8280441..7c49078 100644
--- a/checkstyle.rss
+++ b/checkstyle.rss
@@ -25,8 +25,8 @@ under the License.
 en-us
 ©2007 - 2016 The Apache Software Foundation
 
-  File: 1728,
- Errors: 12478,
+  File: 1729,
+ Errors: 12449,
  Warnings: 0,
  Infos: 0
   
@@ -648,7 +648,7 @@ under the License.
   
   
 
-  http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.replication.regionserver.ReplicationSyncUp.java";>org/apache/hadoop/hbase/replication/regionserver/ReplicationSyncUp.java
+  http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.zookeeper.ZKConfig.java";>org/apache/hadoop/hbase/zookeeper/ZKConfig.java
 
 
   0
@@ -657,12 +657,12 @@ under the License.
   0
 
 
-  3
+  4
 
   
   
 
-  http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.zookeeper.ZKConfig.java";>org/apache/hadoop/hbase/zookeeper/ZKConfig.java
+  http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.regionserver.handler.OpenRegionHandler.java";>org/apache/hadoop/hbase/regionserver/handler/OpenRegionHandler.java
 
 
   0
@@ -671,12 +671,12 @@ under the License.
   0
 
 
-  4
+  2
 
   
   
 
-  http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.regionserver.handler.OpenRegionHandler.java";>org/apache/hadoop/hbase/regionserver/handler/OpenRegionHandler.java
+  http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.replication.regionserver.ReplicationSyncUp.java";>org/apache/hadoop/hbase/replication/regionserver/ReplicationSyncUp.java
 
 
   0
@@ -685,7 +685,7 @@ under the License.
   0
 
 
-  2
+  3
 
   
   
@@ -951,7 +951,7 @@ under the License.
   0
 
 
-  1
+  0
 
   
   
@@ -1054,7 +1054,7 @@ under the License.
   
   
 
-  http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.exceptions.DeserializationException.java";>org/apache/hadoop/hbase/exceptions/DeserializationException.java
+  http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.ipc.CallRunner.java";>org/apache/hadoop/hbase/ipc/CallRunner.java
 
 
   0
@@ -1063,12 +1063,12 @@ under the License.
   0
 
 
-  0
+  1
 
   
   
 
-  http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.ipc.CallRunner.java";>org/apache/hadoop/hbase/ipc/CallRunner.java
+  http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.exceptions.DeserializationException.java";>org/apache/hadoop/hbase/exceptions/DeserializationException.java
 
 
   0
@@ -1077,7 +1077,7 @@ under the License.
   0
 
 
-  1
+  0
 
   
   
@@ -1306,7 +1306,7 @@ under the License.
   
   
 
-  http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.filter.ValueFilter.java";>org/apache/hadoop/hbase/filter/ValueFilter.java
+  http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.http.ServerConfigurationKeys.java";>org/apache/hadoop/hbase/http/ServerConfigurationKeys.java
 
 
   0
@@ -1315,12 +1315,12 @@ under the License.
   0
 
 
-  6
+  1
 
   
   
 
-  http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.procedure2.store.wal.CorruptedWALProcedureStoreException.java";>org/apache/hadoop/hbase/procedure2/store/wal/CorruptedWALProc

[40/51] [partial] hbase-site git commit: Published site at a395922ad5af9494bb55feee3c275c6d3a575e92.

2016-04-11 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/16980207/apidocs/overview-tree.html
--
diff --git a/apidocs/overview-tree.html b/apidocs/overview-tree.html
index a5abf10..3f23fa9 100644
--- a/apidocs/overview-tree.html
+++ b/apidocs/overview-tree.html
@@ -826,22 +826,22 @@
 org.apache.hadoop.hbase.util.Order
 org.apache.hadoop.hbase.KeepDeletedCells
 org.apache.hadoop.hbase.io.encoding.DataBlockEncoding
+org.apache.hadoop.hbase.filter.Filter.ReturnCode
 org.apache.hadoop.hbase.filter.BitComparator.BitwiseOp
-org.apache.hadoop.hbase.filter.CompareFilter.CompareOp
-org.apache.hadoop.hbase.filter.RegexStringComparator.EngineType
 org.apache.hadoop.hbase.filter.FilterList.Operator
-org.apache.hadoop.hbase.filter.Filter.ReturnCode
+org.apache.hadoop.hbase.filter.RegexStringComparator.EngineType
+org.apache.hadoop.hbase.filter.CompareFilter.CompareOp
+org.apache.hadoop.hbase.client.Durability
 org.apache.hadoop.hbase.client.IsolationLevel
 org.apache.hadoop.hbase.client.Admin.CompactType
 org.apache.hadoop.hbase.client.Admin.MasterSwitchType
-org.apache.hadoop.hbase.client.Durability
 org.apache.hadoop.hbase.client.Consistency
 org.apache.hadoop.hbase.client.security.SecurityCapability
+org.apache.hadoop.hbase.regionserver.BloomType
 org.apache.hadoop.hbase.quotas.ThrottlingException.Type
-org.apache.hadoop.hbase.quotas.ThrottleType
 org.apache.hadoop.hbase.quotas.QuotaType
 org.apache.hadoop.hbase.quotas.QuotaScope
-org.apache.hadoop.hbase.regionserver.BloomType
+org.apache.hadoop.hbase.quotas.ThrottleType
 
 
 



[11/51] [partial] hbase-site git commit: Published site at a395922ad5af9494bb55feee3c275c6d3a575e92.

2016-04-11 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/16980207/devapidocs/org/apache/hadoop/hbase/coprocessor/class-use/MasterCoprocessorEnvironment.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/coprocessor/class-use/MasterCoprocessorEnvironment.html
 
b/devapidocs/org/apache/hadoop/hbase/coprocessor/class-use/MasterCoprocessorEnvironment.html
index 9b9ec87..76e 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/coprocessor/class-use/MasterCoprocessorEnvironment.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/coprocessor/class-use/MasterCoprocessorEnvironment.html
@@ -114,9 +114,7 @@
 
 
 void
-MasterObserver.postAbortProcedure(ObserverContext ctx)
-Called after a abortProcedure request has been 
processed.
-
+BaseMasterObserver.postAbortProcedure(ObserverContext ctx) 
 
 
 void
@@ -124,18 +122,16 @@
 
 
 void
-BaseMasterObserver.postAbortProcedure(ObserverContext ctx) 
+MasterObserver.postAbortProcedure(ObserverContext ctx)
+Called after a abortProcedure request has been 
processed.
+
 
 
 void
-MasterObserver.postAddColumn(ObserverContext ctx,
+BaseMasterObserver.postAddColumn(ObserverContext ctx,
   TableName tableName,
   HColumnDescriptor columnFamily)
-Deprecated. 
-As of release 2.0.0, this will be removed in HBase 3.0.0
- (https://issues.apache.org/jira/browse/HBASE-13645";>HBASE-13645).
- Use MasterObserver.postAddColumnFamily(ObserverContext,
 TableName, HColumnDescriptor).
-
+Deprecated. 
 
 
 
@@ -148,19 +144,21 @@
 
 
 void
-BaseMasterObserver.postAddColumn(ObserverContext ctx,
+MasterObserver.postAddColumn(ObserverContext ctx,
   TableName tableName,
   HColumnDescriptor columnFamily)
-Deprecated. 
+Deprecated. 
+As of release 2.0.0, this will be removed in HBase 3.0.0
+ (https://issues.apache.org/jira/browse/HBASE-13645";>HBASE-13645).
+ Use MasterObserver.postAddColumnFamily(ObserverContext,
 TableName, HColumnDescriptor).
+
 
 
 
 void
-MasterObserver.postAddColumnFamily(ObserverContext ctx,
+BaseMasterObserver.postAddColumnFamily(ObserverContext ctx,
   TableName tableName,
-  HColumnDescriptor columnFamily)
-Called after the new column family has been created.
-
+  HColumnDescriptor columnFamily) 
 
 
 void
@@ -170,17 +168,17 @@
 
 
 void
-BaseMasterObserver.postAddColumnFamily(ObserverContext ctx,
+MasterObserver.postAddColumnFamily(ObserverContext ctx,
   TableName tableName,
-  HColumnDescriptor columnFamily) 
+  HColumnDescriptor columnFamily)
+Called after the new column family has been created.
+
 
 
 void
-MasterObserver.postAddColumnFamilyHandler(ObserverContext ctx,
+BaseMasterObserver.postAddColumnFamilyHandler(ObserverContext ctx,
 TableName tableName,
-HColumnDescriptor columnFamily)
-Called after the new column family has been created.
-
+HColumnDescriptor columnFamily) 
 
 
 void
@@ -190,20 +188,18 @@
 
 
 void
-BaseMasterObserver.postAddColumnFamilyHandler(ObserverContext ctx,
+MasterObserver.postAddColumnFamilyHandler(ObserverContext ctx,
 TableName tableName,
-HColumnDescriptor columnFamily) 
+HColumnDescriptor columnFamily)
+Called after the new column family has been created.
+
 
 
 void
-MasterObserver.postAddColumnHandler(ObserverContext ctx,
+BaseMasterObserver.postAddColumnHandler(ObserverContext ctx,
 TableName tableName,
 HColumnDescriptor columnFamily)
-Deprecated. 
-As of release 2.0.0, this will be removed in HBase 3.0.0
- (https://issues.apache.org/jira/browse/HBASE-13645";>HBASE-13645). Use
- MasterObserver.postAddColumnFamilyHandler(ObserverContext,
 TableName, HColumnDescriptor).
-
+Deprecated. 
 
 
 
@@ -216,14 +212,28 @@
 
 
 void
-BaseMasterObserver.postAddColumnHandler(ObserverConte

[37/51] [partial] hbase-site git commit: Published site at a395922ad5af9494bb55feee3c275c6d3a575e92.

2016-04-11 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/16980207/apidocs/src-html/org/apache/hadoop/hbase/client/replication/ReplicationAdmin.html
--
diff --git 
a/apidocs/src-html/org/apache/hadoop/hbase/client/replication/ReplicationAdmin.html
 
b/apidocs/src-html/org/apache/hadoop/hbase/client/replication/ReplicationAdmin.html
index 305abc2..a6ec065 100644
--- 
a/apidocs/src-html/org/apache/hadoop/hbase/client/replication/ReplicationAdmin.html
+++ 
b/apidocs/src-html/org/apache/hadoop/hbase/client/replication/ReplicationAdmin.html
@@ -55,586 +55,594 @@
 047import 
org.apache.hadoop.hbase.client.Connection;
 048import 
org.apache.hadoop.hbase.client.ConnectionFactory;
 049import 
org.apache.hadoop.hbase.client.RegionLocator;
-050import 
org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos;
-051import 
org.apache.hadoop.hbase.replication.ReplicationException;
-052import 
org.apache.hadoop.hbase.replication.ReplicationFactory;
-053import 
org.apache.hadoop.hbase.replication.ReplicationPeer;
-054import 
org.apache.hadoop.hbase.replication.ReplicationPeerConfig;
-055import 
org.apache.hadoop.hbase.replication.ReplicationPeerZKImpl;
-056import 
org.apache.hadoop.hbase.replication.ReplicationPeers;
-057import 
org.apache.hadoop.hbase.replication.ReplicationQueuesClient;
-058import 
org.apache.hadoop.hbase.util.Pair;
-059import 
org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher;
-060
-061/**
-062 * 

-063 * This class provides the administrative interface to HBase cluster -064 * replication. In order to use it, the cluster and the client using -065 * ReplicationAdmin must be configured with hbase.replication -066 * set to true. -067 *

-068 *

-069 * Adding a new peer results in creating new outbound connections from every -070 * region server to a subset of region servers on the slave cluster. Each -071 * new stream of replication will start replicating from the beginning of the -072 * current WAL, meaning that edits from that past will be replicated. -073 *

-074 *

-075 * Removing a peer is a destructive and irreversible operation that stops -076 * all the replication streams for the given cluster and deletes the metadata -077 * used to keep track of the replication state. -078 *

-079 *

-080 * To see which commands are available in the shell, type -081 * replication. -082 *

-083 */ -084@InterfaceAudience.Public -085@InterfaceStability.Evolving -086public class ReplicationAdmin implements Closeable { -087 private static final Log LOG = LogFactory.getLog(ReplicationAdmin.class); -088 -089 public static final String TNAME = "tableName"; -090 public static final String CFNAME = "columnFamilyName"; -091 -092 // only Global for now, can add other type -093 // such as, 1) no global replication, or 2) the table is replicated to this cluster, etc. -094 public static final String REPLICATIONTYPE = "replicationType"; -095 public static final String REPLICATIONGLOBAL = Integer -096 .toString(HConstants.REPLICATION_SCOPE_GLOBAL); -097 -098 private final Connection connection; -099 // TODO: replication should be managed by master. All the classes except ReplicationAdmin should -100 // be moved to hbase-server. Resolve it in HBASE-11392. -101 private final ReplicationQueuesClient replicationQueuesClient; -102 private final ReplicationPeers replicationPeers; -103 /** -104 * A watcher used by replicationPeers and replicationQueuesClient. Keep reference so can dispose -105 * on {@link #close()}. -106 */ -107 private final ZooKeeperWatcher zkw; -108 -109 /** -110 * Constructor that creates a connection to the local ZooKeeper ensemble. -111 * @param conf Configuration to use -112 * @throws IOException if an internal replication error occurs -113 * @throws RuntimeException if replication isn't enabled. -114 */ -115 public ReplicationAdmin(Configuration conf) throws IOException { -116if (!conf.getBoolean(HConstants.REPLICATION_ENABLE_KEY, -117 HConstants.REPLICATION_ENABLE_DEFAULT)) { -118 throw new RuntimeException("hbase.replication isn't true, please " + -119 "enable it in order to use replication"); -120} -121this.connection = ConnectionFactory.createConnection(conf); -122try { -123 zkw = createZooKeeperWatcher(); -124 try { -125this.replicationQueuesClient = -126 ReplicationFactory.getReplicationQueuesClient(zkw, conf, this.connection); -127 this.replicationQueuesClient.init(); -128this.replicationPeers = ReplicationFactory.getReplicationPeers(zkw, conf, -129 this.replicationQueuesClient, this.connection); -130this.replicationPeers.init(); -131 } catch (Exception exception) { -132if (zkw != null) { -133 zkw.close(); -134} -135throw exception; -136 } -137} catch (Exception exceptio

[10/51] [partial] hbase-site git commit: Published site at a395922ad5af9494bb55feee3c275c6d3a575e92.

2016-04-11 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/16980207/devapidocs/org/apache/hadoop/hbase/coprocessor/class-use/ObserverContext.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/coprocessor/class-use/ObserverContext.html 
b/devapidocs/org/apache/hadoop/hbase/coprocessor/class-use/ObserverContext.html
index d0bdf04..a01cc24 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/coprocessor/class-use/ObserverContext.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/coprocessor/class-use/ObserverContext.html
@@ -185,9 +185,7 @@
 
 
 void
-MasterObserver.postAbortProcedure(ObserverContext ctx)
-Called after a abortProcedure request has been 
processed.
-
+BaseMasterObserver.postAbortProcedure(ObserverContext ctx) 
 
 
 void
@@ -195,18 +193,16 @@
 
 
 void
-BaseMasterObserver.postAbortProcedure(ObserverContext ctx) 
+MasterObserver.postAbortProcedure(ObserverContext ctx)
+Called after a abortProcedure request has been 
processed.
+
 
 
 void
-MasterObserver.postAddColumn(ObserverContext ctx,
+BaseMasterObserver.postAddColumn(ObserverContext ctx,
   TableName tableName,
   HColumnDescriptor columnFamily)
-Deprecated. 
-As of release 2.0.0, this will be removed in HBase 3.0.0
- (https://issues.apache.org/jira/browse/HBASE-13645";>HBASE-13645).
- Use MasterObserver.postAddColumnFamily(ObserverContext,
 TableName, HColumnDescriptor).
-
+Deprecated. 
 
 
 
@@ -219,19 +215,21 @@
 
 
 void
-BaseMasterObserver.postAddColumn(ObserverContext ctx,
+MasterObserver.postAddColumn(ObserverContext ctx,
   TableName tableName,
   HColumnDescriptor columnFamily)
-Deprecated. 
+Deprecated. 
+As of release 2.0.0, this will be removed in HBase 3.0.0
+ (https://issues.apache.org/jira/browse/HBASE-13645";>HBASE-13645).
+ Use MasterObserver.postAddColumnFamily(ObserverContext,
 TableName, HColumnDescriptor).
+
 
 
 
 void
-MasterObserver.postAddColumnFamily(ObserverContext ctx,
+BaseMasterObserver.postAddColumnFamily(ObserverContext ctx,
   TableName tableName,
-  HColumnDescriptor columnFamily)
-Called after the new column family has been created.
-
+  HColumnDescriptor columnFamily) 
 
 
 void
@@ -241,17 +239,17 @@
 
 
 void
-BaseMasterObserver.postAddColumnFamily(ObserverContext ctx,
+MasterObserver.postAddColumnFamily(ObserverContext ctx,
   TableName tableName,
-  HColumnDescriptor columnFamily) 
+  HColumnDescriptor columnFamily)
+Called after the new column family has been created.
+
 
 
 void
-MasterObserver.postAddColumnFamilyHandler(ObserverContext ctx,
+BaseMasterObserver.postAddColumnFamilyHandler(ObserverContext ctx,
 TableName tableName,
-HColumnDescriptor columnFamily)
-Called after the new column family has been created.
-
+HColumnDescriptor columnFamily) 
 
 
 void
@@ -261,20 +259,18 @@
 
 
 void
-BaseMasterObserver.postAddColumnFamilyHandler(ObserverContext ctx,
+MasterObserver.postAddColumnFamilyHandler(ObserverContext ctx,
 TableName tableName,
-HColumnDescriptor columnFamily) 
+HColumnDescriptor columnFamily)
+Called after the new column family has been created.
+
 
 
 void
-MasterObserver.postAddColumnHandler(ObserverContext ctx,
+BaseMasterObserver.postAddColumnHandler(ObserverContext ctx,
 TableName tableName,
 HColumnDescriptor columnFamily)
-Deprecated. 
-As of release 2.0.0, this will be removed in HBase 3.0.0
- (https://issues.apache.org/jira/browse/HBASE-13645";>HBASE-13645). Use
- MasterObserver.postAddColumnFamilyHandler(ObserverContext,
 TableName, HColumnDescriptor).
-
+Deprecated. 
 
 
 
@@ -287,18 +283,20 @@
 
 
 void
-BaseMasterObserver.postAddColumnHandler(ObserverContext ctx,
+MasterObserver.postAddColum

[49/51] [partial] hbase-site git commit: Published site at a395922ad5af9494bb55feee3c275c6d3a575e92.

2016-04-11 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/16980207/apache_hbase_reference_guide.pdfmarks
--
diff --git a/apache_hbase_reference_guide.pdfmarks 
b/apache_hbase_reference_guide.pdfmarks
index 4ab2c1d..b35bded 100644
--- a/apache_hbase_reference_guide.pdfmarks
+++ b/apache_hbase_reference_guide.pdfmarks
@@ -2,8 +2,8 @@
   /Author (Apache HBase Team)
   /Subject ()
   /Keywords ()
-  /ModDate (D:20160408145708)
-  /CreationDate (D:20160408145708)
+  /ModDate (D:20160411150029)
+  /CreationDate (D:20160411150029)
   /Creator (Asciidoctor PDF 1.5.0.alpha.6, based on Prawn 1.2.1)
   /Producer ()
   /DOCINFO pdfmark

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/16980207/apidocs/index-all.html
--
diff --git a/apidocs/index-all.html b/apidocs/index-all.html
index a5b7b0d..f517443 100644
--- a/apidocs/index-all.html
+++ b/apidocs/index-all.html
@@ -1591,10 +1591,6 @@
 
 Convert a protobuf ClusterStatus to a ClusterStatus
 
-convert(HBaseProtos.ColumnFamilySchema)
 - Static method in class org.apache.hadoop.hbase.HColumnDescriptor
- 
-convert()
 - Method in class org.apache.hadoop.hbase.HColumnDescriptor
- 
 convert(HRegionInfo)
 - Static method in class org.apache.hadoop.hbase.HRegionInfo
 
 Convert a HRegionInfo to a RegionInfo
@@ -1603,10 +1599,6 @@
 
 Convert a RegionInfo to a HRegionInfo
 
-convert()
 - Method in class org.apache.hadoop.hbase.HTableDescriptor
- 
-convert(HBaseProtos.TableSchema)
 - Static method in class org.apache.hadoop.hbase.HTableDescriptor
- 
 convert(ProcedureProtos.Procedure)
 - Static method in class org.apache.hadoop.hbase.ProcedureInfo
 
 Helper to convert the protobuf object.
@@ -9933,6 +9925,8 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 Retrieve the next byte from this range without incrementing 
position.
 
+peerAdded(String)
 - Method in class org.apache.hadoop.hbase.client.replication.ReplicationAdmin
+ 
 PHOENIX
 - Static variable in class org.apache.hadoop.hbase.HBaseInterfaceAudience
  
 PleaseHoldException 
- Exception in org.apache.hadoop.hbase
@@ -14249,6 +14243,8 @@ Input/OutputFormats, a table indexing MapReduce job, 
and utility methods.
 Sets the given timestamp to the cell iff current timestamp 
is
  HConstants.LATEST_TIMESTAMP.
 
+updatePeerConfig(String,
 ReplicationPeerConfig) - Method in class 
org.apache.hadoop.hbase.client.replication.ReplicationAdmin
+ 
 URL_SAFE 
- Static variable in class org.apache.hadoop.hbase.util.Base64
 
 Encode using Base64-like encoding that is URL and Filename 
safe as



[03/51] [partial] hbase-site git commit: Published site at a395922ad5af9494bb55feee3c275c6d3a575e92.

2016-04-11 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/16980207/devapidocs/org/apache/hadoop/hbase/mapreduce/TableSnapshotInputFormatImpl.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/mapreduce/TableSnapshotInputFormatImpl.html
 
b/devapidocs/org/apache/hadoop/hbase/mapreduce/TableSnapshotInputFormatImpl.html
index 708ebd1..55c4be5 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/mapreduce/TableSnapshotInputFormatImpl.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/mapreduce/TableSnapshotInputFormatImpl.html
@@ -97,7 +97,7 @@
 
 @InterfaceAudience.Private
 @InterfaceStability.Evolving
-public class TableSnapshotInputFormatImpl
+public class TableSnapshotInputFormatImpl
 extends http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true";
 title="class or interface in java.lang">Object
 Hadoop MR API-agnostic implementation for mapreduce over 
table snapshots.
 
@@ -273,7 +273,7 @@ extends http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 
 
 LOG
-public static final org.apache.commons.logging.Log LOG
+public static final org.apache.commons.logging.Log LOG
 
 
 
@@ -282,7 +282,7 @@ extends http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 
 
 SNAPSHOT_NAME_KEY
-private static final http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String SNAPSHOT_NAME_KEY
+private static final http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String SNAPSHOT_NAME_KEY
 See Also:Constant
 Field Values
 
 
@@ -292,7 +292,7 @@ extends http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 
 
 RESTORE_DIR_KEY
-protected static final http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String RESTORE_DIR_KEY
+protected static final http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String RESTORE_DIR_KEY
 See Also:Constant
 Field Values
 
 
@@ -302,7 +302,7 @@ extends http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 
 
 LOCALITY_CUTOFF_MULTIPLIER
-private static final http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String LOCALITY_CUTOFF_MULTIPLIER
+private static final http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String LOCALITY_CUTOFF_MULTIPLIER
 See getBestLocations(Configuration,
 HDFSBlocksDistribution)
 See Also:Constant
 Field Values
 
@@ -313,7 +313,7 @@ extends http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 
 
 DEFAULT_LOCALITY_CUTOFF_MULTIPLIER
-private static final float DEFAULT_LOCALITY_CUTOFF_MULTIPLIER
+private static final float DEFAULT_LOCALITY_CUTOFF_MULTIPLIER
 See Also:Constant
 Field Values
 
 
@@ -331,7 +331,7 @@ extends http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 
 
 TableSnapshotInputFormatImpl
-public TableSnapshotInputFormatImpl()
+public TableSnapshotInputFormatImpl()
 
 
 
@@ -348,7 +348,7 @@ extends http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 
 
 getSplits
-public static http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List getSplits(org.apache.hadoop.conf.Configuration conf)
+public static http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List getSplits(org.apache.hadoop.conf.Configuration conf)
throws http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true";
 title="class or interface in java.io">IOException
 Throws:
 http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true";
 title="class or interface in java.io">IOException
@@ -360,7 +360,7 @@ extends http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 
 
 getRegionInfosFromManifest
-public static http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List getRegionInfosFromManifest(SnapshotManifest manifest)
+public static http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List getRegionInfosFromManifest(SnapshotManifest manifest)
 
 
 
@@ -369,7 +369,7 @@ extends http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 
 
 getSnapshotManifest
-public static SnapshotManifest getSnapshotManifest(org.apache.hadoop.conf.Configuration conf,
+public static SnapshotManifest getSnapshotManifest(o

[30/51] [partial] hbase-site git commit: Published site at a395922ad5af9494bb55feee3c275c6d3a575e92.

2016-04-11 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/16980207/devapidocs/org/apache/hadoop/hbase/HTableDescriptor.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/HTableDescriptor.html 
b/devapidocs/org/apache/hadoop/hbase/HTableDescriptor.html
index cf891d8..29e8c1e 100644
--- a/devapidocs/org/apache/hadoop/hbase/HTableDescriptor.html
+++ b/devapidocs/org/apache/hadoop/hbase/HTableDescriptor.html
@@ -105,7 +105,7 @@
 
 @InterfaceAudience.Public
 @InterfaceStability.Evolving
-public class HTableDescriptor
+public class HTableDescriptor
 extends http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true";
 title="class or interface in java.lang">Object
 implements http://docs.oracle.com/javase/7/docs/api/java/lang/Comparable.html?is-external=true";
 title="class or interface in java.lang">Comparable
 HTableDescriptor contains the details about an HBase table  
such as the descriptors of
@@ -535,14 +535,6 @@ implements http://docs.oracle.com/javase/7/docs/api/java/lang/Comparabl
 
 
 
-org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema
-convert() 
-
-
-static HTableDescriptor
-convert(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema ts) 
-
-
 boolean
 equals(http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true";
 title="class or interface in java.lang">Object obj)
 Compare the contents of the descriptor with another one 
passed as a parameter.
@@ -986,7 +978,7 @@ implements http://docs.oracle.com/javase/7/docs/api/java/lang/Comparabl
 
 
 LOG
-private static final org.apache.commons.logging.Log LOG
+private static final org.apache.commons.logging.Log LOG
 
 
 
@@ -995,7 +987,7 @@ implements http://docs.oracle.com/javase/7/docs/api/java/lang/Comparabl
 
 
 name
-private TableName name
+private TableName name
 
 
 
@@ -1004,7 +996,7 @@ implements http://docs.oracle.com/javase/7/docs/api/java/lang/Comparabl
 
 
 values
-private final http://docs.oracle.com/javase/7/docs/api/java/util/Map.html?is-external=true";
 title="class or interface in java.util">Map values
+private final http://docs.oracle.com/javase/7/docs/api/java/util/Map.html?is-external=true";
 title="class or interface in java.util">Map values
 A map which holds the metadata information of the table. 
This metadata
  includes values like IS_ROOT, IS_META, DEFERRED_LOG_FLUSH, SPLIT_POLICY,
  MAX_FILE_SIZE, READONLY, MEMSTORE_FLUSHSIZE etc...
@@ -1016,7 +1008,7 @@ implements http://docs.oracle.com/javase/7/docs/api/java/lang/Comparabl
 
 
 configuration
-private final http://docs.oracle.com/javase/7/docs/api/java/util/Map.html?is-external=true";
 title="class or interface in java.util">MapString,http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String> configuration
+private final http://docs.oracle.com/javase/7/docs/api/java/util/Map.html?is-external=true";
 title="class or interface in java.util">MapString,http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String> configuration
 A map which holds the configuration specific to the table.
  The keys of the map have the same names as config keys and override the 
defaults with
  table-specific settings. Example usage may be for compactions, etc.
@@ -1028,7 +1020,7 @@ implements http://docs.oracle.com/javase/7/docs/api/java/lang/Comparabl
 
 
 SPLIT_POLICY
-public static final http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String SPLIT_POLICY
+public static final http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String SPLIT_POLICY
 See Also:Constant
 Field Values
 
 
@@ -1038,7 +1030,7 @@ implements http://docs.oracle.com/javase/7/docs/api/java/lang/Comparabl
 
 
 MAX_FILESIZE
-public static final http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String MAX_FILESIZE
+public static final http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String MAX_FILESIZE
 INTERNAL Used by HBase Shell interface to access 
this metadata
  attribute which denotes the maximum size of the store file after which
  a region split occurs
@@ -1052,7 +1044,7 @@ implements http://docs.oracle.com/javase/7/docs/api/java/lang/Comparabl
 
 
 MAX_FILESIZE_KEY
-private static final Bytes MAX_FILESIZE_KEY
+private static final Bytes MAX_FILESIZE_KEY
 
 
 
@@ -10

[01/51] [partial] hbase-site git commit: Published site at a395922ad5af9494bb55feee3c275c6d3a575e92.

2016-04-11 Thread misty
Repository: hbase-site
Updated Branches:
  refs/heads/asf-site db94a6390 -> 169802072


http://git-wip-us.apache.org/repos/asf/hbase-site/blob/16980207/devapidocs/org/apache/hadoop/hbase/master/procedure/CloneSnapshotProcedure.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/master/procedure/CloneSnapshotProcedure.html
 
b/devapidocs/org/apache/hadoop/hbase/master/procedure/CloneSnapshotProcedure.html
index caaf029..7d6c68e 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/master/procedure/CloneSnapshotProcedure.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/master/procedure/CloneSnapshotProcedure.html
@@ -110,7 +110,7 @@
 
 
 @InterfaceAudience.Private
-public class CloneSnapshotProcedure
+public class CloneSnapshotProcedure
 extends StateMachineProcedure
 implements TableProcedureInterface
 
@@ -413,7 +413,7 @@ implements 
 
 LOG
-private static final org.apache.commons.logging.Log LOG
+private static final org.apache.commons.logging.Log LOG
 
 
 
@@ -422,7 +422,7 @@ implements 
 
 aborted
-private final http://docs.oracle.com/javase/7/docs/api/java/util/concurrent/atomic/AtomicBoolean.html?is-external=true";
 title="class or interface in java.util.concurrent.atomic">AtomicBoolean aborted
+private final http://docs.oracle.com/javase/7/docs/api/java/util/concurrent/atomic/AtomicBoolean.html?is-external=true";
 title="class or interface in java.util.concurrent.atomic">AtomicBoolean aborted
 
 
 
@@ -431,7 +431,7 @@ implements 
 
 user
-private org.apache.hadoop.security.UserGroupInformation user
+private org.apache.hadoop.security.UserGroupInformation user
 
 
 
@@ -440,7 +440,7 @@ implements 
 
 hTableDescriptor
-private HTableDescriptor hTableDescriptor
+private HTableDescriptor hTableDescriptor
 
 
 
@@ -449,7 +449,7 @@ implements 
 
 snapshot
-private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription
 snapshot
+private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription
 snapshot
 
 
 
@@ -458,7 +458,7 @@ implements 
 
 newRegions
-private http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List newRegions
+private http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List newRegions
 
 
 
@@ -467,7 +467,7 @@ implements 
 
 parentsToChildrenPairMap
-private http://docs.oracle.com/javase/7/docs/api/java/util/Map.html?is-external=true";
 title="class or interface in java.util">MapString,PairString,http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String>> parentsToChildrenPairMap
+private http://docs.oracle.com/javase/7/docs/api/java/util/Map.html?is-external=true";
 title="class or interface in java.util">MapString,PairString,http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String>> parentsToChildrenPairMap
 
 
 
@@ -476,7 +476,7 @@ implements 
 
 monitorStatus
-private MonitoredTask monitorStatus
+private MonitoredTask monitorStatus
 
 
 
@@ -485,7 +485,7 @@ implements 
 
 traceEnabled
-private http://docs.oracle.com/javase/7/docs/api/java/lang/Boolean.html?is-external=true";
 title="class or interface in java.lang">Boolean traceEnabled
+private http://docs.oracle.com/javase/7/docs/api/java/lang/Boolean.html?is-external=true";
 title="class or interface in java.lang">Boolean traceEnabled
 
 
 
@@ -502,7 +502,7 @@ implements 
 
 CloneSnapshotProcedure
-public CloneSnapshotProcedure()
+public CloneSnapshotProcedure()
 Constructor (for failover)
 
 
@@ -512,7 +512,7 @@ implements 
 
 CloneSnapshotProcedure
-public CloneSnapshotProcedure(MasterProcedureEnv env,
+public CloneSnapshotProcedure(MasterProcedureEnv env,
   HTableDescriptor hTableDescriptor,
   
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription snapshot)
throws http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true";
 title="class or interface in java.io">IOException
@@ -536,7 +536,7 @@ implements 
 
 getMonitorStatus
-private MonitoredTask getMonitorStatus()
+private MonitoredTask getMonitorStatus()
 Set up monitor st

[04/51] [partial] hbase-site git commit: Published site at a395922ad5af9494bb55feee3c275c6d3a575e92.

2016-04-11 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/16980207/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/LruCachedBlock.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/LruCachedBlock.html 
b/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/LruCachedBlock.html
index 65ec455..3c39891 100644
--- a/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/LruCachedBlock.html
+++ b/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/LruCachedBlock.html
@@ -148,14 +148,14 @@
 
 
 void
-LruBlockCache.BlockBucket.add(LruCachedBlock block) 
-
-
-void
 LruCachedBlockQueue.add(LruCachedBlock cb)
 Attempt to add the specified cached block to this 
queue.
 
 
+
+void
+LruBlockCache.BlockBucket.add(LruCachedBlock block) 
+
 
 int
 LruCachedBlock.compareTo(LruCachedBlock that) 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/16980207/devapidocs/org/apache/hadoop/hbase/io/hfile/package-tree.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/io/hfile/package-tree.html 
b/devapidocs/org/apache/hadoop/hbase/io/hfile/package-tree.html
index aa27490..f244233 100644
--- a/devapidocs/org/apache/hadoop/hbase/io/hfile/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/io/hfile/package-tree.html
@@ -271,12 +271,12 @@
 
 java.lang.http://docs.oracle.com/javase/7/docs/api/java/lang/Enum.html?is-external=true";
 title="class or interface in java.lang">Enum (implements java.lang.http://docs.oracle.com/javase/7/docs/api/java/lang/Comparable.html?is-external=true";
 title="class or interface in java.lang">Comparable, java.io.http://docs.oracle.com/javase/7/docs/api/java/io/Serializable.html?is-external=true";
 title="class or interface in java.io">Serializable)
 
-org.apache.hadoop.hbase.io.hfile.Cacheable.MemoryType
 org.apache.hadoop.hbase.io.hfile.BlockType.BlockCategory
-org.apache.hadoop.hbase.io.hfile.BlockPriority
 org.apache.hadoop.hbase.io.hfile.HFileBlock.Writer.State
-org.apache.hadoop.hbase.io.hfile.BlockType
 org.apache.hadoop.hbase.io.hfile.CacheConfig.ExternalBlockCaches
+org.apache.hadoop.hbase.io.hfile.BlockPriority
+org.apache.hadoop.hbase.io.hfile.BlockType
+org.apache.hadoop.hbase.io.hfile.Cacheable.MemoryType
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/16980207/devapidocs/org/apache/hadoop/hbase/ipc/class-use/CallRunner.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/ipc/class-use/CallRunner.html 
b/devapidocs/org/apache/hadoop/hbase/ipc/class-use/CallRunner.html
index 53c18ef..53dacd5 100644
--- a/devapidocs/org/apache/hadoop/hbase/ipc/class-use/CallRunner.html
+++ b/devapidocs/org/apache/hadoop/hbase/ipc/class-use/CallRunner.html
@@ -101,13 +101,13 @@
 AdaptiveLifoCoDelCallQueue.queue 
 
 
-private http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">ListBlockingQueue>
-RWQueueRpcExecutor.queues 
-
-
 protected http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">ListBlockingQueue>
 BalancedQueueRpcExecutor.queues 
 
+
+private http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">ListBlockingQueue>
+RWQueueRpcExecutor.queues 
+
 
 
 
@@ -155,19 +155,19 @@
 
 
 
+http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">ListBlockingQueue>
+BalancedQueueRpcExecutor.getQueues() 
+
+
 protected http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">ListBlockingQueue>
 RWQueueRpcExecutor.getQueues() 
 
-
+
 protected abstract http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List[05/51] [partial] hbase-site git commit: Published site at a395922ad5af9494bb55feee3c275c6d3a575e92.

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/16980207/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/CacheConfig.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/CacheConfig.html 
b/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/CacheConfig.html
index e9b19ce..28e2743 100644
--- a/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/CacheConfig.html
+++ b/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/CacheConfig.html
@@ -237,21 +237,21 @@
 
 
 
+private CacheConfig
+HFileReaderImpl.cacheConf
+Block cache configuration.
+
+
+
 protected CacheConfig
 HFile.WriterFactory.cacheConf 
 
-
+
 private CacheConfig
 HFileBlockIndex.BlockIndexWriter.cacheConf
 CacheConfig, or null if cache-on-write is disabled
 
 
-
-private CacheConfig
-HFileReaderImpl.cacheConf
-Block cache configuration.
-
-
 
 
 
@@ -405,13 +405,6 @@
 
 
 
-static CachedMobFile
-CachedMobFile.create(org.apache.hadoop.fs.FileSystem fs,
-org.apache.hadoop.fs.Path path,
-org.apache.hadoop.conf.Configuration conf,
-CacheConfig cacheConf) 
-
-
 static MobFile
 MobFile.create(org.apache.hadoop.fs.FileSystem fs,
 org.apache.hadoop.fs.Path path,
@@ -420,6 +413,13 @@
 Creates an instance of the MobFile.
 
 
+
+static CachedMobFile
+CachedMobFile.create(org.apache.hadoop.fs.FileSystem fs,
+org.apache.hadoop.fs.Path path,
+org.apache.hadoop.conf.Configuration conf,
+CacheConfig cacheConf) 
+
 
 static StoreFile.Writer
 MobUtils.createDelFileWriter(org.apache.hadoop.conf.Configuration conf,
@@ -605,14 +605,14 @@
 
 
 CacheConfig
-HRegionServer.getCacheConfig() 
-
-
-CacheConfig
 Store.getCacheConfig()
 Used for tests.
 
 
+
+CacheConfig
+HRegionServer.getCacheConfig() 
+
 
 CacheConfig
 HStore.getCacheConfig() 
@@ -727,11 +727,11 @@
 
 
 private CacheConfig
-BlockCacheViewTmplImpl.cacheConfig 
+BlockCacheTmplImpl.cacheConfig 
 
 
 private CacheConfig
-BlockCacheTmplImpl.cacheConfig 
+BlockCacheViewTmplImpl.cacheConfig 
 
 
 private CacheConfig

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/16980207/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/CacheStats.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/CacheStats.html 
b/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/CacheStats.html
index dc61c71..4143193 100644
--- a/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/CacheStats.html
+++ b/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/CacheStats.html
@@ -154,14 +154,14 @@
 
 
 CacheStats
-LruBlockCache.getStats()
-Get counter statistics for this cache.
+BlockCache.getStats()
+Get the statistics for this block cache.
 
 
 
 CacheStats
-BlockCache.getStats()
-Get the statistics for this block cache.
+LruBlockCache.getStats()
+Get counter statistics for this cache.
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/16980207/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/Cacheable.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/Cacheable.html 
b/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/Cacheable.html
index 62df01b..2d95e9f 100644
--- a/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/Cacheable.html
+++ b/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/Cacheable.html
@@ -168,17 +168,19 @@
 
 
 Cacheable
-InclusiveCombinedBlockCache.getBlock(BlockCacheKey cacheKey,
+CombinedBlockCache.getBlock(BlockCacheKey cacheKey,
 boolean caching,
 boolean repeat,
 boolean updateCacheMetrics) 
 
 
 Cacheable
-CombinedBlockCache.getBlock(BlockCacheKey cacheKey,
+BlockCache.getBlock(BlockCacheKey cacheKey,
 boolean caching,
 boolean repeat,
-boolean updateCacheMetrics) 
+boolean updateCacheMetrics)
+Fetch block from cache.
+
 
 
 Cacheable
@@ -191,12 +193,10 @@
 
 
 Cacheable
-BlockCache.getBlock(BlockCacheKey cacheKey,
+InclusiveCombinedBlockCache.getBlock(BlockCacheKey cacheKey,
 boolean caching,
 boolean repeat,
-boolean updateCacheMetrics)
-Fetch block from cache.
-
+boolean updateCacheMetrics) 
 
 
 Cacheable
@@ -250,16 +250,16 @@
 
 
 void
-LruBlockCache.cacheBlock(BlockCacheKey cacheKey,
+BlockCache.cacheBlock(BlockCacheKey cacheKey,
 Cacheable buf)
-Cache the block with the specified name and buffer.
+Add block to cache (defaults to not in-memory).
 
 
 
 void
-BlockCache.cacheBlock(BlockCacheKey cacheKey,
+LruBlockCache.cacheBlock(BlockCacheKey cacheKey,
 Cacheable buf)
-Add block to cache (defaults to not in-memory).
+Cache the block with the specified name and buffer.
 
 
 
@@ -269,17 

[22/51] [partial] hbase-site git commit: Published site at a395922ad5af9494bb55feee3c275c6d3a575e92.

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/16980207/devapidocs/org/apache/hadoop/hbase/class-use/HTableDescriptor.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/class-use/HTableDescriptor.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/HTableDescriptor.html
index 8656b1e..edba4df 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/HTableDescriptor.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/HTableDescriptor.html
@@ -263,151 +263,147 @@ Input/OutputFormats, a table indexing MapReduce job, 
and utility methods.
 
 
 
-static HTableDescriptor
-HTableDescriptor.convert(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema ts) 
-
-
 HTableDescriptor
 TableDescriptors.get(TableName tableName) 
 
-
+
 HTableDescriptor
 TableDescriptor.getHTableDescriptor()
 Associated HTableDescriptor
 
 
-
+
 static HTableDescriptor
 TableDescriptor.metaTableDescriptor(org.apache.hadoop.conf.Configuration conf) 
 
-
+
 HTableDescriptor
 HTableDescriptor.modifyFamily(HColumnDescriptor family)
 Modifies the existing column family.
 
 
-
+
 static HTableDescriptor
 HTableDescriptor.parseFrom(byte[] bytes) 
 
-
+
 HTableDescriptor
 TableDescriptors.remove(TableName tablename) 
 
-
+
 HTableDescriptor
 HTableDescriptor.setCompactionEnabled(boolean isEnable)
 Setting the table compaction enable flag.
 
 
-
+
 HTableDescriptor
 HTableDescriptor.setConfiguration(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String key,
 http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String value)
 Setter for storing a configuration setting in configuration
 map.
 
 
-
+
 HTableDescriptor
 HTableDescriptor.setDurability(Durability durability)
 Sets the Durability 
setting for the table.
 
 
-
+
 HTableDescriptor
 HTableDescriptor.setFlushPolicyClassName(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String clazz)
 This sets the class associated with the flush policy which 
determines determines the stores
  need to be flushed when flushing a region.
 
 
-
+
 HTableDescriptor
 HTableDescriptor.setMaxFileSize(long maxFileSize)
 Sets the maximum size upto which a region can grow to after 
which a region
  split is triggered.
 
 
-
+
 HTableDescriptor
 HTableDescriptor.setMemStoreFlushSize(long memstoreFlushSize)
 Represents the maximum size of the memstore after which the 
contents of the
  memstore are flushed to the filesystem.
 
 
-
+
 HTableDescriptor
 HTableDescriptor.setName(byte[] name)
 Deprecated. 
 
 
-
+
 HTableDescriptor
 HTableDescriptor.setName(TableName name)
 Deprecated. 
 
 
-
+
 HTableDescriptor
 HTableDescriptor.setNormalizationEnabled(boolean isEnable)
 Setting the table normalization enable flag.
 
 
-
+
 HTableDescriptor
 HTableDescriptor.setOwner(User owner)
 Deprecated. 
 
 
-
+
 HTableDescriptor
 HTableDescriptor.setOwnerString(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String ownerString)
 Deprecated. 
 
 
-
+
 HTableDescriptor
 HTableDescriptor.setReadOnly(boolean readOnly)
 Setting the table as read only sets all the columns in the 
table as read
  only.
 
 
-
+
 HTableDescriptor
 HTableDescriptor.setRegionMemstoreReplication(boolean memstoreReplication)
 Enable or Disable the memstore replication from the primary 
region to the replicas.
 
 
-
+
 HTableDescriptor
 HTableDescriptor.setRegionReplication(int regionReplication)
 Sets the number of replicas per region.
 
 
-
+
 HTableDescriptor
 HTableDescriptor.setRegionSplitPolicyClassName(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String clazz)
 This sets the class associated with the region split policy 
which
  determines when a region split should occur.
 
 
-
+
 HTableDescriptor
 HTableDescriptor.setValue(byte[] key,
 byte[] value)
 Setter for storing metadata as a (key, value) pair in values
 map
 
 
-
+
 HTableDescriptor
 HTableDescriptor.setValue(Bytes key,
 Bytes value) 
 
-
+
 private HTableDescriptor
 HTableDescriptor.setValue(Bytes key,
 http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in 
java.lang">String value) 
 
-
+
 HTableDescriptor
 HTableDescriptor.setValue(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String key,
 http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String value)
@@ -595,69 +591,69 @@ Input/OutputFormats, a table indexing MapReduce job, and 
u

[29/51] [partial] hbase-site git commit: Published site at a395922ad5af9494bb55feee3c275c6d3a575e92.

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/16980207/devapidocs/org/apache/hadoop/hbase/KeepDeletedCells.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/KeepDeletedCells.html 
b/devapidocs/org/apache/hadoop/hbase/KeepDeletedCells.html
index 41fb732..a0339eb 100644
--- a/devapidocs/org/apache/hadoop/hbase/KeepDeletedCells.html
+++ b/devapidocs/org/apache/hadoop/hbase/KeepDeletedCells.html
@@ -249,7 +249,7 @@ the order they are declared.
 
 
 values
-public static KeepDeletedCells[] values()
+public static KeepDeletedCells[] values()
 Returns an array containing the constants of this enum 
type, in
 the order they are declared.  This method may be used to iterate
 over the constants as follows:
@@ -266,7 +266,7 @@ for (KeepDeletedCells c : KeepDeletedCells.values())
 
 
 valueOf
-public static KeepDeletedCells valueOf(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String name)
+public static KeepDeletedCells valueOf(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String name)
 Returns the enum constant of this type with the specified 
name.
 The string must match exactly an identifier used to declare an
 enum constant in this type.  (Extraneous whitespace characters are 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/16980207/devapidocs/org/apache/hadoop/hbase/KeyValue.Type.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/KeyValue.Type.html 
b/devapidocs/org/apache/hadoop/hbase/KeyValue.Type.html
index c31803c..521f7b6 100644
--- a/devapidocs/org/apache/hadoop/hbase/KeyValue.Type.html
+++ b/devapidocs/org/apache/hadoop/hbase/KeyValue.Type.html
@@ -331,7 +331,7 @@ the order they are declared.
 
 
 values
-public static KeyValue.Type[] values()
+public static KeyValue.Type[] values()
 Returns an array containing the constants of this enum 
type, in
 the order they are declared.  This method may be used to iterate
 over the constants as follows:
@@ -348,7 +348,7 @@ for (KeyValue.Type c : KeyValue.Type.values())
 
 
 valueOf
-public static KeyValue.Type valueOf(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String name)
+public static KeyValue.Type valueOf(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String name)
 Returns the enum constant of this type with the specified 
name.
 The string must match exactly an identifier used to declare an
 enum constant in this type.  (Extraneous whitespace characters are 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/16980207/devapidocs/org/apache/hadoop/hbase/class-use/Abortable.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/class-use/Abortable.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/Abortable.html
index bbb672e..186adce 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/Abortable.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/Abortable.html
@@ -294,11 +294,11 @@
 
 
 private Abortable
-SimpleRpcScheduler.abortable 
+RpcExecutor.abortable 
 
 
 private Abortable
-RpcExecutor.abortable 
+SimpleRpcScheduler.abortable 
 
 
 
@@ -600,13 +600,13 @@
 
 
 
-private Abortable
-ReplicationPeersZKImpl.abortable 
-
-
 protected Abortable
 ReplicationStateZKBase.abortable 
 
+
+private Abortable
+ReplicationPeersZKImpl.abortable 
+
 
 
 
@@ -821,15 +821,15 @@
 
 
 
-private Abortable
-ZKClusterId.abortable 
-
-
 protected Abortable
 ZooKeeperNodeTracker.abortable
 Used to abort if a fatal error occurs
 
 
+
+private Abortable
+ZKClusterId.abortable 
+
 
 protected Abortable
 ZooKeeperWatcher.abortable 



[12/51] [partial] hbase-site git commit: Published site at a395922ad5af9494bb55feee3c275c6d3a575e92.

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/16980207/devapidocs/org/apache/hadoop/hbase/codec/prefixtree/encode/class-use/PrefixTreeEncoder.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/codec/prefixtree/encode/class-use/PrefixTreeEncoder.html
 
b/devapidocs/org/apache/hadoop/hbase/codec/prefixtree/encode/class-use/PrefixTreeEncoder.html
index 7d01f4b..1fb2511 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/codec/prefixtree/encode/class-use/PrefixTreeEncoder.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/codec/prefixtree/encode/class-use/PrefixTreeEncoder.html
@@ -135,12 +135,12 @@
 
 
 PrefixTreeEncoder
-EncoderPoolImpl.checkOut(http://docs.oracle.com/javase/7/docs/api/java/io/OutputStream.html?is-external=true";
 title="class or interface in java.io">OutputStream outputStream,
+EncoderPool.checkOut(http://docs.oracle.com/javase/7/docs/api/java/io/OutputStream.html?is-external=true";
 title="class or interface in java.io">OutputStream outputStream,
 boolean includeMvccVersion) 
 
 
 PrefixTreeEncoder
-EncoderPool.checkOut(http://docs.oracle.com/javase/7/docs/api/java/io/OutputStream.html?is-external=true";
 title="class or interface in java.io">OutputStream outputStream,
+EncoderPoolImpl.checkOut(http://docs.oracle.com/javase/7/docs/api/java/io/OutputStream.html?is-external=true";
 title="class or interface in java.io">OutputStream outputStream,
 boolean includeMvccVersion) 
 
 
@@ -167,11 +167,11 @@
 
 
 void
-EncoderPoolImpl.checkIn(PrefixTreeEncoder encoder) 
+EncoderPool.checkIn(PrefixTreeEncoder encoder) 
 
 
 void
-EncoderPool.checkIn(PrefixTreeEncoder encoder) 
+EncoderPoolImpl.checkIn(PrefixTreeEncoder encoder) 
 
 
 static void
@@ -201,13 +201,13 @@
 
 
 protected PrefixTreeEncoder
-RowNodeWriter.prefixTreeEncoder
+RowSectionWriter.prefixTreeEncoder
 fields
 
 
 
 protected PrefixTreeEncoder
-RowSectionWriter.prefixTreeEncoder
+RowNodeWriter.prefixTreeEncoder
 fields
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/16980207/devapidocs/org/apache/hadoop/hbase/codec/prefixtree/encode/other/class-use/ColumnNodeType.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/codec/prefixtree/encode/other/class-use/ColumnNodeType.html
 
b/devapidocs/org/apache/hadoop/hbase/codec/prefixtree/encode/other/class-use/ColumnNodeType.html
index a741ea2..ec91e8f 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/codec/prefixtree/encode/other/class-use/ColumnNodeType.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/codec/prefixtree/encode/other/class-use/ColumnNodeType.html
@@ -104,11 +104,11 @@
 
 
 protected ColumnNodeType
-ColumnReader.nodeType 
+ColumnNodeReader.nodeType 
 
 
 protected ColumnNodeType
-ColumnNodeReader.nodeType 
+ColumnReader.nodeType 
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/16980207/devapidocs/org/apache/hadoop/hbase/conf/class-use/ConfigurationManager.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/conf/class-use/ConfigurationManager.html 
b/devapidocs/org/apache/hadoop/hbase/conf/class-use/ConfigurationManager.html
index df58878..6a1393c 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/conf/class-use/ConfigurationManager.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/conf/class-use/ConfigurationManager.html
@@ -174,13 +174,13 @@
 
 
 void
-HStore.deregisterChildren(ConfigurationManager manager)
+HRegion.deregisterChildren(ConfigurationManager manager)
 Needs to be called to deregister the children from the 
manager.
 
 
 
 void
-HRegion.deregisterChildren(ConfigurationManager manager)
+HStore.deregisterChildren(ConfigurationManager manager)
 Needs to be called to deregister the children from the 
manager.
 
 
@@ -192,13 +192,13 @@
 
 
 void
-HStore.registerChildren(ConfigurationManager manager)
+HRegion.registerChildren(ConfigurationManager manager)
 Needs to be called to register the children to the 
manager.
 
 
 
 void
-HRegion.registerChildren(ConfigurationManager manager)
+HStore.registerChildren(ConfigurationManager manager)
 Needs to be called to register the children to the 
manager.
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/16980207/devapidocs/org/apache/hadoop/hbase/coprocessor/RegionObserver.MutationType.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/coprocessor/RegionObserver.MutationType.html
 
b/devapidocs/org/apache/hadoop/hbase/coprocessor/RegionObserver.MutationType.html
index 1713185..2ede7be 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/coprocessor/RegionObserver.MutationType.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/coprocessor/RegionObserver.MutationType.html
@@ -223,7 +223,7 @@ the order they are declared.
 
 
 values
-public static RegionObserver.MutationType[] values()
+pub

[20/51] [partial] hbase-site git commit: Published site at a395922ad5af9494bb55feee3c275c6d3a575e92.

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/16980207/devapidocs/org/apache/hadoop/hbase/class-use/Server.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/class-use/Server.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/Server.html
index f69011b..d2bc3fa 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/Server.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/Server.html
@@ -325,23 +325,15 @@
 
 
 private Server
-ActiveMasterManager.master 
-
-
-private Server
 ServerManager.master 
 
-
+
 (package private) Server
 MasterFileSystem.master 
 
-
-protected Server
-BulkAssigner.server 
-
 
 private Server
-CatalogJanitor.server 
+ActiveMasterManager.master 
 
 
 private Server
@@ -349,8 +341,16 @@
 
 
 private Server
+CatalogJanitor.server 
+
+
+private Server
 SplitLogManager.server 
 
+
+protected Server
+BulkAssigner.server 
+
 
 
 
@@ -518,19 +518,19 @@
 
 
 private Server
-SplitTransactionImpl.server 
+HeapMemoryManager.server 
 
 
 private Server
-SplitTransactionImpl.DaughterOpener.server 
+SplitTransactionImpl.server 
 
 
 private Server
-LogRoller.server 
+SplitTransactionImpl.DaughterOpener.server 
 
 
 private Server
-HeapMemoryManager.server 
+LogRoller.server 
 
 
 
@@ -543,21 +543,21 @@
 
 
 Server
-RegionMergeTransaction.getServer()
-Get the Server running the transaction or rollback
-
+RegionMergeTransactionImpl.getServer() 
 
 
 Server
-RegionMergeTransactionImpl.getServer() 
+SplitTransactionImpl.getServer() 
 
 
 Server
-SplitTransactionImpl.getServer() 
+SplitTransaction.getServer()
+Get the Server running the transaction or rollback
+
 
 
 Server
-SplitTransaction.getServer()
+RegionMergeTransaction.getServer()
 Get the Server running the transaction or rollback
 
 
@@ -595,24 +595,15 @@
 
 
 Region
-RegionMergeTransaction.execute(Server server,
-  RegionServerServices services)
-Deprecated. 
-use #execute(Server, RegionServerServices, 
User)
-
-
-
-
-Region
 RegionMergeTransactionImpl.execute(Server server,
   RegionServerServices services) 
 
-
+
 PairOfSameType
 SplitTransactionImpl.execute(Server server,
   RegionServerServices services) 
 
-
+
 PairOfSameType
 SplitTransaction.execute(Server server,
   RegionServerServices services)
@@ -621,27 +612,28 @@
 
 
 
-
+
 Region
-RegionMergeTransaction.execute(Server server,
-  RegionServerServices services,
-  User user)
-Run the transaction.
+RegionMergeTransaction.execute(Server server,
+  RegionServerServices services)
+Deprecated. 
+use #execute(Server, RegionServerServices, 
User)
+
 
 
-
+
 Region
 RegionMergeTransactionImpl.execute(Server server,
   RegionServerServices services,
   User user) 
 
-
+
 PairOfSameType
 SplitTransactionImpl.execute(Server server,
   RegionServerServices services,
   User user) 
 
-
+
 PairOfSameType
 SplitTransaction.execute(Server server,
   RegionServerServices services,
@@ -649,6 +641,14 @@
 Run the transaction.
 
 
+
+Region
+RegionMergeTransaction.execute(Server server,
+  RegionServerServices services,
+  User user)
+Run the transaction.
+
+
 
 void
 ReplicationService.initialize(Server rs,
@@ -684,24 +684,15 @@
 
 
 boolean
-RegionMergeTransaction.rollback(Server server,
-RegionServerServices services)
-Deprecated. 
-use #rollback(Server, RegionServerServices, 
User)
-
-
-
-
-boolean
 RegionMergeTransactionImpl.rollback(Server server,
 RegionServerServices services) 
 
-
+
 boolean
 SplitTransactionImpl.rollback(Server server,
 RegionServerServices services) 
 
-
+
 boolean
 SplitTransaction.rollback(Server server,
 RegionServerServices services)
@@ -710,27 +701,28 @@
 
 
 
-
+
 boolean
-RegionMergeTransaction.rollback(Server server,
-RegionServerServices services,
-User user)
-Roll back a failed transaction
+RegionMergeTransaction.rollback(Server server,
+RegionServerServices services)
+Deprecated. 
+use #rollback(Server, RegionServerServices, 
User)
+
 
 
-
+
 boolean
 RegionMergeTransactionImpl.rollback(Server server,
 RegionServerServices services,
 User user) 
 
-
+
 boolean
 SplitTransactionImpl.rollback(Server server,
 RegionServerServices services,
 User user) 
 
-
+
 boolean
 SplitTransaction.rollback(Server server,
 RegionServerServices services,
@@ -738,6 +730,14 @@
 Roll back a failed transaction
 
 
+
+boolean
+RegionMergeTransaction.rollback(Server server,
+RegionServerServices services,
+User user)
+Roll back a failed transaction
+
+
 
 void
 RegionMergeTransactionImpl.stepsAfterPONR(Server server,



[41/51] [partial] hbase-site git commit: Published site at a395922ad5af9494bb55feee3c275c6d3a575e92.

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/16980207/apidocs/org/apache/hadoop/hbase/util/class-use/PositionedByteRange.html
--
diff --git 
a/apidocs/org/apache/hadoop/hbase/util/class-use/PositionedByteRange.html 
b/apidocs/org/apache/hadoop/hbase/util/class-use/PositionedByteRange.html
index 2f3ad05..58b0e89 100644
--- a/apidocs/org/apache/hadoop/hbase/util/class-use/PositionedByteRange.html
+++ b/apidocs/org/apache/hadoop/hbase/util/class-use/PositionedByteRange.html
@@ -116,100 +116,100 @@
 
 
 
-T
-DataType.decode(PositionedByteRange src)
-Read an instance of T from the buffer 
src.
-
+byte[]
+OrderedBlob.decode(PositionedByteRange src) 
 
 
-http://docs.oracle.com/javase/7/docs/api/java/lang/Number.html?is-external=true";
 title="class or interface in java.lang">Number
-OrderedNumeric.decode(PositionedByteRange src) 
+http://docs.oracle.com/javase/7/docs/api/java/lang/Integer.html?is-external=true";
 title="class or interface in java.lang">Integer
+OrderedInt32.decode(PositionedByteRange src) 
 
 
-http://docs.oracle.com/javase/7/docs/api/java/lang/Long.html?is-external=true";
 title="class or interface in java.lang">Long
-RawLong.decode(PositionedByteRange src) 
+http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
+RawString.decode(PositionedByteRange src) 
 
 
-http://docs.oracle.com/javase/7/docs/api/java/lang/Short.html?is-external=true";
 title="class or interface in java.lang">Short
-RawShort.decode(PositionedByteRange src) 
+http://docs.oracle.com/javase/7/docs/api/java/lang/Long.html?is-external=true";
 title="class or interface in java.lang">Long
+OrderedInt64.decode(PositionedByteRange src) 
 
 
-http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true";
 title="class or interface in java.lang">Object[]
-Struct.decode(PositionedByteRange src) 
+http://docs.oracle.com/javase/7/docs/api/java/lang/Double.html?is-external=true";
 title="class or interface in java.lang">Double
+RawDouble.decode(PositionedByteRange src) 
 
 
-T
-FixedLengthWrapper.decode(PositionedByteRange src) 
+http://docs.oracle.com/javase/7/docs/api/java/lang/Integer.html?is-external=true";
 title="class or interface in java.lang">Integer
+RawInteger.decode(PositionedByteRange src) 
 
 
-http://docs.oracle.com/javase/7/docs/api/java/lang/Byte.html?is-external=true";
 title="class or interface in java.lang">Byte
-RawByte.decode(PositionedByteRange src) 
+http://docs.oracle.com/javase/7/docs/api/java/lang/Double.html?is-external=true";
 title="class or interface in java.lang">Double
+OrderedFloat64.decode(PositionedByteRange src) 
 
 
-http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
-RawString.decode(PositionedByteRange src) 
+http://docs.oracle.com/javase/7/docs/api/java/lang/Float.html?is-external=true";
 title="class or interface in java.lang">Float
+RawFloat.decode(PositionedByteRange src) 
 
 
-http://docs.oracle.com/javase/7/docs/api/java/lang/Byte.html?is-external=true";
 title="class or interface in java.lang">Byte
-OrderedInt8.decode(PositionedByteRange src) 
+T
+FixedLengthWrapper.decode(PositionedByteRange src) 
 
 
-byte[]
-RawBytes.decode(PositionedByteRange src) 
+http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
+OrderedString.decode(PositionedByteRange src) 
 
 
+http://docs.oracle.com/javase/7/docs/api/java/lang/Number.html?is-external=true";
 title="class or interface in java.lang">Number
+OrderedNumeric.decode(PositionedByteRange src) 
+
+
 T
 TerminatedWrapper.decode(PositionedByteRange src) 
 
+
+http://docs.oracle.com/javase/7/docs/api/java/lang/Float.html?is-external=true";
 title="class or interface in java.lang">Float
+OrderedFloat32.decode(PositionedByteRange src) 
+
 
-http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
-OrderedString.decode(PositionedByteRange src) 
+byte[]
+RawBytes.decode(PositionedByteRange src) 
 
 
 http://docs.oracle.com/javase/7/docs/api/java/lang/Long.html?is-external=true";
 title="class or interface in java.lang">Long
-OrderedInt64.decode(PositionedByteRange src) 
+RawLong.decode(PositionedByteRange src) 
 
 
 http://docs.oracle.com/javase/7/docs/api/java/lang/Short.html?is-external=true";
 title="class or interface in java.lang">Short
-OrderedInt16.decode(PositionedByteRange src) 
+RawShort.decode(PositionedByteRange src) 
 
 
 byte[]
 OrderedBlobVar.decode(PositionedByteRange src) 
 
 
-http://docs.oracle.com/javase/7/docs/api/java/lang/Integer.html?is-external=true";
 title="class or interface in java.lang">Integer
-RawInteger.decode(PositionedByteRange src) 
-
-
-http://docs.oracle.com/javase/7/docs/api/java/lang/Integer.html?is-external=true";
 title="clas

[07/51] [partial] hbase-site git commit: Published site at a395922ad5af9494bb55feee3c275c6d3a575e92.

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/16980207/devapidocs/org/apache/hadoop/hbase/io/class-use/ImmutableBytesWritable.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/io/class-use/ImmutableBytesWritable.html 
b/devapidocs/org/apache/hadoop/hbase/io/class-use/ImmutableBytesWritable.html
index 7d65a2f..50966d8 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/io/class-use/ImmutableBytesWritable.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/io/class-use/ImmutableBytesWritable.html
@@ -158,11 +158,11 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 ImmutableBytesWritable
-TableRecordReader.createKey() 
+TableSnapshotInputFormat.TableSnapshotRecordReader.createKey() 
 
 
 ImmutableBytesWritable
-TableSnapshotInputFormat.TableSnapshotRecordReader.createKey() 
+TableRecordReader.createKey() 
 
 
 ImmutableBytesWritable
@@ -179,11 +179,9 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 org.apache.hadoop.mapred.RecordReader
-TableInputFormatBase.getRecordReader(org.apache.hadoop.mapred.InputSplit split,
+MultiTableSnapshotInputFormat.getRecordReader(org.apache.hadoop.mapred.InputSplit split,
   org.apache.hadoop.mapred.JobConf job,
-  
org.apache.hadoop.mapred.Reporter reporter)
-Builds a TableRecordReader.
-
+  
org.apache.hadoop.mapred.Reporter reporter) 
 
 
 org.apache.hadoop.mapred.RecordReader
@@ -193,9 +191,11 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 org.apache.hadoop.mapred.RecordReader
-MultiTableSnapshotInputFormat.getRecordReader(org.apache.hadoop.mapred.InputSplit split,
+TableInputFormatBase.getRecordReader(org.apache.hadoop.mapred.InputSplit split,
   org.apache.hadoop.mapred.JobConf job,
-  
org.apache.hadoop.mapred.Reporter reporter) 
+  
org.apache.hadoop.mapred.Reporter reporter)
+Builds a TableRecordReader.
+
 
 
 
@@ -214,37 +214,37 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 void
-GroupingTableMap.map(ImmutableBytesWritable key,
+IdentityTableMap.map(ImmutableBytesWritable key,
   Result value,
   org.apache.hadoop.mapred.OutputCollector output,
   org.apache.hadoop.mapred.Reporter reporter)
-Extract the grouping columns from value to construct a new 
key.
+Pass the key, value to reduce
 
 
 
 void
-RowCounter.RowCounterMapper.map(ImmutableBytesWritable row,
-  Result values,
+GroupingTableMap.map(ImmutableBytesWritable key,
+  Result value,
   org.apache.hadoop.mapred.OutputCollector output,
-  org.apache.hadoop.mapred.Reporter reporter) 
+  org.apache.hadoop.mapred.Reporter reporter)
+Extract the grouping columns from value to construct a new 
key.
+
 
 
 void
-IdentityTableMap.map(ImmutableBytesWritable key,
-  Result value,
+RowCounter.RowCounterMapper.map(ImmutableBytesWritable row,
+  Result values,
   org.apache.hadoop.mapred.OutputCollector output,
-  org.apache.hadoop.mapred.Reporter reporter)
-Pass the key, value to reduce
-
+  org.apache.hadoop.mapred.Reporter reporter) 
 
 
 boolean
-TableRecordReader.next(ImmutableBytesWritable key,
+TableSnapshotInputFormat.TableSnapshotRecordReader.next(ImmutableBytesWritable key,
 Result value) 
 
 
 boolean
-TableSnapshotInputFormat.TableSnapshotRecordReader.next(ImmutableBytesWritable key,
+TableRecordReader.next(ImmutableBytesWritable key,
 Result value) 
 
 
@@ -277,28 +277,28 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 void
-GroupingTableMap.map(ImmutableBytesWritable key,
+IdentityTableMap.map(ImmutableBytesWritable key,
   Result value,
   org.apache.hadoop.mapred.OutputCollector output,
   org.apache.hadoop.mapred.Reporter reporter)
-Extract the grouping columns from value to construct a new 
key.
+Pass the key, value to reduce
 
 
 
 void
-RowCounter.RowCounterMapper.map(ImmutableBytesWritable row,
-  Result values,
+GroupingTableMap.map(ImmutableBytesWritable key,
+  Result value,
   org.apache.hadoop.mapred.OutputCollector output,
-  org.apache.hadoop.mapred.Reporter reporter) 
+  org.apache.hadoop.mapred.Reporter reporter)
+Extract the grouping columns from value to construct a new 
key.
+
 
 
 void
-IdentityTableMap.map(ImmutableBytesWritable key,
-  Result value,
+RowCounter.RowCounterMapper.map(ImmutableBytesWritable row,
+  Result values,
   org.apache.hadoop.mapred.OutputCollector output,
-  org.apach

[43/51] [partial] hbase-site git commit: Published site at a395922ad5af9494bb55feee3c275c6d3a575e92.

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/16980207/apidocs/org/apache/hadoop/hbase/filter/class-use/Filter.ReturnCode.html
--
diff --git 
a/apidocs/org/apache/hadoop/hbase/filter/class-use/Filter.ReturnCode.html 
b/apidocs/org/apache/hadoop/hbase/filter/class-use/Filter.ReturnCode.html
index 87c63f2..24328e2 100644
--- a/apidocs/org/apache/hadoop/hbase/filter/class-use/Filter.ReturnCode.html
+++ b/apidocs/org/apache/hadoop/hbase/filter/class-use/Filter.ReturnCode.html
@@ -106,78 +106,78 @@
 MultipleColumnPrefixFilter.filterColumn(Cell cell) 
 
 
-Filter.ReturnCode
-MultiRowRangeFilter.filterKeyValue(Cell ignored) 
+abstract Filter.ReturnCode
+Filter.filterKeyValue(Cell v)
+A way to filter based on the column family, column 
qualifier and/or the column value.
+
 
 
 Filter.ReturnCode
-DependentColumnFilter.filterKeyValue(Cell c) 
+ColumnPrefixFilter.filterKeyValue(Cell cell) 
 
 
 Filter.ReturnCode
-RandomRowFilter.filterKeyValue(Cell v) 
+WhileMatchFilter.filterKeyValue(Cell v) 
 
 
 Filter.ReturnCode
-ColumnPrefixFilter.filterKeyValue(Cell cell) 
+PrefixFilter.filterKeyValue(Cell v) 
 
 
 Filter.ReturnCode
-SkipFilter.filterKeyValue(Cell v) 
+ColumnCountGetFilter.filterKeyValue(Cell v) 
 
 
 Filter.ReturnCode
-InclusiveStopFilter.filterKeyValue(Cell v) 
+FirstKeyOnlyFilter.filterKeyValue(Cell v) 
 
 
 Filter.ReturnCode
-WhileMatchFilter.filterKeyValue(Cell v) 
+InclusiveStopFilter.filterKeyValue(Cell v) 
 
 
 Filter.ReturnCode
-SingleColumnValueFilter.filterKeyValue(Cell c) 
+MultipleColumnPrefixFilter.filterKeyValue(Cell kv) 
 
 
 Filter.ReturnCode
-QualifierFilter.filterKeyValue(Cell v) 
+FilterList.filterKeyValue(Cell c) 
 
 
 Filter.ReturnCode
-FuzzyRowFilter.filterKeyValue(Cell c) 
+ValueFilter.filterKeyValue(Cell v) 
 
 
 Filter.ReturnCode
-KeyOnlyFilter.filterKeyValue(Cell ignored) 
+ColumnPaginationFilter.filterKeyValue(Cell v) 
 
 
 Filter.ReturnCode
-PrefixFilter.filterKeyValue(Cell v) 
+KeyOnlyFilter.filterKeyValue(Cell ignored) 
 
 
 Filter.ReturnCode
-MultipleColumnPrefixFilter.filterKeyValue(Cell kv) 
+DependentColumnFilter.filterKeyValue(Cell c) 
 
 
 Filter.ReturnCode
-FirstKeyValueMatchingQualifiersFilter.filterKeyValue(Cell v)
-Deprecated. 
- 
+FuzzyRowFilter.filterKeyValue(Cell c) 
 
 
 Filter.ReturnCode
-PageFilter.filterKeyValue(Cell ignored) 
+SkipFilter.filterKeyValue(Cell v) 
 
 
 Filter.ReturnCode
-ColumnCountGetFilter.filterKeyValue(Cell v) 
+ColumnRangeFilter.filterKeyValue(Cell kv) 
 
 
 Filter.ReturnCode
-FamilyFilter.filterKeyValue(Cell v) 
+TimestampsFilter.filterKeyValue(Cell v) 
 
 
 Filter.ReturnCode
-TimestampsFilter.filterKeyValue(Cell v) 
+SingleColumnValueFilter.filterKeyValue(Cell c) 
 
 
 Filter.ReturnCode
@@ -185,29 +185,29 @@
 
 
 Filter.ReturnCode
-FilterList.filterKeyValue(Cell c) 
+RandomRowFilter.filterKeyValue(Cell v) 
 
 
 Filter.ReturnCode
-FirstKeyOnlyFilter.filterKeyValue(Cell v) 
+QualifierFilter.filterKeyValue(Cell v) 
 
 
 Filter.ReturnCode
-ColumnPaginationFilter.filterKeyValue(Cell v) 
+FamilyFilter.filterKeyValue(Cell v) 
 
 
 Filter.ReturnCode
-ColumnRangeFilter.filterKeyValue(Cell kv) 
+MultiRowRangeFilter.filterKeyValue(Cell ignored) 
 
 
 Filter.ReturnCode
-ValueFilter.filterKeyValue(Cell v) 
+FirstKeyValueMatchingQualifiersFilter.filterKeyValue(Cell v)
+Deprecated. 
+ 
 
 
-abstract Filter.ReturnCode
-Filter.filterKeyValue(Cell v)
-A way to filter based on the column family, column 
qualifier and/or the column value.
-
+Filter.ReturnCode
+PageFilter.filterKeyValue(Cell ignored) 
 
 
 static Filter.ReturnCode

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/16980207/apidocs/org/apache/hadoop/hbase/filter/class-use/Filter.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/filter/class-use/Filter.html 
b/apidocs/org/apache/hadoop/hbase/filter/class-use/Filter.html
index 02c305c..af620c0 100644
--- a/apidocs/org/apache/hadoop/hbase/filter/class-use/Filter.html
+++ b/apidocs/org/apache/hadoop/hbase/filter/class-use/Filter.html
@@ -148,19 +148,19 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 
+Get
+Get.setFilter(Filter filter) 
+
+
 Scan
 Scan.setFilter(Filter filter) 
 
-
+
 Query
 Query.setFilter(Filter filter)
 Apply the specified server-side filter when performing the 
Query.
 
 
-
-Get
-Get.setFilter(Filter filter) 
-
 
 
 
@@ -382,83 +382,83 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 static Filter
-DependentColumnFilter.createFilterFromArguments(http://docs.oracle.com/javase/7/docs/api/java/util/ArrayList.html?is-external=true";
 title="class or interface in 
java.util">ArrayList filterArguments) 
+ColumnPrefixFilter.createFilterFromArguments(http://docs.oracle.com/javase/7/docs/api/java/util/ArrayList.html?is-external=true";
 title="class or interface in 
java.util">ArrayList filterArguments) 
 
 

[02/51] [partial] hbase-site git commit: Published site at a395922ad5af9494bb55feee3c275c6d3a575e92.

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/16980207/devapidocs/org/apache/hadoop/hbase/master/class-use/MasterServices.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/master/class-use/MasterServices.html 
b/devapidocs/org/apache/hadoop/hbase/master/class-use/MasterServices.html
index 2c47429..260ef3d 100644
--- a/devapidocs/org/apache/hadoop/hbase/master/class-use/MasterServices.html
+++ b/devapidocs/org/apache/hadoop/hbase/master/class-use/MasterServices.html
@@ -242,19 +242,19 @@
 
 
 private MasterServices
-MasterCoprocessorHost.masterServices 
+ClusterSchemaServiceImpl.masterServices 
 
 
 private MasterServices
-MasterCoprocessorHost.MasterEnvironment.masterServices 
+TableNamespaceManager.masterServices 
 
 
 private MasterServices
-TableNamespaceManager.masterServices 
+MasterCoprocessorHost.masterServices 
 
 
 private MasterServices
-ClusterSchemaServiceImpl.masterServices 
+MasterCoprocessorHost.MasterEnvironment.masterServices 
 
 
 private MasterServices
@@ -266,11 +266,11 @@
 
 
 private MasterServices
-ServerManager.services 
+CatalogJanitor.services 
 
 
 private MasterServices
-CatalogJanitor.services 
+ServerManager.services 
 
 
 private MasterServices
@@ -423,16 +423,16 @@
 StochasticLoadBalancer.LocalityBasedCandidateGenerator.masterServices 
 
 
-private MasterServices
-StochasticLoadBalancer.LocalityCostFunction.services 
+protected MasterServices
+BaseLoadBalancer.services 
 
 
 private MasterServices
-RegionLocationFinder.services 
+StochasticLoadBalancer.LocalityCostFunction.services 
 
 
-protected MasterServices
-BaseLoadBalancer.services 
+private MasterServices
+RegionLocationFinder.services 
 
 
 
@@ -445,11 +445,11 @@
 
 
 void
-StochasticLoadBalancer.setMasterServices(MasterServices masterServices) 
+BaseLoadBalancer.setMasterServices(MasterServices masterServices) 
 
 
 void
-BaseLoadBalancer.setMasterServices(MasterServices masterServices) 
+StochasticLoadBalancer.setMasterServices(MasterServices masterServices) 
 
 
 (package private) void
@@ -493,13 +493,13 @@
 
 
 
-protected MasterServices
-TableEventHandler.masterServices 
-
-
 private MasterServices
 DispatchMergingRegionHandler.masterServices 
 
+
+protected MasterServices
+TableEventHandler.masterServices 
+
 
 private MasterServices
 EnableTableHandler.services 
@@ -562,13 +562,13 @@
 
 
 void
-RegionNormalizer.setMasterServices(MasterServices masterServices)
+SimpleRegionNormalizer.setMasterServices(MasterServices masterServices)
 Set the master service.
 
 
 
 void
-SimpleRegionNormalizer.setMasterServices(MasterServices masterServices)
+RegionNormalizer.setMasterServices(MasterServices masterServices)
 Set the master service.
 
 
@@ -629,13 +629,13 @@
 
 
 
-protected MasterServices
-TakeSnapshotHandler.master 
-
-
 private MasterServices
 SnapshotManager.master 
 
+
+protected MasterServices
+TakeSnapshotHandler.master 
+
 
 private MasterServices
 MasterSnapshotVerifier.services 
@@ -827,23 +827,23 @@
 
 
 private MasterServices
-RSGroupInfoManagerImpl.master 
+RSGroupAdminServer.master 
 
 
 private MasterServices
-RSGroupAdminServer.master 
+RSGroupAdminEndpoint.master 
 
 
 private MasterServices
-RSGroupAdminEndpoint.master 
+RSGroupInfoManagerImpl.master 
 
 
 private MasterServices
-RSGroupInfoManagerImpl.RSGroupStartupWorker.masterServices 
+RSGroupBasedLoadBalancer.masterServices 
 
 
 private MasterServices
-RSGroupBasedLoadBalancer.masterServices 
+RSGroupInfoManagerImpl.RSGroupStartupWorker.masterServices 
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/16980207/devapidocs/org/apache/hadoop/hbase/master/class-use/MetricsMasterWrapper.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/master/class-use/MetricsMasterWrapper.html 
b/devapidocs/org/apache/hadoop/hbase/master/class-use/MetricsMasterWrapper.html
index f156778..a531176 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/master/class-use/MetricsMasterWrapper.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/master/class-use/MetricsMasterWrapper.html
@@ -127,13 +127,13 @@
 
 
 
-MetricsMasterSource
-MetricsMasterSourceFactory.create(MetricsMasterWrapper masterWrapper) 
-
-
 MetricsMasterProcSource
 MetricsMasterProcSourceFactory.create(MetricsMasterWrapper masterWrapper) 
 
+
+MetricsMasterSource
+MetricsMasterSourceFactory.create(MetricsMasterWrapper masterWrapper) 
+
 
 MetricsMasterProcSource
 MetricsMasterProcSourceFactoryImpl.create(MetricsMasterWrapper masterWrapper) 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/16980207/devapidocs/org/apache/hadoop/hbase/master/class-use/RackManager.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/master/class-use/RackManager.html 
b/devapidocs/org/apache/hadoop/hbase/master/class-use/RackManager.html
index c9222b0..88910df 100644
--- a/devapidocs/

[25/51] [partial] hbase-site git commit: Published site at a395922ad5af9494bb55feee3c275c6d3a575e92.

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/16980207/devapidocs/org/apache/hadoop/hbase/class-use/HDFSBlocksDistribution.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/class-use/HDFSBlocksDistribution.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/HDFSBlocksDistribution.html
index 3df7b03..d037968 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/HDFSBlocksDistribution.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/HDFSBlocksDistribution.html
@@ -262,11 +262,11 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 HDFSBlocksDistribution
-StoreFile.getHDFSBlockDistribution() 
+StoreFileInfo.getHDFSBlockDistribution() 
 
 
 HDFSBlocksDistribution
-StoreFileInfo.getHDFSBlockDistribution() 
+StoreFile.getHDFSBlockDistribution() 
 
 
 HDFSBlocksDistribution



[24/51] [partial] hbase-site git commit: Published site at a395922ad5af9494bb55feee3c275c6d3a575e92.

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/16980207/devapidocs/org/apache/hadoop/hbase/class-use/HRegionInfo.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/class-use/HRegionInfo.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/HRegionInfo.html
index 9b26d8c..248a0f3 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/HRegionInfo.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/HRegionInfo.html
@@ -863,19 +863,19 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 HRegionInfo
-RegionServerCallable.getHRegionInfo() 
+ScannerCallable.getHRegionInfo() 
 
 
 HRegionInfo
-ScannerCallable.getHRegionInfo() 
+RegionServerCallable.getHRegionInfo() 
 
 
 HRegionInfo
-ScannerCallableWithReplicas.getHRegionInfo() 
+MultiServerCallable.getHRegionInfo() 
 
 
 HRegionInfo
-MultiServerCallable.getHRegionInfo() 
+ScannerCallableWithReplicas.getHRegionInfo() 
 
 
 private HRegionInfo
@@ -1113,10 +1113,8 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 void
-MasterObserver.postAssign(ObserverContext ctx,
-HRegionInfo regionInfo)
-Called after the region assignment has been requested.
-
+BaseMasterObserver.postAssign(ObserverContext ctx,
+HRegionInfo regionInfo) 
 
 
 void
@@ -1125,16 +1123,16 @@ Input/OutputFormats, a table indexing MapReduce job, 
and utility methods.
 
 
 void
-BaseMasterObserver.postAssign(ObserverContext ctx,
-HRegionInfo regionInfo) 
+MasterObserver.postAssign(ObserverContext ctx,
+HRegionInfo regionInfo)
+Called after the region assignment has been requested.
+
 
 
 void
-MasterObserver.postCreateTable(ObserverContext ctx,
+BaseMasterObserver.postCreateTable(ObserverContext ctx,
   HTableDescriptor desc,
-  HRegionInfo[] regions)
-Called after the createTable operation has been 
requested.
-
+  HRegionInfo[] regions) 
 
 
 void
@@ -1144,17 +1142,17 @@ Input/OutputFormats, a table indexing MapReduce job, 
and utility methods.
 
 
 void
-BaseMasterObserver.postCreateTable(ObserverContext ctx,
+MasterObserver.postCreateTable(ObserverContext ctx,
   HTableDescriptor desc,
-  HRegionInfo[] regions) 
+  HRegionInfo[] regions)
+Called after the createTable operation has been 
requested.
+
 
 
 void
-MasterObserver.postCreateTableHandler(ObserverContext ctx,
+BaseMasterObserver.postCreateTableHandler(ObserverContext ctx,
 HTableDescriptor desc,
-HRegionInfo[] regions)
-Called after the createTable operation has been 
requested.
-
+HRegionInfo[] regions) 
 
 
 void
@@ -1164,17 +1162,17 @@ Input/OutputFormats, a table indexing MapReduce job, 
and utility methods.
 
 
 void
-BaseMasterObserver.postCreateTableHandler(ObserverContext ctx,
+MasterObserver.postCreateTableHandler(ObserverContext ctx,
 HTableDescriptor desc,
-HRegionInfo[] regions) 
+HRegionInfo[] regions)
+Called after the createTable operation has been 
requested.
+
 
 
 void
-MasterObserver.postDispatchMerge(ObserverContext c,
+BaseMasterObserver.postDispatchMerge(ObserverContext ctx,
   HRegionInfo regionA,
-  HRegionInfo regionB)
-called after dispatching the region merge request.
-
+  HRegionInfo regionB) 
 
 
 void
@@ -1184,18 +1182,18 @@ Input/OutputFormats, a table indexing MapReduce job, 
and utility methods.
 
 
 void
-BaseMasterObserver.postDispatchMerge(ObserverContext ctx,
+MasterObserver.postDispatchMerge(ObserverContext c,
   HRegionInfo regionA,
-  HRegionInfo regionB) 
+  HRegionInfo regionB)
+called after dispatching the region merge request.
+
 
 
 void
-MasterObserver.postMove(ObserverContext ctx,
+BaseMasterObserver.postMove(ObserverContext ctx,
 HRegionInfo region,
 ServerName srcServer,
-ServerN

[18/51] [partial] hbase-site git commit: Published site at a395922ad5af9494bb55feee3c275c6d3a575e92.

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/16980207/devapidocs/org/apache/hadoop/hbase/class-use/TableName.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/class-use/TableName.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/TableName.html
index 8422fa3..06289a9 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/TableName.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/TableName.html
@@ -759,51 +759,51 @@ service.
 
 
 private TableName
-HRegionLocator.tableName 
+BufferedMutatorParams.tableName 
 
 
-protected TableName
-RpcRetryingCallerWithReadReplicas.tableName 
+private TableName
+TableState.tableName 
 
 
-private TableName
-BufferedMutatorImpl.tableName 
+protected TableName
+RegionServerCallable.tableName 
 
 
 private TableName
-TableState.tableName 
+ClientScanner.tableName 
 
 
 private TableName
-BufferedMutatorParams.tableName 
+HTable.tableName 
 
 
 protected TableName
-RegionAdminServiceCallable.tableName 
+RpcRetryingCallerWithReadReplicas.tableName 
 
 
 private TableName
-AsyncProcess.AsyncRequestFutureImpl.tableName 
+HRegionLocator.tableName 
 
 
-protected TableName
-RegionServerCallable.tableName 
+private TableName
+HBaseAdmin.TableFuture.tableName 
 
 
 private TableName
-HTable.tableName 
+AsyncProcess.AsyncRequestFutureImpl.tableName 
 
 
 private TableName
-ClientScanner.tableName 
+ScannerCallableWithReplicas.tableName 
 
 
 private TableName
-ScannerCallableWithReplicas.tableName 
+BufferedMutatorImpl.tableName 
 
 
-private TableName
-HBaseAdmin.TableFuture.tableName 
+protected TableName
+RegionAdminServiceCallable.tableName 
 
 
 
@@ -837,14 +837,6 @@ service.
 
 
 TableName
-HRegionLocator.getName() 
-
-
-TableName
-BufferedMutatorImpl.getName() 
-
-
-TableName
 BufferedMutator.getName()
 Gets the fully qualified table name instance of the table 
that this BufferedMutator writes to.
 
@@ -861,10 +853,18 @@ service.
 
 
 TableName
+HRegionLocator.getName() 
+
+
+TableName
 RegionLocator.getName()
 Gets the fully qualified table name instance of this 
table.
 
 
+
+TableName
+BufferedMutatorImpl.getName() 
+
 
 TableName
 HTableWrapper.getName() 
@@ -875,13 +875,13 @@ service.
 
 
 TableName
-TableState.getTableName()
-Table name for state
-
+BufferedMutatorParams.getTableName() 
 
 
 TableName
-BufferedMutatorParams.getTableName() 
+TableState.getTableName()
+Table name for state
+
 
 
 TableName
@@ -897,16 +897,16 @@ service.
 
 
 TableName[]
-ConnectionImplementation.listTableNames()
-Deprecated. 
-Use Admin.listTableNames()
 instead
-
+Admin.listTableNames()
+List all of the names of userspace tables.
 
 
 
 TableName[]
-Admin.listTableNames()
-List all of the names of userspace tables.
+HConnection.listTableNames()
+Deprecated. 
+Use Admin.listTables()
 instead.
+
 
 
 
@@ -915,9 +915,9 @@ service.
 
 
 TableName[]
-HConnection.listTableNames()
+ConnectionImplementation.listTableNames()
 Deprecated. 
-Use Admin.listTables()
 instead.
+Use Admin.listTableNames()
 instead
 
 
 
@@ -1019,30 +1019,30 @@ service.
   HColumnDescriptor columnFamily) 
 
 
-private void
-ConnectionImplementation.cacheLocation(TableName tableName,
-  RegionLocations location)
+void
+MetaCache.cacheLocation(TableName tableName,
+  RegionLocations locations)
 Put a newly discovered HRegionLocation into the cache.
 
 
 
-void
-MetaCache.cacheLocation(TableName tableName,
-  RegionLocations locations)
+private void
+ConnectionImplementation.cacheLocation(TableName tableName,
+  RegionLocations location)
 Put a newly discovered HRegionLocation into the cache.
 
 
 
-private void
-ConnectionImplementation.cacheLocation(TableName tableName,
+void
+MetaCache.cacheLocation(TableName tableName,
   ServerName source,
   HRegionLocation location)
 Put a newly discovered HRegionLocation into the cache.
 
 
 
-void
-MetaCache.cacheLocation(TableName tableName,
+private void
+ConnectionImplementation.cacheLocation(TableName tableName,
   ServerName source,
   HRegionLocation location)
 Put a newly discovered HRegionLocation into the cache.
@@ -1077,21 +1077,21 @@ service.
 
 
 void
-ConnectionImplementation.clearRegionCache(TableName tableName) 
+HConnection.clearRegionCache(TableName tableName)
+Deprecated. 
+internal method, do not use through HConnection
+
+
 
 
 void
-ClusterConnection.clearRegionCache(TableName tableName)
-Allows flushing the region cache of all locations that 
pertain to
- tableName
-
+ConnectionImplementation.clearRegionCache(TableName tableName) 
 
 
 void
-HConnection.clearRegionCache(TableName tableName)
-Deprecated. 
-internal method, do not use through HConnection
-
+ClusterConnection.clearRegionCache(TableName tableName)
+Allows flushing the region cache of all 

[21/51] [partial] hbase-site git commit: Published site at a395922ad5af9494bb55feee3c275c6d3a575e92.

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/16980207/devapidocs/org/apache/hadoop/hbase/class-use/InterProcessLock.MetadataHandler.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/class-use/InterProcessLock.MetadataHandler.html
 
b/devapidocs/org/apache/hadoop/hbase/class-use/InterProcessLock.MetadataHandler.html
index 1689c47..38c22d9 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/class-use/InterProcessLock.MetadataHandler.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/class-use/InterProcessLock.MetadataHandler.html
@@ -166,13 +166,13 @@
 
 
 
-protected InterProcessLock.MetadataHandler
-ZKInterProcessLockBase.handler 
-
-
 private InterProcessLock.MetadataHandler
 ZKInterProcessReadWriteLock.handler 
 
+
+protected InterProcessLock.MetadataHandler
+ZKInterProcessLockBase.handler 
+
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/16980207/devapidocs/org/apache/hadoop/hbase/class-use/KeepDeletedCells.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/class-use/KeepDeletedCells.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/KeepDeletedCells.html
index e9a6b35..4f4468b 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/KeepDeletedCells.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/KeepDeletedCells.html
@@ -159,13 +159,13 @@ the order they are declared.
 
 
 private KeepDeletedCells
-ScanQueryMatcher.keepDeletedCells
-whether to return deleted rows
-
+ScanInfo.keepDeletedCells 
 
 
 private KeepDeletedCells
-ScanInfo.keepDeletedCells 
+ScanQueryMatcher.keepDeletedCells
+whether to return deleted rows
+
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/16980207/devapidocs/org/apache/hadoop/hbase/class-use/KeyValue.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/class-use/KeyValue.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/KeyValue.html
index fd8c1b1..1f1face 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/KeyValue.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/KeyValue.html
@@ -208,22 +208,22 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 static KeyValue
-KeyValueUtil.create(http://docs.oracle.com/javase/7/docs/api/java/io/DataInput.html?is-external=true";
 title="class or interface in java.io">DataInput in) 
+KeyValue.create(http://docs.oracle.com/javase/7/docs/api/java/io/DataInput.html?is-external=true";
 title="class or interface in java.io">DataInput in) 
 
 
 static KeyValue
-KeyValue.create(http://docs.oracle.com/javase/7/docs/api/java/io/DataInput.html?is-external=true";
 title="class or interface in java.io">DataInput in) 
+KeyValueUtil.create(http://docs.oracle.com/javase/7/docs/api/java/io/DataInput.html?is-external=true";
 title="class or interface in java.io">DataInput in) 
 
 
 static KeyValue
-KeyValueUtil.create(int length,
+KeyValue.create(int length,
 http://docs.oracle.com/javase/7/docs/api/java/io/DataInput.html?is-external=true";
 title="class or interface in java.io">DataInput in)
 Create a KeyValue reading length from 
in
 
 
 
 static KeyValue
-KeyValue.create(int length,
+KeyValueUtil.create(int length,
 http://docs.oracle.com/javase/7/docs/api/java/io/DataInput.html?is-external=true";
 title="class or interface in java.io">DataInput in)
 Create a KeyValue reading length from 
in
 
@@ -339,31 +339,31 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 static KeyValue
-KeyValueUtil.createKeyValueFromKey(byte[] b) 
+KeyValue.createKeyValueFromKey(byte[] b) 
 
 
 static KeyValue
-KeyValue.createKeyValueFromKey(byte[] b) 
+KeyValueUtil.createKeyValueFromKey(byte[] b) 
 
 
 static KeyValue
-KeyValueUtil.createKeyValueFromKey(byte[] b,
+KeyValue.createKeyValueFromKey(byte[] b,
   int o,
   int l) 
 
 
 static KeyValue
-KeyValue.createKeyValueFromKey(byte[] b,
+KeyValueUtil.createKeyValueFromKey(byte[] b,
   int o,
   int l) 
 
 
 static KeyValue
-KeyValueUtil.createKeyValueFromKey(http://docs.oracle.com/javase/7/docs/api/java/nio/ByteBuffer.html?is-external=true";
 title="class or interface in 
java.nio">ByteBuffer bb) 
+KeyValue.createKeyValueFromKey(http://docs.oracle.com/javase/7/docs/api/java/nio/ByteBuffer.html?is-external=true";
 title="class or interface in 
java.nio">ByteBuffer bb) 
 
 
 static KeyValue
-KeyValue.createKeyValueFromKey(http://docs.oracle.com/javase/7/docs/api/java/nio/ByteBuffer.html?is-external=true";
 title="class or interface in 
java.nio">ByteBuffer bb) 
+KeyValueUtil.createKeyValueFromKey(http://docs.oracle.com/javase/7/docs/api/java/nio/ByteBuffer.html?is-external=true";
 title="class or interface in 
java

[42/51] [partial] hbase-site git commit: Published site at a395922ad5af9494bb55feee3c275c6d3a575e92.

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/16980207/apidocs/org/apache/hadoop/hbase/quotas/package-tree.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/quotas/package-tree.html 
b/apidocs/org/apache/hadoop/hbase/quotas/package-tree.html
index a924e87..94c39f9 100644
--- a/apidocs/org/apache/hadoop/hbase/quotas/package-tree.html
+++ b/apidocs/org/apache/hadoop/hbase/quotas/package-tree.html
@@ -112,9 +112,9 @@
 java.lang.http://docs.oracle.com/javase/7/docs/api/java/lang/Enum.html?is-external=true";
 title="class or interface in java.lang">Enum (implements java.lang.http://docs.oracle.com/javase/7/docs/api/java/lang/Comparable.html?is-external=true";
 title="class or interface in java.lang">Comparable, java.io.http://docs.oracle.com/javase/7/docs/api/java/io/Serializable.html?is-external=true";
 title="class or interface in java.io">Serializable)
 
 org.apache.hadoop.hbase.quotas.ThrottlingException.Type
-org.apache.hadoop.hbase.quotas.ThrottleType
 org.apache.hadoop.hbase.quotas.QuotaType
 org.apache.hadoop.hbase.quotas.QuotaScope
+org.apache.hadoop.hbase.quotas.ThrottleType
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/16980207/apidocs/org/apache/hadoop/hbase/replication/class-use/ReplicationException.html
--
diff --git 
a/apidocs/org/apache/hadoop/hbase/replication/class-use/ReplicationException.html
 
b/apidocs/org/apache/hadoop/hbase/replication/class-use/ReplicationException.html
index 70d6187..0587d3a 100644
--- 
a/apidocs/org/apache/hadoop/hbase/replication/class-use/ReplicationException.html
+++ 
b/apidocs/org/apache/hadoop/hbase/replication/class-use/ReplicationException.html
@@ -162,18 +162,22 @@
 
 
 void
+ReplicationAdmin.peerAdded(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String id) 
+
+
+void
 ReplicationAdmin.removePeer(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String id)
 Removes a peer cluster and stops the replication to 
it.
 
 
-
+
 void
 ReplicationAdmin.removePeerTableCFs(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String id,
 http://docs.oracle.com/javase/7/docs/api/java/util/Map.html?is-external=true";
 title="class or interface in java.util">MapCollectionString>> tableCfs)
 Remove some table-cfs from config of the specified 
peer
 
 
-
+
 void
 ReplicationAdmin.removePeerTableCFs(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String id,
 http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String tableCf)
@@ -183,13 +187,18 @@
 
 
 
-
+
 void
 ReplicationAdmin.setPeerTableCFs(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String id,
   http://docs.oracle.com/javase/7/docs/api/java/util/Map.html?is-external=true";
 title="class or interface in java.util">MapCollectionString>> tableCfs)
 Set the replicable table-cf config of the specified 
peer
 
 
+
+void
+ReplicationAdmin.updatePeerConfig(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String id,
+ReplicationPeerConfig peerConfig) 
+
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/16980207/apidocs/org/apache/hadoop/hbase/replication/class-use/ReplicationPeerConfig.html
--
diff --git 
a/apidocs/org/apache/hadoop/hbase/replication/class-use/ReplicationPeerConfig.html
 
b/apidocs/org/apache/hadoop/hbase/replication/class-use/ReplicationPeerConfig.html
index c0a5b61..560cf2c 100644
--- 
a/apidocs/org/apache/hadoop/hbase/replication/class-use/ReplicationPeerConfig.html
+++ 
b/apidocs/org/apache/hadoop/hbase/replication/class-use/ReplicationPeerConfig.html
@@ -144,6 +144,11 @@
 
 
 
+
+void
+ReplicationAdmin.updatePeerConfig(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-ext

[51/51] [partial] hbase-site git commit: Published site at a395922ad5af9494bb55feee3c275c6d3a575e92.

Published site at a395922ad5af9494bb55feee3c275c6d3a575e92.


Project: http://git-wip-us.apache.org/repos/asf/hbase-site/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase-site/commit/16980207
Tree: http://git-wip-us.apache.org/repos/asf/hbase-site/tree/16980207
Diff: http://git-wip-us.apache.org/repos/asf/hbase-site/diff/16980207

Branch: refs/heads/asf-site
Commit: 1698020721a869e9050ecfba1e06d2ae0b65cdb3
Parents: db94a63
Author: jenkins 
Authored: Mon Apr 11 15:10:13 2016 +
Committer: Misty Stanley-Jones 
Committed: Mon Apr 11 09:30:32 2016 -0700

--
 acid-semantics.html | 4 +-
 apache_hbase_reference_guide.pdf|  8110 
 apache_hbase_reference_guide.pdfmarks   | 4 +-
 apidocs/index-all.html  |12 +-
 .../apache/hadoop/hbase/HColumnDescriptor.html  |   283 +-
 .../apache/hadoop/hbase/HTableDescriptor.html   |   237 +-
 .../apache/hadoop/hbase/KeepDeletedCells.html   | 4 +-
 .../org/apache/hadoop/hbase/class-use/Cell.html |   228 +-
 .../hbase/class-use/HColumnDescriptor.html  |68 +-
 .../hbase/class-use/HTableDescriptor.html   |54 +-
 .../hadoop/hbase/class-use/ServerName.html  | 4 +-
 .../hadoop/hbase/class-use/TableName.html   |54 +-
 .../hbase/client/Admin.MasterSwitchType.html| 4 +-
 .../apache/hadoop/hbase/client/Consistency.html | 4 +-
 .../client/UnmodifyableHTableDescriptor.html| 2 +-
 .../hbase/client/class-use/Consistency.html |10 +-
 .../hbase/client/class-use/Durability.html  | 8 +-
 .../hbase/client/class-use/IsolationLevel.html  |10 +-
 .../hadoop/hbase/client/class-use/Result.html   |48 +-
 .../hadoop/hbase/client/class-use/Row.html  | 6 +-
 .../hadoop/hbase/client/package-tree.html   | 2 +-
 .../client/replication/ReplicationAdmin.html|94 +-
 .../filter/class-use/Filter.ReturnCode.html |60 +-
 .../hadoop/hbase/filter/class-use/Filter.html   |50 +-
 .../hadoop/hbase/filter/package-tree.html   | 6 +-
 .../io/class-use/ImmutableBytesWritable.html|68 +-
 .../hadoop/hbase/io/class-use/TimeRange.html|12 +-
 .../hbase/io/crypto/class-use/Cipher.html   |16 +-
 .../hbase/io/encoding/DataBlockEncoding.html| 4 +-
 .../hbase/quotas/ThrottlingException.Type.html  | 4 +-
 .../hadoop/hbase/quotas/package-tree.html   | 2 +-
 .../class-use/ReplicationException.html |15 +-
 .../class-use/ReplicationPeerConfig.html| 5 +
 .../hadoop/hbase/util/class-use/Bytes.html  |16 +-
 .../hadoop/hbase/util/class-use/Order.html  |42 +-
 .../hadoop/hbase/util/class-use/Pair.html   | 4 +-
 .../util/class-use/PositionedByteRange.html |   380 +-
 apidocs/overview-tree.html  |12 +-
 .../apache/hadoop/hbase/HColumnDescriptor.html  |  2403 ++-
 .../apache/hadoop/hbase/HTableDescriptor.html   |  3099 ++-
 .../client/replication/ReplicationAdmin.html|  1160 +-
 book.html   | 6 +-
 bulk-loads.html | 4 +-
 checkstyle-aggregate.html   | 17954 -
 checkstyle.rss  |   590 +-
 coc.html| 4 +-
 cygwin.html | 4 +-
 dependencies.html   | 4 +-
 dependency-convergence.html | 4 +-
 dependency-info.html| 4 +-
 dependency-management.html  | 4 +-
 devapidocs/allclasses-frame.html| 4 +-
 devapidocs/allclasses-noframe.html  | 4 +-
 devapidocs/constant-values.html | 2 +-
 devapidocs/deprecated-list.html |   270 +-
 devapidocs/index-all.html   |   106 +-
 .../apache/hadoop/hbase/HColumnDescriptor.html  |   337 +-
 .../apache/hadoop/hbase/HTableDescriptor.html   |   305 +-
 .../apache/hadoop/hbase/KeepDeletedCells.html   | 4 +-
 .../org/apache/hadoop/hbase/KeyValue.Type.html  | 4 +-
 .../hadoop/hbase/class-use/Abortable.html   |20 +-
 .../org/apache/hadoop/hbase/class-use/Cell.html |   838 +-
 .../hadoop/hbase/class-use/CellComparator.html  |98 +-
 .../hadoop/hbase/class-use/CellScanner.html |36 +-
 .../hadoop/hbase/class-use/ClusterStatus.html   |20 +-
 .../hadoop/hbase/class-use/Coprocessor.html |12 +-
 .../hbase/class-use/CoprocessorEnvironment.html |58 +-
 .../hbase/class-use/HBaseIOException.html   | 8 +-
 .../hbase/class-use/HColumnDescriptor.html  |   322 +-
 .../hbase/class-use/HDFSBlocksDistribution.html | 4 +-
 .../hadoop/hbase/class-use/HRegionInfo.html |   308 +-
 .../hadoop/hbase/class-use/HRegionLocation.html |   168 +-
 .../hbase/class-u

[09/51] [partial] hbase-site git commit: Published site at a395922ad5af9494bb55feee3c275c6d3a575e92.

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/16980207/devapidocs/org/apache/hadoop/hbase/coprocessor/class-use/RegionCoprocessorEnvironment.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/coprocessor/class-use/RegionCoprocessorEnvironment.html
 
b/devapidocs/org/apache/hadoop/hbase/coprocessor/class-use/RegionCoprocessorEnvironment.html
index 4254a1e..2024382 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/coprocessor/class-use/RegionCoprocessorEnvironment.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/coprocessor/class-use/RegionCoprocessorEnvironment.html
@@ -152,15 +152,15 @@
 
 
 private RegionCoprocessorEnvironment
-MultiRowMutationEndpoint.env 
+BaseRowProcessorEndpoint.env 
 
 
 private RegionCoprocessorEnvironment
-AggregateImplementation.env 
+MultiRowMutationEndpoint.env 
 
 
 private RegionCoprocessorEnvironment
-BaseRowProcessorEndpoint.env 
+AggregateImplementation.env 
 
 
 
@@ -1422,11 +1422,11 @@
 
 
 private RegionCoprocessorEnvironment
-RowCountEndpoint.env 
+BulkDeleteEndpoint.env 
 
 
 private RegionCoprocessorEnvironment
-BulkDeleteEndpoint.env 
+RowCountEndpoint.env 
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/16980207/devapidocs/org/apache/hadoop/hbase/errorhandling/class-use/ForeignException.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/errorhandling/class-use/ForeignException.html
 
b/devapidocs/org/apache/hadoop/hbase/errorhandling/class-use/ForeignException.html
index b19bf70..32ac40d 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/errorhandling/class-use/ForeignException.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/errorhandling/class-use/ForeignException.html
@@ -135,14 +135,14 @@
 
 
 ForeignException
-ForeignExceptionDispatcher.getException() 
-
-
-ForeignException
 ForeignExceptionSnare.getException()
 Get the value of the captured exception.
 
 
+
+ForeignException
+ForeignExceptionDispatcher.getException() 
+
 
 
 
@@ -179,14 +179,14 @@
 
 
 void
-ForeignExceptionDispatcher.rethrowException() 
-
-
-void
 ForeignExceptionSnare.rethrowException()
 Rethrow an exception currently held by the ForeignExceptionSnare.
 
 
+
+void
+ForeignExceptionDispatcher.rethrowException() 
+
 
 
 
@@ -297,6 +297,14 @@
 
 
 void
+ProcedureCoordinatorRpcs.sendAbortToMembers(Procedure procName,
+ForeignException cause)
+Notify the members that the coordinator has aborted the 
procedure and that it should release
+ barrier resources.
+
+
+
+void
 ZKProcedureCoordinatorRpcs.sendAbortToMembers(Procedure proc,
 ForeignException ee)
 This is the abort message being sent by the coordinator to 
member
@@ -305,14 +313,6 @@
  coordinator.
 
 
-
-void
-ProcedureCoordinatorRpcs.sendAbortToMembers(Procedure procName,
-ForeignException cause)
-Notify the members that the coordinator has aborted the 
procedure and that it should release
- barrier resources.
-
-
 
 void
 ZKProcedureMemberRpcs.sendMemberAborted(Subprocedure sub,

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/16980207/devapidocs/org/apache/hadoop/hbase/errorhandling/class-use/ForeignExceptionDispatcher.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/errorhandling/class-use/ForeignExceptionDispatcher.html
 
b/devapidocs/org/apache/hadoop/hbase/errorhandling/class-use/ForeignExceptionDispatcher.html
index e1d42fa..e3030a3 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/errorhandling/class-use/ForeignExceptionDispatcher.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/errorhandling/class-use/ForeignExceptionDispatcher.html
@@ -129,14 +129,14 @@
 
 
 
-protected ForeignExceptionDispatcher
-Subprocedure.monitor
+private ForeignExceptionDispatcher
+Procedure.monitor
 monitor to check for errors
 
 
 
-private ForeignExceptionDispatcher
-Procedure.monitor
+protected ForeignExceptionDispatcher
+Subprocedure.monitor
 monitor to check for errors
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/16980207/devapidocs/org/apache/hadoop/hbase/exceptions/class-use/DeserializationException.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/exceptions/class-use/DeserializationException.html
 
b/devapidocs/org/apache/hadoop/hbase/exceptions/class-use/DeserializationException.html
index 43f7b10..97a2309 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/exceptions/class-use/DeserializationException.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/exceptions/class-use/DeserializationException.html
@@ -148,25 +148,25 @@
 HTableDescriptor.parseFrom(byte[] bytes) 
 
 
-static ClusterId
-ClusterId.parseFrom(byte[] bytes) 
-
-
 static HColumnDescriptor
 HColumnDescriptor.parseFrom(byte[] bytes) 
 
+
+static ClusterId
+C

[33/51] [partial] hbase-site git commit: Published site at a395922ad5af9494bb55feee3c275c6d3a575e92.

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/16980207/coc.html
--
diff --git a/coc.html b/coc.html
index 2b26d01..a18b718 100644
--- a/coc.html
+++ b/coc.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase – 
   Code of Conduct Policy
@@ -331,7 +331,7 @@ For flagrant violations requiring a firm response the PMC 
may opt to skip early
 http://www.apache.org/";>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2016-04-08
+  Last Published: 
2016-04-11
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/16980207/cygwin.html
--
diff --git a/cygwin.html b/cygwin.html
index e4636b2..01ba958 100644
--- a/cygwin.html
+++ b/cygwin.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase – Installing Apache HBase (TM) on Windows using 
Cygwin
 
@@ -673,7 +673,7 @@ Now your HBase server is running, start 
coding and build that next
 http://www.apache.org/";>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2016-04-08
+  Last Published: 
2016-04-11
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/16980207/dependencies.html
--
diff --git a/dependencies.html b/dependencies.html
index d3406dd..9a7b0b9 100644
--- a/dependencies.html
+++ b/dependencies.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase – Project Dependencies
 
@@ -518,7 +518,7 @@
 http://www.apache.org/";>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2016-04-08
+  Last Published: 
2016-04-11
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/16980207/dependency-convergence.html
--
diff --git a/dependency-convergence.html b/dependency-convergence.html
index bfc8a34..d8ff328 100644
--- a/dependency-convergence.html
+++ b/dependency-convergence.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase – Reactor Dependency Convergence
 
@@ -1702,7 +1702,7 @@
 http://www.apache.org/";>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2016-04-08
+  Last Published: 
2016-04-11
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/16980207/dependency-info.html
--
diff --git a/dependency-info.html b/dependency-info.html
index cfe7d50..14c0882 100644
--- a/dependency-info.html
+++ b/dependency-info.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase – Dependency Information
 
@@ -312,7 +312,7 @@
 http://www.apache.org/";>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2016-04-08
+  Last Published: 
2016-04-11
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/16980207/dependency-management.html
--
diff --git a/dependency-management.html b/dependency-management.html
index 23b7454..1836c4d 100644
--- a/dependency-management.html
+++ b/dependency-management.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase – Project Dependency Management
 
@@ -798,7 +798,7 @@
 http://www.apache.org/";>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2016-04-08
+  Last Published: 
2016-04-11
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/16980207/devapidocs/allclasses-frame.html
--
diff --git a/devapidocs/allclasses-frame.html b/devapidocs/allclasses-frame.html
index d58a70b..cb0788d 100644
--- a/devapidocs/allclasses-frame.html
+++ b/devapidocs/allclasses-frame.html
@@ -21,7 +21,6 @@
 AbstractMultiFileWriter
 AbstractMultiFileWriter.WriterFactory
 AbstractMultiOutputCompactor
-AbstractMultiOutputCompactor.InternalScannerFactory
 AbstractPositionedByteRange
 AbstractProtobufLogWriter
 AbstractRpcClient
@@ -389,7 +388,9 @@
 CompactionTool.CompactionWorker
 Compactor
 Compactor.CellSink
+Compactor.CellSinkFactory
 Compacto

[14/51] [partial] hbase-site git commit: Published site at a395922ad5af9494bb55feee3c275c6d3a575e92.

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/16980207/devapidocs/org/apache/hadoop/hbase/client/class-use/Row.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/client/class-use/Row.html 
b/devapidocs/org/apache/hadoop/hbase/client/class-use/Row.html
index 8b6492f..4b5c4a1 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/class-use/Row.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/class-use/Row.html
@@ -259,11 +259,11 @@
 
 
 int
-Mutation.compareTo(Row d) 
+Increment.compareTo(Row i) 
 
 
 int
-Increment.compareTo(Row i) 
+Mutation.compareTo(Row d) 
 
 
 int
@@ -477,23 +477,23 @@
 
 
 void
-ConnectionImplementation.processBatch(http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List list,
+HConnection.processBatch(http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List actions,
 byte[] tableName,
 http://docs.oracle.com/javase/7/docs/api/java/util/concurrent/ExecutorService.html?is-external=true";
 title="class or interface in 
java.util.concurrent">ExecutorService pool,
 http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true";
 title="class or interface in java.lang">Object[] results)
 Deprecated. 
-Unsupported API
+internal method, do not use through HConnection
 
 
 
 
 void
-HConnection.processBatch(http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List actions,
+ConnectionImplementation.processBatch(http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List list,
 byte[] tableName,
 http://docs.oracle.com/javase/7/docs/api/java/util/concurrent/ExecutorService.html?is-external=true";
 title="class or interface in 
java.util.concurrent">ExecutorService pool,
 http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true";
 title="class or interface in java.lang">Object[] results)
 Deprecated. 
-internal method, do not use through HConnection
+Unsupported API
 
 
 
@@ -507,29 +507,29 @@
 
 
 void
-ConnectionImplementation.processBatch(http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List list,
+HConnection.processBatch(http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List actions,
 TableName tableName,
 http://docs.oracle.com/javase/7/docs/api/java/util/concurrent/ExecutorService.html?is-external=true";
 title="class or interface in 
java.util.concurrent">ExecutorService pool,
 http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true";
 title="class or interface in java.lang">Object[] results)
 Deprecated. 
-since 0.96 Use Table.batch(java.util.List, java.lang.Object[]) 
instead
+since 0.96 - Use Table.batch(java.util.List, java.lang.Object[]) 
instead
 
 
 
 
 void
-HConnection.processBatch(http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List actions,
+ConnectionImplementation.processBatch(http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List list,
 TableName tableName,
 http://docs.oracle.com/javase/7/docs/api/java/util/concurrent/ExecutorService.html?is-external=true";
 title="class or interface in 
java.util.concurrent">ExecutorService pool,
 http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true";
 title="class or interface in java.lang">Object[] results)
 Deprecated. 
-since 0.96 - Use Table.batch(java.util.List, java.lang.Object[]) 
instead
+since 0.96 Use Table.batch(java.util.List, java.lang.Object[]) 
instead
 
 
 
 
  void
-ConnectionImplementation.processBatchCallback(http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List list,
+HConnection.processBatchCallback(http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List list,
 byte[] tableName,
  

[28/51] [partial] hbase-site git commit: Published site at a395922ad5af9494bb55feee3c275c6d3a575e92.

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/16980207/devapidocs/org/apache/hadoop/hbase/class-use/Cell.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/class-use/Cell.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/Cell.html
index 7f05906..251eab3 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/Cell.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/Cell.html
@@ -606,16 +606,6 @@ service.
 
 
 int
-CellComparator.compare(Cell a,
-  Cell b) 
-
-
-int
-CellComparator.RowComparator.compare(Cell a,
-  Cell b) 
-
-
-int
 KeyValue.MetaComparator.compare(Cell left,
   Cell right)
 Deprecated. 
@@ -640,6 +630,16 @@ service.
  
 
 
+int
+CellComparator.compare(Cell a,
+  Cell b) 
+
+
+int
+CellComparator.RowComparator.compare(Cell a,
+  Cell b) 
+
+
 private int
 CellComparator.compare(Cell a,
   Cell b,
@@ -815,37 +815,37 @@ service.
 
 
 int
+KeyValue.KVComparator.compareRows(Cell left,
+  Cell right)
+Deprecated. 
+ 
+
+
+int
 CellComparator.compareRows(Cell left,
   Cell right)
 Compares the rows of the left and right cell.
 
 
-
+
 int
 CellComparator.MetaCellComparator.compareRows(Cell left,
   Cell right) 
 
-
+
 int
-KeyValue.KVComparator.compareRows(Cell left,
-  Cell right)
+KeyValue.KVComparator.compareTimestamps(Cell left,
+  Cell right)
 Deprecated. 
  
 
-
+
 static int
 CellComparator.compareTimestamps(Cell left,
   Cell right)
 Compares cell's timestamps in DESCENDING order.
 
 
-
-int
-KeyValue.KVComparator.compareTimestamps(Cell left,
-  Cell right)
-Deprecated. 
- 
-
 
 static int
 CellComparator.compareValue(Cell cell,
@@ -1651,17 +1651,17 @@ service.
 
 
 
-Append
-Append.add(Cell cell)
-Add column and value to this Append operation.
-
-
-
 Increment
 Increment.add(Cell cell)
 Add the specified KeyValue to this operation.
 
 
+
+Append
+Append.add(Cell cell)
+Add column and value to this Append operation.
+
+
 
 Delete
 Delete.addDeleteMarker(Cell kv)
@@ -1750,26 +1750,26 @@ service.
 boolean partial) 
 
 
-Delete
-Delete.setFamilyCellMap(http://docs.oracle.com/javase/7/docs/api/java/util/NavigableMap.html?is-external=true";
 title="class or interface in java.util">NavigableMapList> map) 
-
-
 Put
 Put.setFamilyCellMap(http://docs.oracle.com/javase/7/docs/api/java/util/NavigableMap.html?is-external=true";
 title="class or interface in java.util">NavigableMapList> map) 
 
+
+Delete
+Delete.setFamilyCellMap(http://docs.oracle.com/javase/7/docs/api/java/util/NavigableMap.html?is-external=true";
 title="class or interface in java.util">NavigableMapList> map) 
+
 
-Mutation
-Mutation.setFamilyCellMap(http://docs.oracle.com/javase/7/docs/api/java/util/NavigableMap.html?is-external=true";
 title="class or interface in java.util">NavigableMapList> map)
-Method for setting the put's familyMap
-
+Increment
+Increment.setFamilyCellMap(http://docs.oracle.com/javase/7/docs/api/java/util/NavigableMap.html?is-external=true";
 title="class or interface in java.util">NavigableMapList> map) 
 
 
 Append
 Append.setFamilyCellMap(http://docs.oracle.com/javase/7/docs/api/java/util/NavigableMap.html?is-external=true";
 title="class or interface in java.util">NavigableMapList> map) 
 
 
-Increment
-Increment.setFamilyCellMap(http://docs.oracle.com/javase/7/docs/api/java/util/NavigableMap.html?is-external=true";
 title="class or interface in java.util">NavigableMapList> map) 
+Mutation
+Mutation.setFamilyCellMap(http://docs.oracle.com/javase/7/docs/api/java/util/NavigableMap.html?is-external=true";
 title="class or interface in java.util">NavigableMapList> map)
+Method for setting the put's familyMap

[32/51] [partial] hbase-site git commit: Published site at a395922ad5af9494bb55feee3c275c6d3a575e92.

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/16980207/devapidocs/index-all.html
--
diff --git a/devapidocs/index-all.html b/devapidocs/index-all.html
index ca3fc2d..db43732 100644
--- a/devapidocs/index-all.html
+++ b/devapidocs/index-all.html
@@ -290,6 +290,12 @@
  
 abortTask(TaskAttemptContext)
 - Method in class org.apache.hadoop.hbase.mapreduce.TableOutputCommitter
  
+abortWriter(T)
 - Method in class org.apache.hadoop.hbase.regionserver.compactions.AbstractMultiOutputCompactor
+ 
+abortWriter(T)
 - Method in class org.apache.hadoop.hbase.regionserver.compactions.Compactor
+ 
+abortWriter(StoreFile.Writer)
 - Method in class org.apache.hadoop.hbase.regionserver.compactions.DefaultCompactor
+ 
 abortWriters()
 - Method in class org.apache.hadoop.hbase.regionserver.AbstractMultiFileWriter
 
 Close all writers without throwing any exceptions.
@@ -351,8 +357,6 @@
 
 AbstractMultiOutputCompactor(Configuration,
 Store) - Constructor for class 
org.apache.hadoop.hbase.regionserver.compactions.AbstractMultiOutputCompactor
  
-AbstractMultiOutputCompactor.InternalScannerFactory - 
Interface in org.apache.hadoop.hbase.regionserver.compactions
- 
 AbstractPositionedByteRange - Class in org.apache.hadoop.hbase.util
 
 Extends the basic SimpleByteRange implementation 
with position
@@ -2653,10 +2657,6 @@
 
 appendMetaData(byte[])
 - Method in class org.apache.hadoop.hbase.zookeeper.RecoverableZooKeeper
  
-appendMetadataAndCloseWriter(StoreFile.Writer,
 Compactor.FileDetails, boolean) - Method in class 
org.apache.hadoop.hbase.regionserver.compactions.Compactor
-
-Appends the metadata and closes the writer.
-
 appendNodes(List,
 boolean, boolean) - Method in class 
org.apache.hadoop.hbase.codec.prefixtree.encode.tokenize.Tokenizer
  
 appendNodesToExternalList(List,
 boolean, boolean) - Method in class 
org.apache.hadoop.hbase.codec.prefixtree.encode.tokenize.TokenizerNode
@@ -10874,12 +10874,6 @@
 Commit a merged region, moving it from the merges temporary 
directory to
  the proper location in the filesystem.
 
-commitMultiWriter(T,
 Compactor.FileDetails, CompactionRequest) - Method in class 
org.apache.hadoop.hbase.regionserver.compactions.AbstractMultiOutputCompactor
- 
-commitMultiWriter(DateTieredMultiFileWriter,
 Compactor.FileDetails, CompactionRequest) - Method in class 
org.apache.hadoop.hbase.regionserver.compactions.DateTieredCompactor
- 
-commitMultiWriter(StripeMultiFileWriter,
 Compactor.FileDetails, CompactionRequest) - Method in class 
org.apache.hadoop.hbase.regionserver.compactions.StripeCompactor
- 
 commitStoreFile(String,
 Path) - Method in class org.apache.hadoop.hbase.regionserver.HRegionFileSystem
 
 Move the file from a build/temp location to the main family 
store directory.
@@ -10898,6 +10892,14 @@
  
 committedFiles
 - Variable in class org.apache.hadoop.hbase.regionserver.HStore.StoreFlusherImpl
  
+commitWriter(T,
 Compactor.FileDetails, CompactionRequest) - Method in class 
org.apache.hadoop.hbase.regionserver.compactions.Compactor
+ 
+commitWriter(DateTieredMultiFileWriter,
 Compactor.FileDetails, CompactionRequest) - Method in class 
org.apache.hadoop.hbase.regionserver.compactions.DateTieredCompactor
+ 
+commitWriter(StoreFile.Writer,
 Compactor.FileDetails, CompactionRequest) - Method in class 
org.apache.hadoop.hbase.regionserver.compactions.DefaultCompactor
+ 
+commitWriter(StripeMultiFileWriter,
 Compactor.FileDetails, CompactionRequest) - Method in class 
org.apache.hadoop.hbase.regionserver.compactions.StripeCompactor
+ 
 commitWriters(long,
 boolean) - Method in class org.apache.hadoop.hbase.regionserver.AbstractMultiFileWriter
 
 Commit all writers.
@@ -10956,10 +10958,12 @@
 
 compact(List,
 boolean) - Method in class 
org.apache.hadoop.hbase.mob.compactions.PartitionedMobCompactor
  
-compact(T,
 CompactionRequest, AbstractMultiOutputCompactor.InternalScannerFactory, 
ThroughputController, User) - Method in class 
org.apache.hadoop.hbase.regionserver.compactions.AbstractMultiOutputCompactor
+compact(CompactionRequest,
 ThroughputController, User) - Method in class 
org.apache.hadoop.hbase.mob.DefaultMobStoreCompactor
  
 compact(ThroughputController,
 User) - Method in class 
org.apache.hadoop.hbase.regionserver.compactions.CompactionContext
  
+compact(CompactionRequest,
 Compactor.InternalScannerFactory, Compactor.CellSinkFactory, 
ThroughputController, User) - Method in class 
org.apache.hadoop.hbase.regionserver.compactions.Compactor
+ 
 compact(CompactionRequest,
 List, ThroughputController, User) - Method in class 
org.apache.hadoop.hbase.regionserver.compactions.DateTieredCompactor
  
 compact(CompactionRequest,
 ThroughputController, User) - Method in class 
org.apache.hadoop.hbase.regionserver.compactions.DefaultCompactor
@@ -11286,7 +11290,7 @@
 
 compactOnce
 - Variable in class org.apache.hadoop.hbase.regionserver.CompactionTool.

[35/51] [partial] hbase-site git commit: Published site at a395922ad5af9494bb55feee3c275c6d3a575e92.

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/16980207/checkstyle-aggregate.html
--
diff --git a/checkstyle-aggregate.html b/checkstyle-aggregate.html
index 5202063..35a0177 100644
--- a/checkstyle-aggregate.html
+++ b/checkstyle-aggregate.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase – Checkstyle Results
 
@@ -280,10 +280,10 @@
  Warnings
  Errors
 
-1728
+1729
 0
 0
-12478
+12449
 
 Files
 
@@ -396,7 +396,7 @@
 org/apache/hadoop/hbase/HColumnDescriptor.java
 0
 0
-51
+50
 
 org/apache/hadoop/hbase/HConstants.java
 0
@@ -1031,7 +1031,7 @@
 org/apache/hadoop/hbase/client/replication/ReplicationAdmin.java
 0
 0
-24
+23
 
 org/apache/hadoop/hbase/client/replication/ReplicationSerDeHelper.java
 0
@@ -2926,7 +2926,7 @@
 org/apache/hadoop/hbase/master/procedure/AddColumnFamilyProcedure.java
 0
 0
-63
+64
 
 org/apache/hadoop/hbase/master/procedure/CloneSnapshotProcedure.java
 0
@@ -2946,7 +2946,7 @@
 org/apache/hadoop/hbase/master/procedure/DeleteColumnFamilyProcedure.java
 0
 0
-74
+75
 
 org/apache/hadoop/hbase/master/procedure/DeleteNamespaceProcedure.java
 0
@@ -2981,7 +2981,7 @@
 org/apache/hadoop/hbase/master/procedure/ModifyColumnFamilyProcedure.java
 0
 0
-63
+64
 
 org/apache/hadoop/hbase/master/procedure/ModifyNamespaceProcedure.java
 0
@@ -2991,7 +2991,7 @@
 org/apache/hadoop/hbase/master/procedure/ModifyTableProcedure.java
 0
 0
-94
+95
 
 org/apache/hadoop/hbase/master/procedure/ProcedureSyncWait.java
 0
@@ -3006,7 +3006,7 @@
 org/apache/hadoop/hbase/master/procedure/ServerCrashProcedure.java
 0
 0
-108
+114
 
 org/apache/hadoop/hbase/master/procedure/TruncateTableProcedure.java
 0
@@ -3058,907 +3058,897 @@
 0
 1
 
-org/apache/hadoop/hbase/mob/DefaultMobStoreCompactor.java
-0
-0
-1
-
 org/apache/hadoop/hbase/mob/DefaultMobStoreFlusher.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/mob/ExpiredMobFileCleaner.java
 0
 0
 3
-
+
 org/apache/hadoop/hbase/mob/MobFile.java
 0
 0
 6
-
+
 org/apache/hadoop/hbase/mob/MobFileCache.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/mob/MobUtils.java
 0
 0
 12
-
+
 org/apache/hadoop/hbase/mob/compactions/MobCompactor.java
 0
 0
 3
-
+
 org/apache/hadoop/hbase/mob/compactions/PartitionedMobCompactor.java
 0
 0
 13
-
+
 org/apache/hadoop/hbase/mob/mapreduce/MemStoreWrapper.java
 0
 0
 4
-
+
 org/apache/hadoop/hbase/mob/mapreduce/SweepJob.java
 0
 0
 8
-
+
 org/apache/hadoop/hbase/mob/mapreduce/SweepReducer.java
 0
 0
 8
-
+
 org/apache/hadoop/hbase/mob/mapreduce/Sweeper.java
 0
 0
 7
-
+
 org/apache/hadoop/hbase/monitoring/LogMonitoring.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/monitoring/MemoryBoundedLogMessageBuffer.java
 0
 0
 3
-
+
 org/apache/hadoop/hbase/monitoring/MonitoredRPCHandler.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/monitoring/MonitoredRPCHandlerImpl.java
 0
 0
 4
-
+
 org/apache/hadoop/hbase/monitoring/MonitoredTaskImpl.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/monitoring/StateDumpServlet.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/monitoring/TaskMonitor.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/monitoring/ThreadMonitoring.java
 0
 0
 3
-
+
 org/apache/hadoop/hbase/namespace/NamespaceStateManager.java
 0
 0
 3
-
+
 org/apache/hadoop/hbase/namespace/NamespaceTableAndRegionInfo.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/nio/ByteBuff.java
 0
 0
 22
-
+
 org/apache/hadoop/hbase/nio/MultiByteBuff.java
 0
 0
 27
-
+
 org/apache/hadoop/hbase/nio/SingleByteBuff.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/procedure/MasterProcedureManager.java
 0
 0
 7
-
+
 org/apache/hadoop/hbase/procedure/Procedure.java
 0
 0
 14
-
+
 org/apache/hadoop/hbase/procedure/ProcedureCoordinator.java
 0
 0
 12
-
+
 org/apache/hadoop/hbase/procedure/ProcedureCoordinatorRpcs.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/procedure/ProcedureManagerHost.java
 0
 0
 3
-
+
 org/apache/hadoop/hbase/procedure/ProcedureMember.java
 0
 0
 18
-
+
 org/apache/hadoop/hbase/procedure/ProcedureMemberRpcs.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/procedure/RegionServerProcedureManager.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/procedure/Subprocedure.java
 0
 0
 9
-
+
 org/apache/hadoop/hbase/procedure/ZKProcedureCoordinatorRpcs.java
 0
 0
 3
-
+
 org/apache/hadoop/hbase/procedure/ZKProcedureMemberRpcs.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/procedure/ZKProcedureUtil.java
 0
 0
 4
-
+
 org/apache/hadoop/hbase/procedure/flush/MasterFlushTableProcedureManager.java
 0
 0
 4
-
+
 org/apache/hadoop/hbase/procedure/flush/RegionServerFlushTableProcedureManager.java
 0
 0
 10
-
+
 org/apache/hadoop/hbase/procedure2/Procedure.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/procedure2/ProcedureExecutor.java
 0
 0
 11
-
+
 org/apache/hadoop/hbase/procedure2/RemoteProcedureException.java
 0
 0
 5
-
+
 org/apache/hadoop/hbase/procedure2/RootProcedureState.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/procedure2/store/ProcedureStore.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/procedure2/store/wal/ProcedureWALFile.java
 0

[08/51] [partial] hbase-site git commit: Published site at a395922ad5af9494bb55feee3c275c6d3a575e92.

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/16980207/devapidocs/org/apache/hadoop/hbase/filter/class-use/Filter.ReturnCode.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/filter/class-use/Filter.ReturnCode.html 
b/devapidocs/org/apache/hadoop/hbase/filter/class-use/Filter.ReturnCode.html
index 5e1dc46..8a99b48 100644
--- a/devapidocs/org/apache/hadoop/hbase/filter/class-use/Filter.ReturnCode.html
+++ b/devapidocs/org/apache/hadoop/hbase/filter/class-use/Filter.ReturnCode.html
@@ -132,41 +132,39 @@
 
 
 Filter.ReturnCode
-DependentColumnFilter.filterKeyValue(Cell c) 
+QualifierFilter.filterKeyValue(Cell v) 
 
 
 Filter.ReturnCode
-PrefixFilter.filterKeyValue(Cell v) 
+WhileMatchFilter.filterKeyValue(Cell v) 
 
 
 Filter.ReturnCode
-FamilyFilter.filterKeyValue(Cell v) 
+RandomRowFilter.filterKeyValue(Cell v) 
 
 
 Filter.ReturnCode
-WhileMatchFilter.filterKeyValue(Cell v) 
+ColumnCountGetFilter.filterKeyValue(Cell v) 
 
 
 Filter.ReturnCode
-InclusiveStopFilter.filterKeyValue(Cell v) 
+DependentColumnFilter.filterKeyValue(Cell c) 
 
 
 Filter.ReturnCode
-FirstKeyOnlyFilter.filterKeyValue(Cell v) 
+KeyOnlyFilter.filterKeyValue(Cell ignored) 
 
 
 Filter.ReturnCode
-TimestampsFilter.filterKeyValue(Cell v) 
+FuzzyRowFilter.filterKeyValue(Cell c) 
 
 
-abstract Filter.ReturnCode
-Filter.filterKeyValue(Cell v)
-A way to filter based on the column family, column 
qualifier and/or the column value.
-
+Filter.ReturnCode
+SingleColumnValueFilter.filterKeyValue(Cell c) 
 
 
 Filter.ReturnCode
-KeyOnlyFilter.filterKeyValue(Cell ignored) 
+FamilyFilter.filterKeyValue(Cell v) 
 
 
 Filter.ReturnCode
@@ -174,69 +172,71 @@
 
 
 Filter.ReturnCode
-QualifierFilter.filterKeyValue(Cell v) 
+FilterList.filterKeyValue(Cell c) 
 
 
 Filter.ReturnCode
-SkipFilter.filterKeyValue(Cell v) 
+ColumnPrefixFilter.filterKeyValue(Cell cell) 
 
 
 Filter.ReturnCode
-ColumnCountGetFilter.filterKeyValue(Cell v) 
+ColumnRangeFilter.filterKeyValue(Cell kv) 
 
 
 Filter.ReturnCode
-RandomRowFilter.filterKeyValue(Cell v) 
+PrefixFilter.filterKeyValue(Cell v) 
 
 
 Filter.ReturnCode
-FuzzyRowFilter.filterKeyValue(Cell c) 
+ValueFilter.filterKeyValue(Cell v) 
 
 
-Filter.ReturnCode
-SingleColumnValueFilter.filterKeyValue(Cell c) 
+abstract Filter.ReturnCode
+Filter.filterKeyValue(Cell v)
+A way to filter based on the column family, column 
qualifier and/or the column value.
+
 
 
 Filter.ReturnCode
-FilterList.filterKeyValue(Cell c) 
+MultiRowRangeFilter.filterKeyValue(Cell ignored) 
 
 
 Filter.ReturnCode
-ColumnRangeFilter.filterKeyValue(Cell kv) 
+FirstKeyOnlyFilter.filterKeyValue(Cell v) 
 
 
 Filter.ReturnCode
-MultiRowRangeFilter.filterKeyValue(Cell ignored) 
-
-
-Filter.ReturnCode
 FirstKeyValueMatchingQualifiersFilter.filterKeyValue(Cell v)
 Deprecated. 
  
 
+
+Filter.ReturnCode
+PageFilter.filterKeyValue(Cell ignored) 
+
 
 Filter.ReturnCode
-ColumnPrefixFilter.filterKeyValue(Cell cell) 
+TimestampsFilter.filterKeyValue(Cell v) 
 
 
 Filter.ReturnCode
-PageFilter.filterKeyValue(Cell ignored) 
+ColumnPaginationFilter.filterKeyValue(Cell v) 
 
 
 Filter.ReturnCode
-RowFilter.filterKeyValue(Cell v) 
+SkipFilter.filterKeyValue(Cell v) 
 
 
 Filter.ReturnCode
-ValueFilter.filterKeyValue(Cell v) 
+RowFilter.filterKeyValue(Cell v) 
 
 
 Filter.ReturnCode
-FilterWrapper.filterKeyValue(Cell v) 
+InclusiveStopFilter.filterKeyValue(Cell v) 
 
 
 Filter.ReturnCode
-ColumnPaginationFilter.filterKeyValue(Cell v) 
+FilterWrapper.filterKeyValue(Cell v) 
 
 
 static Filter.ReturnCode
@@ -303,11 +303,11 @@ the order they are declared.
 
 
 Filter.ReturnCode
-VisibilityLabelFilter.filterKeyValue(Cell cell) 
+VisibilityController.DeleteVersionVisibilityExpressionFilter.filterKeyValue(Cell cell) 
 
 
 Filter.ReturnCode
-VisibilityController.DeleteVersionVisibilityExpressionFilter.filterKeyValue(Cell cell) 
+VisibilityLabelFilter.filterKeyValue(Cell cell) 
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/16980207/devapidocs/org/apache/hadoop/hbase/filter/class-use/Filter.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/filter/class-use/Filter.html 
b/devapidocs/org/apache/hadoop/hbase/filter/class-use/Filter.html
index 435b828..2075033 100644
--- a/devapidocs/org/apache/hadoop/hbase/filter/class-use/Filter.html
+++ b/devapidocs/org/apache/hadoop/hbase/filter/class-use/Filter.html
@@ -158,11 +158,11 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 Filter
-Scan.getFilter() 
+Query.getFilter() 
 
 
 Filter
-Query.getFilter() 
+Scan.getFilter() 
 
 
 
@@ -174,19 +174,19 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 
-Scan
-Scan.setFilter(Filter filter) 
-
-
 Get
 Get.setFilter(Filter filter) 
 
-
+
 Query
 Query.setFilter(Filter filter)
 Apply the specified server-side filter when performing the 
Query.
 
 
+
+Scan
+Scan.

[15/51] [partial] hbase-site git commit: Published site at a395922ad5af9494bb55feee3c275c6d3a575e92.

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/16980207/devapidocs/org/apache/hadoop/hbase/client/class-use/NonceGenerator.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/client/class-use/NonceGenerator.html 
b/devapidocs/org/apache/hadoop/hbase/client/class-use/NonceGenerator.html
index a0dc6af..3d31990 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/class-use/NonceGenerator.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/class-use/NonceGenerator.html
@@ -143,32 +143,32 @@
 
 
 NonceGenerator
-ConnectionImplementation.getNonceGenerator() 
+HConnection.getNonceGenerator()
+Deprecated. 
+internal method, do not use thru HConnection
+
+
 
 
 NonceGenerator
-ClusterConnection.getNonceGenerator() 
+ConnectionImplementation.getNonceGenerator() 
 
 
 NonceGenerator
-HConnection.getNonceGenerator()
-Deprecated. 
-internal method, do not use thru HConnection
-
-
+ClusterConnection.getNonceGenerator() 
 
 
 NonceGenerator
 CoprocessorHConnection.getNonceGenerator() 
 
 
-(package private) static NonceGenerator
-ConnectionImplementation.injectNonceGeneratorForTesting(ClusterConnection conn,
+static NonceGenerator
+ConnectionUtils.injectNonceGeneratorForTesting(ClusterConnection conn,
 NonceGenerator cnm) 
 
 
-static NonceGenerator
-ConnectionUtils.injectNonceGeneratorForTesting(ClusterConnection conn,
+(package private) static NonceGenerator
+ConnectionImplementation.injectNonceGeneratorForTesting(ClusterConnection conn,
 NonceGenerator cnm) 
 
 
@@ -181,13 +181,13 @@
 
 
 
-(package private) static NonceGenerator
-ConnectionImplementation.injectNonceGeneratorForTesting(ClusterConnection conn,
+static NonceGenerator
+ConnectionUtils.injectNonceGeneratorForTesting(ClusterConnection conn,
 NonceGenerator cnm) 
 
 
-static NonceGenerator
-ConnectionUtils.injectNonceGeneratorForTesting(ClusterConnection conn,
+(package private) static NonceGenerator
+ConnectionImplementation.injectNonceGeneratorForTesting(ClusterConnection conn,
 NonceGenerator cnm) 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/16980207/devapidocs/org/apache/hadoop/hbase/client/class-use/Put.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/client/class-use/Put.html 
b/devapidocs/org/apache/hadoop/hbase/client/class-use/Put.html
index 6ecd298..6bca306 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/class-use/Put.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/class-use/Put.html
@@ -574,11 +574,11 @@ service.
 
 
 void
-BufferedMutatorImpl.validatePut(Put put) 
+HTable.validatePut(Put put) 
 
 
 void
-HTable.validatePut(Put put) 
+BufferedMutatorImpl.validatePut(Put put) 
 
 
 static void

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/16980207/devapidocs/org/apache/hadoop/hbase/client/class-use/RegionLocator.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/client/class-use/RegionLocator.html 
b/devapidocs/org/apache/hadoop/hbase/client/class-use/RegionLocator.html
index a14014e..f9ec97c 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/class-use/RegionLocator.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/class-use/RegionLocator.html
@@ -146,7 +146,10 @@ service.
 
 
 RegionLocator
-ConnectionImplementation.getRegionLocator(TableName tableName) 
+HConnection.getRegionLocator(TableName tableName)
+Deprecated. 
+Retrieve a RegionLocator implementation to inspect region 
information on a table.
+
 
 
 RegionLocator
@@ -156,10 +159,7 @@ service.
 
 
 RegionLocator
-HConnection.getRegionLocator(TableName tableName)
-Deprecated. 
-Retrieve a RegionLocator implementation to inspect region 
information on a table.
-
+ConnectionImplementation.getRegionLocator(TableName tableName) 
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/16980207/devapidocs/org/apache/hadoop/hbase/client/class-use/Result.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/client/class-use/Result.html 
b/devapidocs/org/apache/hadoop/hbase/client/class-use/Result.html
index 75845f4..1fe4079 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/class-use/Result.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/class-use/Result.html
@@ -398,16 +398,16 @@ service.
 
 
 
-Result
-RpcRetryingCallerWithReadReplicas.ReplicaRegionServerCallable.call(int callTimeout) 
+Result[]
+ScannerCallable.call(int callTimeout) 
 
 
-Result[]
-ClientSmallScanner.SmallScannerCallable.call(int timeout) 
+Result
+RpcRetryingCallerWithReadReplicas.ReplicaRegionServerCallable.call(int callTimeout) 
 
 
 

[39/51] [partial] hbase-site git commit: Published site at a395922ad5af9494bb55feee3c275c6d3a575e92.

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/16980207/apidocs/src-html/org/apache/hadoop/hbase/HColumnDescriptor.html
--
diff --git a/apidocs/src-html/org/apache/hadoop/hbase/HColumnDescriptor.html 
b/apidocs/src-html/org/apache/hadoop/hbase/HColumnDescriptor.html
index 89bff8b..df88e6d 100644
--- a/apidocs/src-html/org/apache/hadoop/hbase/HColumnDescriptor.html
+++ b/apidocs/src-html/org/apache/hadoop/hbase/HColumnDescriptor.html
@@ -40,1246 +40,1197 @@
 032import 
org.apache.hadoop.hbase.io.compress.Compression;
 033import 
org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
 034import 
org.apache.hadoop.hbase.protobuf.ProtobufUtil;
-035import 
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair;
-036import 
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema;
-037import 
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair;
-038import 
org.apache.hadoop.hbase.regionserver.BloomType;
-039import 
org.apache.hadoop.hbase.util.ByteStringer;
-040import 
org.apache.hadoop.hbase.util.Bytes;
-041import 
org.apache.hadoop.hbase.util.PrettyPrinter;
-042import 
org.apache.hadoop.hbase.util.PrettyPrinter.Unit;
-043
-044import 
com.google.common.base.Preconditions;
-045
-046/**
-047 * An HColumnDescriptor contains 
information about a column family such as the
-048 * number of versions, compression 
settings, etc.
-049 *
-050 * It is used as input when creating a 
table or adding a column.
-051 */
-052@InterfaceAudience.Public
-053@InterfaceStability.Evolving
-054public class HColumnDescriptor implements 
Comparable {
-055  // For future backward compatibility
-056
-057  // Version  3 was when column names 
become byte arrays and when we picked up
-058  // Time-to-live feature.  Version 4 was 
when we moved to byte arrays, HBASE-82.
-059  // Version  5 was when bloom filter 
descriptors were removed.
-060  // Version  6 adds metadata as a map 
where keys and values are byte[].
-061  // Version  7 -- add new compression 
and hfile blocksize to HColumnDescriptor (HBASE-1217)
-062  // Version  8 -- reintroduction of 
bloom filters, changed from boolean to enum
-063  // Version  9 -- add data block 
encoding
-064  // Version 10 -- change metadata to 
standard type.
-065  // Version 11 -- add column family 
level configuration.
-066  private static final byte 
COLUMN_DESCRIPTOR_VERSION = (byte) 11;
-067
-068  // These constants are used as FileInfo 
keys
-069  public static final String COMPRESSION 
= "COMPRESSION";
-070  public static final String 
COMPRESSION_COMPACT = "COMPRESSION_COMPACT";
-071  public static final String 
ENCODE_ON_DISK = // To be removed, it is not used anymore
-072  "ENCODE_ON_DISK";
-073  public static final String 
DATA_BLOCK_ENCODING =
-074  "DATA_BLOCK_ENCODING";
-075  /**
-076   * Key for the BLOCKCACHE attribute.
-077   * A more exact name would be 
CACHE_DATA_ON_READ because this flag sets whether or not we
-078   * cache DATA blocks.  We always cache 
INDEX and BLOOM blocks; caching these blocks cannot be
-079   * disabled.
-080   */
-081  public static final String BLOCKCACHE = 
"BLOCKCACHE";
-082  public static final String 
CACHE_DATA_ON_WRITE = "CACHE_DATA_ON_WRITE";
-083  public static final String 
CACHE_INDEX_ON_WRITE = "CACHE_INDEX_ON_WRITE";
-084  public static final String 
CACHE_BLOOMS_ON_WRITE = "CACHE_BLOOMS_ON_WRITE";
-085  public static final String 
EVICT_BLOCKS_ON_CLOSE = "EVICT_BLOCKS_ON_CLOSE";
-086  /**
-087   * Key for cache data into L1 if cache 
is set up with more than one tier.
-088   * To set in the shell, do something 
like this:
-089   * 
hbase(main):003:0> create 't',
-090   *{NAME => 't', 
CONFIGURATION => {CACHE_DATA_IN_L1 => 'true'}}
-091   */
-092  public static final String 
CACHE_DATA_IN_L1 = "CACHE_DATA_IN_L1";
-093
-094  /**
-095   * Key for the PREFETCH_BLOCKS_ON_OPEN 
attribute.
-096   * If set, all INDEX, BLOOM, and DATA 
blocks of HFiles belonging to this
-097   * family will be loaded into the cache 
as soon as the file is opened. These
-098   * loads will not count as cache 
misses.
-099   */
-100  public static final String 
PREFETCH_BLOCKS_ON_OPEN = "PREFETCH_BLOCKS_ON_OPEN";
-101
-102  /**
-103   * Size of storefile/hfile 'blocks'.  
Default is {@link #DEFAULT_BLOCKSIZE}.
-104   * Use smaller block sizes for faster 
random-access at expense of larger
-105   * indices (more memory consumption).
-106   */
-107  public static final String BLOCKSIZE = 
"BLOCKSIZE";
-108
-109  public static final String LENGTH = 
"LENGTH";
-110  public static final String TTL = 
"TTL";
-111  public static final String BLOOMFILTER 
= "BLOOMFILTER";
-112  public static final String FOREVER = 
"FOREVER";
-113  public static final String 
REPLICATION_SCOPE = "REPLICATION_SCOPE";
-114  public static final byte[] 
REPLICATION_SCOPE_BYTES = Bytes.toBytes(REPLICATION_SCOPE);
-115  pu

[48/51] [partial] hbase-site git commit: Published site at a395922ad5af9494bb55feee3c275c6d3a575e92.

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/16980207/apidocs/org/apache/hadoop/hbase/HColumnDescriptor.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/HColumnDescriptor.html 
b/apidocs/org/apache/hadoop/hbase/HColumnDescriptor.html
index 8c6d8d7..ce5ebf7 100644
--- a/apidocs/org/apache/hadoop/hbase/HColumnDescriptor.html
+++ b/apidocs/org/apache/hadoop/hbase/HColumnDescriptor.html
@@ -101,7 +101,7 @@
 
 @InterfaceAudience.Public
 @InterfaceStability.Evolving
-public class HColumnDescriptor
+public class HColumnDescriptor
 extends http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true";
 title="class or interface in java.lang">Object
 implements http://docs.oracle.com/javase/7/docs/api/java/lang/Comparable.html?is-external=true";
 title="class or interface in java.lang">Comparable
 An HColumnDescriptor contains information about a column 
family such as the
@@ -425,14 +425,6 @@ implements http://docs.oracle.com/javase/7/docs/api/java/lang/Comparabl
 compareTo(HColumnDescriptor o) 
 
 
-org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema
-convert() 
-
-
-static HColumnDescriptor
-convert(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema cfs) 
-
-
 boolean
 equals(http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true";
 title="class or interface in java.lang">Object obj) 
 
@@ -804,7 +796,7 @@ implements http://docs.oracle.com/javase/7/docs/api/java/lang/Comparabl
 
 
 COMPRESSION
-public static final http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String COMPRESSION
+public static final http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String COMPRESSION
 See Also:Constant
 Field Values
 
 
@@ -814,7 +806,7 @@ implements http://docs.oracle.com/javase/7/docs/api/java/lang/Comparabl
 
 
 COMPRESSION_COMPACT
-public static final http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String COMPRESSION_COMPACT
+public static final http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String COMPRESSION_COMPACT
 See Also:Constant
 Field Values
 
 
@@ -824,7 +816,7 @@ implements http://docs.oracle.com/javase/7/docs/api/java/lang/Comparabl
 
 
 ENCODE_ON_DISK
-public static final http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String ENCODE_ON_DISK
+public static final http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String ENCODE_ON_DISK
 See Also:Constant
 Field Values
 
 
@@ -834,7 +826,7 @@ implements http://docs.oracle.com/javase/7/docs/api/java/lang/Comparabl
 
 
 DATA_BLOCK_ENCODING
-public static final http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String DATA_BLOCK_ENCODING
+public static final http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String DATA_BLOCK_ENCODING
 See Also:Constant
 Field Values
 
 
@@ -844,7 +836,7 @@ implements http://docs.oracle.com/javase/7/docs/api/java/lang/Comparabl
 
 
 BLOCKCACHE
-public static final http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String BLOCKCACHE
+public static final http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String BLOCKCACHE
 Key for the BLOCKCACHE attribute.
  A more exact name would be CACHE_DATA_ON_READ because this flag sets whether 
or not we
  cache DATA blocks.  We always cache INDEX and BLOOM blocks; caching these 
blocks cannot be
@@ -858,7 +850,7 @@ implements http://docs.oracle.com/javase/7/docs/api/java/lang/Comparabl
 
 
 CACHE_DATA_ON_WRITE
-public static final http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String CACHE_DATA_ON_WRITE
+public static final http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String CACHE_DATA_ON_WRITE
 See Also:Constant
 Field Values
 
 
@@ -868,7 +860,7 @@ implements http://docs.oracle.com/javase/7/docs/api/java/lang/Comparabl
 
 
 CACHE_INDEX_ON_WRITE
-public static final http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String CACHE_INDEX_ON_WRITE
+public static final http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or inter

[17/51] [partial] hbase-site git commit: Published site at a395922ad5af9494bb55feee3c275c6d3a575e92.

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/16980207/devapidocs/org/apache/hadoop/hbase/class-use/TableNotDisabledException.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/class-use/TableNotDisabledException.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/TableNotDisabledException.html
index 74fe4e6..65d17de 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/class-use/TableNotDisabledException.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/class-use/TableNotDisabledException.html
@@ -100,13 +100,13 @@
 
 
 void
-MasterServices.checkTableModifiable(TableName tableName)
-Check table is modifiable; i.e.
-
+HMaster.checkTableModifiable(TableName tableName) 
 
 
 void
-HMaster.checkTableModifiable(TableName tableName) 
+MasterServices.checkTableModifiable(TableName tableName)
+Check table is modifiable; i.e.
+
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/16980207/devapidocs/org/apache/hadoop/hbase/class-use/TableNotFoundException.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/class-use/TableNotFoundException.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/TableNotFoundException.html
index 3a9942d..adb575b 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/TableNotFoundException.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/TableNotFoundException.html
@@ -171,13 +171,13 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 void
-MasterServices.checkTableModifiable(TableName tableName)
-Check table is modifiable; i.e.
-
+HMaster.checkTableModifiable(TableName tableName) 
 
 
 void
-HMaster.checkTableModifiable(TableName tableName) 
+MasterServices.checkTableModifiable(TableName tableName)
+Check table is modifiable; i.e.
+
 
 
 
@@ -194,13 +194,13 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 
-DisableTableHandler
-DisableTableHandler.prepare() 
-
-
 EnableTableHandler
 EnableTableHandler.prepare() 
 
+
+DisableTableHandler
+DisableTableHandler.prepare() 
+
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/16980207/devapidocs/org/apache/hadoop/hbase/class-use/ZooKeeperConnectionException.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/class-use/ZooKeeperConnectionException.html
 
b/devapidocs/org/apache/hadoop/hbase/class-use/ZooKeeperConnectionException.html
index 5f62850..dd7e0ba 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/class-use/ZooKeeperConnectionException.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/class-use/ZooKeeperConnectionException.html
@@ -160,15 +160,15 @@
 
 
 boolean
-ConnectionImplementation.isMasterRunning()
+HConnection.isMasterRunning()
 Deprecated. 
-this has been deprecated without a replacement
+internal method, do not use thru HConnection
 
 
 
 
 boolean
-ClusterConnection.isMasterRunning()
+ConnectionImplementation.isMasterRunning()
 Deprecated. 
 this has been deprecated without a replacement
 
@@ -176,9 +176,9 @@
 
 
 boolean
-HConnection.isMasterRunning()
+ClusterConnection.isMasterRunning()
 Deprecated. 
-internal method, do not use thru HConnection
+this has been deprecated without a replacement
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/16980207/devapidocs/org/apache/hadoop/hbase/classification/class-use/InterfaceAudience.LimitedPrivate.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/classification/class-use/InterfaceAudience.LimitedPrivate.html
 
b/devapidocs/org/apache/hadoop/hbase/classification/class-use/InterfaceAudience.LimitedPrivate.html
index 0967cca..1c71422 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/classification/class-use/InterfaceAudience.LimitedPrivate.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/classification/class-use/InterfaceAudience.LimitedPrivate.html
@@ -1501,6 +1501,10 @@ service.
 
 
 interface 
+ReplicationPeerConfigListener 
+
+
+interface 
 WALEntryFilter
 A Filter for WAL entries before being sent over to 
replication.
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/16980207/devapidocs/org/apache/hadoop/hbase/classification/class-use/InterfaceAudience.Private.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/classification/class-use/InterfaceAudience.Private.html
 
b/devapidocs/org/apache/hadoop/hbase/classification/class-use/InterfaceAudience.Private.html
index 3c1e791..69e4174 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/classification/class-use/InterfaceAudience.Private.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/classification/class-use/InterfaceAudience.Private.html
@@ -5851,7 +5851,7 @@ service.
 
 
 class 
-Compactor
+Compactor
 A compactor is a compaction alg

[23/51] [partial] hbase-site git commit: Published site at a395922ad5af9494bb55feee3c275c6d3a575e92.

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/16980207/devapidocs/org/apache/hadoop/hbase/class-use/HRegionLocation.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/class-use/HRegionLocation.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/HRegionLocation.html
index 524c162..233b3ed 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/HRegionLocation.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/HRegionLocation.html
@@ -266,11 +266,11 @@ service.
 
 
 protected HRegionLocation
-RegionAdminServiceCallable.location 
+RegionServerCallable.location 
 
 
 protected HRegionLocation
-RegionServerCallable.location 
+RegionAdminServiceCallable.location 
 
 
 
@@ -336,27 +336,37 @@ service.
 
 
 HRegionLocation
+HConnection.getRegionLocation(byte[] tableName,
+  byte[] row,
+  boolean reload)
+Deprecated. 
+internal method, do not use through HConnection
+
+
+
+
+HRegionLocation
 ConnectionImplementation.getRegionLocation(byte[] tableName,
   byte[] row,
   boolean reload) 
 
-
+
 HRegionLocation
-HConnection.getRegionLocation(byte[] tableName,
+HConnection.getRegionLocation(TableName tableName,
   byte[] row,
   boolean reload)
 Deprecated. 
-internal method, do not use through HConnection
+internal method, do not use thru HConnection
 
 
 
-
+
 HRegionLocation
 ConnectionImplementation.getRegionLocation(TableName tableName,
   byte[] row,
   boolean reload) 
 
-
+
 HRegionLocation
 ClusterConnection.getRegionLocation(TableName tableName,
   byte[] row,
@@ -364,58 +374,57 @@ service.
 Find region location hosting passed row
 
 
+
+private HRegionLocation
+AsyncProcess.AsyncRequestFutureImpl.getReplicaLocationOrFail(Action action) 
+
 
 HRegionLocation
-HConnection.getRegionLocation(TableName tableName,
-  byte[] row,
-  boolean reload)
+HConnection.locateRegion(byte[] regionName)
 Deprecated. 
 internal method, do not use thru HConnection
 
 
 
 
-private HRegionLocation
-AsyncProcess.AsyncRequestFutureImpl.getReplicaLocationOrFail(Action action) 
-
-
 HRegionLocation
 ConnectionImplementation.locateRegion(byte[] regionName) 
 
-
+
 HRegionLocation
 ClusterConnection.locateRegion(byte[] regionName)
 Gets the location of the region of regionName.
 
 
-
+
 HRegionLocation
-HConnection.locateRegion(byte[] regionName)
+HConnection.locateRegion(byte[] tableName,
+byte[] row)
 Deprecated. 
-internal method, do not use thru HConnection
+internal method, do not use through HConnection
 
 
 
-
+
 HRegionLocation
 ConnectionImplementation.locateRegion(byte[] tableName,
 byte[] row) 
 
-
+
 HRegionLocation
-HConnection.locateRegion(byte[] tableName,
+HConnection.locateRegion(TableName tableName,
 byte[] row)
 Deprecated. 
 internal method, do not use through HConnection
 
 
 
-
+
 HRegionLocation
 ConnectionImplementation.locateRegion(TableName tableName,
 byte[] row) 
 
-
+
 HRegionLocation
 ClusterConnection.locateRegion(TableName tableName,
 byte[] row)
@@ -423,35 +432,35 @@ service.
  lives in.
 
 
-
+
 HRegionLocation
-HConnection.locateRegion(TableName tableName,
-byte[] row)
+HConnection.relocateRegion(byte[] tableName,
+byte[] row)
 Deprecated. 
 internal method, do not use through HConnection
 
 
 
-
+
 HRegionLocation
 ConnectionImplementation.relocateRegion(byte[] tableName,
 byte[] row) 
 
-
+
 HRegionLocation
-HConnection.relocateRegion(byte[] tableName,
+HConnection.relocateRegion(TableName tableName,
 byte[] row)
 Deprecated. 
 internal method, do not use through HConnection
 
 
 
-
+
 HRegionLocation
 ConnectionImplementation.relocateRegion(TableName tableName,
 byte[] row) 
 
-
+
 HRegionLocation
 ClusterConnection.relocateRegion(TableName tableName,
 byte[] row)
@@ -459,15 +468,6 @@ service.
  lives in, ignoring any value that might be in the cache.
 
 
-
-HRegionLocation
-HConnection.relocateRegion(TableName tableName,
-byte[] row)
-Deprecated. 
-internal method, do not use through HConnection
-
-
-
 
 
 
@@ -508,23 +508,17 @@ service.
 
 
 http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List
-ConnectionImplementation.locateRegions(byte[] tableName) 
-
-
-http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true";
 title="class or interf

[45/51] [partial] hbase-site git commit: Published site at a395922ad5af9494bb55feee3c275c6d3a575e92.

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/16980207/apidocs/org/apache/hadoop/hbase/class-use/HColumnDescriptor.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/class-use/HColumnDescriptor.html 
b/apidocs/org/apache/hadoop/hbase/class-use/HColumnDescriptor.html
index b2dbc31..8489b2a 100644
--- a/apidocs/org/apache/hadoop/hbase/class-use/HColumnDescriptor.html
+++ b/apidocs/org/apache/hadoop/hbase/class-use/HColumnDescriptor.html
@@ -101,170 +101,166 @@
 
 
 
-static HColumnDescriptor
-HColumnDescriptor.convert(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema cfs) 
-
-
 HColumnDescriptor[]
 HTableDescriptor.getColumnFamilies()
 Returns an array all the HColumnDescriptor of 
the column families
  of the table.
 
 
-
+
 HColumnDescriptor
 HTableDescriptor.getFamily(byte[] column)
 Returns the HColumnDescriptor for a specific column family 
with name as
  specified by the parameter column.
 
 
-
+
 static HColumnDescriptor
 HColumnDescriptor.parseFrom(byte[] bytes) 
 
-
+
 HColumnDescriptor
 HTableDescriptor.removeFamily(byte[] column)
 Removes the HColumnDescriptor with name specified by the 
parameter column
  from the table descriptor
 
 
-
+
 HColumnDescriptor
 HColumnDescriptor.setBlockCacheEnabled(boolean blockCacheEnabled) 
 
-
+
 HColumnDescriptor
 HColumnDescriptor.setBlocksize(int s) 
 
-
+
 HColumnDescriptor
 HColumnDescriptor.setBloomFilterType(BloomType bt) 
 
-
+
 HColumnDescriptor
 HColumnDescriptor.setCacheBloomsOnWrite(boolean value) 
 
-
+
 HColumnDescriptor
 HColumnDescriptor.setCacheDataInL1(boolean value) 
 
-
+
 HColumnDescriptor
 HColumnDescriptor.setCacheDataOnWrite(boolean value) 
 
-
+
 HColumnDescriptor
 HColumnDescriptor.setCacheIndexesOnWrite(boolean value) 
 
-
+
 HColumnDescriptor
 HColumnDescriptor.setCompactionCompressionType(org.apache.hadoop.hbase.io.compress.Compression.Algorithm type)
 Compression types supported in hbase.
 
 
-
+
 HColumnDescriptor
 HColumnDescriptor.setCompressionType(org.apache.hadoop.hbase.io.compress.Compression.Algorithm type)
 Compression types supported in hbase.
 
 
-
+
 HColumnDescriptor
 HColumnDescriptor.setCompressTags(boolean compressTags)
 Set whether the tags should be compressed along with 
DataBlockEncoding.
 
 
-
+
 HColumnDescriptor
 HColumnDescriptor.setConfiguration(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String key,
 http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String value)
 Setter for storing a configuration setting in configuration
 map.
 
 
-
+
 HColumnDescriptor
 HColumnDescriptor.setDataBlockEncoding(DataBlockEncoding type)
 Set data block encoding algorithm used in block cache.
 
 
-
+
 HColumnDescriptor
 HColumnDescriptor.setDFSReplication(short replication)
 Set the replication factor to hfile(s) belonging to this 
family
 
 
-
+
 HColumnDescriptor
 HColumnDescriptor.setEncryptionKey(byte[] keyBytes)
 Set the raw crypto key attribute for the family
 
 
-
+
 HColumnDescriptor
 HColumnDescriptor.setEncryptionType(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String algorithm)
 Set the encryption algorithm for use with this family
 
 
-
+
 HColumnDescriptor
 HColumnDescriptor.setEvictBlocksOnClose(boolean value) 
 
-
+
 HColumnDescriptor
 HColumnDescriptor.setInMemory(boolean inMemory) 
 
-
+
 HColumnDescriptor
 HColumnDescriptor.setKeepDeletedCells(KeepDeletedCells keepDeletedCells) 
 
-
+
 HColumnDescriptor
 HColumnDescriptor.setMaxVersions(int maxVersions) 
 
-
+
 HColumnDescriptor
 HColumnDescriptor.setMinVersions(int minVersions) 
 
-
+
 HColumnDescriptor
 HColumnDescriptor.setMobEnabled(boolean isMobEnabled)
 Enables the mob for the family.
 
 
-
+
 HColumnDescriptor
 HColumnDescriptor.setMobThreshold(long threshold)
 Sets the mob threshold of the family.
 
 
-
+
 HColumnDescriptor
 HColumnDescriptor.setPrefetchBlocksOnOpen(boolean value) 
 
-
+
 HColumnDescriptor
 HColumnDescriptor.setScope(int scope) 
 
-
+
 HColumnDescriptor
 HColumnDescriptor.setTimeToLive(int timeToLive) 
 
-
+
 HColumnDescriptor
 HColumnDescriptor.setTimeToLive(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in 
java.lang">String timeToLive) 
 
-
+
 HColumnDescriptor
 HColumnDescriptor.setValue(byte[] key,
 byte[] value) 
 
-
+
 HColumnDescriptor
 HColumnDescriptor.setValue(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String key,
 http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in 
java.lang">String value) 
 
-
+
 HColumnDescriptor
 HColumnDescriptor

[46/51] [partial] hbase-site git commit: Published site at a395922ad5af9494bb55feee3c275c6d3a575e92.

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/16980207/apidocs/org/apache/hadoop/hbase/KeepDeletedCells.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/KeepDeletedCells.html 
b/apidocs/org/apache/hadoop/hbase/KeepDeletedCells.html
index b9fd2a6..2f68093 100644
--- a/apidocs/org/apache/hadoop/hbase/KeepDeletedCells.html
+++ b/apidocs/org/apache/hadoop/hbase/KeepDeletedCells.html
@@ -249,7 +249,7 @@ the order they are declared.
 
 
 values
-public static KeepDeletedCells[] values()
+public static KeepDeletedCells[] values()
 Returns an array containing the constants of this enum 
type, in
 the order they are declared.  This method may be used to iterate
 over the constants as follows:
@@ -266,7 +266,7 @@ for (KeepDeletedCells c : KeepDeletedCells.values())
 
 
 valueOf
-public static KeepDeletedCells valueOf(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String name)
+public static KeepDeletedCells valueOf(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String name)
 Returns the enum constant of this type with the specified 
name.
 The string must match exactly an identifier used to declare an
 enum constant in this type.  (Extraneous whitespace characters are 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/16980207/apidocs/org/apache/hadoop/hbase/class-use/Cell.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/class-use/Cell.html 
b/apidocs/org/apache/hadoop/hbase/class-use/Cell.html
index 9ed6623..1fd6859 100644
--- a/apidocs/org/apache/hadoop/hbase/class-use/Cell.html
+++ b/apidocs/org/apache/hadoop/hbase/class-use/Cell.html
@@ -1015,15 +1015,15 @@ Input/OutputFormats, a table indexing MapReduce job, 
and utility methods.
 Append.setFamilyCellMap(http://docs.oracle.com/javase/7/docs/api/java/util/NavigableMap.html?is-external=true";
 title="class or interface in java.util">NavigableMapList> map) 
 
 
-Increment
-Increment.setFamilyCellMap(http://docs.oracle.com/javase/7/docs/api/java/util/NavigableMap.html?is-external=true";
 title="class or interface in java.util">NavigableMapList> map) 
-
-
 Mutation
 Mutation.setFamilyCellMap(http://docs.oracle.com/javase/7/docs/api/java/util/NavigableMap.html?is-external=true";
 title="class or interface in java.util">NavigableMapList> map)
 Method for setting the put's familyMap
 
 
+
+Increment
+Increment.setFamilyCellMap(http://docs.oracle.com/javase/7/docs/api/java/util/NavigableMap.html?is-external=true";
 title="class or interface in java.util">NavigableMapList> map) 
+
 
 Delete
 Delete.setFamilyCellMap(http://docs.oracle.com/javase/7/docs/api/java/util/NavigableMap.html?is-external=true";
 title="class or interface in java.util">NavigableMapList> map) 
@@ -1043,8 +1043,11 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 
-Cell
-MultiRowRangeFilter.getNextCellHint(Cell currentKV) 
+abstract Cell
+Filter.getNextCellHint(Cell currentCell)
+If the filter returns the match code SEEK_NEXT_USING_HINT, 
then it should also tell which is
+ the next key it must seek to.
+
 
 
 Cell
@@ -1052,40 +1055,39 @@ Input/OutputFormats, a table indexing MapReduce job, 
and utility methods.
 
 
 Cell
-FuzzyRowFilter.getNextCellHint(Cell currentCell) 
+MultipleColumnPrefixFilter.getNextCellHint(Cell cell) 
 
 
 Cell
-MultipleColumnPrefixFilter.getNextCellHint(Cell cell) 
+FilterList.getNextCellHint(Cell currentCell) 
 
 
 Cell
-TimestampsFilter.getNextCellHint(Cell currentCell)
-Pick the next cell that the scanner should seek to.
-
+ColumnPaginationFilter.getNextCellHint(Cell cell) 
 
 
 Cell
-FilterList.getNextCellHint(Cell currentCell) 
+FuzzyRowFilter.getNextCellHint(Cell currentCell) 
 
 
 Cell
-ColumnPaginationFilter.getNextCellHint(Cell cell) 
+ColumnRangeFilter.getNextCellHint(Cell cell) 
 
 
 Cell
-ColumnRangeFilter.getNextCellHint(Cell cell) 
+TimestampsFilter.getNextCellHint(Cell currentCell)
+Pick the next cell that the scanner should seek to.
+
 
 
-abstract Cell
-Filter.getNextCellHint(Cell currentCell)
-If the filter returns the match code SEEK_NEXT_USING_HINT, 
then

[16/51] [partial] hbase-site git commit: Published site at a395922ad5af9494bb55feee3c275c6d3a575e92.

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/16980207/devapidocs/org/apache/hadoop/hbase/client/class-use/ClusterConnection.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/client/class-use/ClusterConnection.html 
b/devapidocs/org/apache/hadoop/hbase/client/class-use/ClusterConnection.html
index c51af93..e760ff6 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/class-use/ClusterConnection.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/class-use/ClusterConnection.html
@@ -213,11 +213,11 @@
 
 
 protected ClusterConnection
-RpcRetryingCallerWithReadReplicas.cConnection 
+ScannerCallable.cConnection 
 
 
 protected ClusterConnection
-ScannerCallable.cConnection 
+RpcRetryingCallerWithReadReplicas.cConnection 
 
 
 (package private) ClusterConnection
@@ -229,31 +229,31 @@
 
 
 private ClusterConnection
-HRegionLocator.connection 
+ClientScanner.connection 
 
 
 protected ClusterConnection
-BufferedMutatorImpl.connection 
+HTable.connection 
 
 
-protected ClusterConnection
-RegionAdminServiceCallable.connection 
+private ClusterConnection
+HRegionLocator.connection 
 
 
-protected ClusterConnection
-AsyncProcess.connection 
+private ClusterConnection
+HBaseAdmin.connection 
 
 
 protected ClusterConnection
-HTable.connection 
+AsyncProcess.connection 
 
 
-private ClusterConnection
-ClientScanner.connection 
+protected ClusterConnection
+BufferedMutatorImpl.connection 
 
 
-private ClusterConnection
-HBaseAdmin.connection 
+protected ClusterConnection
+RegionAdminServiceCallable.connection 
 
 
 private ClusterConnection
@@ -281,17 +281,17 @@
 
 
 
-(package private) ClusterConnection
-HTableMultiplexer.getConnection() 
-
-
 ClusterConnection
 ScannerCallable.getConnection() 
 
-
+
 protected ClusterConnection
 ClientScanner.getConnection() 
 
+
+(package private) ClusterConnection
+HTableMultiplexer.getConnection() 
+
 
 static ClusterConnection
 CoprocessorHConnection.getConnectionForEnvironment(CoprocessorEnvironment env)
@@ -341,13 +341,13 @@
 int replicaId) 
 
 
-(package private) static NonceGenerator
-ConnectionImplementation.injectNonceGeneratorForTesting(ClusterConnection conn,
+static NonceGenerator
+ConnectionUtils.injectNonceGeneratorForTesting(ClusterConnection conn,
 NonceGenerator cnm) 
 
 
-static NonceGenerator
-ConnectionUtils.injectNonceGeneratorForTesting(ClusterConnection conn,
+(package private) static NonceGenerator
+ConnectionImplementation.injectNonceGeneratorForTesting(ClusterConnection conn,
 NonceGenerator cnm) 
 
 
@@ -641,15 +641,15 @@
 
 
 private ClusterConnection
-RegionCoprocessorRpcChannel.connection 
+RegionServerCoprocessorRpcChannel.connection 
 
 
 private ClusterConnection
-MasterCoprocessorRpcChannel.connection 
+RegionCoprocessorRpcChannel.connection 
 
 
 private ClusterConnection
-RegionServerCoprocessorRpcChannel.connection 
+MasterCoprocessorRpcChannel.connection 
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/16980207/devapidocs/org/apache/hadoop/hbase/client/class-use/Connection.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/client/class-use/Connection.html 
b/devapidocs/org/apache/hadoop/hbase/client/class-use/Connection.html
index c5b154f..d4b63e0 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/class-use/Connection.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/class-use/Connection.html
@@ -898,11 +898,11 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 private Connection
-TableInputFormatBase.connection 
+HRegionPartitioner.connection 
 
 
 private Connection
-HRegionPartitioner.connection 
+TableInputFormatBase.connection 
 
 
 
@@ -936,22 +936,22 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 private Connection
-TableOutputFormat.TableRecordWriter.connection 
+HRegionPartitioner.connection 
 
 
 private Connection
-TableInputFormatBase.connection
-The underlying Connection 
of the table.
-
+TableOutputFormat.TableRecordWriter.connection 
 
 
-private Connection
-HRegionPartitioner.connection 
-
-
 (package private) Connection
 MultiTableOutputFormat.MultiTableRecordWriter.connection 
 
+
+private Connection
+TableInputFormatBase.connection
+The underlying Connection 
of the table.
+
+
 
 (package private) Connection
 SyncTable.SyncMapper.sourceConnection 
@@ -1060,11 +1060,11 @@ Input/OutputFormats, a table indexing MapReduce job, 
and utility methods.
 
 
 private Connection
-RegionPlacementMaintainer.connection 
+CatalogJanitor.connection 
 
 
 private Connection
-CatalogJanitor.connection 
+RegionPlacementMaintainer.connection 
 
 
 private Connection

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/16980207/devapidocs/org/apac

[31/51] [partial] hbase-site git commit: Published site at a395922ad5af9494bb55feee3c275c6d3a575e92.

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/16980207/devapidocs/org/apache/hadoop/hbase/HColumnDescriptor.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/HColumnDescriptor.html 
b/devapidocs/org/apache/hadoop/hbase/HColumnDescriptor.html
index c282081..0d729c1 100644
--- a/devapidocs/org/apache/hadoop/hbase/HColumnDescriptor.html
+++ b/devapidocs/org/apache/hadoop/hbase/HColumnDescriptor.html
@@ -101,7 +101,7 @@
 
 @InterfaceAudience.Public
 @InterfaceStability.Evolving
-public class HColumnDescriptor
+public class HColumnDescriptor
 extends http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true";
 title="class or interface in java.lang">Object
 implements http://docs.oracle.com/javase/7/docs/api/java/lang/Comparable.html?is-external=true";
 title="class or interface in java.lang">Comparable
 An HColumnDescriptor contains information about a column 
family such as the
@@ -424,31 +424,21 @@ implements http://docs.oracle.com/javase/7/docs/api/java/lang/Comparabl
 
 Constructors 
 
-Modifier
-Constructor and Description
+Constructor and Description
 
 
-private 
-HColumnDescriptor()
-Default constructor.
-
-
-
- 
-HColumnDescriptor(byte[] familyName)
+HColumnDescriptor(byte[] familyName)
 Construct a column descriptor specifying only the family 
name
  The other attributes are defaulted.
 
 
-
- 
-HColumnDescriptor(HColumnDescriptor desc)
+
+HColumnDescriptor(HColumnDescriptor desc)
 Constructor.
 
 
-
- 
-HColumnDescriptor(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String familyName)
+
+HColumnDescriptor(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String familyName)
 Construct a column descriptor specifying only the family 
name
  The other attributes are defaulted.
 
@@ -473,14 +463,6 @@ implements http://docs.oracle.com/javase/7/docs/api/java/lang/Comparabl
 compareTo(HColumnDescriptor o) 
 
 
-org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema
-convert() 
-
-
-static HColumnDescriptor
-convert(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema cfs) 
-
-
 boolean
 equals(http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true";
 title="class or interface in java.lang">Object obj) 
 
@@ -861,7 +843,7 @@ implements http://docs.oracle.com/javase/7/docs/api/java/lang/Comparabl
 
 
 COLUMN_DESCRIPTOR_VERSION
-private static final byte COLUMN_DESCRIPTOR_VERSION
+private static final byte COLUMN_DESCRIPTOR_VERSION
 See Also:Constant
 Field Values
 
 
@@ -871,7 +853,7 @@ implements http://docs.oracle.com/javase/7/docs/api/java/lang/Comparabl
 
 
 COMPRESSION
-public static final http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String COMPRESSION
+public static final http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String COMPRESSION
 See Also:Constant
 Field Values
 
 
@@ -881,7 +863,7 @@ implements http://docs.oracle.com/javase/7/docs/api/java/lang/Comparabl
 
 
 COMPRESSION_COMPACT
-public static final http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String COMPRESSION_COMPACT
+public static final http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String COMPRESSION_COMPACT
 See Also:Constant
 Field Values
 
 
@@ -891,7 +873,7 @@ implements http://docs.oracle.com/javase/7/docs/api/java/lang/Comparabl
 
 
 ENCODE_ON_DISK
-public static final http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String ENCODE_ON_DISK
+public static final http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String ENCODE_ON_DISK
 See Also:Constant
 Field Values
 
 
@@ -901,7 +883,7 @@ implements http://docs.oracle.com/javase/7/docs/api/java/lang/Comparabl
 
 
 DATA_BLOCK_ENCODING
-public static final http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String DATA_BLOCK_ENCODING
+public static final http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String DATA_BLOCK_ENCODING
 See Also:Constant
 Field Values
 
 
@@ -911,7 +893,7 @@ implements http://docs.oracle.com/javase/7/docs/api/java/lang/Comparabl
 
 
 BLOCKCACHE
-public static final http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String BLOCKCACHE
+public static final http://docs.oracl

[38/51] [partial] hbase-site git commit: Published site at a395922ad5af9494bb55feee3c275c6d3a575e92.

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/16980207/apidocs/src-html/org/apache/hadoop/hbase/HTableDescriptor.html
--
diff --git a/apidocs/src-html/org/apache/hadoop/hbase/HTableDescriptor.html 
b/apidocs/src-html/org/apache/hadoop/hbase/HTableDescriptor.html
index b3e3bcc..ba29d83 100644
--- a/apidocs/src-html/org/apache/hadoop/hbase/HTableDescriptor.html
+++ b/apidocs/src-html/org/apache/hadoop/hbase/HTableDescriptor.html
@@ -49,1595 +49,1544 @@
 041import 
org.apache.hadoop.hbase.client.RegionReplicaUtil;
 042import 
org.apache.hadoop.hbase.exceptions.DeserializationException;
 043import 
org.apache.hadoop.hbase.protobuf.ProtobufUtil;
-044import 
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair;
-045import 
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema;
-046import 
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair;
-047import 
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema;
-048import 
org.apache.hadoop.hbase.regionserver.BloomType;
-049import 
org.apache.hadoop.hbase.security.User;
-050import 
org.apache.hadoop.hbase.util.ByteStringer;
-051import 
org.apache.hadoop.hbase.util.Bytes;
-052
-053/**
-054 * HTableDescriptor contains the details 
about an HBase table  such as the descriptors of
-055 * all the column families, is the table 
a catalog table,  -ROOT-  or
-056 *  hbase:meta , 
if the table is read only, the maximum size of the memstore,
-057 * when the region split should occur, 
coprocessors associated with it etc...
-058 */
-059@InterfaceAudience.Public
-060@InterfaceStability.Evolving
-061public class HTableDescriptor implements 
Comparable {
+044import 
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema;
+045import 
org.apache.hadoop.hbase.regionserver.BloomType;
+046import 
org.apache.hadoop.hbase.security.User;
+047import 
org.apache.hadoop.hbase.util.Bytes;
+048
+049/**
+050 * HTableDescriptor contains the details 
about an HBase table  such as the descriptors of
+051 * all the column families, is the table 
a catalog table,  -ROOT-  or
+052 *  hbase:meta , 
if the table is read only, the maximum size of the memstore,
+053 * when the region split should occur, 
coprocessors associated with it etc...
+054 */
+055@InterfaceAudience.Public
+056@InterfaceStability.Evolving
+057public class HTableDescriptor implements 
Comparable {
+058
+059  private static final Log LOG = 
LogFactory.getLog(HTableDescriptor.class);
+060
+061  private TableName name = null;
 062
-063  private static final Log LOG = 
LogFactory.getLog(HTableDescriptor.class);
-064
-065  private TableName name = null;
-066
-067  /**
-068   * A map which holds the metadata 
information of the table. This metadata
-069   * includes values like IS_ROOT, 
IS_META, DEFERRED_LOG_FLUSH, SPLIT_POLICY,
-070   * MAX_FILE_SIZE, READONLY, 
MEMSTORE_FLUSHSIZE etc...
-071   */
-072  private final Map 
values =
-073  new HashMap();
-074
-075  /**
-076   * A map which holds the configuration 
specific to the table.
-077   * The keys of the map have the same 
names as config keys and override the defaults with
-078   * table-specific settings. Example 
usage may be for compactions, etc.
-079   */
-080  private final Map 
configuration = new HashMap();
-081
-082  public static final String SPLIT_POLICY 
= "SPLIT_POLICY";
-083
-084  /**
-085   * INTERNAL Used 
by HBase Shell interface to access this metadata
-086   * attribute which denotes the maximum 
size of the store file after which
-087   * a region split occurs
-088   *
-089   * @see #getMaxFileSize()
-090   */
-091  public static final String MAX_FILESIZE 
= "MAX_FILESIZE";
-092  private static final Bytes 
MAX_FILESIZE_KEY =
-093  new 
Bytes(Bytes.toBytes(MAX_FILESIZE));
+063  /**
+064   * A map which holds the metadata 
information of the table. This metadata
+065   * includes values like IS_ROOT, 
IS_META, DEFERRED_LOG_FLUSH, SPLIT_POLICY,
+066   * MAX_FILE_SIZE, READONLY, 
MEMSTORE_FLUSHSIZE etc...
+067   */
+068  private final Map 
values =
+069  new HashMap();
+070
+071  /**
+072   * A map which holds the configuration 
specific to the table.
+073   * The keys of the map have the same 
names as config keys and override the defaults with
+074   * table-specific settings. Example 
usage may be for compactions, etc.
+075   */
+076  private final Map 
configuration = new HashMap();
+077
+078  public static final String SPLIT_POLICY 
= "SPLIT_POLICY";
+079
+080  /**
+081   * INTERNAL Used 
by HBase Shell interface to access this metadata
+082   * attribute which denotes the maximum 
size of the store file after which
+083   * a region split occurs
+084   *
+085   * @see #

[44/51] [partial] hbase-site git commit: Published site at a395922ad5af9494bb55feee3c275c6d3a575e92.

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/16980207/apidocs/org/apache/hadoop/hbase/client/class-use/Result.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/client/class-use/Result.html 
b/apidocs/org/apache/hadoop/hbase/client/class-use/Result.html
index 3018db3..6fa9423 100644
--- a/apidocs/org/apache/hadoop/hbase/client/class-use/Result.html
+++ b/apidocs/org/apache/hadoop/hbase/client/class-use/Result.html
@@ -290,17 +290,11 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 org.apache.hadoop.mapred.RecordReader
-MultiTableSnapshotInputFormat.getRecordReader(org.apache.hadoop.mapred.InputSplit split,
-  org.apache.hadoop.mapred.JobConf job,
-  
org.apache.hadoop.mapred.Reporter reporter) 
-
-
-org.apache.hadoop.mapred.RecordReader
 TableSnapshotInputFormat.getRecordReader(org.apache.hadoop.mapred.InputSplit split,
   org.apache.hadoop.mapred.JobConf job,
   
org.apache.hadoop.mapred.Reporter reporter) 
 
-
+
 org.apache.hadoop.mapred.RecordReader
 TableInputFormatBase.getRecordReader(org.apache.hadoop.mapred.InputSplit split,
   org.apache.hadoop.mapred.JobConf job,
@@ -308,6 +302,12 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 Builds a TableRecordReader.
 
 
+
+org.apache.hadoop.mapred.RecordReader
+MultiTableSnapshotInputFormat.getRecordReader(org.apache.hadoop.mapred.InputSplit split,
+  org.apache.hadoop.mapred.JobConf job,
+  
org.apache.hadoop.mapred.Reporter reporter) 
+
 
 
 
@@ -325,20 +325,20 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 void
-IdentityTableMap.map(ImmutableBytesWritable key,
+GroupingTableMap.map(ImmutableBytesWritable key,
   Result value,
   org.apache.hadoop.mapred.OutputCollector output,
   org.apache.hadoop.mapred.Reporter reporter)
-Pass the key, value to reduce
+Extract the grouping columns from value to construct a new 
key.
 
 
 
 void
-GroupingTableMap.map(ImmutableBytesWritable key,
+IdentityTableMap.map(ImmutableBytesWritable key,
   Result value,
   org.apache.hadoop.mapred.OutputCollector output,
   org.apache.hadoop.mapred.Reporter reporter)
-Extract the grouping columns from value to construct a new 
key.
+Pass the key, value to reduce
 
 
 
@@ -362,20 +362,20 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 void
-IdentityTableMap.map(ImmutableBytesWritable key,
+GroupingTableMap.map(ImmutableBytesWritable key,
   Result value,
   org.apache.hadoop.mapred.OutputCollector output,
   org.apache.hadoop.mapred.Reporter reporter)
-Pass the key, value to reduce
+Extract the grouping columns from value to construct a new 
key.
 
 
 
 void
-GroupingTableMap.map(ImmutableBytesWritable key,
+IdentityTableMap.map(ImmutableBytesWritable key,
   Result value,
   org.apache.hadoop.mapred.OutputCollector output,
   org.apache.hadoop.mapred.Reporter reporter)
-Extract the grouping columns from value to construct a new 
key.
+Pass the key, value to reduce
 
 
 
@@ -415,16 +415,16 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 org.apache.hadoop.mapreduce.RecordReader
-TableSnapshotInputFormat.createRecordReader(org.apache.hadoop.mapreduce.InputSplit split,
-
org.apache.hadoop.mapreduce.TaskAttemptContext context) 
-
-
-org.apache.hadoop.mapreduce.RecordReader
 MultiTableInputFormatBase.createRecordReader(org.apache.hadoop.mapreduce.InputSplit split,
 
org.apache.hadoop.mapreduce.TaskAttemptContext context)
 Builds a TableRecordReader.
 
 
+
+org.apache.hadoop.mapreduce.RecordReader
+TableSnapshotInputFormat.createRecordReader(org.apache.hadoop.mapreduce.InputSplit split,
+
org.apache.hadoop.mapreduce.TaskAttemptContext context) 
+
 
 org.apache.hadoop.mapreduce.RecordReader
 TableInputFormatBase.createRecordReader(org.apache.hadoop.mapreduce.InputSplit split,
@@ -449,18 +449,18 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 void
-GroupingTableMapper.map(ImmutableBytesWritable key,
+IdentityTableMapper.map(ImmutableBytesWritable key,
   Result value,
   org.apache.hadoop.mapreduce.Mapper.Context context)
-Extract the grouping columns from value to construct a new 
key.
+Pas

[47/51] [partial] hbase-site git commit: Published site at a395922ad5af9494bb55feee3c275c6d3a575e92.

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/16980207/apidocs/org/apache/hadoop/hbase/HTableDescriptor.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/HTableDescriptor.html 
b/apidocs/org/apache/hadoop/hbase/HTableDescriptor.html
index 496c1e4..c0af091 100644
--- a/apidocs/org/apache/hadoop/hbase/HTableDescriptor.html
+++ b/apidocs/org/apache/hadoop/hbase/HTableDescriptor.html
@@ -105,7 +105,7 @@
 
 @InterfaceAudience.Public
 @InterfaceStability.Evolving
-public class HTableDescriptor
+public class HTableDescriptor
 extends http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true";
 title="class or interface in java.lang">Object
 implements http://docs.oracle.com/javase/7/docs/api/java/lang/Comparable.html?is-external=true";
 title="class or interface in java.lang">Comparable
 HTableDescriptor contains the details about an HBase table  
such as the descriptors of
@@ -405,14 +405,6 @@ implements http://docs.oracle.com/javase/7/docs/api/java/lang/Comparabl
 
 
 
-org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema
-convert() 
-
-
-static HTableDescriptor
-convert(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema ts) 
-
-
 boolean
 equals(http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true";
 title="class or interface in java.lang">Object obj)
 Compare the contents of the descriptor with another one 
passed as a parameter.
@@ -830,7 +822,7 @@ implements http://docs.oracle.com/javase/7/docs/api/java/lang/Comparabl
 
 
 SPLIT_POLICY
-public static final http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String SPLIT_POLICY
+public static final http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String SPLIT_POLICY
 See Also:Constant
 Field Values
 
 
@@ -840,7 +832,7 @@ implements http://docs.oracle.com/javase/7/docs/api/java/lang/Comparabl
 
 
 MAX_FILESIZE
-public static final http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String MAX_FILESIZE
+public static final http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String MAX_FILESIZE
 INTERNAL Used by HBase Shell interface to access 
this metadata
  attribute which denotes the maximum size of the store file after which
  a region split occurs
@@ -854,7 +846,7 @@ implements http://docs.oracle.com/javase/7/docs/api/java/lang/Comparabl
 
 
 OWNER
-public static final http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String OWNER
+public static final http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String OWNER
 See Also:Constant
 Field Values
 
 
@@ -864,7 +856,7 @@ implements http://docs.oracle.com/javase/7/docs/api/java/lang/Comparabl
 
 
 OWNER_KEY
-public static final Bytes OWNER_KEY
+public static final Bytes OWNER_KEY
 
 
 
@@ -873,7 +865,7 @@ implements http://docs.oracle.com/javase/7/docs/api/java/lang/Comparabl
 
 
 READONLY
-public static final http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String READONLY
+public static final http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String READONLY
 INTERNAL Used by rest interface to access this 
metadata
  attribute which denotes if the table is Read Only
 See Also:isReadOnly(),
 
@@ -886,7 +878,7 @@ implements http://docs.oracle.com/javase/7/docs/api/java/lang/Comparabl
 
 
 COMPACTION_ENABLED
-public static final http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String COMPACTION_ENABLED
+public static final http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String COMPACTION_ENABLED
 INTERNAL Used by HBase Shell interface to access 
this metadata
  attribute which denotes if the table is compaction enabled
 See Also:isCompactionEnabled(),
 
@@ -899,7 +891,7 @@ implements http://docs.oracle.com/javase/7/docs/api/java/lang/Comparabl
 
 
 MEMSTORE_FLUSHSIZE
-public static final http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String MEMSTORE_FLUSHSIZE
+public static final http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String MEMSTORE_FLUSHSIZE
 INTERNAL Used by HBase Shell interface to access 
this metadata
  attribute which represents the maximum size of the mems

[36/51] [partial] hbase-site git commit: Published site at a395922ad5af9494bb55feee3c275c6d3a575e92.

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/16980207/book.html
--
diff --git a/book.html b/book.html
index 6e780e7..0ab20df 100644
--- a/book.html
+++ b/book.html
@@ -1655,7 +1655,7 @@ HBase 0.98 drops support for Hadoop 1.0, deprecates use 
of Hadoop 1.1+, and HBas
 S
 NT
 NT
-NT
+X
 
 
 Hadoop-2.3.x
@@ -1663,7 +1663,7 @@ HBase 0.98 drops support for Hadoop 1.0, deprecates use 
of Hadoop 1.1+, and HBas
 S
 NT
 NT
-NT
+X
 
 
 Hadoop-2.4.x
@@ -33282,7 +33282,7 @@ The server will return cellblocks compressed using this 
same compressor as long
 
 
 Version 2.0.0-SNAPSHOT
-Last updated 2016-04-08 14:30:12 UTC
+Last updated 2016-04-11 14:30:43 UTC
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/16980207/bulk-loads.html
--
diff --git a/bulk-loads.html b/bulk-loads.html
index 06a28d5..4f3257b 100644
--- a/bulk-loads.html
+++ b/bulk-loads.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase –  
   Bulk Loads in Apache HBase (TM)
@@ -305,7 +305,7 @@ under the License. -->
 http://www.apache.org/";>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2016-04-08
+  Last Published: 
2016-04-11
 
 
 



[27/51] [partial] hbase-site git commit: Published site at a395922ad5af9494bb55feee3c275c6d3a575e92.

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/16980207/devapidocs/org/apache/hadoop/hbase/class-use/CellComparator.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/class-use/CellComparator.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/CellComparator.html
index 69864c1..a0e381f 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/CellComparator.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/CellComparator.html
@@ -234,16 +234,16 @@
 
 
 int
-BufferedDataBlockEncoder.BufferedEncodedSeeker.compareKey(CellComparator comparator,
-Cell key) 
-
-
-int
 DataBlockEncoder.EncodedSeeker.compareKey(CellComparator comparator,
 Cell key)
 Compare the given key against the current key
 
 
+
+int
+BufferedDataBlockEncoder.BufferedEncodedSeeker.compareKey(CellComparator comparator,
+Cell key) 
+
 
 DataBlockEncoder.EncodedSeeker
 CopyKeyDataBlockEncoder.createSeeker(CellComparator comparator,
@@ -299,30 +299,30 @@
 
 
 protected CellComparator
-HFileWriterImpl.comparator
-Key comparator.
+CompoundBloomFilterBase.comparator
+Comparator used to compare Bloom filter keys
 
 
 
 protected CellComparator
-HFile.WriterFactory.comparator 
+HFileWriterImpl.comparator
+Key comparator.
+
 
 
 private CellComparator
-HFileBlockIndex.CellBasedKeyBlockIndexReader.comparator
-Needed doing lookup on blocks.
+HFileReaderImpl.comparator
+Key comparator
 
 
 
 protected CellComparator
-CompoundBloomFilterBase.comparator
-Comparator used to compare Bloom filter keys
-
+HFile.WriterFactory.comparator 
 
 
 private CellComparator
-HFileReaderImpl.comparator
-Key comparator
+HFileBlockIndex.CellBasedKeyBlockIndexReader.comparator
+Needed doing lookup on blocks.
 
 
 
@@ -344,11 +344,11 @@
 
 
 CellComparator
-HFile.Reader.getComparator() 
+HFileReaderImpl.getComparator() 
 
 
 CellComparator
-HFileReaderImpl.getComparator() 
+HFile.Reader.getComparator() 
 
 
 
@@ -499,12 +499,20 @@
 StripeStoreFileManager.cellComparator 
 
 
+protected CellComparator
+StripeStoreFlusher.StripeFlushRequest.comparator 
+
+
 private CellComparator
-StoreFile.WriterBuilder.comparator 
+Segment.comparator 
+
+
+protected CellComparator
+HRegion.RegionScannerImpl.comparator 
 
 
 private CellComparator
-AbstractMemStore.comparator 
+ScanInfo.comparator 
 
 
 protected CellComparator
@@ -512,32 +520,24 @@
 
 
 private CellComparator
-Segment.comparator 
+StoreFile.WriterBuilder.comparator 
 
 
 private CellComparator
-ScanInfo.comparator 
+AbstractMemStore.comparator 
 
 
 private CellComparator
 HStore.comparator 
 
 
-protected CellComparator
-HRegion.RegionScannerImpl.comparator 
+private CellComparator
+DefaultStoreFileManager.kvComparator 
 
 
 protected CellComparator
-StripeStoreFlusher.StripeFlushRequest.comparator 
-
-
-protected CellComparator
 KeyValueHeap.KVScannerComparator.kvComparator 
 
-
-private CellComparator
-DefaultStoreFileManager.kvComparator 
-
 
 private CellComparator
 ScanQueryMatcher.rowComparator
@@ -565,11 +565,11 @@
 
 
 CellComparator
-StoreFile.Reader.getComparator() 
+Store.getComparator() 
 
 
-protected CellComparator
-AbstractMemStore.getComparator() 
+(package private) CellComparator
+StoreFileScanner.getComparator() 
 
 
 protected CellComparator
@@ -583,19 +583,19 @@
 
 
 CellComparator
-Store.getComparator() 
+StoreFile.Reader.getComparator() 
 
 
-CellComparator
-KeyValueHeap.KVScannerComparator.getComparator() 
+protected CellComparator
+AbstractMemStore.getComparator() 
 
 
-(package private) CellComparator
-StoreFileScanner.getComparator() 
+CellComparator
+HStore.getComparator() 
 
 
 CellComparator
-HStore.getComparator() 
+KeyValueHeap.KVScannerComparator.getComparator() 
 
 
 
@@ -629,12 +629,6 @@
 
 
 
-protected void
-DefaultStoreEngine.createComponents(org.apache.hadoop.conf.Configuration conf,
-Store store,
-CellComparator kvComparator) 
-
-
 protected abstract void
 StoreEngine.createComponents(org.apache.hadoop.conf.Configuration conf,
 Store store,
@@ -642,17 +636,23 @@
 Create the StoreEngine's components.
 
 
+
+protected void
+StripeStoreEngine.createComponents(org.apache.hadoop.conf.Configuration conf,
+Store store,
+CellComparator comparator) 
+
 
 protected void
-DateTieredStoreEngine.createComponents(org.apache.hadoop.conf.Configuration conf,
+DefaultStoreEngine.createComponents(org.apache.hadoop.conf.Configuration conf,
 Store store,
 CellComparator kvComparator) 
 
 
 protected void
-StripeStoreEngine.createComponents(org.apache.hadoop.conf.Configuration conf,
+DateTieredStoreEngine.createComponents(org.apache.hadoop.conf.Configuration conf,
 Store store,
-Ce

[13/51] [partial] hbase-site git commit: Published site at a395922ad5af9494bb55feee3c275c6d3a575e92.

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/16980207/devapidocs/org/apache/hadoop/hbase/client/replication/ReplicationAdmin.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/client/replication/ReplicationAdmin.html 
b/devapidocs/org/apache/hadoop/hbase/client/replication/ReplicationAdmin.html
index 95b6ca5..1224f44 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/client/replication/ReplicationAdmin.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/client/replication/ReplicationAdmin.html
@@ -101,7 +101,7 @@
 
 @InterfaceAudience.Public
 @InterfaceStability.Evolving
-public class ReplicationAdmin
+public class ReplicationAdmin
 extends http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true";
 title="class or interface in java.lang">Object
 implements http://docs.oracle.com/javase/7/docs/api/java/io/Closeable.html?is-external=true";
 title="class or interface in java.io">Closeable
 
@@ -352,18 +352,22 @@ implements http://docs.oracle.com/javase/7/docs/api/java/io/Closeable.h
 
 
 void
+peerAdded(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String id) 
+
+
+void
 removePeer(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String id)
 Removes a peer cluster and stops the replication to 
it.
 
 
-
+
 void
 removePeerTableCFs(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String id,
 http://docs.oracle.com/javase/7/docs/api/java/util/Map.html?is-external=true";
 title="class or interface in java.util">MapCollectionString>> tableCfs)
 Remove some table-cfs from config of the specified 
peer
 
 
-
+
 void
 removePeerTableCFs(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String id,
 http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String tableCf)
@@ -373,20 +377,25 @@ implements http://docs.oracle.com/javase/7/docs/api/java/io/Closeable.h
 
 
 
-
+
 void
 setPeerTableCFs(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String id,
   http://docs.oracle.com/javase/7/docs/api/java/util/Map.html?is-external=true";
 title="class or interface in java.util">MapCollectionString>> tableCfs)
 Set the replicable table-cf config of the specified 
peer
 
 
-
+
 private void
 setTableRep(TableName tableName,
   boolean isRepEnabled)
 Set the table's replication switch if the table's 
replication switch is already not set.
 
 
+
+void
+updatePeerConfig(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String id,
+ReplicationPeerConfig peerConfig) 
+
 
 
 
@@ -415,7 +424,7 @@ implements http://docs.oracle.com/javase/7/docs/api/java/io/Closeable.h
 
 
 LOG
-private static final org.apache.commons.logging.Log LOG
+private static final org.apache.commons.logging.Log LOG
 
 
 
@@ -424,7 +433,7 @@ implements http://docs.oracle.com/javase/7/docs/api/java/io/Closeable.h
 
 
 TNAME
-public static final http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String TNAME
+public static final http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String TNAME
 See Also:Constant
 Field Values
 
 
@@ -434,7 +443,7 @@ implements http://docs.oracle.com/javase/7/docs/api/java/io/Closeable.h
 
 
 CFNAME
-public static final http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String CFNAME
+public static final http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String CFNAME
 See Also:Constant
 Field Values
 
 
@@ -444,7 +453,7 @@ implements http://docs.oracle.com/javase/7/docs/api/java/io/Closeable.h
 
 
 REPLICATIONTYPE
-public static final http://docs.oracle.com/java

[50/51] [partial] hbase-site git commit: Published site at a395922ad5af9494bb55feee3c275c6d3a575e92.

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/16980207/apache_hbase_reference_guide.pdf
--
diff --git a/apache_hbase_reference_guide.pdf b/apache_hbase_reference_guide.pdf
index f9bb3c1..0586619 100644
--- a/apache_hbase_reference_guide.pdf
+++ b/apache_hbase_reference_guide.pdf
@@ -5,8 +5,8 @@
 /Author (Apache HBase Team)
 /Creator (Asciidoctor PDF 1.5.0.alpha.6, based on Prawn 1.2.1)
 /Producer (Apache HBase Team)
-/CreationDate (D:20160408145532+00'00')
-/ModDate (D:20160408145532+00'00')
+/CreationDate (D:20160411145844+00'00')
+/ModDate (D:20160411145844+00'00')
 >>
 endobj
 2 0 obj
@@ -99367,7 +99367,7 @@ endobj
 >>
 endobj
 141 0 obj
-<< /Length 47374
+<< /Length 47370
 >>
 stream
 q
@@ -101270,7 +101270,7 @@ S
 BT
 463.656 445.833 Td
 /F1.0 10.5 Tf
-<4e54> Tj
+<58> Tj
 ET
 
 0.000 0.000 0.000 scn
@@ -101480,7 +101480,7 @@ S
 BT
 463.656 422.553005 Td
 /F1.0 10.5 Tf
-<4e54> Tj
+<58> Tj
 ET
 
 0.000 0.000 0.000 scn
@@ -817141,4060 +817141,4060 @@ xref
 0001252110 0 n 
 0001252358 0 n 
 0001252608 0 n 
-0001300037 0 n 
-0001300264 0 n 
-0001300493 0 n 
-0001361968 0 n 
-0001362190 0 n 
-0001362236 0 n 
-0001376990 0 n 
-0001377267 0 n 
-0001377313 0 n 
-0001377370 0 n 
-0001377586 0 n 
-0001377811 0 n 
-0001377869 0 n 
-0001378103 0 n 
-0001378335 0 n 
-0001393770 0 n 
-0001394050 0 n 
-0001394096 0 n 
-0001394268 0 n 
-0001394325 0 n 
-0001394515 0 n 
-0001394775 0 n 
-0001394833 0 n 
-0001395070 0 n 
-0001395307 0 n 
-0001395544 0 n 
-0001411546 0 n 
-0001411847 0 n 
-0001411892 0 n 
-0001412528 0 n 
-0001413243 0 n 
-0001413405 0 n 
-0001413561 0 n 
-0001413618 0 n 
-0001413753 0 n 
-0001413811 0 n 
-0001414036 0 n 
-0001414310 0 n 
-0001414367 0 n 
-0001414538 0 n 
-0001414706 0 n 
-0001414764 0 n 
-0001421145 0 n 
-0001421380 0 n 
-0001421534 0 n 
-0001421679 0 n 
-0001443104 0 n 
-0001443339 0 n 
-0001443484 0 n 
-0001443672 0 n 
-0001449985 0 n 
-0001450194 0 n 
-0001462822 0 n 
-0001463062 0 n 
-0001463107 0 n 
-0001463271 0 n 
-0001475864 0 n 
-0001476117 0 n 
-0001476162 0 n 
-0001476219 0 n 
-0001476383 0 n 
-0001476429 0 n 
-0001486396 0 n 
-0001486592 0 n 
-0001495836 0 n 
-0001496032 0 n 
-0001508869 0 n 
-0001509065 0 n 
-0001518863 0 n 
-0001519059 0 n 
-0001531108 0 n 
-0001531322 0 n 
-0001531595 0 n 
-0001541450 0 n 
-0001541688 0 n 
-0001541972 0 n 
-0001542258 0 n 
-0001542541 0 n 
-0001542826 0 n 
-0001553546 0 n 
-0001553755 0 n 
-0001564327 0 n 
-0001564523 0 n 
-0001574402 0 n 
-0001574598 0 n 
-0001585839 0 n 
-0001586035 0 n 
-0001597135 0 n 
-0001597331 0 n 
-0001610342 0 n 
-0001610538 0 n 
-0001620464 0 n 
-0001620660 0 n 
-0001631022 0 n 
-0001631244 0 n 
-0001631483 0 n 
-0001631705 0 n 
-0001641741 0 n 
-0001641955 0 n 
-0001642201 0 n 
-0001651769 0 n 
-0001651983 0 n 
-0001652205 0 n 
-0001662885 0 n 
-0001663081 0 n 
-0001673189 0 n 
-0001673385 0 n 
-0001683199 0 n 
-0001683395 0 n 
-0001692665 0 n 
-0001692861 0 n 
-0001702689 0 n 
-0001702885 0 n 
-0001714646 0 n 
-0001714842 0 n 
-0001723600 0 n 
-0001723796 0 n 
-0001735361 0 n 
-0001735599 0 n 
-0001735865 0 n 
-0001736132 0 n 
-0001736411 0 n 
-0001736691 0 n 
-0001748219 0 n 
-0001748415 0 n 
-0001758677 0 n 
-0001758873 0 n 
-0001768462 0 n 
-0001768658 0 n 
-0001778278 0 n 
-0001778500 0 n 
-0001778557 0 n 
-0001778614 0 n 
-0001778672 0 n 
-0001810944 0 n 
-0001811184 0 n 
-0001811242 0 n 
-0001811500 0 n 
-0001818790 0 n 
-0001819017 0 n 
-0001819267 0 n 
-0001827342 0 n 
-0001827577 0 n 
-0001827622 0 n 
-0001827679 0 n 
-0001827736 0 n 
-0001843306 0 n 
-0001843528 0 n 
-0001843586 0 n 
-0001850827 0 n 
-0001851049 0 n 
-0001851095 0 n 
-0001851407 0 n 
-0001864437 0 n 
-0001864730 0 n 
-0001864775 0 n 
-0001864821 0 n 
-0001864960 0 n 
-0001865083 0 n 
-0001865140 0 n 
-0001865362 0 n 
-0001865586 0 n 
-0001865821 0 n 
-0001865878 0 n 
-0001866067 0 n 
-0001866124 0 n 
-0001866182 0 n 
-0001866239 0 n 
-0001879570 0 n 
-0001879831 0 n 
-0001879877 0 n 
-0001880044 0 n 
-0001880101 0 n 
-0001880158 0 n 
-0001880215 0 n 
-0001880385 0 n 
-0001880442 0 n 
-0001880790 0 n 
-0001880848 0 n 
-0001897061 0 n 
-0001897317 0 n 
-0001897465 0 n 
-0001897511 0 n 
-0001897673 0 n 
-0001897730 000

hbase git commit: HBASE-14985 TimeRange constructors should set allTime when appropriate (Geoffrey Jacoby)

Repository: hbase
Updated Branches:
  refs/heads/master 896457339 -> ff9c92e16


HBASE-14985 TimeRange constructors should set allTime when appropriate 
(Geoffrey Jacoby)


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/ff9c92e1
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/ff9c92e1
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/ff9c92e1

Branch: refs/heads/master
Commit: ff9c92e16831fe350904ac99f92619fb97ba2bef
Parents: 8964573
Author: tedyu 
Authored: Mon Apr 11 16:59:45 2016 -0700
Committer: tedyu 
Committed: Mon Apr 11 16:59:45 2016 -0700

--
 .../org/apache/hadoop/hbase/io/TimeRange.java   | 12 +--
 .../regionserver/TestTimeRangeTracker.java  | 37 
 2 files changed, 47 insertions(+), 2 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/ff9c92e1/hbase-common/src/main/java/org/apache/hadoop/hbase/io/TimeRange.java
--
diff --git 
a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/TimeRange.java 
b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/TimeRange.java
index a300c21..212ad45 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/TimeRange.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/TimeRange.java
@@ -36,8 +36,10 @@ import org.apache.hadoop.hbase.util.Bytes;
 @InterfaceAudience.Public
 @InterfaceStability.Stable
 public class TimeRange {
-  private long minStamp = 0L;
-  private long maxStamp = Long.MAX_VALUE;
+  private static final long MIN_TIME = 0L;
+  private static final long MAX_TIME = Long.MAX_VALUE;
+  private long minStamp = MIN_TIME;
+  private long maxStamp = MAX_TIME;
   private boolean allTime = false;
 
   /**
@@ -54,6 +56,9 @@ public class TimeRange {
*/
   public TimeRange(long minStamp) {
 this.minStamp = minStamp;
+if (this.minStamp == MIN_TIME){
+  this.allTime = true;
+}
   }
 
   /**
@@ -80,6 +85,9 @@ public class TimeRange {
 }
 this.minStamp = minStamp;
 this.maxStamp = maxStamp;
+if (this.minStamp == MIN_TIME && this.maxStamp == MAX_TIME){
+  this.allTime = true;
+}
   }
 
   /**

http://git-wip-us.apache.org/repos/asf/hbase/blob/ff9c92e1/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestTimeRangeTracker.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestTimeRangeTracker.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestTimeRangeTracker.java
index edec023..9d49e61 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestTimeRangeTracker.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestTimeRangeTracker.java
@@ -17,6 +17,8 @@
  */
 package org.apache.hadoop.hbase.regionserver;
 
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
 import static org.junit.Assert.assertTrue;
 
 import org.apache.hadoop.hbase.io.TimeRange;
@@ -25,6 +27,8 @@ import 
org.apache.hadoop.hbase.testclassification.RegionServerTests;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
 
+import java.io.IOException;
+
 @Category({RegionServerTests.class, SmallTests.class})
 public class TestTimeRangeTracker {
   @Test
@@ -81,6 +85,39 @@ public class TestTimeRangeTracker {
 assertTrue(trr.getMinimumTimestamp() == 0);
   }
 
+  @Test
+  public void testRangeConstruction() throws IOException {
+TimeRange defaultRange = new TimeRange();
+assertEquals(0L, defaultRange.getMin());
+assertEquals(Long.MAX_VALUE, defaultRange.getMax());
+assertTrue(defaultRange.isAllTime());
+
+TimeRange oneArgRange = new TimeRange(0L);
+assertEquals(0L, oneArgRange.getMin());
+assertEquals(Long.MAX_VALUE, oneArgRange.getMax());
+assertTrue(oneArgRange.isAllTime());
+
+TimeRange oneArgRange2 = new TimeRange(1);
+assertEquals(1, oneArgRange2.getMin());
+assertEquals(Long.MAX_VALUE, oneArgRange2.getMax());
+assertFalse(oneArgRange2.isAllTime());
+
+TimeRange twoArgRange = new TimeRange(0L, Long.MAX_VALUE);
+assertEquals(0L, twoArgRange.getMin());
+assertEquals(Long.MAX_VALUE, twoArgRange.getMax());
+assertTrue(twoArgRange.isAllTime());
+
+TimeRange twoArgRange2 = new TimeRange(0L, Long.MAX_VALUE - 1);
+assertEquals(0L, twoArgRange2.getMin());
+assertEquals(Long.MAX_VALUE - 1, twoArgRange2.getMax());
+assertFalse(twoArgRange2.isAllTime());
+
+TimeRange twoArgRange3 = new TimeRange(1, Long.MAX_VALUE);
+assertEquals(1, twoArgRange3.getMin());
+assertEquals(Long.MAX_VALUE, twoArgRange3.getMax());
+assertFalse(twoArgRange3.isAllTime());
+  }
+
   /**
   

hbase git commit: HBASE-14985 TimeRange constructors should set allTime when appropriate (Geoffrey Jacoby)

Repository: hbase
Updated Branches:
  refs/heads/branch-1 2dbbe8960 -> e31361dcb


HBASE-14985 TimeRange constructors should set allTime when appropriate 
(Geoffrey Jacoby)


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/e31361dc
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/e31361dc
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/e31361dc

Branch: refs/heads/branch-1
Commit: e31361dcb8dc6bd611b869cae0f39211619be3db
Parents: 2dbbe89
Author: tedyu 
Authored: Mon Apr 11 17:58:38 2016 -0700
Committer: tedyu 
Committed: Mon Apr 11 17:58:38 2016 -0700

--
 .../org/apache/hadoop/hbase/io/TimeRange.java   | 12 +--
 .../regionserver/TestTimeRangeTracker.java  | 37 
 2 files changed, 47 insertions(+), 2 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/e31361dc/hbase-common/src/main/java/org/apache/hadoop/hbase/io/TimeRange.java
--
diff --git 
a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/TimeRange.java 
b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/TimeRange.java
index 672cc9d..4ec062d 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/TimeRange.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/TimeRange.java
@@ -36,8 +36,10 @@ import org.apache.hadoop.hbase.util.Bytes;
 @InterfaceAudience.Public
 @InterfaceStability.Stable
 public class TimeRange {
-  private long minStamp = 0L;
-  private long maxStamp = Long.MAX_VALUE;
+  private static final long MIN_TIME = 0L;
+  private static final long MAX_TIME = Long.MAX_VALUE;
+  private long minStamp = MIN_TIME;
+  private long maxStamp = MAX_TIME;
   private boolean allTime = false;
 
   /**
@@ -58,6 +60,9 @@ public class TimeRange {
   @Deprecated
   public TimeRange(long minStamp) {
 this.minStamp = minStamp;
+if (this.minStamp == MIN_TIME){
+  this.allTime = true;
+}
   }
 
   /**
@@ -89,6 +94,9 @@ public class TimeRange {
 }
 this.minStamp = minStamp;
 this.maxStamp = maxStamp;
+if (this.minStamp == MIN_TIME && this.maxStamp == MAX_TIME){
+  this.allTime = true;
+}
   }
 
   /**

http://git-wip-us.apache.org/repos/asf/hbase/blob/e31361dc/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestTimeRangeTracker.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestTimeRangeTracker.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestTimeRangeTracker.java
index 85fb5dc..963a9e4 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestTimeRangeTracker.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestTimeRangeTracker.java
@@ -17,8 +17,12 @@
  */
 package org.apache.hadoop.hbase.regionserver;
 
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
 import static org.junit.Assert.assertTrue;
 
+import java.io.IOException;
+
 import org.apache.hadoop.hbase.io.TimeRange;
 import org.apache.hadoop.hbase.testclassification.SmallTests;
 import org.junit.Test;
@@ -80,6 +84,39 @@ public class TestTimeRangeTracker {
 assertTrue(trr.getMinimumTimestamp() == 0);
   }
 
+  @Test
+  public void testRangeConstruction() throws IOException {
+TimeRange defaultRange = new TimeRange();
+assertEquals(0L, defaultRange.getMin());
+assertEquals(Long.MAX_VALUE, defaultRange.getMax());
+assertTrue(defaultRange.isAllTime());
+
+TimeRange oneArgRange = new TimeRange(0L);
+assertEquals(0L, oneArgRange.getMin());
+assertEquals(Long.MAX_VALUE, oneArgRange.getMax());
+assertTrue(oneArgRange.isAllTime());
+
+TimeRange oneArgRange2 = new TimeRange(1);
+assertEquals(1, oneArgRange2.getMin());
+assertEquals(Long.MAX_VALUE, oneArgRange2.getMax());
+assertFalse(oneArgRange2.isAllTime());
+
+TimeRange twoArgRange = new TimeRange(0L, Long.MAX_VALUE);
+assertEquals(0L, twoArgRange.getMin());
+assertEquals(Long.MAX_VALUE, twoArgRange.getMax());
+assertTrue(twoArgRange.isAllTime());
+
+TimeRange twoArgRange2 = new TimeRange(0L, Long.MAX_VALUE - 1);
+assertEquals(0L, twoArgRange2.getMin());
+assertEquals(Long.MAX_VALUE - 1, twoArgRange2.getMax());
+assertFalse(twoArgRange2.isAllTime());
+
+TimeRange twoArgRange3 = new TimeRange(1, Long.MAX_VALUE);
+assertEquals(1, twoArgRange3.getMin());
+assertEquals(Long.MAX_VALUE, twoArgRange3.getMax());
+assertFalse(twoArgRange3.isAllTime());
+  }
+
   /**
* Bit of code to test concurrent access on this class.
* @param args