hbase git commit: HBASE-19205 Backport HBASE-18441 ZookeeperWatcher#interruptedException should throw exception

2017-11-07 Thread stack
Repository: hbase
Updated Branches:
  refs/heads/branch-1.3 65b23613b -> 557194830


HBASE-19205 Backport HBASE-18441 ZookeeperWatcher#interruptedException should 
throw exception


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/55719483
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/55719483
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/55719483

Branch: refs/heads/branch-1.3
Commit: 557194830654842379b1ce82942ff74ea3615bb5
Parents: 65b2361
Author: stack 
Authored: Tue Nov 7 21:10:42 2017 -0800
Committer: Michael Stack 
Committed: Tue Nov 7 21:13:04 2017 -0800

--
 .../hbase/zookeeper/ZooKeeperWatcher.java   | 30 
 .../hadoop/hbase/zookeeper/TestZKUtil.java  | 23 +++
 .../hbase/zookeeper/ZooKeeperMainServer.java|  5 ++--
 3 files changed, 44 insertions(+), 14 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/55719483/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/ZooKeeperWatcher.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/ZooKeeperWatcher.java
 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/ZooKeeperWatcher.java
index f5fa0b7..cb54cec 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/ZooKeeperWatcher.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/ZooKeeperWatcher.java
@@ -244,7 +244,7 @@ public class ZooKeeperWatcher implements Watcher, 
Abortable, Closeable {
 } catch(KeeperException.NoNodeException nne) {
   return;
 } catch(InterruptedException ie) {
-  interruptedException(ie);
+  interruptedExceptionNoThrow(ie, false);
 } catch (IOException|KeeperException e) {
   LOG.warn("Received exception while checking and setting zookeeper ACLs", 
e);
 }
@@ -722,20 +722,26 @@ public class ZooKeeperWatcher implements Watcher, 
Abortable, Closeable {
 
   /**
* Handles InterruptedExceptions in client calls.
-   * 
-   * This may be temporary but for now this gives one place to deal with these.
-   * 
-   * TODO: Currently, this method does nothing.
-   *   Is this ever expected to happen?  Do we abort or can we let it run?
-   *   Maybe this should be logged as WARN?  It shouldn't happen?
-   * 
-   * @param ie
+   * @param ie the InterruptedException instance thrown
+   * @throws KeeperException the exception to throw, transformed from the 
InterruptedException
+   */
+  public void interruptedException(InterruptedException ie) throws 
KeeperException {
+interruptedExceptionNoThrow(ie, true);
+// Throw a system error exception to let upper level handle it
+throw new KeeperException.SystemErrorException();
+  }
+
+  /**
+   * Log the InterruptedException and interrupt current thread
+   * @param ie The IterruptedException to log
+   * @param throwLater Whether we will throw the exception latter
*/
-  public void interruptedException(InterruptedException ie) {
-LOG.debug(prefix("Received InterruptedException, doing nothing here"), ie);
+  public void interruptedExceptionNoThrow(InterruptedException ie, boolean 
throwLater) {
+LOG.debug(prefix("Received InterruptedException, will interrupt current 
thread"
++ (throwLater ? " and rethrow a SystemErrorException" : "")),
+  ie);
 // At least preserver interrupt.
 Thread.currentThread().interrupt();
-// no-op
   }
 
   /**

http://git-wip-us.apache.org/repos/asf/hbase/blob/55719483/hbase-client/src/test/java/org/apache/hadoop/hbase/zookeeper/TestZKUtil.java
--
diff --git 
a/hbase-client/src/test/java/org/apache/hadoop/hbase/zookeeper/TestZKUtil.java 
b/hbase-client/src/test/java/org/apache/hadoop/hbase/zookeeper/TestZKUtil.java
index 1099e5e..83e29e2 100644
--- 
a/hbase-client/src/test/java/org/apache/hadoop/hbase/zookeeper/TestZKUtil.java
+++ 
b/hbase-client/src/test/java/org/apache/hadoop/hbase/zookeeper/TestZKUtil.java
@@ -27,6 +27,7 @@ import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.ZooKeeperConnectionException;
 import org.apache.hadoop.hbase.security.Superusers;
 import org.apache.hadoop.hbase.testclassification.SmallTests;
+import org.apache.zookeeper.KeeperException;
 import org.apache.zookeeper.ZooDefs.Ids;
 import org.apache.zookeeper.ZooDefs.Perms;
 import org.apache.zookeeper.data.ACL;
@@ -34,6 +35,7 @@ import org.apache.zookeeper.data.Id;
 import org.junit.Assert;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
+import org.mockito.Mockito;
 
 /**
  *
@@ -78,4 +80,25 @@ public class TestZKUtil {
 Assert.assertTrue(aclList.contains(new ACL(Perms.ALL, new Id("sasl", 
"user2"

hbase git commit: HBASE-19205 Backport HBASE-18441 ZookeeperWatcher#interruptedException should throw exception

2017-11-07 Thread stack
Repository: hbase
Updated Branches:
  refs/heads/branch-1.2 9f0a450e5 -> eb8560cab


HBASE-19205 Backport HBASE-18441 ZookeeperWatcher#interruptedException should 
throw exception


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/eb8560ca
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/eb8560ca
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/eb8560ca

Branch: refs/heads/branch-1.2
Commit: eb8560cab55018236f502f25a77c69610f3c56da
Parents: 9f0a450
Author: stack 
Authored: Tue Nov 7 21:10:42 2017 -0800
Committer: Michael Stack 
Committed: Tue Nov 7 21:12:22 2017 -0800

--
 .../hbase/zookeeper/ZooKeeperWatcher.java   | 30 
 .../hadoop/hbase/zookeeper/TestZKUtil.java  | 23 +++
 .../hbase/zookeeper/ZooKeeperMainServer.java|  5 ++--
 3 files changed, 44 insertions(+), 14 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/eb8560ca/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/ZooKeeperWatcher.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/ZooKeeperWatcher.java
 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/ZooKeeperWatcher.java
index 73a3a9e..c50c230 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/ZooKeeperWatcher.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/ZooKeeperWatcher.java
@@ -249,7 +249,7 @@ public class ZooKeeperWatcher implements Watcher, 
Abortable, Closeable {
 } catch(KeeperException.NoNodeException nne) {
   return;
 } catch(InterruptedException ie) {
-  interruptedException(ie);
+  interruptedExceptionNoThrow(ie, false);
 } catch (IOException|KeeperException e) {
   LOG.warn("Received exception while checking and setting zookeeper ACLs", 
e);
 }
@@ -747,20 +747,26 @@ public class ZooKeeperWatcher implements Watcher, 
Abortable, Closeable {
 
   /**
* Handles InterruptedExceptions in client calls.
-   * 
-   * This may be temporary but for now this gives one place to deal with these.
-   * 
-   * TODO: Currently, this method does nothing.
-   *   Is this ever expected to happen?  Do we abort or can we let it run?
-   *   Maybe this should be logged as WARN?  It shouldn't happen?
-   * 
-   * @param ie
+   * @param ie the InterruptedException instance thrown
+   * @throws KeeperException the exception to throw, transformed from the 
InterruptedException
+   */
+  public void interruptedException(InterruptedException ie) throws 
KeeperException {
+interruptedExceptionNoThrow(ie, true);
+// Throw a system error exception to let upper level handle it
+throw new KeeperException.SystemErrorException();
+  }
+
+  /**
+   * Log the InterruptedException and interrupt current thread
+   * @param ie The IterruptedException to log
+   * @param throwLater Whether we will throw the exception latter
*/
-  public void interruptedException(InterruptedException ie) {
-LOG.debug(prefix("Received InterruptedException, doing nothing here"), ie);
+  public void interruptedExceptionNoThrow(InterruptedException ie, boolean 
throwLater) {
+LOG.debug(prefix("Received InterruptedException, will interrupt current 
thread"
++ (throwLater ? " and rethrow a SystemErrorException" : "")),
+  ie);
 // At least preserver interrupt.
 Thread.currentThread().interrupt();
-// no-op
   }
 
   /**

http://git-wip-us.apache.org/repos/asf/hbase/blob/eb8560ca/hbase-client/src/test/java/org/apache/hadoop/hbase/zookeeper/TestZKUtil.java
--
diff --git 
a/hbase-client/src/test/java/org/apache/hadoop/hbase/zookeeper/TestZKUtil.java 
b/hbase-client/src/test/java/org/apache/hadoop/hbase/zookeeper/TestZKUtil.java
index 1099e5e..83e29e2 100644
--- 
a/hbase-client/src/test/java/org/apache/hadoop/hbase/zookeeper/TestZKUtil.java
+++ 
b/hbase-client/src/test/java/org/apache/hadoop/hbase/zookeeper/TestZKUtil.java
@@ -27,6 +27,7 @@ import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.ZooKeeperConnectionException;
 import org.apache.hadoop.hbase.security.Superusers;
 import org.apache.hadoop.hbase.testclassification.SmallTests;
+import org.apache.zookeeper.KeeperException;
 import org.apache.zookeeper.ZooDefs.Ids;
 import org.apache.zookeeper.ZooDefs.Perms;
 import org.apache.zookeeper.data.ACL;
@@ -34,6 +35,7 @@ import org.apache.zookeeper.data.Id;
 import org.junit.Assert;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
+import org.mockito.Mockito;
 
 /**
  *
@@ -78,4 +80,25 @@ public class TestZKUtil {
 Assert.assertTrue(aclList.contains(new ACL(Perms.ALL, new Id("sasl", 
"user2"

[4/5] hbase git commit: HBASE-19128 Purge Distributed Log Replay from codebase, configurations, text; mark the feature as unsupported, broken.

2017-11-07 Thread appy
http://git-wip-us.apache.org/repos/asf/hbase/blob/a6d8023f/hbase-server/src/main/java/org/apache/hadoop/hbase/master/SplitLogManager.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/SplitLogManager.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/SplitLogManager.java
index 67aab82..8ab087d 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/SplitLogManager.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/SplitLogManager.java
@@ -25,7 +25,6 @@ import static 
org.apache.hadoop.hbase.master.SplitLogManager.TerminationStatus.I
 import static 
org.apache.hadoop.hbase.master.SplitLogManager.TerminationStatus.SUCCESS;
 
 import java.io.IOException;
-import java.io.InterruptedIOException;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.Collections;
@@ -36,7 +35,6 @@ import java.util.Set;
 import java.util.concurrent.ConcurrentHashMap;
 import java.util.concurrent.ConcurrentMap;
 import java.util.concurrent.atomic.AtomicInteger;
-import java.util.concurrent.locks.ReentrantLock;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
@@ -50,19 +48,16 @@ import org.apache.hadoop.hbase.ScheduledChore;
 import org.apache.hadoop.hbase.ServerName;
 import org.apache.hadoop.hbase.SplitLogCounters;
 import org.apache.hadoop.hbase.Stoppable;
-import org.apache.hadoop.hbase.client.RegionInfo;
 import org.apache.hadoop.hbase.coordination.SplitLogManagerCoordination;
 import 
org.apache.hadoop.hbase.coordination.SplitLogManagerCoordination.SplitLogManagerDetails;
 import org.apache.hadoop.hbase.monitoring.MonitoredTask;
 import org.apache.hadoop.hbase.monitoring.TaskMonitor;
 import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
 import org.apache.hadoop.hbase.util.FSUtils;
-import org.apache.hadoop.hbase.util.Pair;
 import org.apache.hadoop.hbase.wal.AbstractFSWALProvider;
 import org.apache.yetus.audience.InterfaceAudience;
 
 import 
org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
-import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.SplitLogTask.RecoveryMode;
 
 /**
  * Distributes the task of log splitting to the available region servers.
@@ -106,15 +101,6 @@ public class SplitLogManager {
 
   private long unassignedTimeout;
   private long lastTaskCreateTime = Long.MAX_VALUE;
-  private long checkRecoveringTimeThreshold = 15000; // 15 seconds
-  private final List, Boolean>> 
failedRecoveringRegionDeletions = Collections
-  .synchronizedList(new ArrayList, Boolean>>());
-
-  /**
-   * In distributedLogReplay mode, we need touch both splitlog and 
recovering-regions znodes in one
-   * operation. So the lock is used to guard such cases.
-   */
-  protected final ReentrantLock recoveringRegionLock = new ReentrantLock();
 
   @VisibleForTesting
   final ConcurrentMap tasks = new ConcurrentHashMap<>();
@@ -141,7 +127,6 @@ public class SplitLogManager {
   SplitLogManagerDetails details = new SplitLogManagerDetails(tasks, 
master, failedDeletions);
   coordination.setDetails(details);
   coordination.init();
-  // Determine recovery mode
 }
 this.unassignedTimeout =
 conf.getInt("hbase.splitlog.manager.unassigned.timeout", 
DEFAULT_UNASSIGNED_TIMEOUT);
@@ -252,7 +237,6 @@ public class SplitLogManager {
 long t = EnvironmentEdgeManager.currentTime();
 long totalSize = 0;
 TaskBatch batch = new TaskBatch();
-Boolean isMetaRecovery = (filter == null) ? null : false;
 for (FileStatus lf : logfiles) {
   // TODO If the log file is still being written to - which is most likely
   // the case for the last log file - then its length will show up here
@@ -266,13 +250,6 @@ public class SplitLogManager {
   }
 }
 waitForSplittingCompletion(batch, status);
-// remove recovering regions
-if (filter == MasterWalManager.META_FILTER /* reference comparison */) {
-  // we split meta regions and user regions separately therefore logfiles 
are either all for
-  // meta or user regions but won't for both( we could have mixed 
situations in tests)
-  isMetaRecovery = true;
-}
-removeRecoveringRegions(serverNames, isMetaRecovery);
 
 if (batch.done != batch.installed) {
   batch.isDead = true;
@@ -384,61 +361,6 @@ public class SplitLogManager {
   }
 
   /**
-   * It removes recovering regions under /hbase/recovering-regions/[encoded 
region name] so that the
-   * region server hosting the region can allow reads to the recovered region
-   * @param serverNames servers which are just recovered
-   * @param isMetaRecovery whether current recovery is for the meta region on 
{@code serverNames}
-   */
-  private void removeRecoveringRegions(final Set serverNames, 
Boolean isMetaRecovery) {
-if (!isLogReplaying()) {
-  // the function is only used in WALEdit dire

[1/5] hbase git commit: HBASE-19128 Purge Distributed Log Replay from codebase, configurations, text; mark the feature as unsupported, broken.

2017-11-07 Thread appy
Repository: hbase
Updated Branches:
  refs/heads/branch-2 8095f9628 -> a6d8023ff


http://git-wip-us.apache.org/repos/asf/hbase/blob/a6d8023f/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelReplicationWithExpAsString.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelReplicationWithExpAsString.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelReplicationWithExpAsString.java
index 398be48..e5b1c39 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelReplicationWithExpAsString.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelReplicationWithExpAsString.java
@@ -77,7 +77,6 @@ public class TestVisibilityLabelReplicationWithExpAsString 
extends TestVisibilit
 + "\\\"" + "\u0027&" + "\")";
 // setup configuration
 conf = HBaseConfiguration.create();
-conf.setBoolean(HConstants.DISTRIBUTED_LOG_REPLAY_KEY, false);
 conf.setInt("hfile.format.version", 3);
 conf.set(HConstants.ZOOKEEPER_ZNODE_PARENT, "/1");
 conf.setInt("replication.source.size.capacity", 10240);

http://git-wip-us.apache.org/repos/asf/hbase/blob/a6d8023f/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabels.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabels.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabels.java
index 4992135..18bc02b 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabels.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabels.java
@@ -64,7 +64,6 @@ import org.apache.hadoop.hbase.regionserver.HRegion;
 import org.apache.hadoop.hbase.regionserver.HRegionServer;
 import org.apache.hadoop.hbase.regionserver.HStore;
 import org.apache.hadoop.hbase.regionserver.HStoreFile;
-import org.apache.hadoop.hbase.regionserver.Region;
 import org.apache.hadoop.hbase.security.User;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.hbase.util.JVMClusterUtil.RegionServerThread;
@@ -101,7 +100,7 @@ public abstract class TestVisibilityLabels {
   public static Configuration conf;
 
   private volatile boolean killedRS = false;
-  @Rule 
+  @Rule
   public final TestName TEST_NAME = new TestName();
   public static User SUPERUSER, USER1;
 
@@ -146,7 +145,7 @@ public abstract class TestVisibilityLabels {
   current.getRowLength(), row2, 0, row2.length));
 }
   }
-  
+
   @Test
   public void testSimpleVisibilityLabelsWithUniCodeCharacters() throws 
Exception {
 TableName tableName = TableName.valueOf(TEST_NAME.getMethodName());
@@ -398,13 +397,6 @@ public abstract class TestVisibilityLabels {
   } catch (InterruptedException e) {
   }
 }
-Region labelsTableRegion = 
regionServer.getRegions(LABELS_TABLE_NAME).get(0);
-while (labelsTableRegion.isRecovering()) {
-  try {
-Thread.sleep(10);
-  } catch (InterruptedException e) {
-  }
-}
   }
 
   @Test
@@ -583,7 +575,7 @@ public abstract class TestVisibilityLabels {
   put.addColumn(fam, qual, HConstants.LATEST_TIMESTAMP, value);
   put.setCellVisibility(new CellVisibility(SECRET));
   table.checkAndPut(row2, fam, qual, null, put);
-  
+
   Scan scan = new Scan();
   scan.setAuthorizations(new Authorizations(SECRET));
   ResultScanner scanner = table.getScanner(scan);

http://git-wip-us.apache.org/repos/asf/hbase/blob/a6d8023f/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsReplication.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsReplication.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsReplication.java
index 99525e2..513c765 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsReplication.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsReplication.java
@@ -129,7 +129,6 @@ public class TestVisibilityLabelsReplication {
   public void setup() throws Exception {
 // setup configuration
 conf = HBaseConfiguration.create();
-conf.setBoolean(HConstants.DISTRIBUTED_LOG_REPLAY_KEY, false);
 conf.setInt("hfile.format.version", 3);
 conf.set(HConstants.ZOOKEEPER_ZNODE_PARENT, "/1");
 conf.setInt("replication.source.size.capacity", 10240);

http://git-wip-us.apache.org/repos/asf/hbase/blob/a

[5/5] hbase git commit: HBASE-19128 Purge Distributed Log Replay from codebase, configurations, text; mark the feature as unsupported, broken.

2017-11-07 Thread appy
HBASE-19128 Purge Distributed Log Replay from codebase, configurations, text; 
mark the feature as unsupported, broken.


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/a6d8023f
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/a6d8023f
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/a6d8023f

Branch: refs/heads/branch-2
Commit: a6d8023ff5ea967caa5eafb76b28640a892c416e
Parents: 8095f96
Author: Apekshit Sharma 
Authored: Fri Nov 3 14:34:15 2017 -0700
Committer: Apekshit Sharma 
Committed: Tue Nov 7 17:48:52 2017 -0800

--
 .../exceptions/RegionInRecoveryException.java   |  44 -
 .../apache/hadoop/hbase/executor/EventType.java |   3 +-
 .../hbase/shaded/protobuf/ProtobufUtil.java |   2 +-
 .../hbase/shaded/protobuf/RequestConverter.java |  16 +-
 .../apache/hadoop/hbase/zookeeper/ZKUtil.java   |  54 -
 .../hadoop/hbase/zookeeper/ZNodePaths.java  |  11 +-
 .../hbase/zookeeper/ZooKeeperWatcher.java   |   1 -
 .../hbase/zookeeper/TestZooKeeperWatcher.java   |   4 +-
 .../org/apache/hadoop/hbase/HConstants.java |   7 -
 .../src/main/resources/hbase-default.xml|   8 +-
 .../wal/MetricsEditsReplaySource.java   |  72 --
 .../wal/MetricsEditsReplaySourceImpl.java   |  76 --
 ...se.regionserver.wal.MetricsEditsReplaySource |  18 -
 .../src/main/protobuf/Admin.proto   |   4 +-
 .../src/main/protobuf/MasterProcedure.proto |   4 +-
 .../src/main/protobuf/ZooKeeper.proto   |   7 +-
 hbase-protocol/src/main/protobuf/Admin.proto|   4 +-
 .../src/main/protobuf/ZooKeeper.proto   |   7 +-
 .../org/apache/hadoop/hbase/SplitLogTask.java   |  37 +-
 .../SplitLogManagerCoordination.java|  69 --
 .../SplitLogWorkerCoordination.java |  12 +-
 .../ZKSplitLogManagerCoordination.java  | 394 +---
 .../ZkSplitLogWorkerCoordination.java   |  80 +-
 .../hbase/coprocessor/RegionObserver.java   |   7 +-
 .../hadoop/hbase/master/MasterWalManager.java   |  29 -
 .../hadoop/hbase/master/SplitLogManager.java| 147 +--
 .../master/procedure/RSProcedureDispatcher.java |   2 +-
 .../hadoop/hbase/regionserver/HRegion.java  | 119 +--
 .../hbase/regionserver/HRegionServer.java   | 112 ---
 .../hbase/regionserver/RSRpcServices.java   |  24 -
 .../hadoop/hbase/regionserver/Region.java   |   3 -
 .../regionserver/RegionCoprocessorHost.java |  19 -
 .../regionserver/RegionServerServices.java  |   8 +-
 .../hbase/regionserver/SplitLogWorker.java  |   8 +-
 .../handler/FinishRegionRecoveringHandler.java  |  56 --
 .../handler/WALSplitterHandler.java |  18 +-
 .../regionserver/wal/MetricsWALEditsReplay.java |  59 --
 .../regionserver/wal/WALEditsReplaySink.java| 227 -
 .../RegionReplicaReplicationEndpoint.java   |  41 -
 .../hbase/security/access/AccessController.java |  22 +-
 .../visibility/VisibilityController.java|  16 +-
 .../apache/hadoop/hbase/wal/WALSplitter.java| 705 +
 .../zookeeper/RecoveringRegionWatcher.java  |  92 --
 .../hadoop/hbase/zookeeper/ZKSplitLog.java  | 114 +--
 .../hadoop/hbase/MockRegionServerServices.java  |   6 -
 .../org/apache/hadoop/hbase/TestIOFencing.java  |   2 +-
 .../apache/hadoop/hbase/TestSerialization.java  |  13 -
 .../hadoop/hbase/client/TestReplicasClient.java |   2 +-
 .../hadoop/hbase/master/MockRegionServer.java   |   5 -
 .../master/TestDistributedLogSplitting.java | 996 +--
 .../hbase/master/TestMasterWalManager.java  | 105 --
 .../hbase/master/TestSplitLogManager.java   |  83 +-
 .../hadoop/hbase/regionserver/TestHRegion.java  |  90 --
 .../regionserver/TestPerColumnFamilyFlush.java  |  12 -
 .../regionserver/TestRegionReplicaFailover.java |  20 -
 .../regionserver/TestRegionServerNoMaster.java  |   4 +-
 .../hbase/regionserver/TestSplitLogWorker.java  |  27 +-
 .../regionserver/TestSplitWalDataLoss.java  |   1 -
 .../regionserver/wal/AbstractTestWALReplay.java |  14 +-
 ...egionReplicaReplicationEndpointNoMaster.java |  52 -
 ...sibilityLabelReplicationWithExpAsString.java |   1 -
 .../visibility/TestVisibilityLabels.java|  14 +-
 .../TestVisibilityLabelsReplication.java|   1 -
 ...VisibilityLabelsWithCustomVisLabService.java |   2 -
 ...ibilityLabelsWithDefaultVisLabelService.java |   2 -
 .../TestVisibilityLabelsWithDeletes.java|   3 +-
 .../TestVisibilityWithCheckAuths.java   |   1 -
 .../hbase/wal/TestWALReaderOnSecureWAL.java |   9 +-
 .../apache/hadoop/hbase/wal/TestWALSplit.java   |  15 +-
 src/main/asciidoc/_chapters/architecture.adoc   |  33 +-
 src/main/asciidoc/_chapters/hbase-default.adoc  |  22 +-
 src/main/asciidoc/_chapters/upgrading.adoc  |  10 +-
 72 files changed, 182 insertions(+), 4125 deletions(-)
---

[3/5] hbase git commit: HBASE-19128 Purge Distributed Log Replay from codebase, configurations, text; mark the feature as unsupported, broken.

2017-11-07 Thread appy
http://git-wip-us.apache.org/repos/asf/hbase/blob/a6d8023f/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WALSplitter.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WALSplitter.java 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WALSplitter.java
index 215d2ed..39063a2 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WALSplitter.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WALSplitter.java
@@ -18,9 +18,7 @@
  */
 package org.apache.hadoop.hbase.wal;
 
-import org.apache.hadoop.hbase.client.Connection;
 import org.apache.hadoop.hbase.coordination.SplitLogWorkerCoordination;
-import org.apache.hadoop.hbase.regionserver.SplitLogWorker;
 import 
org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
 import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions;
 import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
@@ -32,7 +30,6 @@ import java.io.InterruptedIOException;
 import java.text.ParseException;
 import java.util.ArrayList;
 import java.util.Collections;
-import java.util.HashSet;
 import java.util.LinkedList;
 import java.util.List;
 import java.util.Map;
@@ -50,7 +47,6 @@ import java.util.concurrent.Future;
 import java.util.concurrent.ThreadFactory;
 import java.util.concurrent.ThreadPoolExecutor;
 import java.util.concurrent.TimeUnit;
-import java.util.concurrent.atomic.AtomicBoolean;
 import java.util.concurrent.atomic.AtomicLong;
 import java.util.concurrent.atomic.AtomicReference;
 import java.util.regex.Matcher;
@@ -67,26 +63,13 @@ import org.apache.hadoop.fs.PathFilter;
 import org.apache.hadoop.hbase.Cell;
 import org.apache.hadoop.hbase.CellScanner;
 import org.apache.hadoop.hbase.CellUtil;
-import org.apache.hadoop.hbase.CoordinatedStateManager;
 import org.apache.hadoop.hbase.HBaseConfiguration;
 import org.apache.hadoop.hbase.HConstants;
-import org.apache.hadoop.hbase.HRegionInfo;
-import org.apache.hadoop.hbase.HRegionLocation;
-import org.apache.hadoop.hbase.MetaTableAccessor;
-import org.apache.hadoop.hbase.ServerName;
 import org.apache.hadoop.hbase.TableName;
-import org.apache.hadoop.hbase.TableNotFoundException;
-import org.apache.yetus.audience.InterfaceAudience;
-import org.apache.hadoop.hbase.client.ClusterConnection;
-import org.apache.hadoop.hbase.client.ConnectionFactory;
-import org.apache.hadoop.hbase.client.ConnectionUtils;
 import org.apache.hadoop.hbase.client.Delete;
 import org.apache.hadoop.hbase.client.Durability;
 import org.apache.hadoop.hbase.client.Mutation;
 import org.apache.hadoop.hbase.client.Put;
-import org.apache.hadoop.hbase.client.TableState;
-import org.apache.hadoop.hbase.coordination.ZKSplitLogManagerCoordination;
-import org.apache.hadoop.hbase.exceptions.RegionOpeningException;
 import org.apache.hadoop.hbase.io.HeapSize;
 import org.apache.hadoop.hbase.master.SplitLogManager;
 import org.apache.hadoop.hbase.monitoring.MonitoredTask;
@@ -95,24 +78,15 @@ import org.apache.hadoop.hbase.regionserver.HRegion;
 import org.apache.hadoop.hbase.regionserver.LastSequenceId;
 import org.apache.hadoop.hbase.regionserver.wal.AbstractFSWAL;
 import org.apache.hadoop.hbase.regionserver.wal.WALCellCodec;
-import org.apache.hadoop.hbase.regionserver.wal.WALEditsReplaySink;
 import org.apache.hadoop.hbase.shaded.com.google.protobuf.TextFormat;
-import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
-import org.apache.hadoop.hbase.shaded.protobuf.RequestConverter;
-import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminService.BlockingInterface;
-import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionInfoRequest;
-import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionInfoResponse;
 import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.WALEntry;
 import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.MutationType;
 import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionStoreSequenceIds;
 import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.StoreSequenceId;
 import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos;
-import 
org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.CompactionDescriptor;
-import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.SplitLogTask.RecoveryMode;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.hbase.util.CancelableProgressable;
 import org.apache.hadoop.hbase.util.ClassSize;
-import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
 import org.apache.hadoop.hbase.util.FSUtils;
 import org.apache.hadoop.hbase.util.Pair;
 import org.apache.hadoop.hbase.util.Threads;
@@ -122,11 +96,12 @@ import org.apache.hadoop.hbase.wal.WALProvider.Writer;
 import org.apache.hadoop.hbase.zookeeper.ZKSplitLog;
 i

[2/5] hbase git commit: HBASE-19128 Purge Distributed Log Replay from codebase, configurations, text; mark the feature as unsupported, broken.

2017-11-07 Thread appy
http://git-wip-us.apache.org/repos/asf/hbase/blob/a6d8023f/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestDistributedLogSplitting.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestDistributedLogSplitting.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestDistributedLogSplitting.java
index eafc412..3d59639 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestDistributedLogSplitting.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestDistributedLogSplitting.java
@@ -33,12 +33,9 @@ import static org.junit.Assert.fail;
 import java.io.IOException;
 import java.util.ArrayList;
 import java.util.Arrays;
-import java.util.HashSet;
 import java.util.Iterator;
-import java.util.LinkedList;
 import java.util.List;
 import java.util.NavigableSet;
-import java.util.Set;
 import java.util.concurrent.ExecutorService;
 import java.util.concurrent.Executors;
 import java.util.concurrent.Future;
@@ -67,28 +64,18 @@ import org.apache.hadoop.hbase.ServerName;
 import org.apache.hadoop.hbase.SplitLogCounters;
 import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.Waiter;
-import org.apache.hadoop.hbase.client.ClusterConnection;
-import org.apache.hadoop.hbase.client.CompactionState;
-import org.apache.hadoop.hbase.client.ConnectionUtils;
-import org.apache.hadoop.hbase.client.Delete;
-import org.apache.hadoop.hbase.client.Get;
-import org.apache.hadoop.hbase.client.Increment;
-import org.apache.hadoop.hbase.client.NonceGenerator;
-import org.apache.hadoop.hbase.client.PerClientRandomNonceGenerator;
 import org.apache.hadoop.hbase.client.Put;
 import org.apache.hadoop.hbase.client.RegionInfo;
 import org.apache.hadoop.hbase.client.RegionInfoBuilder;
 import org.apache.hadoop.hbase.client.RegionLocator;
-import org.apache.hadoop.hbase.client.Result;
-import org.apache.hadoop.hbase.client.RetriesExhaustedWithDetailsException;
 import org.apache.hadoop.hbase.client.Table;
 import org.apache.hadoop.hbase.coordination.ZKSplitLogManagerCoordination;
-import org.apache.hadoop.hbase.exceptions.RegionInRecoveryException;
 import org.apache.hadoop.hbase.ipc.ServerNotRunningYetException;
 import org.apache.hadoop.hbase.master.SplitLogManager.TaskBatch;
 import org.apache.hadoop.hbase.master.assignment.RegionStates;
 import org.apache.hadoop.hbase.regionserver.HRegion;
 import org.apache.hadoop.hbase.regionserver.HRegionServer;
+import org.apache.hadoop.hbase.regionserver.MultiVersionConcurrencyControl;
 import org.apache.hadoop.hbase.regionserver.Region;
 import org.apache.hadoop.hbase.testclassification.LargeTests;
 import org.apache.hadoop.hbase.testclassification.MasterTests;
@@ -110,10 +97,8 @@ import org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher;
 import org.apache.hadoop.hdfs.MiniDFSCluster;
 import org.junit.After;
 import org.junit.AfterClass;
-import org.junit.Assert;
 import org.junit.Before;
 import org.junit.BeforeClass;
-import org.junit.Ignore;
 import org.junit.Rule;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
@@ -145,7 +130,6 @@ public class TestDistributedLogSplitting {
   Configuration conf;
   static Configuration originalConf;
   static HBaseTestingUtility TEST_UTIL;
-  static MiniDFSCluster dfsCluster;
   static MiniZooKeeperCluster zkCluster;
 
   @Rule
@@ -154,7 +138,6 @@ public class TestDistributedLogSplitting {
   @BeforeClass
   public static void setup() throws Exception {
 TEST_UTIL = new HBaseTestingUtility(HBaseConfiguration.create());
-dfsCluster = TEST_UTIL.startMiniDFSCluster(1);
 zkCluster = TEST_UTIL.startMiniZKCluster();
 originalConf = TEST_UTIL.getConfiguration();
   }
@@ -178,7 +161,6 @@ public class TestDistributedLogSplitting {
 conf.setInt(HConstants.REGION_SERVER_HIGH_PRIORITY_HANDLER_COUNT, 10);
 TEST_UTIL.shutdownMiniHBaseCluster();
 TEST_UTIL = new HBaseTestingUtility(conf);
-TEST_UTIL.setDFSCluster(dfsCluster);
 TEST_UTIL.setZkCluster(zkCluster);
 TEST_UTIL.startMiniHBaseCluster(NUM_MASTERS, num_rs);
 cluster = TEST_UTIL.getHBaseCluster();
@@ -211,14 +193,12 @@ public class TestDistributedLogSplitting {
 }
   }
 
-  @Ignore("DLR is broken by HBASE-12751") @Test (timeout=30)
+  @Test (timeout=30)
   public void testRecoveredEdits() throws Exception {
-LOG.info("testRecoveredEdits");
 conf.setLong("hbase.regionserver.hlog.blocksize", 30 * 1024); // create 
more than one wal
-conf.setBoolean(HConstants.DISTRIBUTED_LOG_REPLAY_KEY, false);
 startCluster(NUM_RS);
 
-final int NUM_LOG_LINES = 1000;
+final int NUM_LOG_LINES = 1;
 final SplitLogManager slm = 
master.getMasterWalManager().getSplitLogManager();
 // turn off load balancing to prevent regions from moving around otherwise
 // they will consume recovered.edits
@@ -229,23 +209,18 @@ public class TestDistributed

[3/5] hbase git commit: HBASE-19128 Purge Distributed Log Replay from codebase, configurations, text; mark the feature as unsupported, broken.

2017-11-07 Thread appy
http://git-wip-us.apache.org/repos/asf/hbase/blob/4132314f/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WALSplitter.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WALSplitter.java 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WALSplitter.java
index 215d2ed..39063a2 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WALSplitter.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WALSplitter.java
@@ -18,9 +18,7 @@
  */
 package org.apache.hadoop.hbase.wal;
 
-import org.apache.hadoop.hbase.client.Connection;
 import org.apache.hadoop.hbase.coordination.SplitLogWorkerCoordination;
-import org.apache.hadoop.hbase.regionserver.SplitLogWorker;
 import 
org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
 import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions;
 import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
@@ -32,7 +30,6 @@ import java.io.InterruptedIOException;
 import java.text.ParseException;
 import java.util.ArrayList;
 import java.util.Collections;
-import java.util.HashSet;
 import java.util.LinkedList;
 import java.util.List;
 import java.util.Map;
@@ -50,7 +47,6 @@ import java.util.concurrent.Future;
 import java.util.concurrent.ThreadFactory;
 import java.util.concurrent.ThreadPoolExecutor;
 import java.util.concurrent.TimeUnit;
-import java.util.concurrent.atomic.AtomicBoolean;
 import java.util.concurrent.atomic.AtomicLong;
 import java.util.concurrent.atomic.AtomicReference;
 import java.util.regex.Matcher;
@@ -67,26 +63,13 @@ import org.apache.hadoop.fs.PathFilter;
 import org.apache.hadoop.hbase.Cell;
 import org.apache.hadoop.hbase.CellScanner;
 import org.apache.hadoop.hbase.CellUtil;
-import org.apache.hadoop.hbase.CoordinatedStateManager;
 import org.apache.hadoop.hbase.HBaseConfiguration;
 import org.apache.hadoop.hbase.HConstants;
-import org.apache.hadoop.hbase.HRegionInfo;
-import org.apache.hadoop.hbase.HRegionLocation;
-import org.apache.hadoop.hbase.MetaTableAccessor;
-import org.apache.hadoop.hbase.ServerName;
 import org.apache.hadoop.hbase.TableName;
-import org.apache.hadoop.hbase.TableNotFoundException;
-import org.apache.yetus.audience.InterfaceAudience;
-import org.apache.hadoop.hbase.client.ClusterConnection;
-import org.apache.hadoop.hbase.client.ConnectionFactory;
-import org.apache.hadoop.hbase.client.ConnectionUtils;
 import org.apache.hadoop.hbase.client.Delete;
 import org.apache.hadoop.hbase.client.Durability;
 import org.apache.hadoop.hbase.client.Mutation;
 import org.apache.hadoop.hbase.client.Put;
-import org.apache.hadoop.hbase.client.TableState;
-import org.apache.hadoop.hbase.coordination.ZKSplitLogManagerCoordination;
-import org.apache.hadoop.hbase.exceptions.RegionOpeningException;
 import org.apache.hadoop.hbase.io.HeapSize;
 import org.apache.hadoop.hbase.master.SplitLogManager;
 import org.apache.hadoop.hbase.monitoring.MonitoredTask;
@@ -95,24 +78,15 @@ import org.apache.hadoop.hbase.regionserver.HRegion;
 import org.apache.hadoop.hbase.regionserver.LastSequenceId;
 import org.apache.hadoop.hbase.regionserver.wal.AbstractFSWAL;
 import org.apache.hadoop.hbase.regionserver.wal.WALCellCodec;
-import org.apache.hadoop.hbase.regionserver.wal.WALEditsReplaySink;
 import org.apache.hadoop.hbase.shaded.com.google.protobuf.TextFormat;
-import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
-import org.apache.hadoop.hbase.shaded.protobuf.RequestConverter;
-import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminService.BlockingInterface;
-import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionInfoRequest;
-import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionInfoResponse;
 import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.WALEntry;
 import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.MutationType;
 import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionStoreSequenceIds;
 import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.StoreSequenceId;
 import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos;
-import 
org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.CompactionDescriptor;
-import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.SplitLogTask.RecoveryMode;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.hbase.util.CancelableProgressable;
 import org.apache.hadoop.hbase.util.ClassSize;
-import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
 import org.apache.hadoop.hbase.util.FSUtils;
 import org.apache.hadoop.hbase.util.Pair;
 import org.apache.hadoop.hbase.util.Threads;
@@ -122,11 +96,12 @@ import org.apache.hadoop.hbase.wal.WALProvider.Writer;
 import org.apache.hadoop.hbase.zookeeper.ZKSplitLog;
 i

[4/5] hbase git commit: HBASE-19128 Purge Distributed Log Replay from codebase, configurations, text; mark the feature as unsupported, broken.

2017-11-07 Thread appy
http://git-wip-us.apache.org/repos/asf/hbase/blob/4132314f/hbase-server/src/main/java/org/apache/hadoop/hbase/master/SplitLogManager.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/SplitLogManager.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/SplitLogManager.java
index 67aab82..8ab087d 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/SplitLogManager.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/SplitLogManager.java
@@ -25,7 +25,6 @@ import static 
org.apache.hadoop.hbase.master.SplitLogManager.TerminationStatus.I
 import static 
org.apache.hadoop.hbase.master.SplitLogManager.TerminationStatus.SUCCESS;
 
 import java.io.IOException;
-import java.io.InterruptedIOException;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.Collections;
@@ -36,7 +35,6 @@ import java.util.Set;
 import java.util.concurrent.ConcurrentHashMap;
 import java.util.concurrent.ConcurrentMap;
 import java.util.concurrent.atomic.AtomicInteger;
-import java.util.concurrent.locks.ReentrantLock;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
@@ -50,19 +48,16 @@ import org.apache.hadoop.hbase.ScheduledChore;
 import org.apache.hadoop.hbase.ServerName;
 import org.apache.hadoop.hbase.SplitLogCounters;
 import org.apache.hadoop.hbase.Stoppable;
-import org.apache.hadoop.hbase.client.RegionInfo;
 import org.apache.hadoop.hbase.coordination.SplitLogManagerCoordination;
 import 
org.apache.hadoop.hbase.coordination.SplitLogManagerCoordination.SplitLogManagerDetails;
 import org.apache.hadoop.hbase.monitoring.MonitoredTask;
 import org.apache.hadoop.hbase.monitoring.TaskMonitor;
 import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
 import org.apache.hadoop.hbase.util.FSUtils;
-import org.apache.hadoop.hbase.util.Pair;
 import org.apache.hadoop.hbase.wal.AbstractFSWALProvider;
 import org.apache.yetus.audience.InterfaceAudience;
 
 import 
org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
-import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.SplitLogTask.RecoveryMode;
 
 /**
  * Distributes the task of log splitting to the available region servers.
@@ -106,15 +101,6 @@ public class SplitLogManager {
 
   private long unassignedTimeout;
   private long lastTaskCreateTime = Long.MAX_VALUE;
-  private long checkRecoveringTimeThreshold = 15000; // 15 seconds
-  private final List, Boolean>> 
failedRecoveringRegionDeletions = Collections
-  .synchronizedList(new ArrayList, Boolean>>());
-
-  /**
-   * In distributedLogReplay mode, we need touch both splitlog and 
recovering-regions znodes in one
-   * operation. So the lock is used to guard such cases.
-   */
-  protected final ReentrantLock recoveringRegionLock = new ReentrantLock();
 
   @VisibleForTesting
   final ConcurrentMap tasks = new ConcurrentHashMap<>();
@@ -141,7 +127,6 @@ public class SplitLogManager {
   SplitLogManagerDetails details = new SplitLogManagerDetails(tasks, 
master, failedDeletions);
   coordination.setDetails(details);
   coordination.init();
-  // Determine recovery mode
 }
 this.unassignedTimeout =
 conf.getInt("hbase.splitlog.manager.unassigned.timeout", 
DEFAULT_UNASSIGNED_TIMEOUT);
@@ -252,7 +237,6 @@ public class SplitLogManager {
 long t = EnvironmentEdgeManager.currentTime();
 long totalSize = 0;
 TaskBatch batch = new TaskBatch();
-Boolean isMetaRecovery = (filter == null) ? null : false;
 for (FileStatus lf : logfiles) {
   // TODO If the log file is still being written to - which is most likely
   // the case for the last log file - then its length will show up here
@@ -266,13 +250,6 @@ public class SplitLogManager {
   }
 }
 waitForSplittingCompletion(batch, status);
-// remove recovering regions
-if (filter == MasterWalManager.META_FILTER /* reference comparison */) {
-  // we split meta regions and user regions separately therefore logfiles 
are either all for
-  // meta or user regions but won't for both( we could have mixed 
situations in tests)
-  isMetaRecovery = true;
-}
-removeRecoveringRegions(serverNames, isMetaRecovery);
 
 if (batch.done != batch.installed) {
   batch.isDead = true;
@@ -384,61 +361,6 @@ public class SplitLogManager {
   }
 
   /**
-   * It removes recovering regions under /hbase/recovering-regions/[encoded 
region name] so that the
-   * region server hosting the region can allow reads to the recovered region
-   * @param serverNames servers which are just recovered
-   * @param isMetaRecovery whether current recovery is for the meta region on 
{@code serverNames}
-   */
-  private void removeRecoveringRegions(final Set serverNames, 
Boolean isMetaRecovery) {
-if (!isLogReplaying()) {
-  // the function is only used in WALEdit dire

[2/5] hbase git commit: HBASE-19128 Purge Distributed Log Replay from codebase, configurations, text; mark the feature as unsupported, broken.

2017-11-07 Thread appy
http://git-wip-us.apache.org/repos/asf/hbase/blob/4132314f/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestDistributedLogSplitting.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestDistributedLogSplitting.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestDistributedLogSplitting.java
index eafc412..3d59639 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestDistributedLogSplitting.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestDistributedLogSplitting.java
@@ -33,12 +33,9 @@ import static org.junit.Assert.fail;
 import java.io.IOException;
 import java.util.ArrayList;
 import java.util.Arrays;
-import java.util.HashSet;
 import java.util.Iterator;
-import java.util.LinkedList;
 import java.util.List;
 import java.util.NavigableSet;
-import java.util.Set;
 import java.util.concurrent.ExecutorService;
 import java.util.concurrent.Executors;
 import java.util.concurrent.Future;
@@ -67,28 +64,18 @@ import org.apache.hadoop.hbase.ServerName;
 import org.apache.hadoop.hbase.SplitLogCounters;
 import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.Waiter;
-import org.apache.hadoop.hbase.client.ClusterConnection;
-import org.apache.hadoop.hbase.client.CompactionState;
-import org.apache.hadoop.hbase.client.ConnectionUtils;
-import org.apache.hadoop.hbase.client.Delete;
-import org.apache.hadoop.hbase.client.Get;
-import org.apache.hadoop.hbase.client.Increment;
-import org.apache.hadoop.hbase.client.NonceGenerator;
-import org.apache.hadoop.hbase.client.PerClientRandomNonceGenerator;
 import org.apache.hadoop.hbase.client.Put;
 import org.apache.hadoop.hbase.client.RegionInfo;
 import org.apache.hadoop.hbase.client.RegionInfoBuilder;
 import org.apache.hadoop.hbase.client.RegionLocator;
-import org.apache.hadoop.hbase.client.Result;
-import org.apache.hadoop.hbase.client.RetriesExhaustedWithDetailsException;
 import org.apache.hadoop.hbase.client.Table;
 import org.apache.hadoop.hbase.coordination.ZKSplitLogManagerCoordination;
-import org.apache.hadoop.hbase.exceptions.RegionInRecoveryException;
 import org.apache.hadoop.hbase.ipc.ServerNotRunningYetException;
 import org.apache.hadoop.hbase.master.SplitLogManager.TaskBatch;
 import org.apache.hadoop.hbase.master.assignment.RegionStates;
 import org.apache.hadoop.hbase.regionserver.HRegion;
 import org.apache.hadoop.hbase.regionserver.HRegionServer;
+import org.apache.hadoop.hbase.regionserver.MultiVersionConcurrencyControl;
 import org.apache.hadoop.hbase.regionserver.Region;
 import org.apache.hadoop.hbase.testclassification.LargeTests;
 import org.apache.hadoop.hbase.testclassification.MasterTests;
@@ -110,10 +97,8 @@ import org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher;
 import org.apache.hadoop.hdfs.MiniDFSCluster;
 import org.junit.After;
 import org.junit.AfterClass;
-import org.junit.Assert;
 import org.junit.Before;
 import org.junit.BeforeClass;
-import org.junit.Ignore;
 import org.junit.Rule;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
@@ -145,7 +130,6 @@ public class TestDistributedLogSplitting {
   Configuration conf;
   static Configuration originalConf;
   static HBaseTestingUtility TEST_UTIL;
-  static MiniDFSCluster dfsCluster;
   static MiniZooKeeperCluster zkCluster;
 
   @Rule
@@ -154,7 +138,6 @@ public class TestDistributedLogSplitting {
   @BeforeClass
   public static void setup() throws Exception {
 TEST_UTIL = new HBaseTestingUtility(HBaseConfiguration.create());
-dfsCluster = TEST_UTIL.startMiniDFSCluster(1);
 zkCluster = TEST_UTIL.startMiniZKCluster();
 originalConf = TEST_UTIL.getConfiguration();
   }
@@ -178,7 +161,6 @@ public class TestDistributedLogSplitting {
 conf.setInt(HConstants.REGION_SERVER_HIGH_PRIORITY_HANDLER_COUNT, 10);
 TEST_UTIL.shutdownMiniHBaseCluster();
 TEST_UTIL = new HBaseTestingUtility(conf);
-TEST_UTIL.setDFSCluster(dfsCluster);
 TEST_UTIL.setZkCluster(zkCluster);
 TEST_UTIL.startMiniHBaseCluster(NUM_MASTERS, num_rs);
 cluster = TEST_UTIL.getHBaseCluster();
@@ -211,14 +193,12 @@ public class TestDistributedLogSplitting {
 }
   }
 
-  @Ignore("DLR is broken by HBASE-12751") @Test (timeout=30)
+  @Test (timeout=30)
   public void testRecoveredEdits() throws Exception {
-LOG.info("testRecoveredEdits");
 conf.setLong("hbase.regionserver.hlog.blocksize", 30 * 1024); // create 
more than one wal
-conf.setBoolean(HConstants.DISTRIBUTED_LOG_REPLAY_KEY, false);
 startCluster(NUM_RS);
 
-final int NUM_LOG_LINES = 1000;
+final int NUM_LOG_LINES = 1;
 final SplitLogManager slm = 
master.getMasterWalManager().getSplitLogManager();
 // turn off load balancing to prevent regions from moving around otherwise
 // they will consume recovered.edits
@@ -229,23 +209,18 @@ public class TestDistributed

[1/5] hbase git commit: HBASE-19128 Purge Distributed Log Replay from codebase, configurations, text; mark the feature as unsupported, broken.

2017-11-07 Thread appy
Repository: hbase
Updated Branches:
  refs/heads/master dc127df07 -> 4132314f5


http://git-wip-us.apache.org/repos/asf/hbase/blob/4132314f/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelReplicationWithExpAsString.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelReplicationWithExpAsString.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelReplicationWithExpAsString.java
index 398be48..e5b1c39 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelReplicationWithExpAsString.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelReplicationWithExpAsString.java
@@ -77,7 +77,6 @@ public class TestVisibilityLabelReplicationWithExpAsString 
extends TestVisibilit
 + "\\\"" + "\u0027&" + "\")";
 // setup configuration
 conf = HBaseConfiguration.create();
-conf.setBoolean(HConstants.DISTRIBUTED_LOG_REPLAY_KEY, false);
 conf.setInt("hfile.format.version", 3);
 conf.set(HConstants.ZOOKEEPER_ZNODE_PARENT, "/1");
 conf.setInt("replication.source.size.capacity", 10240);

http://git-wip-us.apache.org/repos/asf/hbase/blob/4132314f/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabels.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabels.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabels.java
index 4992135..18bc02b 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabels.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabels.java
@@ -64,7 +64,6 @@ import org.apache.hadoop.hbase.regionserver.HRegion;
 import org.apache.hadoop.hbase.regionserver.HRegionServer;
 import org.apache.hadoop.hbase.regionserver.HStore;
 import org.apache.hadoop.hbase.regionserver.HStoreFile;
-import org.apache.hadoop.hbase.regionserver.Region;
 import org.apache.hadoop.hbase.security.User;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.hbase.util.JVMClusterUtil.RegionServerThread;
@@ -101,7 +100,7 @@ public abstract class TestVisibilityLabels {
   public static Configuration conf;
 
   private volatile boolean killedRS = false;
-  @Rule 
+  @Rule
   public final TestName TEST_NAME = new TestName();
   public static User SUPERUSER, USER1;
 
@@ -146,7 +145,7 @@ public abstract class TestVisibilityLabels {
   current.getRowLength(), row2, 0, row2.length));
 }
   }
-  
+
   @Test
   public void testSimpleVisibilityLabelsWithUniCodeCharacters() throws 
Exception {
 TableName tableName = TableName.valueOf(TEST_NAME.getMethodName());
@@ -398,13 +397,6 @@ public abstract class TestVisibilityLabels {
   } catch (InterruptedException e) {
   }
 }
-Region labelsTableRegion = 
regionServer.getRegions(LABELS_TABLE_NAME).get(0);
-while (labelsTableRegion.isRecovering()) {
-  try {
-Thread.sleep(10);
-  } catch (InterruptedException e) {
-  }
-}
   }
 
   @Test
@@ -583,7 +575,7 @@ public abstract class TestVisibilityLabels {
   put.addColumn(fam, qual, HConstants.LATEST_TIMESTAMP, value);
   put.setCellVisibility(new CellVisibility(SECRET));
   table.checkAndPut(row2, fam, qual, null, put);
-  
+
   Scan scan = new Scan();
   scan.setAuthorizations(new Authorizations(SECRET));
   ResultScanner scanner = table.getScanner(scan);

http://git-wip-us.apache.org/repos/asf/hbase/blob/4132314f/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsReplication.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsReplication.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsReplication.java
index 99525e2..513c765 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsReplication.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsReplication.java
@@ -129,7 +129,6 @@ public class TestVisibilityLabelsReplication {
   public void setup() throws Exception {
 // setup configuration
 conf = HBaseConfiguration.create();
-conf.setBoolean(HConstants.DISTRIBUTED_LOG_REPLAY_KEY, false);
 conf.setInt("hfile.format.version", 3);
 conf.set(HConstants.ZOOKEEPER_ZNODE_PARENT, "/1");
 conf.setInt("replication.source.size.capacity", 10240);

http://git-wip-us.apache.org/repos/asf/hbase/blob/413

[5/5] hbase git commit: HBASE-19128 Purge Distributed Log Replay from codebase, configurations, text; mark the feature as unsupported, broken.

2017-11-07 Thread appy
HBASE-19128 Purge Distributed Log Replay from codebase, configurations, text; 
mark the feature as unsupported, broken.


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/4132314f
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/4132314f
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/4132314f

Branch: refs/heads/master
Commit: 4132314f51951af43f4f56d9886233b3ba417903
Parents: dc127df
Author: Apekshit Sharma 
Authored: Fri Nov 3 14:34:15 2017 -0700
Committer: Apekshit Sharma 
Committed: Tue Nov 7 17:43:14 2017 -0800

--
 .../exceptions/RegionInRecoveryException.java   |  44 -
 .../apache/hadoop/hbase/executor/EventType.java |   3 +-
 .../hbase/shaded/protobuf/ProtobufUtil.java |   2 +-
 .../hbase/shaded/protobuf/RequestConverter.java |  16 +-
 .../apache/hadoop/hbase/zookeeper/ZKUtil.java   |  54 -
 .../hadoop/hbase/zookeeper/ZNodePaths.java  |  11 +-
 .../hbase/zookeeper/ZooKeeperWatcher.java   |   1 -
 .../hbase/zookeeper/TestZooKeeperWatcher.java   |   4 +-
 .../org/apache/hadoop/hbase/HConstants.java |   7 -
 .../src/main/resources/hbase-default.xml|   8 +-
 .../wal/MetricsEditsReplaySource.java   |  72 --
 .../wal/MetricsEditsReplaySourceImpl.java   |  76 --
 ...se.regionserver.wal.MetricsEditsReplaySource |  18 -
 .../src/main/protobuf/Admin.proto   |   4 +-
 .../src/main/protobuf/MasterProcedure.proto |   4 +-
 .../src/main/protobuf/ZooKeeper.proto   |   7 +-
 hbase-protocol/src/main/protobuf/Admin.proto|   4 +-
 .../src/main/protobuf/ZooKeeper.proto   |   7 +-
 .../org/apache/hadoop/hbase/SplitLogTask.java   |  37 +-
 .../SplitLogManagerCoordination.java|  69 --
 .../SplitLogWorkerCoordination.java |  12 +-
 .../ZKSplitLogManagerCoordination.java  | 394 +---
 .../ZkSplitLogWorkerCoordination.java   |  80 +-
 .../hbase/coprocessor/RegionObserver.java   |   7 +-
 .../hadoop/hbase/master/MasterWalManager.java   |  29 -
 .../hadoop/hbase/master/SplitLogManager.java| 147 +--
 .../master/procedure/RSProcedureDispatcher.java |   2 +-
 .../hadoop/hbase/regionserver/HRegion.java  | 119 +--
 .../hbase/regionserver/HRegionServer.java   | 112 ---
 .../hbase/regionserver/RSRpcServices.java   |  24 -
 .../hadoop/hbase/regionserver/Region.java   |   3 -
 .../regionserver/RegionCoprocessorHost.java |  19 -
 .../regionserver/RegionServerServices.java  |   8 +-
 .../hbase/regionserver/SplitLogWorker.java  |   8 +-
 .../handler/FinishRegionRecoveringHandler.java  |  56 --
 .../handler/WALSplitterHandler.java |  18 +-
 .../regionserver/wal/MetricsWALEditsReplay.java |  59 --
 .../regionserver/wal/WALEditsReplaySink.java| 227 -
 .../RegionReplicaReplicationEndpoint.java   |  41 -
 .../hbase/security/access/AccessController.java |  22 +-
 .../visibility/VisibilityController.java|  16 +-
 .../apache/hadoop/hbase/wal/WALSplitter.java| 705 +
 .../zookeeper/RecoveringRegionWatcher.java  |  92 --
 .../hadoop/hbase/zookeeper/ZKSplitLog.java  | 114 +--
 .../hadoop/hbase/MockRegionServerServices.java  |   6 -
 .../org/apache/hadoop/hbase/TestIOFencing.java  |   2 +-
 .../apache/hadoop/hbase/TestSerialization.java  |  13 -
 .../hadoop/hbase/client/TestReplicasClient.java |   2 +-
 .../hadoop/hbase/master/MockRegionServer.java   |   5 -
 .../master/TestDistributedLogSplitting.java | 996 +--
 .../hbase/master/TestMasterWalManager.java  | 105 --
 .../hbase/master/TestSplitLogManager.java   |  83 +-
 .../hadoop/hbase/regionserver/TestHRegion.java  |  90 --
 .../regionserver/TestPerColumnFamilyFlush.java  |  12 -
 .../regionserver/TestRegionReplicaFailover.java |  20 -
 .../regionserver/TestRegionServerNoMaster.java  |   4 +-
 .../hbase/regionserver/TestSplitLogWorker.java  |  27 +-
 .../regionserver/TestSplitWalDataLoss.java  |   1 -
 .../regionserver/wal/AbstractTestWALReplay.java |  14 +-
 ...egionReplicaReplicationEndpointNoMaster.java |  52 -
 ...sibilityLabelReplicationWithExpAsString.java |   1 -
 .../visibility/TestVisibilityLabels.java|  14 +-
 .../TestVisibilityLabelsReplication.java|   1 -
 ...VisibilityLabelsWithCustomVisLabService.java |   2 -
 ...ibilityLabelsWithDefaultVisLabelService.java |   2 -
 .../TestVisibilityLabelsWithDeletes.java|   3 +-
 .../TestVisibilityWithCheckAuths.java   |   1 -
 .../hbase/wal/TestWALReaderOnSecureWAL.java |   9 +-
 .../apache/hadoop/hbase/wal/TestWALSplit.java   |  15 +-
 src/main/asciidoc/_chapters/architecture.adoc   |  27 +-
 src/main/asciidoc/_chapters/hbase-default.adoc  |  20 +-
 src/main/asciidoc/_chapters/upgrading.adoc  |  10 +-
 72 files changed, 170 insertions(+), 4129 deletions(-)
-

hbase git commit: HBASE-19088 move_tables_rsgroup will throw an exception when the table is disabled

2017-11-07 Thread apurtell
Repository: hbase
Updated Branches:
  refs/heads/branch-1.4 0a6e93801 -> 7c8d76994


HBASE-19088 move_tables_rsgroup will throw an exception when the table is 
disabled

Signed-off-by: Andrew Purtell 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/7c8d7699
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/7c8d7699
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/7c8d7699

Branch: refs/heads/branch-1.4
Commit: 7c8d76994a29add97c77b240c9b61dbb568f2828
Parents: 0a6e938
Author: Guangxu Cheng 
Authored: Mon Oct 30 16:29:40 2017 +0800
Committer: Andrew Purtell 
Committed: Tue Nov 7 17:14:17 2017 -0800

--
 .../hbase/rsgroup/RSGroupAdminServer.java   |  7 
 .../hadoop/hbase/rsgroup/TestRSGroupsBase.java  | 34 
 2 files changed, 41 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/7c8d7699/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupAdminServer.java
--
diff --git 
a/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupAdminServer.java
 
b/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupAdminServer.java
index 13b3141..125e08e 100644
--- 
a/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupAdminServer.java
+++ 
b/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupAdminServer.java
@@ -54,6 +54,7 @@ import org.apache.hadoop.hbase.master.RegionState;
 import org.apache.hadoop.hbase.master.ServerManager;
 import org.apache.hadoop.hbase.master.TableLockManager.TableLock;
 import org.apache.hadoop.hbase.net.Address;
+import org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos;
 
 /**
  * Service to support Region Server Grouping (HBase-6721)
@@ -276,6 +277,12 @@ public class RSGroupAdminServer implements RSGroupAdmin {
   }
 }
 for(TableName table: tables) {
+  if 
(master.getAssignmentManager().getTableStateManager().isTableState(table,
+  ZooKeeperProtos.Table.State.DISABLED,
+  ZooKeeperProtos.Table.State.DISABLING)) {
+LOG.debug("Skipping move regions because the table" + table + " is 
disabled.");
+continue;
+  }
   TableLock lock = master.getTableLockManager().writeLock(table, "Group: 
table move");
   try {
 lock.acquire();

http://git-wip-us.apache.org/repos/asf/hbase/blob/7c8d7699/hbase-rsgroup/src/test/java/org/apache/hadoop/hbase/rsgroup/TestRSGroupsBase.java
--
diff --git 
a/hbase-rsgroup/src/test/java/org/apache/hadoop/hbase/rsgroup/TestRSGroupsBase.java
 
b/hbase-rsgroup/src/test/java/org/apache/hadoop/hbase/rsgroup/TestRSGroupsBase.java
index 0db0fea..a0d1401 100644
--- 
a/hbase-rsgroup/src/test/java/org/apache/hadoop/hbase/rsgroup/TestRSGroupsBase.java
+++ 
b/hbase-rsgroup/src/test/java/org/apache/hadoop/hbase/rsgroup/TestRSGroupsBase.java
@@ -812,4 +812,38 @@ public abstract class TestRSGroupsBase {
 Set newGroupTables = 
rsGroupAdmin.getRSGroupInfo(newGroup.getName()).getTables();
 assertTrue(newGroupTables.contains(tableName));
   }
+
+  @Test
+  public void testDisabledTableMove() throws Exception {
+final TableName tableName = TableName.valueOf(tablePrefix + 
"_testDisabledTableMove");
+final byte[] familyNameBytes = Bytes.toBytes("f");
+String newGroupName = getGroupName("testDisabledTableMove");
+final RSGroupInfo newGroup = addGroup(rsGroupAdmin, newGroupName, 2);
+
+TEST_UTIL.createMultiRegionTable(tableName, familyNameBytes, 5);
+TEST_UTIL.waitFor(WAIT_TIMEOUT, new Waiter.Predicate() {
+  @Override
+  public boolean evaluate() throws Exception {
+List regions = getTableRegionMap().get(tableName);
+if (regions == null) {
+  return false;
+}
+return getTableRegionMap().get(tableName).size() >= 5;
+  }
+});
+
+RSGroupInfo tableGrp = rsGroupAdmin.getRSGroupInfoOfTable(tableName);
+assertTrue(tableGrp.getName().equals(RSGroupInfo.DEFAULT_GROUP));
+
+//test disable table
+admin.disableTable(tableName);
+
+//change table's group
+LOG.info("Moving table "+ tableName + " to " + newGroup.getName());
+rsGroupAdmin.moveTables(Sets.newHashSet(tableName), newGroup.getName());
+
+//verify group change
+Assert.assertEquals(newGroup.getName(),
+rsGroupAdmin.getRSGroupInfoOfTable(tableName).getName());
+  }
 }



hbase git commit: HBASE-19088 move_tables_rsgroup will throw an exception when the table is disabled

2017-11-07 Thread apurtell
Repository: hbase
Updated Branches:
  refs/heads/branch-1 264cf0d47 -> dc1310f48


HBASE-19088 move_tables_rsgroup will throw an exception when the table is 
disabled

Signed-off-by: Andrew Purtell 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/dc1310f4
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/dc1310f4
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/dc1310f4

Branch: refs/heads/branch-1
Commit: dc1310f48731c6855d7df7a93dea773e4d46e579
Parents: 264cf0d
Author: Guangxu Cheng 
Authored: Mon Oct 30 16:29:40 2017 +0800
Committer: Andrew Purtell 
Committed: Tue Nov 7 17:14:03 2017 -0800

--
 .../hbase/rsgroup/RSGroupAdminServer.java   |  7 
 .../hadoop/hbase/rsgroup/TestRSGroupsBase.java  | 34 
 2 files changed, 41 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/dc1310f4/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupAdminServer.java
--
diff --git 
a/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupAdminServer.java
 
b/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupAdminServer.java
index 13b3141..125e08e 100644
--- 
a/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupAdminServer.java
+++ 
b/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupAdminServer.java
@@ -54,6 +54,7 @@ import org.apache.hadoop.hbase.master.RegionState;
 import org.apache.hadoop.hbase.master.ServerManager;
 import org.apache.hadoop.hbase.master.TableLockManager.TableLock;
 import org.apache.hadoop.hbase.net.Address;
+import org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos;
 
 /**
  * Service to support Region Server Grouping (HBase-6721)
@@ -276,6 +277,12 @@ public class RSGroupAdminServer implements RSGroupAdmin {
   }
 }
 for(TableName table: tables) {
+  if 
(master.getAssignmentManager().getTableStateManager().isTableState(table,
+  ZooKeeperProtos.Table.State.DISABLED,
+  ZooKeeperProtos.Table.State.DISABLING)) {
+LOG.debug("Skipping move regions because the table" + table + " is 
disabled.");
+continue;
+  }
   TableLock lock = master.getTableLockManager().writeLock(table, "Group: 
table move");
   try {
 lock.acquire();

http://git-wip-us.apache.org/repos/asf/hbase/blob/dc1310f4/hbase-rsgroup/src/test/java/org/apache/hadoop/hbase/rsgroup/TestRSGroupsBase.java
--
diff --git 
a/hbase-rsgroup/src/test/java/org/apache/hadoop/hbase/rsgroup/TestRSGroupsBase.java
 
b/hbase-rsgroup/src/test/java/org/apache/hadoop/hbase/rsgroup/TestRSGroupsBase.java
index 0db0fea..a0d1401 100644
--- 
a/hbase-rsgroup/src/test/java/org/apache/hadoop/hbase/rsgroup/TestRSGroupsBase.java
+++ 
b/hbase-rsgroup/src/test/java/org/apache/hadoop/hbase/rsgroup/TestRSGroupsBase.java
@@ -812,4 +812,38 @@ public abstract class TestRSGroupsBase {
 Set newGroupTables = 
rsGroupAdmin.getRSGroupInfo(newGroup.getName()).getTables();
 assertTrue(newGroupTables.contains(tableName));
   }
+
+  @Test
+  public void testDisabledTableMove() throws Exception {
+final TableName tableName = TableName.valueOf(tablePrefix + 
"_testDisabledTableMove");
+final byte[] familyNameBytes = Bytes.toBytes("f");
+String newGroupName = getGroupName("testDisabledTableMove");
+final RSGroupInfo newGroup = addGroup(rsGroupAdmin, newGroupName, 2);
+
+TEST_UTIL.createMultiRegionTable(tableName, familyNameBytes, 5);
+TEST_UTIL.waitFor(WAIT_TIMEOUT, new Waiter.Predicate() {
+  @Override
+  public boolean evaluate() throws Exception {
+List regions = getTableRegionMap().get(tableName);
+if (regions == null) {
+  return false;
+}
+return getTableRegionMap().get(tableName).size() >= 5;
+  }
+});
+
+RSGroupInfo tableGrp = rsGroupAdmin.getRSGroupInfoOfTable(tableName);
+assertTrue(tableGrp.getName().equals(RSGroupInfo.DEFAULT_GROUP));
+
+//test disable table
+admin.disableTable(tableName);
+
+//change table's group
+LOG.info("Moving table "+ tableName + " to " + newGroup.getName());
+rsGroupAdmin.moveTables(Sets.newHashSet(tableName), newGroup.getName());
+
+//verify group change
+Assert.assertEquals(newGroup.getName(),
+rsGroupAdmin.getRSGroupInfoOfTable(tableName).getName());
+  }
 }



hbase git commit: HBASE-19088 move_tables_rsgroup will throw an exception when the table is disabled

2017-11-07 Thread apurtell
Repository: hbase
Updated Branches:
  refs/heads/branch-2 e3bd47531 -> 8095f9628


HBASE-19088 move_tables_rsgroup will throw an exception when the table is 
disabled

Signed-off-by: Andrew Purtell 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/8095f962
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/8095f962
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/8095f962

Branch: refs/heads/branch-2
Commit: 8095f96289ac2fb1f5d9cd15b3ef55ddf791020f
Parents: e3bd475
Author: Guangxu Cheng 
Authored: Mon Oct 30 16:57:10 2017 +0800
Committer: Andrew Purtell 
Committed: Tue Nov 7 17:12:55 2017 -0800

--
 .../hbase/rsgroup/RSGroupAdminServer.java   |  4 +++
 .../hadoop/hbase/rsgroup/TestRSGroupsBase.java  | 33 
 2 files changed, 37 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/8095f962/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupAdminServer.java
--
diff --git 
a/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupAdminServer.java
 
b/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupAdminServer.java
index 3c82d76..4a9a885 100644
--- 
a/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupAdminServer.java
+++ 
b/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupAdminServer.java
@@ -406,6 +406,10 @@ public class RSGroupAdminServer implements RSGroupAdmin {
   // action is required.
   if (targetGroup != null) {
 for (TableName table: tables) {
+  if (master.getAssignmentManager().isTableDisabled(table)) {
+LOG.debug("Skipping move regions because the table" + table + " is 
disabled.");
+continue;
+  }
   for (RegionInfo region :
   
master.getAssignmentManager().getRegionStates().getRegionsOfTable(table)) {
 LOG.info("Moving region " + region.getShortNameToLog() +

http://git-wip-us.apache.org/repos/asf/hbase/blob/8095f962/hbase-rsgroup/src/test/java/org/apache/hadoop/hbase/rsgroup/TestRSGroupsBase.java
--
diff --git 
a/hbase-rsgroup/src/test/java/org/apache/hadoop/hbase/rsgroup/TestRSGroupsBase.java
 
b/hbase-rsgroup/src/test/java/org/apache/hadoop/hbase/rsgroup/TestRSGroupsBase.java
index f0291fa..b225dad 100644
--- 
a/hbase-rsgroup/src/test/java/org/apache/hadoop/hbase/rsgroup/TestRSGroupsBase.java
+++ 
b/hbase-rsgroup/src/test/java/org/apache/hadoop/hbase/rsgroup/TestRSGroupsBase.java
@@ -709,6 +709,39 @@ public abstract class TestRSGroupsBase {
   }
 
   @Test
+  public void testDisabledTableMove() throws Exception {
+final byte[] familyNameBytes = Bytes.toBytes("f");
+String newGroupName = getGroupName(name.getMethodName());
+final RSGroupInfo newGroup = addGroup(newGroupName, 2);
+
+TEST_UTIL.createMultiRegionTable(tableName, familyNameBytes, 5);
+TEST_UTIL.waitFor(WAIT_TIMEOUT, new Waiter.Predicate() {
+  @Override
+  public boolean evaluate() throws Exception {
+List regions = getTableRegionMap().get(tableName);
+if (regions == null) {
+  return false;
+}
+return getTableRegionMap().get(tableName).size() >= 5;
+  }
+});
+
+RSGroupInfo tableGrp = rsGroupAdmin.getRSGroupInfoOfTable(tableName);
+assertTrue(tableGrp.getName().equals(RSGroupInfo.DEFAULT_GROUP));
+
+//test disable table
+admin.disableTable(tableName);
+
+//change table's group
+LOG.info("Moving table "+ tableName + " to " + newGroup.getName());
+rsGroupAdmin.moveTables(Sets.newHashSet(tableName), newGroup.getName());
+
+//verify group change
+Assert.assertEquals(newGroup.getName(),
+rsGroupAdmin.getRSGroupInfoOfTable(tableName).getName());
+  }
+
+  @Test
   public void testMoveServersAndTables() throws Exception {
 LOG.info("testMoveServersAndTables");
 final RSGroupInfo newGroup = addGroup(getGroupName(name.getMethodName()), 
1);



hbase git commit: HBASE-19088 move_tables_rsgroup will throw an exception when the table is disabled

2017-11-07 Thread apurtell
Repository: hbase
Updated Branches:
  refs/heads/master 46408e021 -> dc127df07


HBASE-19088 move_tables_rsgroup will throw an exception when the table is 
disabled

Signed-off-by: Andrew Purtell 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/dc127df0
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/dc127df0
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/dc127df0

Branch: refs/heads/master
Commit: dc127df077490640a808d540fa56abbe888c9730
Parents: 46408e0
Author: Guangxu Cheng 
Authored: Mon Oct 30 16:57:10 2017 +0800
Committer: Andrew Purtell 
Committed: Tue Nov 7 17:13:17 2017 -0800

--
 .../hbase/rsgroup/RSGroupAdminServer.java   |  4 +++
 .../hadoop/hbase/rsgroup/TestRSGroupsBase.java  | 33 
 2 files changed, 37 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/dc127df0/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupAdminServer.java
--
diff --git 
a/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupAdminServer.java
 
b/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupAdminServer.java
index 3c82d76..4a9a885 100644
--- 
a/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupAdminServer.java
+++ 
b/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupAdminServer.java
@@ -406,6 +406,10 @@ public class RSGroupAdminServer implements RSGroupAdmin {
   // action is required.
   if (targetGroup != null) {
 for (TableName table: tables) {
+  if (master.getAssignmentManager().isTableDisabled(table)) {
+LOG.debug("Skipping move regions because the table" + table + " is 
disabled.");
+continue;
+  }
   for (RegionInfo region :
   
master.getAssignmentManager().getRegionStates().getRegionsOfTable(table)) {
 LOG.info("Moving region " + region.getShortNameToLog() +

http://git-wip-us.apache.org/repos/asf/hbase/blob/dc127df0/hbase-rsgroup/src/test/java/org/apache/hadoop/hbase/rsgroup/TestRSGroupsBase.java
--
diff --git 
a/hbase-rsgroup/src/test/java/org/apache/hadoop/hbase/rsgroup/TestRSGroupsBase.java
 
b/hbase-rsgroup/src/test/java/org/apache/hadoop/hbase/rsgroup/TestRSGroupsBase.java
index f0291fa..b225dad 100644
--- 
a/hbase-rsgroup/src/test/java/org/apache/hadoop/hbase/rsgroup/TestRSGroupsBase.java
+++ 
b/hbase-rsgroup/src/test/java/org/apache/hadoop/hbase/rsgroup/TestRSGroupsBase.java
@@ -709,6 +709,39 @@ public abstract class TestRSGroupsBase {
   }
 
   @Test
+  public void testDisabledTableMove() throws Exception {
+final byte[] familyNameBytes = Bytes.toBytes("f");
+String newGroupName = getGroupName(name.getMethodName());
+final RSGroupInfo newGroup = addGroup(newGroupName, 2);
+
+TEST_UTIL.createMultiRegionTable(tableName, familyNameBytes, 5);
+TEST_UTIL.waitFor(WAIT_TIMEOUT, new Waiter.Predicate() {
+  @Override
+  public boolean evaluate() throws Exception {
+List regions = getTableRegionMap().get(tableName);
+if (regions == null) {
+  return false;
+}
+return getTableRegionMap().get(tableName).size() >= 5;
+  }
+});
+
+RSGroupInfo tableGrp = rsGroupAdmin.getRSGroupInfoOfTable(tableName);
+assertTrue(tableGrp.getName().equals(RSGroupInfo.DEFAULT_GROUP));
+
+//test disable table
+admin.disableTable(tableName);
+
+//change table's group
+LOG.info("Moving table "+ tableName + " to " + newGroup.getName());
+rsGroupAdmin.moveTables(Sets.newHashSet(tableName), newGroup.getName());
+
+//verify group change
+Assert.assertEquals(newGroup.getName(),
+rsGroupAdmin.getRSGroupInfoOfTable(tableName).getName());
+  }
+
+  @Test
   public void testMoveServersAndTables() throws Exception {
 LOG.info("testMoveServersAndTables");
 final RSGroupInfo newGroup = addGroup(getGroupName(name.getMethodName()), 
1);



[2/8] hbase git commit: HBASE-19203 Update Hadoop version used for build to 2.7.4 (from 2.5.1)

2017-11-07 Thread apurtell
HBASE-19203 Update Hadoop version used for build to 2.7.4 (from 2.5.1)


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/7953daa5
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/7953daa5
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/7953daa5

Branch: refs/heads/branch-1.4
Commit: 7953daa52c8e98803d4f0b73096c381e3de315f2
Parents: 546a236
Author: Andrew Purtell 
Authored: Tue Nov 7 11:37:02 2017 -0800
Committer: Andrew Purtell 
Committed: Tue Nov 7 12:36:27 2017 -0800

--
 dev-support/hbase-personality.sh | 6 +++---
 pom.xml  | 2 +-
 2 files changed, 4 insertions(+), 4 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/7953daa5/dev-support/hbase-personality.sh
--
diff --git a/dev-support/hbase-personality.sh b/dev-support/hbase-personality.sh
index 24f2ef5..f70c51b 100755
--- a/dev-support/hbase-personality.sh
+++ b/dev-support/hbase-personality.sh
@@ -262,15 +262,15 @@ function hadoopcheck_rebuild
   # See the Hadoop section on prereqs in the HBase Reference Guide
   if [[ "${PATCH_BRANCH}" = branch-1* ]]; then
 yetus_info "setting Hadoop versions to test based on branch-1-ish rules."
-hbase_hadoop2_versions="2.4.0 2.4.1 2.5.0 2.5.1 2.5.2 2.6.1 2.6.2 2.6.3 
2.6.4 2.6.5 2.7.1 2.7.2 2.7.3"
+hbase_hadoop2_versions="2.4.0 2.4.1 2.5.0 2.5.1 2.5.2 2.6.1 2.6.2 2.6.3 
2.6.4 2.6.5 2.7.1 2.7.2 2.7.3 2.7.4"
 hbase_hadoop3_versions=""
   elif [[ ${PATCH_BRANCH} = branch-2* ]]; then
 yetus_info "setting Hadoop versions to test based on branch-2-ish rules."
-hbase_hadoop2_versions="2.6.1 2.6.2 2.6.3 2.6.4 2.6.5 2.7.1 2.7.2 2.7.3"
+hbase_hadoop2_versions="2.6.1 2.6.2 2.6.3 2.6.4 2.6.5 2.7.1 2.7.2 2.7.3 
2.7.4"
 hbase_hadoop3_versions="3.0.0-alpha4"
   else # master or a feature branch
 yetus_info "setting Hadoop versions to test based on master/feature branch 
rules."
-hbase_hadoop2_versions="2.6.1 2.6.2 2.6.3 2.6.4 2.6.5 2.7.1 2.7.2 2.7.3"
+hbase_hadoop2_versions="2.6.1 2.6.2 2.6.3 2.6.4 2.6.5 2.7.1 2.7.2 2.7.3 
2.7.4"
 hbase_hadoop3_versions="3.0.0-alpha4"
   fi
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/7953daa5/pom.xml
--
diff --git a/pom.xml b/pom.xml
index 073b668..ad574f7 100644
--- a/pom.xml
+++ b/pom.xml
@@ -1233,7 +1233,7 @@
 3.0.3
 ${compileSource}
 
-2.5.1
+2.7.4
 
 
${hadoop-two.version}
 3.0.0-SNAPSHOT



[8/8] hbase git commit: HBASE-18993 Backport patches in HBASE-18410 to branch-1.x branches

2017-11-07 Thread apurtell
HBASE-18993 Backport patches in HBASE-18410 to branch-1.x branches

Signed-off-by: Andrew Purtell 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/264cf0d4
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/264cf0d4
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/264cf0d4

Branch: refs/heads/branch-1
Commit: 264cf0d4739ae9ad4343755d654efd73a9e623e0
Parents: c5ab70d
Author: huzheng 
Authored: Thu Oct 19 21:51:18 2017 +0800
Committer: Andrew Purtell 
Committed: Tue Nov 7 14:00:27 2017 -0800

--
 .../org/apache/hadoop/hbase/filter/Filter.java  |  14 +-
 .../apache/hadoop/hbase/filter/FilterList.java  | 421 +++-
 .../hadoop/hbase/filter/FilterListBase.java | 177 +
 .../hadoop/hbase/filter/FilterListWithAND.java  | 258 
 .../hadoop/hbase/filter/FilterListWithOR.java   | 380 +++
 .../hadoop/hbase/filter/TestFilterList.java | 658 +++
 .../hbase/filter/TestFilterListOnMini.java  |   6 +-
 7 files changed, 1464 insertions(+), 450 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/264cf0d4/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/Filter.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/Filter.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/Filter.java
index 0a7a184..a22b5c7 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/Filter.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/Filter.java
@@ -166,15 +166,23 @@ public abstract class Filter {
  */
 NEXT_COL,
 /**
- * Done with columns, skip to next row. Note that filterRow() will
- * still be called.
+ * Seek to next row in current family. It may still pass a cell whose 
family is different but
+ * row is the same as previous cell to {@link #filterKeyValue(Cell)} , 
even if we get a NEXT_ROW
+ * returned for previous cell. For more details see HBASE-18368. 
+ * Once reset() method was invoked, then we switch to the next row for all 
family, and you can
+ * catch the event by invoking CellUtils.matchingRows(previousCell, 
currentCell). 
+ * Note that filterRow() will still be called. 
  */
 NEXT_ROW,
 /**
  * Seek to next key which is given as hint by the filter.
  */
 SEEK_NEXT_USING_HINT,
-}
+/**
+ * Include KeyValue and done with row, seek to next. See NEXT_ROW
+ */
+INCLUDE_AND_SEEK_NEXT_ROW,
+  }
 
   /**
* Chance to alter the list of Cells to be submitted. Modifications to the 
list will carry on

http://git-wip-us.apache.org/repos/asf/hbase/blob/264cf0d4/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
index be22e5d..0b39b56 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
@@ -21,123 +21,99 @@ package org.apache.hadoop.hbase.filter;
 import java.io.IOException;
 import java.util.ArrayList;
 import java.util.Arrays;
+import java.util.Collections;
 import java.util.List;
 
-import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
+import com.google.protobuf.InvalidProtocolBufferException;
 import org.apache.hadoop.hbase.Cell;
 import org.apache.hadoop.hbase.KeyValue;
 import org.apache.hadoop.hbase.KeyValueUtil;
+import org.apache.hadoop.hbase.classification.InterfaceAudience;
+import org.apache.hadoop.hbase.classification.InterfaceStability;
 import org.apache.hadoop.hbase.exceptions.DeserializationException;
 import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
 import org.apache.hadoop.hbase.protobuf.generated.FilterProtos;
 
-import com.google.protobuf.InvalidProtocolBufferException;
-
 /**
- * Implementation of {@link Filter} that represents an ordered List of Filters
- * which will be evaluated with a specified boolean operator {@link 
Operator#MUST_PASS_ALL}
- * (AND) or {@link Operator#MUST_PASS_ONE} (OR).
- * Since you can use Filter Lists as children of Filter Lists, you can create a
- * hierarchy of filters to be evaluated.
- *
- * 
- * {@link Operator#MUST_PASS_ALL} evaluates lazily: evaluation stops as soon 
as one filter does
- * not include the KeyValue.
- *
- * 
- * {@link Operator#MUST_PASS_ONE} evaluates non-lazily: all filters are always 
evaluated.
- *
- * 
+ * Implementation of {@link Filter} that represents an ordered List of Filters 
which will be

[4/8] hbase git commit: HBASE-19203 Update Hadoop version used for build to 2.7.4 (from 2.5.1)

2017-11-07 Thread apurtell
HBASE-19203 Update Hadoop version used for build to 2.7.4 (from 2.5.1)


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/e3bd4753
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/e3bd4753
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/e3bd4753

Branch: refs/heads/branch-2
Commit: e3bd47531797801a0c97226849de9f29e37361f1
Parents: fa3cc6c
Author: Andrew Purtell 
Authored: Tue Nov 7 11:37:02 2017 -0800
Committer: Andrew Purtell 
Committed: Tue Nov 7 12:37:53 2017 -0800

--
 dev-support/hbase-personality.sh | 6 +++---
 pom.xml  | 2 +-
 2 files changed, 4 insertions(+), 4 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/e3bd4753/dev-support/hbase-personality.sh
--
diff --git a/dev-support/hbase-personality.sh b/dev-support/hbase-personality.sh
index dcf4f7a..d0d81c6 100755
--- a/dev-support/hbase-personality.sh
+++ b/dev-support/hbase-personality.sh
@@ -262,15 +262,15 @@ function hadoopcheck_rebuild
   # See the Hadoop section on prereqs in the HBase Reference Guide
   if [[ "${PATCH_BRANCH}" = branch-1* ]]; then
 yetus_info "setting Hadoop versions to test based on branch-1-ish rules."
-hbase_hadoop2_versions="2.4.0 2.4.1 2.5.0 2.5.1 2.5.2 2.6.1 2.6.2 2.6.3 
2.6.4 2.6.5 2.7.1 2.7.2 2.7.3"
+hbase_hadoop2_versions="2.4.0 2.4.1 2.5.0 2.5.1 2.5.2 2.6.1 2.6.2 2.6.3 
2.6.4 2.6.5 2.7.1 2.7.2 2.7.3 2.7.4"
 hbase_hadoop3_versions=""
   elif [[ ${PATCH_BRANCH} = branch-2* ]]; then
 yetus_info "setting Hadoop versions to test based on branch-2-ish rules."
-hbase_hadoop2_versions="2.6.1 2.6.2 2.6.3 2.6.4 2.6.5 2.7.1 2.7.2 2.7.3"
+hbase_hadoop2_versions="2.6.1 2.6.2 2.6.3 2.6.4 2.6.5 2.7.1 2.7.2 2.7.3 
2.7.4"
 hbase_hadoop3_versions="3.0.0-alpha4"
   else # master or a feature branch
 yetus_info "setting Hadoop versions to test based on master/feature branch 
rules."
-hbase_hadoop2_versions="2.6.1 2.6.2 2.6.3 2.6.4 2.6.5 2.7.1 2.7.2 2.7.3"
+hbase_hadoop2_versions="2.6.1 2.6.2 2.6.3 2.6.4 2.6.5 2.7.1 2.7.2 2.7.3 
2.7.4"
 hbase_hadoop3_versions="3.0.0-alpha4"
   fi
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/e3bd4753/pom.xml
--
diff --git a/pom.xml b/pom.xml
index 027f6df..e4a040f 100644
--- a/pom.xml
+++ b/pom.xml
@@ -1339,7 +1339,7 @@
 3.0.4
 ${compileSource}
 
-2.7.1
+2.7.4
 3.0.0-alpha4
 

[6/8] hbase git commit: HBASE-18993 Backport patches in HBASE-18410 to branch-1.x branches

2017-11-07 Thread apurtell
HBASE-18993 Backport patches in HBASE-18410 to branch-1.x branches

Signed-off-by: Andrew Purtell 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/0a6e9380
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/0a6e9380
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/0a6e9380

Branch: refs/heads/branch-1.4
Commit: 0a6e93801440f7662817a87fa2c7d8928de40449
Parents: 7953daa
Author: huzheng 
Authored: Thu Oct 19 21:51:18 2017 +0800
Committer: Andrew Purtell 
Committed: Tue Nov 7 14:00:11 2017 -0800

--
 .../org/apache/hadoop/hbase/filter/Filter.java  |  14 +-
 .../apache/hadoop/hbase/filter/FilterList.java  | 421 +++-
 .../hadoop/hbase/filter/FilterListBase.java | 177 +
 .../hadoop/hbase/filter/FilterListWithAND.java  | 258 
 .../hadoop/hbase/filter/FilterListWithOR.java   | 380 +++
 .../hadoop/hbase/filter/TestFilterList.java | 658 +++
 .../hbase/filter/TestFilterListOnMini.java  |   6 +-
 7 files changed, 1464 insertions(+), 450 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/0a6e9380/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/Filter.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/Filter.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/Filter.java
index 0a7a184..a22b5c7 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/Filter.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/Filter.java
@@ -166,15 +166,23 @@ public abstract class Filter {
  */
 NEXT_COL,
 /**
- * Done with columns, skip to next row. Note that filterRow() will
- * still be called.
+ * Seek to next row in current family. It may still pass a cell whose 
family is different but
+ * row is the same as previous cell to {@link #filterKeyValue(Cell)} , 
even if we get a NEXT_ROW
+ * returned for previous cell. For more details see HBASE-18368. 
+ * Once reset() method was invoked, then we switch to the next row for all 
family, and you can
+ * catch the event by invoking CellUtils.matchingRows(previousCell, 
currentCell). 
+ * Note that filterRow() will still be called. 
  */
 NEXT_ROW,
 /**
  * Seek to next key which is given as hint by the filter.
  */
 SEEK_NEXT_USING_HINT,
-}
+/**
+ * Include KeyValue and done with row, seek to next. See NEXT_ROW
+ */
+INCLUDE_AND_SEEK_NEXT_ROW,
+  }
 
   /**
* Chance to alter the list of Cells to be submitted. Modifications to the 
list will carry on

http://git-wip-us.apache.org/repos/asf/hbase/blob/0a6e9380/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
index be22e5d..0b39b56 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
@@ -21,123 +21,99 @@ package org.apache.hadoop.hbase.filter;
 import java.io.IOException;
 import java.util.ArrayList;
 import java.util.Arrays;
+import java.util.Collections;
 import java.util.List;
 
-import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
+import com.google.protobuf.InvalidProtocolBufferException;
 import org.apache.hadoop.hbase.Cell;
 import org.apache.hadoop.hbase.KeyValue;
 import org.apache.hadoop.hbase.KeyValueUtil;
+import org.apache.hadoop.hbase.classification.InterfaceAudience;
+import org.apache.hadoop.hbase.classification.InterfaceStability;
 import org.apache.hadoop.hbase.exceptions.DeserializationException;
 import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
 import org.apache.hadoop.hbase.protobuf.generated.FilterProtos;
 
-import com.google.protobuf.InvalidProtocolBufferException;
-
 /**
- * Implementation of {@link Filter} that represents an ordered List of Filters
- * which will be evaluated with a specified boolean operator {@link 
Operator#MUST_PASS_ALL}
- * (AND) or {@link Operator#MUST_PASS_ONE} (OR).
- * Since you can use Filter Lists as children of Filter Lists, you can create a
- * hierarchy of filters to be evaluated.
- *
- * 
- * {@link Operator#MUST_PASS_ALL} evaluates lazily: evaluation stops as soon 
as one filter does
- * not include the KeyValue.
- *
- * 
- * {@link Operator#MUST_PASS_ONE} evaluates non-lazily: all filters are always 
evaluated.
- *
- * 
+ * Implementation of {@link Filter} that represents an ordered List of Filters 
which will b

[5/8] hbase git commit: HBASE-18993 Backport patches in HBASE-18410 to branch-1.x branches

2017-11-07 Thread apurtell
http://git-wip-us.apache.org/repos/asf/hbase/blob/0a6e9380/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterList.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterList.java 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterList.java
index 6ddc422..77fc47e 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterList.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterList.java
@@ -18,44 +18,37 @@
  */
 package org.apache.hadoop.hbase.filter;
 
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertNull;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+
+import com.google.common.collect.Lists;
+
 import java.io.IOException;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.List;
 
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.assertNotNull;
-import static org.junit.Assert.assertTrue;
-import static org.junit.Assert.assertNull;
-
 import org.apache.hadoop.hbase.Cell;
 import org.apache.hadoop.hbase.KeyValue;
 import org.apache.hadoop.hbase.KeyValueUtil;
-import org.apache.hadoop.hbase.testclassification.SmallTests;
 import org.apache.hadoop.hbase.exceptions.DeserializationException;
 import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp;
 import org.apache.hadoop.hbase.filter.Filter.ReturnCode;
 import org.apache.hadoop.hbase.filter.FilterList.Operator;
 import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
+import org.apache.hadoop.hbase.testclassification.FilterTests;
+import org.apache.hadoop.hbase.testclassification.SmallTests;
 import org.apache.hadoop.hbase.util.Bytes;
+import org.junit.Assert;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
 
-import com.google.common.collect.Lists;
-
-/**
- * Tests filter sets
- *
- */
-@Category(SmallTests.class)
+@Category({FilterTests.class, SmallTests.class})
 public class TestFilterList {
   static final int MAX_PAGES = 2;
-  static final char FIRST_CHAR = 'a';
-  static final char LAST_CHAR = 'e';
-  static byte[] GOOD_BYTES = Bytes.toBytes("abc");
-  static byte[] BAD_BYTES = Bytes.toBytes("def");
-
 
   @Test
   public void testAddFilter() throws Exception {
@@ -74,9 +67,49 @@ public class TestFilterList {
 filterList = new FilterList(Operator.MUST_PASS_ALL, Arrays.asList(filter1, 
filter2));
 filterList.addFilter(new FirstKeyOnlyFilter());
 
+filterList.setReversed(false);
+FirstKeyOnlyFilter f = new FirstKeyOnlyFilter();
+f.setReversed(true);
+try {
+  filterList.addFilter(f);
+  fail("The IllegalArgumentException should be thrown because the added 
filter is reversed");
+} catch (IllegalArgumentException e) {
+}
+
   }
 
+  @Test
+  public void testConstruction() {
+FirstKeyOnlyFilter f1 = new FirstKeyOnlyFilter();
+FirstKeyOnlyFilter f2 = new FirstKeyOnlyFilter();
+f1.setReversed(true);
+f2.setReversed(false);
+
+try {
+  FilterList ff = new FilterList(f1, f2);
+  fail("The IllegalArgumentException should be thrown");
+} catch (IllegalArgumentException e) {
+}
+
+try {
+  FilterList ff = new FilterList(Arrays.asList((Filter) f1, (Filter) f2));
+  fail("The IllegalArgumentException should be thrown because the added 
filter is reversed");
+} catch (IllegalArgumentException e) {
+}
+
+try {
+  FilterList ff = new FilterList(FilterList.Operator.MUST_PASS_ALL,
+  Arrays.asList((Filter) f1, (Filter) f2));
+  fail("The IllegalArgumentException should be thrown because the added 
filter is reversed");
+} catch (IllegalArgumentException e) {
+}
 
+try {
+  FilterList ff = new FilterList(FilterList.Operator.MUST_PASS_ALL, f1, 
f2);
+  fail("The IllegalArgumentException should be thrown because the added 
filter is reversed");
+} catch (IllegalArgumentException e) {
+}
+  }
   /**
* Test "must pass one"
* @throws Exception
@@ -90,9 +123,7 @@ public class TestFilterList {
 List filters = new ArrayList();
 filters.add(new PageFilter(MAX_PAGES));
 filters.add(new WhileMatchFilter(new PrefixFilter(Bytes.toBytes("yyy";
-Filter filterMPONE =
-  new FilterList(FilterList.Operator.MUST_PASS_ONE, filters);
-return filterMPONE;
+return new FilterList(FilterList.Operator.MUST_PASS_ONE, filters);
   }
 
   private void mpOneTest(Filter filterMPONE) throws Exception {
@@ -112,11 +143,10 @@ public class TestFilterList {
 assertFalse(filterMPONE.filterAllRemaining());
 
 /* Will pass both */
-byte [] rowkey = Bytes.toBytes("y");
+byte[] rowkey = Bytes.toBytes("y");
 for (int i = 0; i < MAX_PAGES - 1; i++) {
   assertFalse(f

[7/8] hbase git commit: HBASE-18993 Backport patches in HBASE-18410 to branch-1.x branches

2017-11-07 Thread apurtell
http://git-wip-us.apache.org/repos/asf/hbase/blob/264cf0d4/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterList.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterList.java 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterList.java
index 6ddc422..77fc47e 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterList.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterList.java
@@ -18,44 +18,37 @@
  */
 package org.apache.hadoop.hbase.filter;
 
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertNull;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+
+import com.google.common.collect.Lists;
+
 import java.io.IOException;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.List;
 
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.assertNotNull;
-import static org.junit.Assert.assertTrue;
-import static org.junit.Assert.assertNull;
-
 import org.apache.hadoop.hbase.Cell;
 import org.apache.hadoop.hbase.KeyValue;
 import org.apache.hadoop.hbase.KeyValueUtil;
-import org.apache.hadoop.hbase.testclassification.SmallTests;
 import org.apache.hadoop.hbase.exceptions.DeserializationException;
 import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp;
 import org.apache.hadoop.hbase.filter.Filter.ReturnCode;
 import org.apache.hadoop.hbase.filter.FilterList.Operator;
 import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
+import org.apache.hadoop.hbase.testclassification.FilterTests;
+import org.apache.hadoop.hbase.testclassification.SmallTests;
 import org.apache.hadoop.hbase.util.Bytes;
+import org.junit.Assert;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
 
-import com.google.common.collect.Lists;
-
-/**
- * Tests filter sets
- *
- */
-@Category(SmallTests.class)
+@Category({FilterTests.class, SmallTests.class})
 public class TestFilterList {
   static final int MAX_PAGES = 2;
-  static final char FIRST_CHAR = 'a';
-  static final char LAST_CHAR = 'e';
-  static byte[] GOOD_BYTES = Bytes.toBytes("abc");
-  static byte[] BAD_BYTES = Bytes.toBytes("def");
-
 
   @Test
   public void testAddFilter() throws Exception {
@@ -74,9 +67,49 @@ public class TestFilterList {
 filterList = new FilterList(Operator.MUST_PASS_ALL, Arrays.asList(filter1, 
filter2));
 filterList.addFilter(new FirstKeyOnlyFilter());
 
+filterList.setReversed(false);
+FirstKeyOnlyFilter f = new FirstKeyOnlyFilter();
+f.setReversed(true);
+try {
+  filterList.addFilter(f);
+  fail("The IllegalArgumentException should be thrown because the added 
filter is reversed");
+} catch (IllegalArgumentException e) {
+}
+
   }
 
+  @Test
+  public void testConstruction() {
+FirstKeyOnlyFilter f1 = new FirstKeyOnlyFilter();
+FirstKeyOnlyFilter f2 = new FirstKeyOnlyFilter();
+f1.setReversed(true);
+f2.setReversed(false);
+
+try {
+  FilterList ff = new FilterList(f1, f2);
+  fail("The IllegalArgumentException should be thrown");
+} catch (IllegalArgumentException e) {
+}
+
+try {
+  FilterList ff = new FilterList(Arrays.asList((Filter) f1, (Filter) f2));
+  fail("The IllegalArgumentException should be thrown because the added 
filter is reversed");
+} catch (IllegalArgumentException e) {
+}
+
+try {
+  FilterList ff = new FilterList(FilterList.Operator.MUST_PASS_ALL,
+  Arrays.asList((Filter) f1, (Filter) f2));
+  fail("The IllegalArgumentException should be thrown because the added 
filter is reversed");
+} catch (IllegalArgumentException e) {
+}
 
+try {
+  FilterList ff = new FilterList(FilterList.Operator.MUST_PASS_ALL, f1, 
f2);
+  fail("The IllegalArgumentException should be thrown because the added 
filter is reversed");
+} catch (IllegalArgumentException e) {
+}
+  }
   /**
* Test "must pass one"
* @throws Exception
@@ -90,9 +123,7 @@ public class TestFilterList {
 List filters = new ArrayList();
 filters.add(new PageFilter(MAX_PAGES));
 filters.add(new WhileMatchFilter(new PrefixFilter(Bytes.toBytes("yyy";
-Filter filterMPONE =
-  new FilterList(FilterList.Operator.MUST_PASS_ONE, filters);
-return filterMPONE;
+return new FilterList(FilterList.Operator.MUST_PASS_ONE, filters);
   }
 
   private void mpOneTest(Filter filterMPONE) throws Exception {
@@ -112,11 +143,10 @@ public class TestFilterList {
 assertFalse(filterMPONE.filterAllRemaining());
 
 /* Will pass both */
-byte [] rowkey = Bytes.toBytes("y");
+byte[] rowkey = Bytes.toBytes("y");
 for (int i = 0; i < MAX_PAGES - 1; i++) {
   assertFalse(f

[3/8] hbase git commit: HBASE-19203 Update Hadoop version used for build to 2.7.4 (from 2.5.1)

2017-11-07 Thread apurtell
HBASE-19203 Update Hadoop version used for build to 2.7.4 (from 2.5.1)


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/46408e02
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/46408e02
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/46408e02

Branch: refs/heads/master
Commit: 46408e021d73abb648914a0465318516cf8eef21
Parents: 4eae5a2
Author: Andrew Purtell 
Authored: Tue Nov 7 11:37:02 2017 -0800
Committer: Andrew Purtell 
Committed: Tue Nov 7 12:37:36 2017 -0800

--
 dev-support/hbase-personality.sh | 6 +++---
 pom.xml  | 2 +-
 2 files changed, 4 insertions(+), 4 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/46408e02/dev-support/hbase-personality.sh
--
diff --git a/dev-support/hbase-personality.sh b/dev-support/hbase-personality.sh
index dcf4f7a..d0d81c6 100755
--- a/dev-support/hbase-personality.sh
+++ b/dev-support/hbase-personality.sh
@@ -262,15 +262,15 @@ function hadoopcheck_rebuild
   # See the Hadoop section on prereqs in the HBase Reference Guide
   if [[ "${PATCH_BRANCH}" = branch-1* ]]; then
 yetus_info "setting Hadoop versions to test based on branch-1-ish rules."
-hbase_hadoop2_versions="2.4.0 2.4.1 2.5.0 2.5.1 2.5.2 2.6.1 2.6.2 2.6.3 
2.6.4 2.6.5 2.7.1 2.7.2 2.7.3"
+hbase_hadoop2_versions="2.4.0 2.4.1 2.5.0 2.5.1 2.5.2 2.6.1 2.6.2 2.6.3 
2.6.4 2.6.5 2.7.1 2.7.2 2.7.3 2.7.4"
 hbase_hadoop3_versions=""
   elif [[ ${PATCH_BRANCH} = branch-2* ]]; then
 yetus_info "setting Hadoop versions to test based on branch-2-ish rules."
-hbase_hadoop2_versions="2.6.1 2.6.2 2.6.3 2.6.4 2.6.5 2.7.1 2.7.2 2.7.3"
+hbase_hadoop2_versions="2.6.1 2.6.2 2.6.3 2.6.4 2.6.5 2.7.1 2.7.2 2.7.3 
2.7.4"
 hbase_hadoop3_versions="3.0.0-alpha4"
   else # master or a feature branch
 yetus_info "setting Hadoop versions to test based on master/feature branch 
rules."
-hbase_hadoop2_versions="2.6.1 2.6.2 2.6.3 2.6.4 2.6.5 2.7.1 2.7.2 2.7.3"
+hbase_hadoop2_versions="2.6.1 2.6.2 2.6.3 2.6.4 2.6.5 2.7.1 2.7.2 2.7.3 
2.7.4"
 hbase_hadoop3_versions="3.0.0-alpha4"
   fi
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/46408e02/pom.xml
--
diff --git a/pom.xml b/pom.xml
index ffc25fa..8672d8b 100755
--- a/pom.xml
+++ b/pom.xml
@@ -1394,7 +1394,7 @@
 3.0.4
 ${compileSource}
 
-2.7.1
+2.7.4
 3.0.0-alpha4
 

[1/8] hbase git commit: HBASE-19203 Update Hadoop version used for build to 2.7.4 (from 2.5.1)

2017-11-07 Thread apurtell
Repository: hbase
Updated Branches:
  refs/heads/branch-1 045c02d17 -> 264cf0d47
  refs/heads/branch-1.4 546a2368e -> 0a6e93801
  refs/heads/branch-2 fa3cc6c3b -> e3bd47531
  refs/heads/master 4eae5a297 -> 46408e021


HBASE-19203 Update Hadoop version used for build to 2.7.4 (from 2.5.1)


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/c5ab70d1
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/c5ab70d1
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/c5ab70d1

Branch: refs/heads/branch-1
Commit: c5ab70d19353fc916b9edd739945875b5d081d85
Parents: 045c02d
Author: Andrew Purtell 
Authored: Tue Nov 7 11:37:02 2017 -0800
Committer: Andrew Purtell 
Committed: Tue Nov 7 12:35:46 2017 -0800

--
 dev-support/hbase-personality.sh | 6 +++---
 pom.xml  | 2 +-
 2 files changed, 4 insertions(+), 4 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/c5ab70d1/dev-support/hbase-personality.sh
--
diff --git a/dev-support/hbase-personality.sh b/dev-support/hbase-personality.sh
index 24f2ef5..f70c51b 100755
--- a/dev-support/hbase-personality.sh
+++ b/dev-support/hbase-personality.sh
@@ -262,15 +262,15 @@ function hadoopcheck_rebuild
   # See the Hadoop section on prereqs in the HBase Reference Guide
   if [[ "${PATCH_BRANCH}" = branch-1* ]]; then
 yetus_info "setting Hadoop versions to test based on branch-1-ish rules."
-hbase_hadoop2_versions="2.4.0 2.4.1 2.5.0 2.5.1 2.5.2 2.6.1 2.6.2 2.6.3 
2.6.4 2.6.5 2.7.1 2.7.2 2.7.3"
+hbase_hadoop2_versions="2.4.0 2.4.1 2.5.0 2.5.1 2.5.2 2.6.1 2.6.2 2.6.3 
2.6.4 2.6.5 2.7.1 2.7.2 2.7.3 2.7.4"
 hbase_hadoop3_versions=""
   elif [[ ${PATCH_BRANCH} = branch-2* ]]; then
 yetus_info "setting Hadoop versions to test based on branch-2-ish rules."
-hbase_hadoop2_versions="2.6.1 2.6.2 2.6.3 2.6.4 2.6.5 2.7.1 2.7.2 2.7.3"
+hbase_hadoop2_versions="2.6.1 2.6.2 2.6.3 2.6.4 2.6.5 2.7.1 2.7.2 2.7.3 
2.7.4"
 hbase_hadoop3_versions="3.0.0-alpha4"
   else # master or a feature branch
 yetus_info "setting Hadoop versions to test based on master/feature branch 
rules."
-hbase_hadoop2_versions="2.6.1 2.6.2 2.6.3 2.6.4 2.6.5 2.7.1 2.7.2 2.7.3"
+hbase_hadoop2_versions="2.6.1 2.6.2 2.6.3 2.6.4 2.6.5 2.7.1 2.7.2 2.7.3 
2.7.4"
 hbase_hadoop3_versions="3.0.0-alpha4"
   fi
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/c5ab70d1/pom.xml
--
diff --git a/pom.xml b/pom.xml
index 2076782..83a0ae5 100644
--- a/pom.xml
+++ b/pom.xml
@@ -1233,7 +1233,7 @@
 3.0.3
 ${compileSource}
 
-2.5.1
+2.7.4
 
 
${hadoop-two.version}
 3.0.0-SNAPSHOT



[02/15] hbase git commit: Change timezone for Ashu Pachauri in pom.xml

2017-11-07 Thread busbey
Change timezone for Ashu Pachauri in pom.xml


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/bc3f3ee3
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/bc3f3ee3
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/bc3f3ee3

Branch: refs/heads/HBASE-19189
Commit: bc3f3ee3bc43b3c14d61806f799382c9c06a49d6
Parents: c463e9c
Author: Ashu Pachauri 
Authored: Mon Nov 6 12:17:41 2017 +0530
Committer: Ashu Pachauri 
Committed: Mon Nov 6 12:17:41 2017 +0530

--
 pom.xml | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/bc3f3ee3/pom.xml
--
diff --git a/pom.xml b/pom.xml
index 8117583..ffc25fa 100755
--- a/pom.xml
+++ b/pom.xml
@@ -215,7 +215,7 @@
   ashu
   Ashu Pachauri
   a...@apache.org
-  -8
+  +5
 
 
   binlijin



[06/15] hbase git commit: HBASE-19131 (Addendum) Use the emptyList() to replace EMPTY_LIST

2017-11-07 Thread busbey
HBASE-19131 (Addendum) Use the emptyList() to replace EMPTY_LIST


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/33ede551
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/33ede551
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/33ede551

Branch: refs/heads/HBASE-19189
Commit: 33ede55164421b40c0bfe1c9d47c1db6701265c2
Parents: 9ee8e27
Author: Chia-Ping Tsai 
Authored: Tue Nov 7 04:06:00 2017 +0800
Committer: Chia-Ping Tsai 
Committed: Tue Nov 7 04:06:00 2017 +0800

--
 .../src/main/java/org/apache/hadoop/hbase/ClusterStatus.java   | 6 +++---
 1 file changed, 3 insertions(+), 3 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/33ede551/hbase-client/src/main/java/org/apache/hadoop/hbase/ClusterStatus.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/ClusterStatus.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/ClusterStatus.java
index 13c5bac..351b0c8 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ClusterStatus.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ClusterStatus.java
@@ -138,7 +138,7 @@ public class ClusterStatus {
*/
   public List getDeadServerNames() {
 if (deadServers == null) {
-  return Collections.EMPTY_LIST;
+  return Collections.emptyList();
 }
 return Collections.unmodifiableList(deadServers);
   }
@@ -256,7 +256,7 @@ public class ClusterStatus {
 
   public Collection getServers() {
 if (liveServers == null) {
-  return Collections.EMPTY_LIST;
+  return Collections.emptyList();
 }
 return Collections.unmodifiableCollection(this.liveServers.keySet());
   }
@@ -281,7 +281,7 @@ public class ClusterStatus {
*/
   public List getBackupMasters() {
 if (backupMasters == null) {
-  return Collections.EMPTY_LIST;
+  return Collections.emptyList();
 }
 return Collections.unmodifiableList(this.backupMasters);
   }



[15/15] hbase git commit: HBASE-19189 Ad-hoc test job for running a subset of tests lots of times

2017-11-07 Thread busbey
HBASE-19189 Ad-hoc test job for running a subset of tests lots of times


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/5339d25b
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/5339d25b
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/5339d25b

Branch: refs/heads/HBASE-19189
Commit: 5339d25b4b7c3f8f136ba8c72c299598eff87f4b
Parents: 4eae5a2
Author: Sean Busbey 
Authored: Mon Nov 6 13:48:05 2017 -0600
Committer: Sean Busbey 
Committed: Tue Nov 7 16:46:58 2017 -0600

--
 dev-support/adhoc_run_tests/Jenkinsfile| 93 +
 dev-support/adhoc_run_tests/adhoc_run_tests.sh | 90 
 dev-support/gather_machine_environment.sh  | 50 +++
 3 files changed, 233 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/5339d25b/dev-support/adhoc_run_tests/Jenkinsfile
--
diff --git a/dev-support/adhoc_run_tests/Jenkinsfile 
b/dev-support/adhoc_run_tests/Jenkinsfile
new file mode 100644
index 000..b3793cd
--- /dev/null
+++ b/dev-support/adhoc_run_tests/Jenkinsfile
@@ -0,0 +1,93 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements.  See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership.  The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License.  You may obtain a copy of the License at
+//
+//   http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied.  See the License for the
+// specific language governing permissions and limitations
+// under the License.
+pipeline {
+  parameters {
+string(name: 'tests', description: 'space separated list of tests to run. 
e.g. ' +
+'TestLogRollingNoCluster TestMetricRegistryImpl TestConstraints')
+string(name: 'node', defaultValue: 'Hadoop',
+description: 'the node label that should be used to run the test.')
+string(name: 'repeat_count', defaultValue: '100',
+description: 'number of iterations to run looking for a failure.')
+string(name: 'fork_count', defaultValue: '0.5C', description: '''
+Given to surefire to set the number of parallel forks for a given test 
attempt (i.e. one
+maven invocation that has all of the specified tests. The default 
tries to use half of the
+available cores on the system.
+
+For more information see
+http://maven.apache.org/surefire/maven-surefire-plugin/test-mojo.html#forkCount";>
+the surefire docs on the forkCount parameter
+''')
+  }
+  agent {
+node {
+  label "${params.node}"
+}
+  }
+  options {
+timeout (time: 6, unit: 'HOURS')
+timestamps()
+  }
+  environment {
+// where we check out to across stages
+BASEDIR = "${env.WORKSPACE}/component"
+OUTPUT_RELATIVE = 'output'
+OUTPUTDIR = "${env.WORKSPACE}/output"
+BRANCH_SPECIFIC_DOCKERFILE = "${env.BASEDIR}/dev-support/docker/Dockerfile"
+  }
+  stages {
+stage ('run tests') {
+  tools {
+maven 'Maven (latest)'
+// this needs to be set to the jdk that ought to be used to build 
releases on the branch
+// the Jenkinsfile is stored in.
+jdk "JDK 1.8 (latest)"
+  }
+  steps {
+sh """#!/bin/bash -e
+  echo "Setting up directories"
+  rm -rf "${env.OUTPUTDIR}" && mkdir "${env.OUTPUTDIR}"
+  rm -rf ".m2-repo" && mkdir ".m2-repo"
+  mkdir "${env.OUTPUTDIR}/machine"
+"""
+sh """#!/bin/bash -e
+  "${env.BASEDIR}/dev-support/gather_machine_environment.sh" \
+  "${OUTPUT_RELATIVE}/machine"
+"""
+dir ("component") {
+  sh '''#!/bin/bash -e
+./dev-support/adhoc_run_tests/adhoc_run_tests.sh \
+--force-timeout 1800 \
+--maven-local-repo ".m2-repo" \
+--log-output "${OUTPUTDIR}" \
+--surefire-fork-count "${fork_count}" \
+--repeat "${repeat_count}" \
+"${tests}"
+'''
+}
+  }
+  post {
+always {
+  archive 'output/*'
+  archive 'output/**/*'
+}
+failure {
+  archive 'component/**/target/surefire-reports/*'
+}
+  }
+}
+  }
+}

http://git-wip-us.apache.org/repos/asf/hbase/blob/5339d25b/dev-support/adhoc_run_tests/adhoc_run_tests.sh
--
di

[11/15] hbase git commit: HBASE-19183 Removed redundant groupId from hbase-checkstyle and hbase-error-prone

2017-11-07 Thread busbey
HBASE-19183 Removed redundant groupId from hbase-checkstyle and 
hbase-error-prone


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/d4e3f902
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/d4e3f902
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/d4e3f902

Branch: refs/heads/HBASE-19189
Commit: d4e3f902e6ba5b747295ca6053f34badd4018175
Parents: 0356674
Author: Jan Hentschel 
Authored: Sat Nov 4 23:01:40 2017 +0100
Committer: Jan Hentschel 
Committed: Tue Nov 7 08:20:51 2017 +0100

--
 hbase-build-support/hbase-error-prone/pom.xml | 1 -
 hbase-checkstyle/pom.xml  | 1 -
 2 files changed, 2 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/d4e3f902/hbase-build-support/hbase-error-prone/pom.xml
--
diff --git a/hbase-build-support/hbase-error-prone/pom.xml 
b/hbase-build-support/hbase-error-prone/pom.xml
index 907d82d..067e154 100644
--- a/hbase-build-support/hbase-error-prone/pom.xml
+++ b/hbase-build-support/hbase-error-prone/pom.xml
@@ -26,7 +26,6 @@
 3.0.0-SNAPSHOT
 ..
   
-  org.apache.hbase
   hbase-error-prone
   3.0.0-SNAPSHOT
   Apache HBase - Error Prone Rules

http://git-wip-us.apache.org/repos/asf/hbase/blob/d4e3f902/hbase-checkstyle/pom.xml
--
diff --git a/hbase-checkstyle/pom.xml b/hbase-checkstyle/pom.xml
index ed84b20..2b30c12 100644
--- a/hbase-checkstyle/pom.xml
+++ b/hbase-checkstyle/pom.xml
@@ -22,7 +22,6 @@
 */
 -->
 4.0.0
-org.apache.hbase
 hbase-checkstyle
 3.0.0-SNAPSHOT
 Apache HBase - Checkstyle



[08/15] hbase git commit: HBASE-19186 Unify to use bytes to show size in master/rs ui

2017-11-07 Thread busbey
HBASE-19186 Unify to use bytes to show size in master/rs ui


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/b6011a16
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/b6011a16
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/b6011a16

Branch: refs/heads/HBASE-19189
Commit: b6011a16fffebae21e56c41206b29d96c0613024
Parents: 2a99b87
Author: Guanghao Zhang 
Authored: Sun Nov 5 12:41:02 2017 +0800
Committer: Guanghao Zhang 
Committed: Tue Nov 7 10:07:03 2017 +0800

--
 .../tmpl/regionserver/BlockCacheTmpl.jamon  |   4 +-
 .../tmpl/regionserver/ServerMetricsTmpl.jamon   |  10 +-
 .../hbase-webapps/master/procedures.jsp |   9 +-
 .../hbase-webapps/master/processMaster.jsp  |   9 +-
 .../hbase-webapps/master/processRS.jsp  | 228 ---
 .../resources/hbase-webapps/master/table.jsp|   2 +-
 .../hbase-webapps/regionserver/processRS.jsp|   9 +-
 7 files changed, 23 insertions(+), 248 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/b6011a16/hbase-server/src/main/jamon/org/apache/hadoop/hbase/tmpl/regionserver/BlockCacheTmpl.jamon
--
diff --git 
a/hbase-server/src/main/jamon/org/apache/hadoop/hbase/tmpl/regionserver/BlockCacheTmpl.jamon
 
b/hbase-server/src/main/jamon/org/apache/hadoop/hbase/tmpl/regionserver/BlockCacheTmpl.jamon
index b4e44d8..5ea5bcc 100644
--- 
a/hbase-server/src/main/jamon/org/apache/hadoop/hbase/tmpl/regionserver/BlockCacheTmpl.jamon
+++ 
b/hbase-server/src/main/jamon/org/apache/hadoop/hbase/tmpl/regionserver/BlockCacheTmpl.jamon
@@ -244,13 +244,13 @@ 
org.apache.hadoop.util.StringUtils.TraditionalBinaryPrefix;
 Size
 <% 
TraditionalBinaryPrefix.long2String(cacheConfig.getBlockCache().getCurrentSize(),
 "B", 1) %>
-Current size of block cache in use (bytes)
+Current size of block cache in use
 
 
 Free
 <% 
TraditionalBinaryPrefix.long2String(cacheConfig.getBlockCache().getFreeSize(),
 "B", 1) %>
-The total free memory currently available to store more cache 
entries (bytes)
+The total free memory currently available to store more cache 
entries
 
 
 Count

http://git-wip-us.apache.org/repos/asf/hbase/blob/b6011a16/hbase-server/src/main/jamon/org/apache/hadoop/hbase/tmpl/regionserver/ServerMetricsTmpl.jamon
--
diff --git 
a/hbase-server/src/main/jamon/org/apache/hadoop/hbase/tmpl/regionserver/ServerMetricsTmpl.jamon
 
b/hbase-server/src/main/jamon/org/apache/hadoop/hbase/tmpl/regionserver/ServerMetricsTmpl.jamon
index 2e99d5b..adcfff1 100644
--- 
a/hbase-server/src/main/jamon/org/apache/hadoop/hbase/tmpl/regionserver/ServerMetricsTmpl.jamon
+++ 
b/hbase-server/src/main/jamon/org/apache/hadoop/hbase/tmpl/regionserver/ServerMetricsTmpl.jamon
@@ -146,7 +146,7 @@ MetricsRegionServerWrapper mWrap;
 
 
 Num. WAL Files
-Size. WAL Files (bytes)
+Size. WAL Files
 
 
 
@@ -165,9 +165,9 @@ MetricsRegionServerWrapper mWrap;
 
 Num. Stores
 Num. Storefiles
-Root Index Size (bytes)
-Index Size (bytes)
-Bloom Size (bytes)
+Root Index Size
+Index Size
+Bloom Size
 
 
 <% mWrap.getNumStores() %>
@@ -212,7 +212,7 @@ MetricsHBaseServerWrapper mServerWrap;
 Priority Call Queue Length
 General Call Queue Length
 Replication Call Queue Length
-Total Call Queue Size (bytes)
+Total Call Queue Size
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/b6011a16/hbase-server/src/main/resources/hbase-webapps/master/procedures.jsp
--
diff --git 
a/hbase-server/src/main/resources/hbase-webapps/master/procedures.jsp 
b/hbase-server/src/main/resources/hbase-webapps/master/procedures.jsp
index 63a41cc..c3df296 100644
--- a/hbase-server/src/main/resources/hbase-webapps/master/procedures.jsp
+++ b/hbase-server/src/main/resources/hbase-webapps/master/procedures.jsp
@@ -39,6 +39,7 @@
   import="org.apache.hadoop.hbase.procedure2.util.StringUtils"
   import="org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos"
   import="org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil"
+  import="org.apache.hadoop.util.StringUtils.TraditionalBinaryPrefix"
 %>
 <%
   HMaster master = (HMaster)getServletContext().getAttribute(HMaster.MASTER);
@@ -173,7 +174,7 @@
 <%ProcedureWALFile pwf = procedureWALFiles.get(i); %>
 
<%= pwf.getLogId() %>
-   <%= StringUtils.humanSize(pwf.getSize()) %> 
+   <%= TraditionalBinaryPrefix.long2String(pwf.getSize(), "B", 
1) %> 
<%= new Date(

[04/15] hbase git commit: HBASE-18950 Remove Optional parameters in AsyncAdmin interface

2017-11-07 Thread busbey
HBASE-18950 Remove Optional parameters in AsyncAdmin interface


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/888f2335
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/888f2335
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/888f2335

Branch: refs/heads/HBASE-19189
Commit: 888f2335c952040646ce820f6191f6433ec9411d
Parents: bc3f3ee
Author: Guanghao Zhang 
Authored: Mon Oct 23 11:22:00 2017 +0800
Committer: Guanghao Zhang 
Committed: Mon Nov 6 20:30:59 2017 +0800

--
 .../apache/hadoop/hbase/client/AsyncAdmin.java  | 132 ++---
 .../hadoop/hbase/client/AsyncHBaseAdmin.java| 114 -
 .../client/AsyncRpcRetryingCallerFactory.java   |   8 +-
 .../apache/hadoop/hbase/client/HBaseAdmin.java  |   6 +-
 .../hadoop/hbase/client/RawAsyncHBaseAdmin.java | 477 +--
 .../hbase/shaded/protobuf/ProtobufUtil.java |  11 +-
 .../hbase/shaded/protobuf/RequestConverter.java | 403 ++--
 ...gionServerBulkLoadWithOldSecureEndpoint.java |   3 +-
 .../hadoop/hbase/client/TestAsyncAdminBase.java |  18 +-
 .../hbase/client/TestAsyncClusterAdminApi.java  |   3 +-
 .../hbase/client/TestAsyncRegionAdminApi.java   |  23 +-
 .../hbase/client/TestAsyncSnapshotAdminApi.java |  12 +-
 .../hbase/client/TestAsyncTableAdminApi.java|  22 +-
 .../hbase/coprocessor/TestMasterObserver.java   |   9 +-
 .../regionserver/TestHRegionServerBulkLoad.java |   2 +-
 .../TestHRegionServerBulkLoadWithOldClient.java |   3 +-
 16 files changed, 713 insertions(+), 533 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/888f2335/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncAdmin.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncAdmin.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncAdmin.java
index 8fe02b9..baae6cf 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncAdmin.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncAdmin.java
@@ -64,38 +64,49 @@ public interface AsyncAdmin {
   /**
* List all the userspace tables.
* @return - returns a list of TableDescriptors wrapped by a {@link 
CompletableFuture}.
-   * @see #listTables(Optional, boolean)
*/
   default CompletableFuture> listTables() {
-return listTables(Optional.empty(), false);
+return listTables(false);
   }
 
   /**
+   * List all the tables.
+   * @param includeSysTables False to match only against userspace tables
+   * @return - returns a list of TableDescriptors wrapped by a {@link 
CompletableFuture}.
+   */
+  CompletableFuture> listTables(boolean 
includeSysTables);
+
+  /**
* List all the tables matching the given pattern.
* @param pattern The compiled regular expression to match against
* @param includeSysTables False to match only against userspace tables
* @return - returns a list of TableDescriptors wrapped by a {@link 
CompletableFuture}.
*/
-  CompletableFuture> listTables(Optional 
pattern,
-  boolean includeSysTables);
+  CompletableFuture> listTables(Pattern pattern, boolean 
includeSysTables);
 
   /**
* List all of the names of userspace tables.
* @return a list of table names wrapped by a {@link CompletableFuture}.
-   * @see #listTableNames(Optional, boolean)
+   * @see #listTableNames(Pattern, boolean)
*/
   default CompletableFuture> listTableNames() {
-return listTableNames(Optional.empty(), false);
+return listTableNames(false);
   }
 
   /**
+   * List all of the names of tables.
+   * @param includeSysTables False to match only against userspace tables
+   * @return a list of table names wrapped by a {@link CompletableFuture}.
+   */
+  CompletableFuture> listTableNames(boolean includeSysTables);
+
+  /**
* List all of the names of userspace tables.
* @param pattern The regular expression to match against
* @param includeSysTables False to match only against userspace tables
* @return a list of table names wrapped by a {@link CompletableFuture}.
*/
-  CompletableFuture> listTableNames(Optional pattern,
-  boolean includeSysTables);
+  CompletableFuture> listTableNames(Pattern pattern, boolean 
includeSysTables);
 
   /**
* Method for getting the tableDescriptor
@@ -108,9 +119,7 @@ public interface AsyncAdmin {
* Creates a new table.
* @param desc table descriptor for table
*/
-  default CompletableFuture createTable(TableDescriptor desc) {
-return createTable(desc, Optional.empty());
-  }
+  CompletableFuture createTable(TableDescriptor desc);
 
   /**
* Creates a new table with the specified number of regions. The start key 
specified will become
@@ -133,7 +142,7 @@ public interface AsyncAdmin {

[13/15] hbase git commit: HBASE-19174 Updated link to presentations to link to book

2017-11-07 Thread busbey
HBASE-19174 Updated link to presentations to link to book


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/29fd1dea
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/29fd1dea
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/29fd1dea

Branch: refs/heads/HBASE-19189
Commit: 29fd1dead227a6e72d29e5b5fc990a08a7c4bb05
Parents: 9d63bda
Author: Jan Hentschel 
Authored: Sat Nov 4 00:04:00 2017 +0100
Committer: Jan Hentschel 
Committed: Tue Nov 7 08:29:38 2017 +0100

--
 src/site/asciidoc/old_news.adoc | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/29fd1dea/src/site/asciidoc/old_news.adoc
--
diff --git a/src/site/asciidoc/old_news.adoc b/src/site/asciidoc/old_news.adoc
index c5cf993..4ae3d7a 100644
--- a/src/site/asciidoc/old_news.adoc
+++ b/src/site/asciidoc/old_news.adoc
@@ -113,7 +113,7 @@ October 2nd, 2009:: HBase at Hadoop World in NYC. A few of 
us will be talking on
 
 August 7th-9th, 2009:: HUG7 and HBase Hackathon at StumbleUpon in SF: Sign up 
for the:: link:http://www.meetup.com/hbaseusergroup/calendar/10950511/[HBase 
User Group Meeting, HUG7] or for the 
link:http://www.meetup.com/hackathon/calendar/10951718/[Hackathon] or for both 
(all are welcome!).
 
-June, 2009::  HBase at HadoopSummit2009 and at NOSQL: See the 
link:https://wiki.apache.org/hadoop/HBase/HBasePresentations[presentations]
+June, 2009::  HBase at HadoopSummit2009 and at NOSQL: See the 
link:https://hbase.apache.org/book.html#other.info.pres[presentations]
 
 March 3rd, 2009 :: HUG6 -- 
link:http://www.meetup.com/hbaseusergroup/calendar/9764004/[HBase User Group 6]
 



[07/15] hbase git commit: HBASE-19111 Add CellUtil#isPut and deprecate methods returning/expecting non public-api data

2017-11-07 Thread busbey
HBASE-19111 Add CellUtil#isPut and deprecate methods returning/expecting non 
public-api data

KeyValue.Type, and its corresponding byte value, are not public API. We
shouldn't have methods that are expecting them. Added a basic sanity
test for isPut and isDelete.

Signed-off-by: Ramkrishna 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/2a99b87a
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/2a99b87a
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/2a99b87a

Branch: refs/heads/HBASE-19189
Commit: 2a99b87af2ebe289e2fec94c9cdca0942397977d
Parents: 33ede55
Author: Josh Elser 
Authored: Fri Oct 27 19:27:59 2017 -0400
Committer: Josh Elser 
Committed: Mon Nov 6 15:37:12 2017 -0500

--
 .../main/java/org/apache/hadoop/hbase/Cell.java |  3 +
 .../java/org/apache/hadoop/hbase/CellUtil.java  |  9 +++
 .../hadoop/hbase/client/TestFromClientSide.java | 73 +++-
 3 files changed, 66 insertions(+), 19 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/2a99b87a/hbase-common/src/main/java/org/apache/hadoop/hbase/Cell.java
--
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/Cell.java 
b/hbase-common/src/main/java/org/apache/hadoop/hbase/Cell.java
index b2f6304..f5833c8 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/Cell.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/Cell.java
@@ -133,7 +133,10 @@ public interface Cell {
 
   /**
* @return The byte representation of the KeyValue.TYPE of this cell: one of 
Put, Delete, etc
+   * @deprecated since 2.0.0, use appropriate {@link CellUtil#isDelete} or
+   *{@link CellUtil#isPut(Cell)} methods instead. This will be removed in 
3.0.0.
*/
+  @Deprecated
   byte getTypeByte();
 
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/2a99b87a/hbase-common/src/main/java/org/apache/hadoop/hbase/CellUtil.java
--
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/CellUtil.java 
b/hbase-common/src/main/java/org/apache/hadoop/hbase/CellUtil.java
index 78f12b5..52eb8fa 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/CellUtil.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/CellUtil.java
@@ -893,6 +893,7 @@ public final class CellUtil {
* {KeyValue.Type#DeleteFamily} or a
* {@link KeyValue.Type#DeleteColumn} KeyValue type.
*/
+  @SuppressWarnings("deprecation")
   public static boolean isDelete(final Cell cell) {
 return PrivateCellUtil.isDelete(cell.getTypeByte());
   }
@@ -962,6 +963,14 @@ public final class CellUtil {
   }
 
   /**
+   * @return True if this cell is a Put.
+   */
+  @SuppressWarnings("deprecation")
+  public static boolean isPut(Cell cell) {
+return cell.getTypeByte() == Type.Put.getCode();
+  }
+
+  /**
* Estimate based on keyvalue's serialization format in the RPC layer. Note 
that there is an extra
* SIZEOF_INT added to the size here that indicates the actual length of the 
cell for cases where
* cell's are serialized in a contiguous format (For eg in RPCs).

http://git-wip-us.apache.org/repos/asf/hbase/blob/2a99b87a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide.java
index 804f821..02d3797 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide.java
@@ -50,6 +50,7 @@ import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.Cell;
+import org.apache.hadoop.hbase.CellScanner;
 import org.apache.hadoop.hbase.CellUtil;
 import org.apache.hadoop.hbase.ClusterStatus.Option;
 import org.apache.hadoop.hbase.CompareOperator;
@@ -132,9 +133,6 @@ public class TestFromClientSide {
   @Rule
   public TestName name = new TestName();
 
-  /**
-   * @throws java.lang.Exception
-   */
   @BeforeClass
   public static void setUpBeforeClass() throws Exception {
 // Uncomment the following lines if more verbosity is needed for
@@ -151,9 +149,6 @@ public class TestFromClientSide {
 TEST_UTIL.startMiniCluster(SLAVES);
   }
 
-  /**
-   * @throws java.lang.Exception
-   */
   @AfterClass
   public static void tearDownAfterClass() throws Exception {
 TEST_UTIL.shutdownMiniCluster();
@@ -342,8 +337,6 @@ public class TestFromClientSide {
   /**
 

[01/15] hbase git commit: HBASE-19185 ClassNotFoundException: com.fasterxml.jackson.* [Forced Update!]

2017-11-07 Thread busbey
Repository: hbase
Updated Branches:
  refs/heads/HBASE-19189 1b1ba46fb -> 5339d25b4 (forced update)


HBASE-19185 ClassNotFoundException: com.fasterxml.jackson.*


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/c463e9c8
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/c463e9c8
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/c463e9c8

Branch: refs/heads/HBASE-19189
Commit: c463e9c8403645597141b18cb9d502623fa7f104
Parents: 28cdf4a
Author: Chia-Ping Tsai 
Authored: Sun Nov 5 23:30:28 2017 +0800
Committer: Chia-Ping Tsai 
Committed: Sun Nov 5 23:30:28 2017 +0800

--
 .../org/apache/hadoop/hbase/mapreduce/TableMapReduceUtil.java   | 5 -
 1 file changed, 4 insertions(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/c463e9c8/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/TableMapReduceUtil.java
--
diff --git 
a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/TableMapReduceUtil.java
 
b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/TableMapReduceUtil.java
index cf86184..40e2cb9 100644
--- 
a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/TableMapReduceUtil.java
+++ 
b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/TableMapReduceUtil.java
@@ -820,7 +820,10 @@ public class TableMapReduceUtil {
   org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists.class,
   org.apache.htrace.Trace.class,
   com.codahale.metrics.MetricRegistry.class,
-  org.apache.commons.lang3.ArrayUtils.class);
+  org.apache.commons.lang3.ArrayUtils.class,
+  com.fasterxml.jackson.databind.ObjectMapper.class,
+  com.fasterxml.jackson.core.Versioned.class,
+  com.fasterxml.jackson.annotation.JsonView.class);
   }
 
   /**



[03/15] hbase git commit: HBASE-18950 Remove Optional parameters in AsyncAdmin interface

2017-11-07 Thread busbey
http://git-wip-us.apache.org/repos/asf/hbase/blob/888f2335/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncAdminBase.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncAdminBase.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncAdminBase.java
index c3c4045..83ba244 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncAdminBase.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncAdminBase.java
@@ -21,10 +21,8 @@ import static 
org.apache.hadoop.hbase.client.AsyncProcess.START_LOG_ERRORS_AFTER
 
 import java.util.Arrays;
 import java.util.List;
-import java.util.Optional;
-import java.util.concurrent.ExecutionException;
+import java.util.concurrent.CompletableFuture;
 import java.util.concurrent.ForkJoinPool;
-import java.util.concurrent.TimeUnit;
 import java.util.function.Supplier;
 import java.util.regex.Pattern;
 
@@ -41,8 +39,6 @@ import org.junit.Before;
 import org.junit.BeforeClass;
 import org.junit.Rule;
 import org.junit.rules.TestName;
-import org.junit.runner.RunWith;
-import org.junit.runners.Parameterized;
 import org.junit.runners.Parameterized.Parameter;
 import org.junit.runners.Parameterized.Parameters;
 
@@ -106,7 +102,7 @@ public abstract class TestAsyncAdminBase {
 
   @After
   public void tearDown() throws Exception {
-
admin.listTableNames(Optional.of(Pattern.compile(tableName.getNameAsString() + 
".*")), false)
+admin.listTableNames(Pattern.compile(tableName.getNameAsString() + ".*"), 
false)
 .whenCompleteAsync((tables, err) -> {
   if (tables != null) {
 tables.forEach(table -> {
@@ -122,19 +118,21 @@ public abstract class TestAsyncAdminBase {
   }
 
   protected void createTableWithDefaultConf(TableName tableName) {
-createTableWithDefaultConf(tableName, Optional.empty());
+createTableWithDefaultConf(tableName, null);
   }
 
-  protected void createTableWithDefaultConf(TableName tableName, 
Optional splitKeys) {
+  protected void createTableWithDefaultConf(TableName tableName, byte[][] 
splitKeys) {
 createTableWithDefaultConf(tableName, splitKeys, FAMILY);
   }
 
-  protected void createTableWithDefaultConf(TableName tableName, 
Optional splitKeys,
+  protected void createTableWithDefaultConf(TableName tableName, byte[][] 
splitKeys,
   byte[]... families) {
 TableDescriptorBuilder builder = 
TableDescriptorBuilder.newBuilder(tableName);
 for (byte[] family : families) {
   builder.addColumnFamily(ColumnFamilyDescriptorBuilder.of(family));
 }
-admin.createTable(builder.build(), splitKeys).join();
+CompletableFuture future = splitKeys == null ? 
admin.createTable(builder.build())
+: admin.createTable(builder.build(), splitKeys);
+future.join();
   }
 }

http://git-wip-us.apache.org/repos/asf/hbase/blob/888f2335/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncClusterAdminApi.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncClusterAdminApi.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncClusterAdminApi.java
index 53de2b5..e7c439b 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncClusterAdminApi.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncClusterAdminApi.java
@@ -31,7 +31,6 @@ import java.util.Collection;
 import java.util.EnumSet;
 import java.util.List;
 import java.util.Map;
-import java.util.Optional;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.ClusterStatus;
@@ -254,7 +253,7 @@ public class TestAsyncClusterAdminApi extends 
TestAsyncAdminBase {
   List tableRegions = admin.getTableRegions(table).get();
   List regionLoads = Lists.newArrayList();
   for (ServerName serverName : servers) {
-regionLoads.addAll(admin.getRegionLoads(serverName, 
Optional.of(table)).get());
+regionLoads.addAll(admin.getRegionLoads(serverName, table).get());
   }
   checkRegionsAndRegionLoads(tableRegions, regionLoads);
 }

http://git-wip-us.apache.org/repos/asf/hbase/blob/888f2335/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncRegionAdminApi.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncRegionAdminApi.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncRegionAdminApi.java
index 262cac6..1ee1b94 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncRegionAdminApi.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncRegionAdminApi.java
@@ -200,7 +200,7 @@ public class TestAsyncRegionAdminApi extends 
TestAsyncAdminBase

[14/15] hbase git commit: HBASE-18961 doMiniBatchMutate() is split into smaller member methods of BatchOperation and it's sub-classes

2017-11-07 Thread busbey
HBASE-18961 doMiniBatchMutate() is split into smaller member methods of 
BatchOperation and it's sub-classes

There is no functionality change except for below:
* Variable lastIndexExclusive was getting incremented while locking rows 
corresponding to input
  operations. As a result when getRowLockInternal() method throws 
TimeoutIOException only operations
  in range [nextIndexToProcess, lastIndexExclusive) was getting marked as 
FAILED before raising
  exception up the call stack. With these changes all operations are getting 
marked as FAILED.
* Cluster Ids of first mutation is used consistently for entire batch. Previous 
behavior was to use
  cluster ids of first mutation in a mini-batch

Signed-off-by: Michael Stack 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/4eae5a29
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/4eae5a29
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/4eae5a29

Branch: refs/heads/HBASE-19189
Commit: 4eae5a29749da1c34f1a2dd0b1f6aa6f7a9bbffd
Parents: 29fd1de
Author: Umesh Agashe 
Authored: Sun Oct 8 00:31:12 2017 -0700
Committer: Michael Stack 
Committed: Tue Nov 7 10:00:49 2017 -0800

--
 .../org/apache/hadoop/hbase/util/NonceKey.java  |4 +-
 .../hadoop/hbase/regionserver/HRegion.java  | 1328 ++
 .../MiniBatchOperationInProgress.java   |   44 +-
 .../regionserver/MultiRowMutationProcessor.java |2 +-
 .../TestMiniBatchOperationInProgress.java   |4 +-
 .../access/TestWithDisabledAuthorization.java   |2 +-
 6 files changed, 805 insertions(+), 579 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/4eae5a29/hbase-common/src/main/java/org/apache/hadoop/hbase/util/NonceKey.java
--
diff --git 
a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/NonceKey.java 
b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/NonceKey.java
index 6da808e..b658331 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/NonceKey.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/NonceKey.java
@@ -1,4 +1,4 @@
-/**
+/*
  *
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
@@ -18,7 +18,6 @@
  */
 package org.apache.hadoop.hbase.util;
 
-import org.apache.hadoop.hbase.HConstants;
 import org.apache.yetus.audience.InterfaceAudience;
 
  /**
@@ -31,7 +30,6 @@ public class NonceKey {
   private long nonce;
 
   public NonceKey(long group, long nonce) {
-assert nonce != HConstants.NO_NONCE;
 this.group = group;
 this.nonce = nonce;
   }

http://git-wip-us.apache.org/repos/asf/hbase/blob/4eae5a29/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
index 5cd27b8..82d4bd2 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
@@ -161,6 +161,7 @@ import org.apache.hadoop.hbase.util.EncryptionTest;
 import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
 import org.apache.hadoop.hbase.util.FSUtils;
 import org.apache.hadoop.hbase.util.HashedBytes;
+import org.apache.hadoop.hbase.util.NonceKey;
 import org.apache.hadoop.hbase.util.Pair;
 import org.apache.hadoop.hbase.util.ServerRegionReplicaUtil;
 import org.apache.hadoop.hbase.util.Threads;
@@ -199,6 +200,8 @@ import 
org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.RegionEventDe
 import 
org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.RegionEventDescriptor.EventType;
 import 
org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.StoreDescriptor;
 
+import edu.umd.cs.findbugs.annotations.Nullable;
+
 /**
  * Regions store data for a certain region of a table.  It stores all columns
  * for each row. A given table consists of one or more Regions.
@@ -642,7 +645,6 @@ public class HRegion implements HeapSize, 
PropagatingConfigurationObserver, Regi
   // flushPerChanges is to prevent too many changes in memstore
   private long flushPerChanges;
   private long blockingMemStoreSize;
-  final long threadWakeFrequency;
   // Used to guard closes
   final ReentrantReadWriteLock lock = new ReentrantReadWriteLock();
 
@@ -757,7 +759,6 @@ public class HRegion implements HeapSize, 
PropagatingConfigurationObserver, Regi
   }
 }
 this.rsServices = rsServices;
-this.threadWakeFrequency = conf.getLong(HConstants.THREAD_WAKE_FREQUENCY, 
10 * 1000);
 setHTableSpecificC

[10/15] hbase git commit: HBASE-19103 Add BigDecimalComparator for filter

2017-11-07 Thread busbey
HBASE-19103 Add BigDecimalComparator for filter

Signed-off-by: Jan Hentschel 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/0356674c
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/0356674c
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/0356674c

Branch: refs/heads/HBASE-19189
Commit: 0356674cd1f59b10ab515058efa948e556fbc79e
Parents: d1b6d8c
Author: QilinCao 
Authored: Mon Oct 30 20:55:11 2017 +0800
Committer: Jan Hentschel 
Committed: Tue Nov 7 08:07:58 2017 +0100

--
 .../hbase/filter/BigDecimalComparator.java  | 116 ++
 .../src/main/protobuf/Comparator.proto  |   4 +
 .../src/main/protobuf/Comparator.proto  |   4 +
 .../hbase/filter/TestBigDecimalComparator.java  | 118 +++
 .../filter/TestComparatorSerialization.java |   9 ++
 .../hadoop/hbase/regionserver/TestHRegion.java  |  43 +++
 6 files changed, 294 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/0356674c/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/BigDecimalComparator.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/BigDecimalComparator.java
 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/BigDecimalComparator.java
new file mode 100644
index 000..5da366f
--- /dev/null
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/BigDecimalComparator.java
@@ -0,0 +1,116 @@
+/*
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hbase.filter;
+
+import java.math.BigDecimal;
+import java.nio.ByteBuffer;
+import java.util.Objects;
+
+import org.apache.hadoop.hbase.exceptions.DeserializationException;
+import 
org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException;
+import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos;
+import org.apache.hadoop.hbase.util.ByteBufferUtils;
+import org.apache.hadoop.hbase.util.Bytes;
+
+import org.apache.yetus.audience.InterfaceAudience;
+
+/**
+ * A BigDecimal comparator which numerical compares against the specified byte 
array
+ */
+@InterfaceAudience.Public
+public class BigDecimalComparator extends ByteArrayComparable {
+  private BigDecimal bigDecimal;
+
+  public BigDecimalComparator(BigDecimal value) {
+super(Bytes.toBytes(value));
+this.bigDecimal = value;
+  }
+
+  @Override
+  public boolean equals(Object obj) {
+if (obj == null || !(obj instanceof BigDecimalComparator)) {
+  return false;
+}
+if (this == obj) {
+  return true;
+}
+BigDecimalComparator bdc = (BigDecimalComparator) obj;
+return this.bigDecimal.equals(bdc.bigDecimal);
+  }
+
+  @Override
+  public int hashCode() {
+return Objects.hash(this.bigDecimal);
+  }
+
+  @Override
+  public int compareTo(byte[] value, int offset, int length) {
+BigDecimal that = Bytes.toBigDecimal(value, offset, length);
+return this.bigDecimal.compareTo(that);
+  }
+
+  @Override
+  public int compareTo(ByteBuffer value, int offset, int length) {
+BigDecimal that = ByteBufferUtils.toBigDecimal(value, offset, length);
+return this.bigDecimal.compareTo(that);
+  }
+
+  /**
+   * @return The comparator serialized using pb
+   */
+  @Override
+  public byte[] toByteArray() {
+ComparatorProtos.BigDecimalComparator.Builder builder =
+ComparatorProtos.BigDecimalComparator.newBuilder();
+builder.setComparable(ProtobufUtil.toByteArrayComparable(this.value));
+return builder.build().toByteArray();
+  }
+
+  /**
+   * @param pbBytes A pb serialized {@link BigDecimalComparator} instance
+   * @return An instance of {@link BigDecimalComparator} made from 
bytes
+   * @throws DeserializationException A deserialization exception
+   * @see #toByteArray
+   */
+  public static BigDecimalComparator parseFrom(final byte[] pbBytes)
+  throws DeserializationException {
+Compa

[12/15] hbase git commit: HBASE-19175 Added linklint files to gitignore

2017-11-07 Thread busbey
HBASE-19175 Added linklint files to gitignore


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/9d63bda8
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/9d63bda8
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/9d63bda8

Branch: refs/heads/HBASE-19189
Commit: 9d63bda8ff44764963ee3ed11eca3881037ff789
Parents: d4e3f90
Author: Jan Hentschel 
Authored: Sat Nov 4 01:33:30 2017 +0100
Committer: Jan Hentschel 
Committed: Tue Nov 7 08:25:48 2017 +0100

--
 .gitignore | 3 +++
 1 file changed, 3 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/9d63bda8/.gitignore
--
diff --git a/.gitignore b/.gitignore
index b9c6fb2..405edc0 100644
--- a/.gitignore
+++ b/.gitignore
@@ -15,3 +15,6 @@ hbase-*/test
 *.ipr
 patchprocess/
 dependency-reduced-pom.xml
+link_report/
+linklint-*.zip
+linklint/



[09/15] hbase git commit: HBASE-19198 TestIPv6NIOServerSocketChannel fails; unable to bind

2017-11-07 Thread busbey
HBASE-19198 TestIPv6NIOServerSocketChannel fails; unable to bind


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/d1b6d8c9
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/d1b6d8c9
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/d1b6d8c9

Branch: refs/heads/HBASE-19189
Commit: d1b6d8c90692d2ccf9a9e5c9c6186d62a0b2b553
Parents: b6011a1
Author: Michael Stack 
Authored: Mon Nov 6 21:19:51 2017 -0800
Committer: Michael Stack 
Committed: Mon Nov 6 21:20:04 2017 -0800

--
 .../apache/hadoop/hbase/TestIPv6NIOServerSocketChannel.java| 6 +++---
 1 file changed, 3 insertions(+), 3 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/d1b6d8c9/hbase-server/src/test/java/org/apache/hadoop/hbase/TestIPv6NIOServerSocketChannel.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestIPv6NIOServerSocketChannel.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestIPv6NIOServerSocketChannel.java
index d4f4ada..e63eaf2 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestIPv6NIOServerSocketChannel.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestIPv6NIOServerSocketChannel.java
@@ -49,7 +49,6 @@ import org.junit.rules.TestRule;
  */
 @Category({MiscTests.class, SmallTests.class})
 public class TestIPv6NIOServerSocketChannel {
-
   private static final Log LOG = 
LogFactory.getLog(TestIPv6NIOServerSocketChannel.class);
 
   @Rule
@@ -69,6 +68,7 @@ public class TestIPv6NIOServerSocketChannel {
 break;
   } catch (BindException ex) {
 //continue
+LOG.info("Failed on " + addr + ", inedAddr=" + inetAddr, ex);
   } finally {
 if (serverSocket != null) {
   serverSocket.close();
@@ -151,9 +151,9 @@ public class TestIPv6NIOServerSocketChannel {
*/
   @Test
   public void testServerSocketFromLocalhostResolution() throws IOException {
-InetAddress[] addrs = InetAddress.getAllByName("localhost");
+InetAddress[] addrs = {InetAddress.getLocalHost()};
 for (InetAddress addr : addrs) {
-  LOG.info("resolved localhost as:" + addr);
+  LOG.info("Resolved localhost as: " + addr);
   bindServerSocket(addr);
   bindNIOServerSocket(addr);
 }



[05/15] hbase git commit: HBASE-19160 expose CellComparator as IA.Public

2017-11-07 Thread busbey
HBASE-19160 expose CellComparator as IA.Public


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/9ee8e271
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/9ee8e271
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/9ee8e271

Branch: refs/heads/HBASE-19189
Commit: 9ee8e2714df54345743ddf18bf23899872930b2c
Parents: 888f233
Author: Mike Drob 
Authored: Thu Nov 2 16:16:43 2017 -0500
Committer: Mike Drob 
Committed: Mon Nov 6 10:08:14 2017 -0600

--
 .../hadoop/hbase/client/ConnectionUtils.java|  4 ++--
 .../org/apache/hadoop/hbase/client/Result.java  |  5 ++--
 .../hadoop/hbase/filter/FilterListBase.java |  4 ++--
 .../hadoop/hbase/filter/FuzzyRowFilter.java |  6 ++---
 .../hbase/filter/InclusiveStopFilter.java   |  4 ++--
 .../org/apache/hadoop/hbase/CellComparator.java | 12 +-
 .../java/org/apache/hadoop/hbase/CellUtil.java  |  2 +-
 .../java/org/apache/hadoop/hbase/KeyValue.java  |  2 +-
 .../io/encoding/BufferedDataBlockEncoder.java   |  3 +--
 .../apache/hadoop/hbase/TestCellComparator.java | 13 ++-
 .../hadoop/hbase/util/RedundantKVGenerator.java |  6 ++---
 .../mapreduce/IntegrationTestImportTsv.java | 10 
 .../hadoop/hbase/mapreduce/CellSortReducer.java |  4 ++--
 .../hbase/mapreduce/HFileOutputFormat2.java |  6 ++---
 .../apache/hadoop/hbase/mapreduce/Import.java   |  6 ++---
 .../hadoop/hbase/mapreduce/PutSortReducer.java  |  4 ++--
 .../hadoop/hbase/mapreduce/SyncTable.java   |  8 +++
 .../hadoop/hbase/mapreduce/TextSortReducer.java |  4 ++--
 .../hadoop/hbase/io/hfile/FixedFileTrailer.java |  2 +-
 .../org/apache/hadoop/hbase/io/hfile/HFile.java |  4 +---
 .../hbase/io/hfile/HFilePrettyPrinter.java  | 10 
 .../hadoop/hbase/io/hfile/HFileReaderImpl.java  |  3 +--
 .../hadoop/hbase/io/hfile/HFileWriterImpl.java  |  3 +--
 .../org/apache/hadoop/hbase/mob/MobUtils.java   |  5 ++--
 .../compactions/PartitionedMobCompactor.java|  3 ++-
 .../hbase/regionserver/DefaultMemStore.java |  3 +--
 .../hadoop/hbase/regionserver/HStore.java   |  3 +--
 .../hbase/regionserver/StoreFileReader.java |  5 ++--
 .../hbase/regionserver/StoreFileWriter.java |  6 ++---
 .../hbase/regionserver/wal/FSWALEntry.java  |  6 ++---
 .../hbase/util/CollectionBackedScanner.java |  5 ++--
 .../hadoop/hbase/util/CompressionTest.java  |  3 ++-
 .../hadoop/hbase/HBaseTestingUtility.java   |  2 +-
 .../hbase/HFilePerformanceEvaluation.java   |  2 +-
 .../apache/hadoop/hbase/client/TestResult.java  | 18 +++
 .../apache/hadoop/hbase/filter/TestFilter.java  | 13 ---
 .../hadoop/hbase/filter/TestFilterList.java | 24 
 .../hbase/regionserver/KeyValueScanFixture.java |  6 ++---
 .../hbase/regionserver/TestCellFlatSet.java | 10 
 .../regionserver/TestCompactingMemStore.java| 12 --
 .../regionserver/TestKeyValueScanFixture.java   |  4 ++--
 .../hbase/regionserver/TestStoreScanner.java| 21 -
 .../AbstractTestScanQueryMatcher.java   |  6 ++---
 .../hadoop/hbase/spark/HBaseContext.scala   |  2 +-
 44 files changed, 133 insertions(+), 151 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/9ee8e271/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ConnectionUtils.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ConnectionUtils.java
 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ConnectionUtils.java
index 5e0e3b7..bc0ade2 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ConnectionUtils.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ConnectionUtils.java
@@ -39,7 +39,7 @@ import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.Cell;
-import org.apache.hadoop.hbase.CellComparatorImpl;
+import org.apache.hadoop.hbase.CellComparator;
 import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.HRegionInfo;
 import org.apache.hadoop.hbase.PrivateCellUtil;
@@ -336,7 +336,7 @@ public final class ConnectionUtils {
 }
 Cell[] rawCells = result.rawCells();
 int index =
-Arrays.binarySearch(rawCells, keepCellsAfter, 
CellComparatorImpl.COMPARATOR::compareWithoutRow);
+Arrays.binarySearch(rawCells, keepCellsAfter, 
CellComparator.getInstance()::compareWithoutRow);
 if (index < 0) {
   index = -index - 1;
 } else {

http://git-wip-us.apache.org/repos/asf/hbase/blob/9ee8e271/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Result.java

hbase git commit: HBASE-19182 Add deprecation in branch-1 for hbase-prefix-tree so some heads up it removed in hbase2

2017-11-07 Thread stack
Repository: hbase
Updated Branches:
  refs/heads/branch-1.2 273588336 -> 9f0a450e5


HBASE-19182 Add deprecation in branch-1 for hbase-prefix-tree so some heads up 
it removed in hbase2


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/9f0a450e
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/9f0a450e
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/9f0a450e

Branch: refs/heads/branch-1.2
Commit: 9f0a450e570b90ad588ada4e988f30d16c5f74b3
Parents: 2735883
Author: Michael Stack 
Authored: Sat Nov 4 10:37:10 2017 -0700
Committer: Michael Stack 
Committed: Tue Nov 7 11:31:02 2017 -0800

--
 hbase-prefix-tree/pom.xml| 2 +-
 .../hadoop/hbase/codec/prefixtree/PrefixTreeCodec.java   | 3 +++
 src/main/asciidoc/_chapters/compression.adoc | 8 +---
 3 files changed, 9 insertions(+), 4 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/9f0a450e/hbase-prefix-tree/pom.xml
--
diff --git a/hbase-prefix-tree/pom.xml b/hbase-prefix-tree/pom.xml
index 6dcdc66..6b6a043 100644
--- a/hbase-prefix-tree/pom.xml
+++ b/hbase-prefix-tree/pom.xml
@@ -29,7 +29,7 @@
 
   hbase-prefix-tree
   Apache HBase - Prefix Tree
-  Prefix Tree Data Block Encoder
+  Prefix Tree Data Block Encoder; DEPRECATED! Removed in 
hbase-2.0.0
 
   
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/9f0a450e/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeCodec.java
--
diff --git 
a/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeCodec.java
 
b/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeCodec.java
index a73232f..857c237 100644
--- 
a/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeCodec.java
+++ 
b/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeCodec.java
@@ -57,7 +57,10 @@ import org.apache.hadoop.io.WritableUtils;
  * {@link PrefixTreeEncoder}, and decoding is delegated to instances of
  * {@link org.apache.hadoop.hbase.codec.prefixtree.scanner.CellSearcher}. 
Encoder and decoder instances are
  * created and recycled by static PtEncoderFactory and PtDecoderFactory.
+ * @deprecated Since 1.2.7. Will be removed in 2.0.0. Write the dev list if 
you would like to take on the
+ * maintainence of this facility.
  */
+@Deprecated
 @InterfaceAudience.Private
 public class PrefixTreeCodec implements DataBlockEncoder{
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/9f0a450e/src/main/asciidoc/_chapters/compression.adoc
--
diff --git a/src/main/asciidoc/_chapters/compression.adoc 
b/src/main/asciidoc/_chapters/compression.adoc
index e5b9b8f..03d7d2e 100644
--- a/src/main/asciidoc/_chapters/compression.adoc
+++ b/src/main/asciidoc/_chapters/compression.adoc
@@ -115,14 +115,16 @@ The data format is nearly identical to Diff encoding, so 
there is not an image t
 Prefix Tree::
   Prefix tree encoding was introduced as an experimental feature in HBase 0.96.
   It provides similar memory savings to the Prefix, Diff, and Fast Diff 
encoder, but provides faster random access at a cost of slower encoding speed.
-+
+
 Prefix Tree may be appropriate for applications that have high block cache hit 
ratios. It introduces new 'tree' fields for the row and column.
 The row tree field contains a list of offsets/references corresponding to the 
cells in that row. This allows for a good deal of compression.
 For more details about Prefix Tree encoding, see 
link:https://issues.apache.org/jira/browse/HBASE-4676[HBASE-4676].
-+
+
 It is difficult to graphically illustrate a prefix tree, so no image is 
included. See the Wikipedia article for 
link:http://en.wikipedia.org/wiki/Trie[Trie] for more general information about 
this data structure.
+j
+Prefix Tree will be removed in hbase-2.0.0. It is a sweet feature but has seen 
little uptake and is not actively maintained.
+Come and write the dev list if you are interesting in carrying-on this 
encoding.
 
-[[data.block.encoding.types]]
 === Which Compressor or Data Block Encoder To Use
 
 The compression or codec type to use depends on the characteristics of your 
data. Choosing the wrong type could cause your data to take more space rather 
than less, and can have performance implications.



hbase git commit: HBASE-19182 Add deprecation in branch-1 for hbase-prefix-tree so some heads up it removed in hbase2

2017-11-07 Thread stack
Repository: hbase
Updated Branches:
  refs/heads/branch-1.3 342328bdd -> 65b23613b


HBASE-19182 Add deprecation in branch-1 for hbase-prefix-tree so some heads up 
it removed in hbase2


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/65b23613
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/65b23613
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/65b23613

Branch: refs/heads/branch-1.3
Commit: 65b23613b4da801a8c2085a6aa7b02c4f22736fe
Parents: 342328b
Author: Michael Stack 
Authored: Sat Nov 4 10:37:10 2017 -0700
Committer: Michael Stack 
Committed: Tue Nov 7 11:30:17 2017 -0800

--
 hbase-prefix-tree/pom.xml| 2 +-
 .../hadoop/hbase/codec/prefixtree/PrefixTreeCodec.java   | 3 +++
 src/main/asciidoc/_chapters/compression.adoc | 8 +---
 3 files changed, 9 insertions(+), 4 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/65b23613/hbase-prefix-tree/pom.xml
--
diff --git a/hbase-prefix-tree/pom.xml b/hbase-prefix-tree/pom.xml
index c6e7aa4..8a6eaf0 100644
--- a/hbase-prefix-tree/pom.xml
+++ b/hbase-prefix-tree/pom.xml
@@ -29,7 +29,7 @@
 
   hbase-prefix-tree
   Apache HBase - Prefix Tree
-  Prefix Tree Data Block Encoder
+  Prefix Tree Data Block Encoder; DEPRECATED! Removed in 
hbase-2.0.0
 
   
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/65b23613/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeCodec.java
--
diff --git 
a/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeCodec.java
 
b/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeCodec.java
index a73232f..857c237 100644
--- 
a/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeCodec.java
+++ 
b/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeCodec.java
@@ -57,7 +57,10 @@ import org.apache.hadoop.io.WritableUtils;
  * {@link PrefixTreeEncoder}, and decoding is delegated to instances of
  * {@link org.apache.hadoop.hbase.codec.prefixtree.scanner.CellSearcher}. 
Encoder and decoder instances are
  * created and recycled by static PtEncoderFactory and PtDecoderFactory.
+ * @deprecated Since 1.2.7. Will be removed in 2.0.0. Write the dev list if 
you would like to take on the
+ * maintainence of this facility.
  */
+@Deprecated
 @InterfaceAudience.Private
 public class PrefixTreeCodec implements DataBlockEncoder{
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/65b23613/src/main/asciidoc/_chapters/compression.adoc
--
diff --git a/src/main/asciidoc/_chapters/compression.adoc 
b/src/main/asciidoc/_chapters/compression.adoc
index e5b9b8f..03d7d2e 100644
--- a/src/main/asciidoc/_chapters/compression.adoc
+++ b/src/main/asciidoc/_chapters/compression.adoc
@@ -115,14 +115,16 @@ The data format is nearly identical to Diff encoding, so 
there is not an image t
 Prefix Tree::
   Prefix tree encoding was introduced as an experimental feature in HBase 0.96.
   It provides similar memory savings to the Prefix, Diff, and Fast Diff 
encoder, but provides faster random access at a cost of slower encoding speed.
-+
+
 Prefix Tree may be appropriate for applications that have high block cache hit 
ratios. It introduces new 'tree' fields for the row and column.
 The row tree field contains a list of offsets/references corresponding to the 
cells in that row. This allows for a good deal of compression.
 For more details about Prefix Tree encoding, see 
link:https://issues.apache.org/jira/browse/HBASE-4676[HBASE-4676].
-+
+
 It is difficult to graphically illustrate a prefix tree, so no image is 
included. See the Wikipedia article for 
link:http://en.wikipedia.org/wiki/Trie[Trie] for more general information about 
this data structure.
+j
+Prefix Tree will be removed in hbase-2.0.0. It is a sweet feature but has seen 
little uptake and is not actively maintained.
+Come and write the dev list if you are interesting in carrying-on this 
encoding.
 
-[[data.block.encoding.types]]
 === Which Compressor or Data Block Encoder To Use
 
 The compression or codec type to use depends on the characteristics of your 
data. Choosing the wrong type could cause your data to take more space rather 
than less, and can have performance implications.



hbase git commit: HBASE-19182 Add deprecation in branch-1 for hbase-prefix-tree so some heads up it removed in hbase2

2017-11-07 Thread stack
Repository: hbase
Updated Branches:
  refs/heads/branch-1.4 d4e973d2d -> 546a2368e


HBASE-19182 Add deprecation in branch-1 for hbase-prefix-tree so some heads up 
it removed in hbase2


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/546a2368
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/546a2368
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/546a2368

Branch: refs/heads/branch-1.4
Commit: 546a2368e7b4053548f964a0d248914d0ff85ba2
Parents: d4e973d
Author: Michael Stack 
Authored: Sat Nov 4 10:37:10 2017 -0700
Committer: Michael Stack 
Committed: Tue Nov 7 11:14:08 2017 -0800

--
 hbase-prefix-tree/pom.xml | 2 +-
 .../apache/hadoop/hbase/codec/prefixtree/PrefixTreeCodec.java | 3 +++
 src/main/asciidoc/_chapters/compression.adoc  | 7 +--
 3 files changed, 9 insertions(+), 3 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/546a2368/hbase-prefix-tree/pom.xml
--
diff --git a/hbase-prefix-tree/pom.xml b/hbase-prefix-tree/pom.xml
index 05b50f3..e8a7daa 100644
--- a/hbase-prefix-tree/pom.xml
+++ b/hbase-prefix-tree/pom.xml
@@ -29,7 +29,7 @@
 
   hbase-prefix-tree
   Apache HBase - Prefix Tree
-  Prefix Tree Data Block Encoder
+  Prefix Tree Data Block Encoder; DEPRECATED! Removed in 
hbase-2.0.0
 
   
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/546a2368/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeCodec.java
--
diff --git 
a/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeCodec.java
 
b/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeCodec.java
index a73232f..857c237 100644
--- 
a/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeCodec.java
+++ 
b/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeCodec.java
@@ -57,7 +57,10 @@ import org.apache.hadoop.io.WritableUtils;
  * {@link PrefixTreeEncoder}, and decoding is delegated to instances of
  * {@link org.apache.hadoop.hbase.codec.prefixtree.scanner.CellSearcher}. 
Encoder and decoder instances are
  * created and recycled by static PtEncoderFactory and PtDecoderFactory.
+ * @deprecated Since 1.2.7. Will be removed in 2.0.0. Write the dev list if 
you would like to take on the
+ * maintainence of this facility.
  */
+@Deprecated
 @InterfaceAudience.Private
 public class PrefixTreeCodec implements DataBlockEncoder{
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/546a2368/src/main/asciidoc/_chapters/compression.adoc
--
diff --git a/src/main/asciidoc/_chapters/compression.adoc 
b/src/main/asciidoc/_chapters/compression.adoc
index 42d4de5..78fc6a2 100644
--- a/src/main/asciidoc/_chapters/compression.adoc
+++ b/src/main/asciidoc/_chapters/compression.adoc
@@ -115,13 +115,16 @@ The data format is nearly identical to Diff encoding, so 
there is not an image t
 Prefix Tree::
   Prefix tree encoding was introduced as an experimental feature in HBase 0.96.
   It provides similar memory savings to the Prefix, Diff, and Fast Diff 
encoder, but provides faster random access at a cost of slower encoding speed.
-+
+
 Prefix Tree may be appropriate for applications that have high block cache hit 
ratios. It introduces new 'tree' fields for the row and column.
 The row tree field contains a list of offsets/references corresponding to the 
cells in that row. This allows for a good deal of compression.
 For more details about Prefix Tree encoding, see 
link:https://issues.apache.org/jira/browse/HBASE-4676[HBASE-4676].
-+
+
 It is difficult to graphically illustrate a prefix tree, so no image is 
included. See the Wikipedia article for 
link:http://en.wikipedia.org/wiki/Trie[Trie] for more general information about 
this data structure.
 
+Prefix Tree will be removed in hbase-2.0.0. It is a sweet feature but has seen 
little uptake and is not actively maintained.
+Come and write the dev list if you are interesting in carrying-on this 
encoding.
+
 === Which Compressor or Data Block Encoder To Use
 
 The compression or codec type to use depends on the characteristics of your 
data. Choosing the wrong type could cause your data to take more space rather 
than less, and can have performance implications.



hbase git commit: HBASE-19182 Add deprecation in branch-1 for hbase-prefix-tree so some heads up it removed in hbase2

2017-11-07 Thread stack
Repository: hbase
Updated Branches:
  refs/heads/branch-1 ca68d7786 -> 045c02d17


HBASE-19182 Add deprecation in branch-1 for hbase-prefix-tree so some heads up 
it removed in hbase2


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/045c02d1
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/045c02d1
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/045c02d1

Branch: refs/heads/branch-1
Commit: 045c02d178d86250a9f36ae4e705d6f75a065d3f
Parents: ca68d77
Author: Michael Stack 
Authored: Sat Nov 4 10:37:10 2017 -0700
Committer: Michael Stack 
Committed: Tue Nov 7 11:13:37 2017 -0800

--
 hbase-prefix-tree/pom.xml | 2 +-
 .../apache/hadoop/hbase/codec/prefixtree/PrefixTreeCodec.java | 3 +++
 src/main/asciidoc/_chapters/compression.adoc  | 7 +--
 3 files changed, 9 insertions(+), 3 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/045c02d1/hbase-prefix-tree/pom.xml
--
diff --git a/hbase-prefix-tree/pom.xml b/hbase-prefix-tree/pom.xml
index 9a5649b..5839079 100644
--- a/hbase-prefix-tree/pom.xml
+++ b/hbase-prefix-tree/pom.xml
@@ -29,7 +29,7 @@
 
   hbase-prefix-tree
   Apache HBase - Prefix Tree
-  Prefix Tree Data Block Encoder
+  Prefix Tree Data Block Encoder; DEPRECATED! Removed in 
hbase-2.0.0
 
   
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/045c02d1/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeCodec.java
--
diff --git 
a/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeCodec.java
 
b/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeCodec.java
index a73232f..857c237 100644
--- 
a/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeCodec.java
+++ 
b/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeCodec.java
@@ -57,7 +57,10 @@ import org.apache.hadoop.io.WritableUtils;
  * {@link PrefixTreeEncoder}, and decoding is delegated to instances of
  * {@link org.apache.hadoop.hbase.codec.prefixtree.scanner.CellSearcher}. 
Encoder and decoder instances are
  * created and recycled by static PtEncoderFactory and PtDecoderFactory.
+ * @deprecated Since 1.2.7. Will be removed in 2.0.0. Write the dev list if 
you would like to take on the
+ * maintainence of this facility.
  */
+@Deprecated
 @InterfaceAudience.Private
 public class PrefixTreeCodec implements DataBlockEncoder{
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/045c02d1/src/main/asciidoc/_chapters/compression.adoc
--
diff --git a/src/main/asciidoc/_chapters/compression.adoc 
b/src/main/asciidoc/_chapters/compression.adoc
index 42d4de5..78fc6a2 100644
--- a/src/main/asciidoc/_chapters/compression.adoc
+++ b/src/main/asciidoc/_chapters/compression.adoc
@@ -115,13 +115,16 @@ The data format is nearly identical to Diff encoding, so 
there is not an image t
 Prefix Tree::
   Prefix tree encoding was introduced as an experimental feature in HBase 0.96.
   It provides similar memory savings to the Prefix, Diff, and Fast Diff 
encoder, but provides faster random access at a cost of slower encoding speed.
-+
+
 Prefix Tree may be appropriate for applications that have high block cache hit 
ratios. It introduces new 'tree' fields for the row and column.
 The row tree field contains a list of offsets/references corresponding to the 
cells in that row. This allows for a good deal of compression.
 For more details about Prefix Tree encoding, see 
link:https://issues.apache.org/jira/browse/HBASE-4676[HBASE-4676].
-+
+
 It is difficult to graphically illustrate a prefix tree, so no image is 
included. See the Wikipedia article for 
link:http://en.wikipedia.org/wiki/Trie[Trie] for more general information about 
this data structure.
 
+Prefix Tree will be removed in hbase-2.0.0. It is a sweet feature but has seen 
little uptake and is not actively maintained.
+Come and write the dev list if you are interesting in carrying-on this 
encoding.
+
 === Which Compressor or Data Block Encoder To Use
 
 The compression or codec type to use depends on the characteristics of your 
data. Choosing the wrong type could cause your data to take more space rather 
than less, and can have performance implications.



hbase git commit: HBASE-18961 doMiniBatchMutate() is split into smaller member methods of BatchOperation and it's sub-classes

2017-11-07 Thread stack
Repository: hbase
Updated Branches:
  refs/heads/branch-2 1110910b3 -> fa3cc6c3b


HBASE-18961 doMiniBatchMutate() is split into smaller member methods of 
BatchOperation and it's sub-classes

There is no functionality change except for below:
* Variable lastIndexExclusive was getting incremented while locking rows 
corresponding to input
  operations. As a result when getRowLockInternal() method throws 
TimeoutIOException only operations
  in range [nextIndexToProcess, lastIndexExclusive) was getting marked as 
FAILED before raising
  exception up the call stack. With these changes all operations are getting 
marked as FAILED.
* Cluster Ids of first mutation is used consistently for entire batch. Previous 
behavior was to use
  cluster ids of first mutation in a mini-batch

Signed-off-by: Michael Stack 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/fa3cc6c3
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/fa3cc6c3
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/fa3cc6c3

Branch: refs/heads/branch-2
Commit: fa3cc6c3bc4860d633fdb8152f094de40fb558e6
Parents: 1110910
Author: Umesh Agashe 
Authored: Sun Oct 8 00:31:12 2017 -0700
Committer: Michael Stack 
Committed: Tue Nov 7 10:01:28 2017 -0800

--
 .../org/apache/hadoop/hbase/util/NonceKey.java  |4 +-
 .../hadoop/hbase/regionserver/HRegion.java  | 1328 ++
 .../MiniBatchOperationInProgress.java   |   44 +-
 .../regionserver/MultiRowMutationProcessor.java |2 +-
 .../TestMiniBatchOperationInProgress.java   |4 +-
 .../access/TestWithDisabledAuthorization.java   |2 +-
 6 files changed, 805 insertions(+), 579 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/fa3cc6c3/hbase-common/src/main/java/org/apache/hadoop/hbase/util/NonceKey.java
--
diff --git 
a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/NonceKey.java 
b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/NonceKey.java
index 6da808e..b658331 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/NonceKey.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/NonceKey.java
@@ -1,4 +1,4 @@
-/**
+/*
  *
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
@@ -18,7 +18,6 @@
  */
 package org.apache.hadoop.hbase.util;
 
-import org.apache.hadoop.hbase.HConstants;
 import org.apache.yetus.audience.InterfaceAudience;
 
  /**
@@ -31,7 +30,6 @@ public class NonceKey {
   private long nonce;
 
   public NonceKey(long group, long nonce) {
-assert nonce != HConstants.NO_NONCE;
 this.group = group;
 this.nonce = nonce;
   }

http://git-wip-us.apache.org/repos/asf/hbase/blob/fa3cc6c3/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
index 5cd27b8..82d4bd2 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
@@ -161,6 +161,7 @@ import org.apache.hadoop.hbase.util.EncryptionTest;
 import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
 import org.apache.hadoop.hbase.util.FSUtils;
 import org.apache.hadoop.hbase.util.HashedBytes;
+import org.apache.hadoop.hbase.util.NonceKey;
 import org.apache.hadoop.hbase.util.Pair;
 import org.apache.hadoop.hbase.util.ServerRegionReplicaUtil;
 import org.apache.hadoop.hbase.util.Threads;
@@ -199,6 +200,8 @@ import 
org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.RegionEventDe
 import 
org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.RegionEventDescriptor.EventType;
 import 
org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.StoreDescriptor;
 
+import edu.umd.cs.findbugs.annotations.Nullable;
+
 /**
  * Regions store data for a certain region of a table.  It stores all columns
  * for each row. A given table consists of one or more Regions.
@@ -642,7 +645,6 @@ public class HRegion implements HeapSize, 
PropagatingConfigurationObserver, Regi
   // flushPerChanges is to prevent too many changes in memstore
   private long flushPerChanges;
   private long blockingMemStoreSize;
-  final long threadWakeFrequency;
   // Used to guard closes
   final ReentrantReadWriteLock lock = new ReentrantReadWriteLock();
 
@@ -757,7 +759,6 @@ public class HRegion implements HeapSize, 
PropagatingConfigurationObserver, Regi
   }
 }
 this.rsServices = rsServices;
-this.threadWakeFrequency = con

hbase git commit: HBASE-18961 doMiniBatchMutate() is split into smaller member methods of BatchOperation and it's sub-classes

2017-11-07 Thread stack
Repository: hbase
Updated Branches:
  refs/heads/master 29fd1dead -> 4eae5a297


HBASE-18961 doMiniBatchMutate() is split into smaller member methods of 
BatchOperation and it's sub-classes

There is no functionality change except for below:
* Variable lastIndexExclusive was getting incremented while locking rows 
corresponding to input
  operations. As a result when getRowLockInternal() method throws 
TimeoutIOException only operations
  in range [nextIndexToProcess, lastIndexExclusive) was getting marked as 
FAILED before raising
  exception up the call stack. With these changes all operations are getting 
marked as FAILED.
* Cluster Ids of first mutation is used consistently for entire batch. Previous 
behavior was to use
  cluster ids of first mutation in a mini-batch

Signed-off-by: Michael Stack 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/4eae5a29
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/4eae5a29
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/4eae5a29

Branch: refs/heads/master
Commit: 4eae5a29749da1c34f1a2dd0b1f6aa6f7a9bbffd
Parents: 29fd1de
Author: Umesh Agashe 
Authored: Sun Oct 8 00:31:12 2017 -0700
Committer: Michael Stack 
Committed: Tue Nov 7 10:00:49 2017 -0800

--
 .../org/apache/hadoop/hbase/util/NonceKey.java  |4 +-
 .../hadoop/hbase/regionserver/HRegion.java  | 1328 ++
 .../MiniBatchOperationInProgress.java   |   44 +-
 .../regionserver/MultiRowMutationProcessor.java |2 +-
 .../TestMiniBatchOperationInProgress.java   |4 +-
 .../access/TestWithDisabledAuthorization.java   |2 +-
 6 files changed, 805 insertions(+), 579 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/4eae5a29/hbase-common/src/main/java/org/apache/hadoop/hbase/util/NonceKey.java
--
diff --git 
a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/NonceKey.java 
b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/NonceKey.java
index 6da808e..b658331 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/NonceKey.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/NonceKey.java
@@ -1,4 +1,4 @@
-/**
+/*
  *
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
@@ -18,7 +18,6 @@
  */
 package org.apache.hadoop.hbase.util;
 
-import org.apache.hadoop.hbase.HConstants;
 import org.apache.yetus.audience.InterfaceAudience;
 
  /**
@@ -31,7 +30,6 @@ public class NonceKey {
   private long nonce;
 
   public NonceKey(long group, long nonce) {
-assert nonce != HConstants.NO_NONCE;
 this.group = group;
 this.nonce = nonce;
   }

http://git-wip-us.apache.org/repos/asf/hbase/blob/4eae5a29/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
index 5cd27b8..82d4bd2 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
@@ -161,6 +161,7 @@ import org.apache.hadoop.hbase.util.EncryptionTest;
 import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
 import org.apache.hadoop.hbase.util.FSUtils;
 import org.apache.hadoop.hbase.util.HashedBytes;
+import org.apache.hadoop.hbase.util.NonceKey;
 import org.apache.hadoop.hbase.util.Pair;
 import org.apache.hadoop.hbase.util.ServerRegionReplicaUtil;
 import org.apache.hadoop.hbase.util.Threads;
@@ -199,6 +200,8 @@ import 
org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.RegionEventDe
 import 
org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.RegionEventDescriptor.EventType;
 import 
org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.StoreDescriptor;
 
+import edu.umd.cs.findbugs.annotations.Nullable;
+
 /**
  * Regions store data for a certain region of a table.  It stores all columns
  * for each row. A given table consists of one or more Regions.
@@ -642,7 +645,6 @@ public class HRegion implements HeapSize, 
PropagatingConfigurationObserver, Regi
   // flushPerChanges is to prevent too many changes in memstore
   private long flushPerChanges;
   private long blockingMemStoreSize;
-  final long threadWakeFrequency;
   // Used to guard closes
   final ReentrantReadWriteLock lock = new ReentrantReadWriteLock();
 
@@ -757,7 +759,6 @@ public class HRegion implements HeapSize, 
PropagatingConfigurationObserver, Regi
   }
 }
 this.rsServices = rsServices;
-this.threadWakeFrequency = conf.ge

[22/51] [partial] hbase-site git commit: Published site at .

2017-11-07 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/2cef721c/devapidocs/org/apache/hadoop/hbase/mapreduce/Import.Importer.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/mapreduce/Import.Importer.html 
b/devapidocs/org/apache/hadoop/hbase/mapreduce/Import.Importer.html
index 70a906c..61dc8ca 100644
--- a/devapidocs/org/apache/hadoop/hbase/mapreduce/Import.Importer.html
+++ b/devapidocs/org/apache/hadoop/hbase/mapreduce/Import.Importer.html
@@ -123,7 +123,7 @@ var activeTableTab = "activeTableTab";
 
 
 
-public static class Import.Importer
+public static class Import.Importer
 extends TableMapper
 Write table content out to files in hdfs.
 
@@ -271,7 +271,7 @@ extends 
 
 cfRenameMap
-private http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true";
 title="class or interface in java.util">Map cfRenameMap
+private http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true";
 title="class or interface in java.util">Map cfRenameMap
 
 
 
@@ -280,7 +280,7 @@ extends 
 
 clusterIds
-private http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">ListUUID> clusterIds
+private http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">ListUUID> clusterIds
 
 
 
@@ -289,7 +289,7 @@ extends 
 
 filter
-private Filter filter
+private Filter filter
 
 
 
@@ -298,7 +298,7 @@ extends 
 
 durability
-private Durability durability
+private Durability durability
 
 
 
@@ -315,7 +315,7 @@ extends 
 
 Importer
-public Importer()
+public Importer()
 
 
 
@@ -332,7 +332,7 @@ extends 
 
 map
-public void map(ImmutableBytesWritable row,
+public void map(ImmutableBytesWritable row,
 Result value,
 org.apache.hadoop.mapreduce.Mapper.Context context)
  throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true";
 title="class or interface in java.io">IOException
@@ -354,7 +354,7 @@ extends 
 
 writeResult
-private void writeResult(ImmutableBytesWritable key,
+private void writeResult(ImmutableBytesWritable key,
  Result result,
  
org.apache.hadoop.mapreduce.Mapper.Context context)
   throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true";
 title="class or interface in java.io">IOException,
@@ -372,7 +372,7 @@ extends 
 
 processKV
-protected void processKV(ImmutableBytesWritable key,
+protected void processKV(ImmutableBytesWritable key,
  Result result,
  
org.apache.hadoop.mapreduce.Mapper.Context context,
  Put put,
@@ -392,7 +392,7 @@ extends 
 
 addPutToKv
-protected void addPutToKv(Put put,
+protected void addPutToKv(Put put,
   Cell kv)
throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true";
 title="class or interface in java.io">IOException
 
@@ -407,7 +407,7 @@ extends 
 
 setup
-public void setup(org.apache.hadoop.mapreduce.Mapper.Context context)
+public void setup(org.apache.hadoop.mapreduce.Mapper.Context context)
 
 Overrides:
 setup in 
class org.apache.hadoop.mapreduce.Mapper

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/2cef721c/devapidocs/org/apache/hadoop/hbase/mapreduce/Import.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/mapreduce/Import.html 
b/devapidocs/org/apache/hadoop/hbase/mapreduce/Import.html
index 284571e..ad49669 100644
--- a/devapidocs/org/apache/hadoop/hbase/mapreduce/Import.html
+++ b/devapidocs/org/apache/hadoop/hbase/mapreduce/Import.html
@@ -521,7 +521,7 @@ implements org.apache.hadoop.util.Tool
 
 
 instantiateFilter
-public static Filter instantiateFilter(org.apache.hadoop.conf.Configuration conf)
+public static Filter instantiateFilter(org.apache.hadoop.conf.Configuration conf)
 Create a Filter to apply to all 
incoming keys (KeyValues) to
  optionally not include in the job output
 
@@ -540,7 +540,7 @@ implements org.apache.hadoop.util.Tool
 
 
 toQuotedByteArrays
-private static http://docs.oracle.com/javase/8/docs/api/java/util/ArrayList.html?is-external=true";
 title="class or interface in java.util">ArrayList toQuotedByteArrays(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String... stringArgs)

[16/51] [partial] hbase-site git commit: Published site at .

2017-11-07 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/2cef721c/devapidocs/src-html/org/apache/hadoop/hbase/CellUtil.html
--
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/CellUtil.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/CellUtil.html
index b492d9f..435b2b3 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/CellUtil.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/CellUtil.html
@@ -901,599 +901,608 @@
 893   * {KeyValue.Type#DeleteFamily} 
or a
 894   * {@link 
KeyValue.Type#DeleteColumn} KeyValue type.
 895   */
-896  public static boolean isDelete(final 
Cell cell) {
-897return 
PrivateCellUtil.isDelete(cell.getTypeByte());
-898  }
-899
-900  /**
-901   * @return True if a delete type, a 
{@link KeyValue.Type#Delete} or a
-902   * {KeyValue.Type#DeleteFamily} 
or a
-903   * {@link 
KeyValue.Type#DeleteColumn} KeyValue type.
-904   * @deprecated As of release 2.0.0, 
this will be removed in HBase 3.0.0.
-905   */
-906  @Deprecated
-907  public static boolean isDelete(final 
byte type) {
-908return Type.Delete.getCode() <= 
type
-909&& type <= 
Type.DeleteFamily.getCode();
-910  }
-911
-912  /**
-913   * @return True if this cell is a 
{@link KeyValue.Type#Delete} type.
-914   * @deprecated As of release 2.0.0, 
this will be removed in HBase 3.0.0.
-915   */
-916  @Deprecated
-917  public static boolean isDeleteType(Cell 
cell) {
-918return cell.getTypeByte() == 
Type.Delete.getCode();
-919  }
-920
-921  /**
-922   * @deprecated As of release 2.0.0, 
this will be removed in HBase 3.0.0. 
-923   */
-924  @Deprecated
-925  public static boolean 
isDeleteFamily(final Cell cell) {
-926return cell.getTypeByte() == 
Type.DeleteFamily.getCode();
-927  }
-928
-929  /**
-930   * @deprecated As of release 2.0.0, 
this will be removed in HBase 3.0.0. 
-931   */
-932  @Deprecated
-933  public static boolean 
isDeleteFamilyVersion(final Cell cell) {
-934return cell.getTypeByte() == 
Type.DeleteFamilyVersion.getCode();
-935  }
-936
-937  /**
-938   * @deprecated As of release 2.0.0, 
this will be removed in HBase 3.0.0. 
-939   */
-940  @Deprecated
-941  public static boolean 
isDeleteColumns(final Cell cell) {
-942return cell.getTypeByte() == 
Type.DeleteColumn.getCode();
-943  }
-944
-945  /**
-946   * @deprecated As of release 2.0.0, 
this will be removed in HBase 3.0.0. 
-947   */
-948  @Deprecated
-949  public static boolean 
isDeleteColumnVersion(final Cell cell) {
-950return cell.getTypeByte() == 
Type.Delete.getCode();
-951  }
-952
-953  /**
-954   *
-955   * @return True if this cell is a 
delete family or column type.
-956   * @deprecated As of release 2.0.0, 
this will be removed in HBase 3.0.0.
-957   */
-958  @Deprecated
-959  public static boolean 
isDeleteColumnOrFamily(Cell cell) {
-960int t = cell.getTypeByte();
-961return t == 
Type.DeleteColumn.getCode() || t == Type.DeleteFamily.getCode();
-962  }
-963
-964  /**
-965   * Estimate based on keyvalue's 
serialization format in the RPC layer. Note that there is an extra
-966   * SIZEOF_INT added to the size here 
that indicates the actual length of the cell for cases where
-967   * cell's are serialized in a 
contiguous format (For eg in RPCs).
-968   * @param cell
-969   * @return Estimate of the 
cell size in bytes plus an extra SIZEOF_INT indicating 
the
-970   * actual cell length.
-971   * @deprecated As of release 2.0.0, 
this will be removed in HBase 3.0.0.
-972   */
-973  @Deprecated
-974  public static int 
estimatedSerializedSizeOf(final Cell cell) {
-975return 
PrivateCellUtil.estimatedSerializedSizeOf(cell);
-976  }
-977
-978  /**
-979   * Calculates the serialized key size. 
We always serialize in the KeyValue's serialization
-980   * format.
-981   * @param cell the cell for which the 
key size has to be calculated.
-982   * @return the key size
-983   * @deprecated As of release 2.0.0, 
this will be removed in HBase 3.0.0.
-984   */
-985  @Deprecated
-986  public static int 
estimatedSerializedSizeOfKey(final Cell cell) {
-987return 
PrivateCellUtil.estimatedSerializedSizeOfKey(cell);
-988  }
-989
-990  /**
-991   * This is an estimate of the heap 
space occupied by a cell. When the cell is of type
-992   * {@link HeapSize} we call {@link 
HeapSize#heapSize()} so cell can give a correct value. In other
-993   * cases we just consider the bytes 
occupied by the cell components ie. row, CF, qualifier,
-994   * timestamp, type, value and tags.
-995   * @param cell
-996   * @return estimate of the heap space
-997   * @deprecated As of release 2.0.0, 
this will be removed in HBase 3.0.0.
-998   */
-999  @Deprecated
-1000  public static long 
estimatedHeapSizeOf(final Cell cell) {
-1001return 
PrivateCellUtil.estimatedHeapSizeOf(cell);
-1002  }
-1003
-1004  /* tags 
*/
-1005  /**
-10

[09/51] [partial] hbase-site git commit: Published site at .

2017-11-07 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/2cef721c/devapidocs/src-html/org/apache/hadoop/hbase/filter/FuzzyRowFilter.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/filter/FuzzyRowFilter.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/filter/FuzzyRowFilter.html
index aa5ad0d..30d80c0 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/filter/FuzzyRowFilter.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/filter/FuzzyRowFilter.html
@@ -32,619 +32,617 @@
 024import java.util.PriorityQueue;
 025
 026import org.apache.hadoop.hbase.Cell;
-027import 
org.apache.hadoop.hbase.CellComparatorImpl;
-028import 
org.apache.hadoop.hbase.CellUtil;
-029import 
org.apache.hadoop.hbase.PrivateCellUtil;
-030import 
org.apache.yetus.audience.InterfaceAudience;
-031import 
org.apache.hadoop.hbase.exceptions.DeserializationException;
-032import 
org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException;
-033import 
org.apache.hadoop.hbase.shaded.com.google.protobuf.UnsafeByteOperations;
-034import 
org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos;
-035import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair;
-036import 
org.apache.hadoop.hbase.util.Bytes;
-037import 
org.apache.hadoop.hbase.util.Pair;
-038import 
org.apache.hadoop.hbase.util.UnsafeAccess;
-039import 
org.apache.hadoop.hbase.util.UnsafeAvailChecker;
-040
-041import 
org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
-042
-043/**
-044 * This is optimized version of a 
standard FuzzyRowFilter Filters data based on fuzzy row key.
-045 * Performs fast-forwards during 
scanning. It takes pairs (row key, fuzzy info) to match row keys.
-046 * Where fuzzy info is a byte array with 
0 or 1 as its values:
-047 * 
    -048 *
  • 0 - means that this byte in provided row key is fixed, i.e. row key's byte at same position -049 * must match
  • -050 *
  • 1 - means that this byte in provided row key is NOT fixed, i.e. row key's byte at this -051 * position can be different from the one in provided row key
  • -052 *
-053 * Example: Let's assume row key format is userId_actionId_year_month. Length of userId is fixed and -054 * is 4, length of actionId is 2 and year and month are 4 and 2 bytes long respectively. Let's -055 * assume that we need to fetch all users that performed certain action (encoded as "99") in Jan of -056 * any year. Then the pair (row key, fuzzy info) would be the following: row key = "_99__01" -057 * (one can use any value instead of "?") fuzzy info = -058 * "\x01\x01\x01\x01\x00\x00\x00\x00\x01\x01\x01\x01\x00\x00\x00" I.e. fuzzy info tells the matching -059 * mask is "_99__01", where at ? can be any value. -060 */ -061@InterfaceAudience.Public -062public class FuzzyRowFilter extends FilterBase { -063 private static final boolean UNSAFE_UNALIGNED = UnsafeAvailChecker.unaligned(); -064 private List> fuzzyKeysData; -065 private boolean done = false; -066 -067 /** -068 * The index of a last successfully found matching fuzzy string (in fuzzyKeysData). We will start -069 * matching next KV with this one. If they do not match then we will return back to the one-by-one -070 * iteration over fuzzyKeysData. -071 */ -072 private int lastFoundIndex = -1; -073 -074 /** -075 * Row tracker (keeps all next rows after SEEK_NEXT_USING_HINT was returned) -076 */ -077 private RowTracker tracker; -078 -079 public FuzzyRowFilter(List> fuzzyKeysData) { -080List> fuzzyKeyDataCopy = new ArrayList<>(fuzzyKeysData.size()); -081 -082for (Pair aFuzzyKeysData : fuzzyKeysData) { -083 if (aFuzzyKeysData.getFirst().length != aFuzzyKeysData.getSecond().length) { -084Pair readable = -085 new Pair<>(Bytes.toStringBinary(aFuzzyKeysData.getFirst()), Bytes.toStringBinary(aFuzzyKeysData.getSecond())); -086throw new IllegalArgumentException("Fuzzy pair lengths do not match: " + readable); -087 } -088 -089 Pair p = new Pair<>(); -090 // create a copy of pair bytes so that they are not modified by the filter. -091 p.setFirst(Arrays.copyOf(aFuzzyKeysData.getFirst(), aFuzzyKeysData.getFirst().length)); -092 p.setSecond(Arrays.copyOf(aFuzzyKeysData.getSecond(), aFuzzyKeysData.getSecond().length)); -093 -094 // update mask ( 0 -> -1 (0xff), 1 -> 2) -095 p.setSecond(preprocessMask(p.getSecond())); -096 preprocessSearchKey(p); -097 -098 fuzzyKeyDataCopy.add(p); -099} -100this.fuzzyKeysData = fuzzyKeyDataCopy; -101this.tracker = new RowTracker(); -102 } +027import org.apache.hadoop.hbase.CellComparator; +028import org.apache.hadoop.hbase.PrivateCellUtil; +029import org.apache.yetus.audience.InterfaceAudience; +030im

[38/51] [partial] hbase-site git commit: Published site at .

2017-11-07 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/2cef721c/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/Import.html
--
diff --git a/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/Import.html 
b/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/Import.html
index ab49b7b..ebc8e8a 100644
--- a/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/Import.html
+++ b/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/Import.html
@@ -48,7 +48,7 @@
 040import org.apache.hadoop.fs.FileSystem;
 041import org.apache.hadoop.fs.Path;
 042import org.apache.hadoop.hbase.Cell;
-043import 
org.apache.hadoop.hbase.CellComparatorImpl;
+043import 
org.apache.hadoop.hbase.CellComparator;
 044import 
org.apache.hadoop.hbase.CellUtil;
 045import 
org.apache.hadoop.hbase.HBaseConfiguration;
 046import 
org.apache.hadoop.hbase.PrivateCellUtil;
@@ -153,641 +153,639 @@
 145}
 146
 147@Override
-148
@edu.umd.cs.findbugs.annotations.SuppressWarnings(value="EQ_COMPARETO_USE_OBJECT_EQUALS",
-149  justification="This is wrong, yes, 
but we should be purging Writables, not fixing them")
-150public int 
compareTo(CellWritableComparable o) {
-151  return 
CellComparatorImpl.COMPARATOR.compare(this.kv, 
((CellWritableComparable)o).kv);
-152}
+148public int 
compareTo(CellWritableComparable o) {
+149  return 
CellComparator.getInstance().compare(this.kv, o.kv);
+150}
+151
+152public static class 
CellWritableComparator extends WritableComparator {
 153
-154public static class 
CellWritableComparator extends WritableComparator {
-155
-156  @Override
-157  public int compare(byte[] b1, int 
s1, int l1, byte[] b2, int s2, int l2) {
-158try {
-159  CellWritableComparable kv1 = 
new CellWritableComparable();
-160  kv1.readFields(new 
DataInputStream(new ByteArrayInputStream(b1, s1, l1)));
-161  CellWritableComparable kv2 = 
new CellWritableComparable();
-162  kv2.readFields(new 
DataInputStream(new ByteArrayInputStream(b2, s2, l2)));
-163  return compare(kv1, kv2);
-164} catch (IOException e) {
-165  throw new 
RuntimeException(e);
-166}
-167  }
+154  @Override
+155  public int compare(byte[] b1, int 
s1, int l1, byte[] b2, int s2, int l2) {
+156try {
+157  CellWritableComparable kv1 = 
new CellWritableComparable();
+158  kv1.readFields(new 
DataInputStream(new ByteArrayInputStream(b1, s1, l1)));
+159  CellWritableComparable kv2 = 
new CellWritableComparable();
+160  kv2.readFields(new 
DataInputStream(new ByteArrayInputStream(b2, s2, l2)));
+161  return compare(kv1, kv2);
+162} catch (IOException e) {
+163  throw new 
RuntimeException(e);
+164}
+165  }
+166
+167}
 168
-169}
+169  }
 170
-171  }
-172
-173  public static class CellReducer
-174  extends
-175  Reducer {
-176protected void reduce(
-177CellWritableComparable row,
-178Iterable kvs,
-179
Reducer.Context context)
-181throws java.io.IOException, 
InterruptedException {
-182  int index = 0;
-183  for (Cell kv : kvs) {
-184context.write(new 
ImmutableBytesWritable(CellUtil.cloneRow(kv)),
-185  new MapReduceCell(kv));
-186if (++index % 100 == 0)
-187  context.setStatus("Wrote " + 
index + " KeyValues, "
-188  + "and the rowkey whose is 
being wrote is " + Bytes.toString(kv.getRowArray()));
-189  }
-190}
-191  }
-192
-193  public static class CellSortImporter
-194  extends 
TableMapper {
-195private Map 
cfRenameMap;
-196private Filter filter;
-197private static final Log LOG = 
LogFactory.getLog(CellImporter.class);
-198
-199/**
-200 * @param row  The current table row 
key.
-201 * @param value  The columns.
-202 * @param context  The current 
context.
-203 * @throws IOException When something 
is broken with the data.
-204 */
-205@Override
-206public void 
map(ImmutableBytesWritable row, Result value,
-207  Context context)
-208throws IOException {
-209  try {
-210if (LOG.isTraceEnabled()) {
-211  LOG.trace("Considering the 
row."
-212  + Bytes.toString(row.get(), 
row.getOffset(), row.getLength()));
-213}
-214if (filter == null || 
!filter.filterRowKey(
-215  
PrivateCellUtil.createFirstOnRow(row.get(), row.getOffset(), (short) 
row.getLength( {
-216  for (Cell kv : 
value.rawCells()) {
-217kv = filterKv(filter, kv);
-218// skip if we filtered it 
out
-219if (kv == null) continue;
-220Cell ret = convertKv(kv, 
cfRenameMap);
-221

[11/51] [partial] hbase-site git commit: Published site at .

2017-11-07 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/2cef721c/devapidocs/src-html/org/apache/hadoop/hbase/filter/FuzzyRowFilter.RowTracker.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/filter/FuzzyRowFilter.RowTracker.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/filter/FuzzyRowFilter.RowTracker.html
index aa5ad0d..30d80c0 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/filter/FuzzyRowFilter.RowTracker.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/filter/FuzzyRowFilter.RowTracker.html
@@ -32,619 +32,617 @@
 024import java.util.PriorityQueue;
 025
 026import org.apache.hadoop.hbase.Cell;
-027import 
org.apache.hadoop.hbase.CellComparatorImpl;
-028import 
org.apache.hadoop.hbase.CellUtil;
-029import 
org.apache.hadoop.hbase.PrivateCellUtil;
-030import 
org.apache.yetus.audience.InterfaceAudience;
-031import 
org.apache.hadoop.hbase.exceptions.DeserializationException;
-032import 
org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException;
-033import 
org.apache.hadoop.hbase.shaded.com.google.protobuf.UnsafeByteOperations;
-034import 
org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos;
-035import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair;
-036import 
org.apache.hadoop.hbase.util.Bytes;
-037import 
org.apache.hadoop.hbase.util.Pair;
-038import 
org.apache.hadoop.hbase.util.UnsafeAccess;
-039import 
org.apache.hadoop.hbase.util.UnsafeAvailChecker;
-040
-041import 
org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
-042
-043/**
-044 * This is optimized version of a 
standard FuzzyRowFilter Filters data based on fuzzy row key.
-045 * Performs fast-forwards during 
scanning. It takes pairs (row key, fuzzy info) to match row keys.
-046 * Where fuzzy info is a byte array with 
0 or 1 as its values:
-047 * 
    -048 *
  • 0 - means that this byte in provided row key is fixed, i.e. row key's byte at same position -049 * must match
  • -050 *
  • 1 - means that this byte in provided row key is NOT fixed, i.e. row key's byte at this -051 * position can be different from the one in provided row key
  • -052 *
-053 * Example: Let's assume row key format is userId_actionId_year_month. Length of userId is fixed and -054 * is 4, length of actionId is 2 and year and month are 4 and 2 bytes long respectively. Let's -055 * assume that we need to fetch all users that performed certain action (encoded as "99") in Jan of -056 * any year. Then the pair (row key, fuzzy info) would be the following: row key = "_99__01" -057 * (one can use any value instead of "?") fuzzy info = -058 * "\x01\x01\x01\x01\x00\x00\x00\x00\x01\x01\x01\x01\x00\x00\x00" I.e. fuzzy info tells the matching -059 * mask is "_99__01", where at ? can be any value. -060 */ -061@InterfaceAudience.Public -062public class FuzzyRowFilter extends FilterBase { -063 private static final boolean UNSAFE_UNALIGNED = UnsafeAvailChecker.unaligned(); -064 private List> fuzzyKeysData; -065 private boolean done = false; -066 -067 /** -068 * The index of a last successfully found matching fuzzy string (in fuzzyKeysData). We will start -069 * matching next KV with this one. If they do not match then we will return back to the one-by-one -070 * iteration over fuzzyKeysData. -071 */ -072 private int lastFoundIndex = -1; -073 -074 /** -075 * Row tracker (keeps all next rows after SEEK_NEXT_USING_HINT was returned) -076 */ -077 private RowTracker tracker; -078 -079 public FuzzyRowFilter(List> fuzzyKeysData) { -080List> fuzzyKeyDataCopy = new ArrayList<>(fuzzyKeysData.size()); -081 -082for (Pair aFuzzyKeysData : fuzzyKeysData) { -083 if (aFuzzyKeysData.getFirst().length != aFuzzyKeysData.getSecond().length) { -084Pair readable = -085 new Pair<>(Bytes.toStringBinary(aFuzzyKeysData.getFirst()), Bytes.toStringBinary(aFuzzyKeysData.getSecond())); -086throw new IllegalArgumentException("Fuzzy pair lengths do not match: " + readable); -087 } -088 -089 Pair p = new Pair<>(); -090 // create a copy of pair bytes so that they are not modified by the filter. -091 p.setFirst(Arrays.copyOf(aFuzzyKeysData.getFirst(), aFuzzyKeysData.getFirst().length)); -092 p.setSecond(Arrays.copyOf(aFuzzyKeysData.getSecond(), aFuzzyKeysData.getSecond().length)); -093 -094 // update mask ( 0 -> -1 (0xff), 1 -> 2) -095 p.setSecond(preprocessMask(p.getSecond())); -096 preprocessSearchKey(p); -097 -098 fuzzyKeyDataCopy.add(p); -099} -100this.fuzzyKeysData = fuzzyKeyDataCopy; -101this.tracker = new RowTracker(); -102 } +027import org.apache.hadoop.hbase.CellComparator; +028import org.apache.hadoop.hbase.PrivateCellUtil; +029i

[45/51] [partial] hbase-site git commit: Published site at .

2017-11-07 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/2cef721c/apidocs/src-html/org/apache/hadoop/hbase/CellComparator.html
--
diff --git a/apidocs/src-html/org/apache/hadoop/hbase/CellComparator.html 
b/apidocs/src-html/org/apache/hadoop/hbase/CellComparator.html
new file mode 100644
index 000..5685dfa
--- /dev/null
+++ b/apidocs/src-html/org/apache/hadoop/hbase/CellComparator.html
@@ -0,0 +1,197 @@
+http://www.w3.org/TR/html4/loose.dtd";>
+
+
+Source code
+
+
+
+
+001/*
+002 * Licensed to the Apache Software 
Foundation (ASF) under one
+003 * or more contributor license 
agreements.  See the NOTICE file
+004 * distributed with this work for 
additional information
+005 * regarding copyright ownership.  The 
ASF licenses this file
+006 * to you under the Apache License, 
Version 2.0 (the
+007 * "License"); you may not use this file 
except in compliance
+008 * with the License.  You may obtain a 
copy of the License at
+009 *
+010 * 
http://www.apache.org/licenses/LICENSE-2.0
+011 *
+012 * Unless required by applicable law or 
agreed to in writing, software
+013 * distributed under the License is 
distributed on an "AS IS" BASIS,
+014 * WITHOUT WARRANTIES OR CONDITIONS OF 
ANY KIND, either express or implied.
+015 * See the License for the specific 
language governing permissions and
+016 * limitations under the License.
+017 */
+018package org.apache.hadoop.hbase;
+019
+020import java.util.Comparator;
+021
+022import 
org.apache.yetus.audience.InterfaceAudience;
+023import 
org.apache.yetus.audience.InterfaceStability;
+024/**
+025 * Comparator for comparing cells and has 
some specialized methods that allows comparing individual
+026 * cell components like row, family, 
qualifier and timestamp
+027 */
+028@InterfaceAudience.Public
+029@InterfaceStability.Evolving
+030public interface CellComparator extends 
Comparator {
+031  /**
+032   * A comparator for ordering cells in 
user-space tables. Useful when writing cells in sorted
+033   * order as necessary for bulk import 
(i.e. via MapReduce)
+034   * 

+035 * CAUTION: This comparator may provide inaccurate ordering for cells from system tables, +036 * and should not be relied upon in that case. +037 */ +038 static CellComparator getInstance() { +039return CellComparatorImpl.COMPARATOR; +040 } +041 +042 /** +043 * Lexographically compares two cells. The key part of the cell is taken for comparison which +044 * includes row, family, qualifier, timestamp and type +045 * @param leftCell the left hand side cell +046 * @param rightCell the right hand side cell +047 * @return greater than 0 if leftCell is bigger, less than 0 if rightCell is bigger, 0 if both +048 * cells are equal +049 */ +050 @Override +051 int compare(Cell leftCell, Cell rightCell); +052 +053 /** +054 * Lexographically compares the rows of two cells. +055 * @param leftCell the left hand side cell +056 * @param rightCell the right hand side cell +057 * @return greater than 0 if leftCell is bigger, less than 0 if rightCell is bigger, 0 if both +058 * cells are equal +059 */ +060 int compareRows(Cell leftCell, Cell rightCell); +061 +062 /** +063 * Compares the row part of the cell with a simple plain byte[] like the +064 * stopRow in Scan. +065 * @param cell the cell +066 * @param bytes the byte[] representing the row to be compared with +067 * @param offset the offset of the byte[] +068 * @param length the length of the byte[] +069 * @return greater than 0 if leftCell is bigger, less than 0 if rightCell is bigger, 0 if both +070 * cells are equal +071 */ +072 int compareRows(Cell cell, byte[] bytes, int offset, int length); +073 +074 /** +075 * Lexographically compares the two cells excluding the row part. It compares family, qualifier, +076 * timestamp and the type +077 * @param leftCell the left hand side cell +078 * @param rightCell the right hand side cell +079 * @return greater than 0 if leftCell is bigger, less than 0 if rightCell is bigger, 0 if both +080 * cells are equal +081 */ +082 int compareWithoutRow(Cell leftCell, Cell rightCell); +083 +084 /** +085 * Lexographically compares the families of the two cells +086 * @param leftCell the left hand side cell +087 * @param rightCell the right hand side cell +088 * @return greater than 0 if leftCell is bigger, less than 0 if rightCell is bigger, 0 if both +089 * cells are equal +090 */ +091 int compareFamilies(Cell leftCell, Cell rightCell); +092 +093 /** +094 * Lexographically compares the qualifiers of the two cells +095 * @param leftCell the left hand side cell +096 * @param rightCell the right hand side cell +097 * @return greater than 0 if leftCell is bigger, less than 0 if rightCell is bigger, 0 if both +098 * cells are equal +099 */ +100 int compareQualifiers(C


[47/51] [partial] hbase-site git commit: Published site at .

2017-11-07 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/2cef721c/apidocs/org/apache/hadoop/hbase/client/Result.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/client/Result.html 
b/apidocs/org/apache/hadoop/hbase/client/Result.html
index 245071f..2021bc7 100644
--- a/apidocs/org/apache/hadoop/hbase/client/Result.html
+++ b/apidocs/org/apache/hadoop/hbase/client/Result.html
@@ -114,7 +114,7 @@ var activeTableTab = "activeTableTab";
 
 
 @InterfaceAudience.Public
-public class Result
+public class Result
 extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true";
 title="class or interface in java.lang">Object
 implements org.apache.hadoop.hbase.CellScannable, 
org.apache.hadoop.hbase.CellScanner
 Single row result of a Get or Scan query.
@@ -564,7 +564,7 @@ implements org.apache.hadoop.hbase.CellScannable, 
org.apache.hadoop.hbase.CellSc
 
 
 EMPTY_RESULT
-public static final Result EMPTY_RESULT
+public static final Result EMPTY_RESULT
 
 
 
@@ -581,7 +581,7 @@ implements org.apache.hadoop.hbase.CellScannable, 
org.apache.hadoop.hbase.CellSc
 
 
 Result
-public Result()
+public Result()
 Creates an empty Result w/ no KeyValue payload; returns 
null if you call rawCells().
  Use this to represent no results if null won't do or in old 
'mapred' as opposed
  to 'mapreduce' package MapReduce where you need to overwrite a Result 
instance with a
@@ -602,7 +602,7 @@ implements org.apache.hadoop.hbase.CellScannable, 
org.apache.hadoop.hbase.CellSc
 
 
 create
-public static Result create(http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List cells)
+public static Result create(http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List cells)
 Instantiate a Result with the specified List of KeyValues.
  Note: You must ensure that the keyvalues are already 
sorted.
 
@@ -617,7 +617,7 @@ implements org.apache.hadoop.hbase.CellScannable, 
org.apache.hadoop.hbase.CellSc
 
 
 create
-public static Result create(http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List cells,
+public static Result create(http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List cells,
 http://docs.oracle.com/javase/8/docs/api/java/lang/Boolean.html?is-external=true";
 title="class or interface in java.lang">Boolean exists)
 
 
@@ -627,7 +627,7 @@ implements org.apache.hadoop.hbase.CellScannable, 
org.apache.hadoop.hbase.CellSc
 
 
 create
-public static Result create(http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List cells,
+public static Result create(http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List cells,
 http://docs.oracle.com/javase/8/docs/api/java/lang/Boolean.html?is-external=true";
 title="class or interface in java.lang">Boolean exists,
 boolean stale)
 
@@ -638,7 +638,7 @@ implements org.apache.hadoop.hbase.CellScannable, 
org.apache.hadoop.hbase.CellSc
 
 
 create
-public static Result create(http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List cells,
+public static Result create(http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List cells,
 http://docs.oracle.com/javase/8/docs/api/java/lang/Boolean.html?is-external=true";
 title="class or interface in java.lang">Boolean exists,
 boolean stale,
 boolean mayHaveMoreCellsInRow)
@@ -650,7 +650,7 @@ implements org.apache.hadoop.hbase.CellScannable, 
org.apache.hadoop.hbase.CellSc
 
 
 create
-public static Result create(Cell[] cells)
+public static Result create(Cell[] cells)
 Instantiate a Result with the specified array of KeyValues.
  Note: You must ensure that the keyvalues are already 
sorted.
 
@@ -665,7 +665,7 @@ implements org.apache.hadoop.hbase.CellScannable, 
org.apache.hadoop.hbase.CellSc
 
 
 create
-public static Result create(Cell[] cells,
+public static Result create(Cell[] cells,
 http://docs.oracle.com/javase/8/docs/api/java/lang/Boolean.html?is-external=true";
 title="class or interface in java.lang">Boolean exists,
 boolean stale)
 
@@ -676,7 +676,7 @@ implements org.apache.hadoop.hbase.CellScannable, 
org.apache.hadoop.hbase.CellSc
 
 
 create
-public static Result create(Cell[] cells,
+public static Result create(Cell[] cells,
 

[46/51] [partial] hbase-site git commit: Published site at .

2017-11-07 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/2cef721c/apidocs/org/apache/hadoop/hbase/filter/class-use/BigDecimalComparator.html
--
diff --git 
a/apidocs/org/apache/hadoop/hbase/filter/class-use/BigDecimalComparator.html 
b/apidocs/org/apache/hadoop/hbase/filter/class-use/BigDecimalComparator.html
new file mode 100644
index 000..338f0ea
--- /dev/null
+++ b/apidocs/org/apache/hadoop/hbase/filter/class-use/BigDecimalComparator.html
@@ -0,0 +1,168 @@
+http://www.w3.org/TR/html4/loose.dtd";>
+
+
+
+
+
+Uses of Class org.apache.hadoop.hbase.filter.BigDecimalComparator 
(Apache HBase 3.0.0-SNAPSHOT API)
+
+
+
+
+
+
+
+JavaScript is disabled on your browser.
+
+
+
+
+
+Skip navigation links
+
+
+
+
+Overview
+Package
+Class
+Use
+Tree
+Deprecated
+Index
+Help
+
+
+
+
+Prev
+Next
+
+
+Frames
+No Frames
+
+
+All Classes
+
+
+
+
+
+
+
+
+
+
+Uses of 
Classorg.apache.hadoop.hbase.filter.BigDecimalComparator
+
+
+
+
+
+Packages that use BigDecimalComparator 
+
+Package
+Description
+
+
+
+org.apache.hadoop.hbase.filter
+
+Provides row-level filters applied to HRegion scan results 
during calls to
+ ResultScanner.next().
+
+
+
+
+
+
+
+
+
+
+Uses of BigDecimalComparator in org.apache.hadoop.hbase.filter
+
+Methods in org.apache.hadoop.hbase.filter
 that return BigDecimalComparator 
+
+Modifier and Type
+Method and Description
+
+
+
+static BigDecimalComparator
+BigDecimalComparator.parseFrom(byte[] pbBytes) 
+
+
+
+
+
+
+
+
+
+
+
+
+Skip navigation links
+
+
+
+
+Overview
+Package
+Class
+Use
+Tree
+Deprecated
+Index
+Help
+
+
+
+
+Prev
+Next
+
+
+Frames
+No Frames
+
+
+All Classes
+
+
+
+
+
+
+
+
+
+Copyright © 2007–2017 https://www.apache.org/";>The Apache Software Foundation. All rights 
reserved.
+
+

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/2cef721c/apidocs/org/apache/hadoop/hbase/filter/class-use/ByteArrayComparable.html
--
diff --git 
a/apidocs/org/apache/hadoop/hbase/filter/class-use/ByteArrayComparable.html 
b/apidocs/org/apache/hadoop/hbase/filter/class-use/ByteArrayComparable.html
index 88f0501..c47ff1c 100644
--- a/apidocs/org/apache/hadoop/hbase/filter/class-use/ByteArrayComparable.html
+++ b/apidocs/org/apache/hadoop/hbase/filter/class-use/ByteArrayComparable.html
@@ -107,39 +107,45 @@
 
 
 class 
+BigDecimalComparator
+A BigDecimal comparator which numerical compares against 
the specified byte array
+
+
+
+class 
 BinaryComparator
 A binary comparator which lexicographically compares 
against the specified
  byte array using Bytes.compareTo(byte[],
 byte[]).
 
 
-
+
 class 
 BinaryPrefixComparator
 A comparator which compares against a specified byte array, 
but only compares
  up to the length of this byte array.
 
 
-
+
 class 
 BitComparator
 A bit comparator which performs the specified bitwise 
operation on each of the bytes
  with the specified byte array.
 
 
-
+
 class 
 LongComparator
 A long comparator which numerical compares against the 
specified byte array
 
 
-
+
 class 
 NullComparator
 A binary comparator which lexicographically compares 
against the specified
  byte array using Bytes.compareTo(byte[],
 byte[]).
 
 
-
+
 class 
 RegexStringComparator
 This comparator is for use with CompareFilter 
implementations, such
@@ -147,7 +153,7 @@
  filtering based on the value of a given column.
 
 
-
+
 class 
 SubstringComparator
 This comparator is for use with SingleColumnValueFilter, 
for filtering based on

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/2cef721c/apidocs/org/apache/hadoop/hbase/filter/package-frame.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/filter/package-frame.html 
b/apidocs/org/apache/hadoop/hbase/filter/package-frame.html
index 32cac95..b62dd91 100644
--- a/apidocs/org/apache/hadoop/hbase/filter/package-frame.html
+++ b/apidocs/org/apache/hadoop/hbase/filter/package-frame.html
@@ -13,6 +13,7 @@
 
 Classes
 
+BigDecimalComparator
 BinaryComparator
 BinaryPrefixComparator
 BitComparator

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/2cef721c/apidocs/org/apache/hadoop/hbase/filter/package-summary.html

[34/51] [partial] hbase-site git commit: Published site at .

2017-11-07 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/2cef721c/devapidocs/index-all.html
--
diff --git a/devapidocs/index-all.html b/devapidocs/index-all.html
index 522ec10..0cd348c 100644
--- a/devapidocs/index-all.html
+++ b/devapidocs/index-all.html
@@ -3321,6 +3321,8 @@
 
 areScanStartRowAndStopRowEqual(byte[],
 byte[]) - Static method in class org.apache.hadoop.hbase.client.ClientUtil
  
+areSerializedFieldsEqual(BigDecimalComparator)
 - Method in class org.apache.hadoop.hbase.filter.BigDecimalComparator
+ 
 areSerializedFieldsEqual(ByteArrayComparable)
 - Method in class org.apache.hadoop.hbase.filter.BinaryComparator
  
 areSerializedFieldsEqual(ByteArrayComparable)
 - Method in class org.apache.hadoop.hbase.filter.BinaryPrefixComparator
@@ -5672,12 +5674,20 @@
  
 BidirectionalLRUMap(int)
 - Constructor for class org.apache.hadoop.hbase.io.util.LRUDictionary.BidirectionalLRUMap
  
+bigDecimal
 - Variable in class org.apache.hadoop.hbase.filter.BigDecimalComparator
+ 
 BigDecimalColumnInterpreter - Class in org.apache.hadoop.hbase.client.coprocessor
 
 ColumnInterpreter for doing Aggregation's with BigDecimal 
columns.
 
 BigDecimalColumnInterpreter()
 - Constructor for class org.apache.hadoop.hbase.client.coprocessor.BigDecimalColumnInterpreter
  
+BigDecimalComparator - Class in org.apache.hadoop.hbase.filter
+
+A BigDecimal comparator which numerical compares against 
the specified byte array
+
+BigDecimalComparator(BigDecimal)
 - Constructor for class org.apache.hadoop.hbase.filter.BigDecimalComparator
+ 
 biggestRow
 - Variable in class org.apache.hadoop.hbase.io.hfile.HFilePrettyPrinter.KeyValueStatsCollector
  
 bigItemSize
 - Variable in class org.apache.hadoop.hbase.io.hfile.bucket.BucketAllocator
@@ -14081,7 +14091,7 @@
 COMPARATOR - 
Static variable in class org.apache.hadoop.hbase.KeyValue
 
 Deprecated.
-Use CellComparatorImpl.COMPARATOR
 instead. Deprecated for hbase 2.0, remove for hbase 3.0.
+Use CellComparator.getInstance()
 instead. Deprecated for hbase 2.0, remove for hbase 3.0.
 
 
 COMPARATOR
 - Static variable in class org.apache.hadoop.hbase.master.cleaner.HFileCleaner
@@ -14602,6 +14612,10 @@
 
 compareTo(ServerName)
 - Method in class org.apache.hadoop.hbase.favored.StartcodeAgnosticServerName
  
+compareTo(byte[],
 int, int) - Method in class org.apache.hadoop.hbase.filter.BigDecimalComparator
+ 
+compareTo(ByteBuffer,
 int, int) - Method in class org.apache.hadoop.hbase.filter.BigDecimalComparator
+ 
 compareTo(byte[],
 int, int) - Method in class org.apache.hadoop.hbase.filter.BinaryComparator
  
 compareTo(ByteBuffer,
 int, int) - Method in class org.apache.hadoop.hbase.filter.BinaryComparator
@@ -26995,6 +27009,8 @@
  
 equals(Object)
 - Method in class org.apache.hadoop.hbase.favored.FavoredNodesPlan
  
+equals(Object)
 - Method in class org.apache.hadoop.hbase.filter.BigDecimalComparator
+ 
 equals(Object)
 - Method in class org.apache.hadoop.hbase.filter.NullComparator
  
 equals(Object)
 - Method in class org.apache.hadoop.hbase.HColumnDescriptor
@@ -38360,6 +38376,10 @@
 
 Get a SASL wrapped InputStream.
 
+getInstance()
 - Static method in interface org.apache.hadoop.hbase.CellComparator
+
+A comparator for ordering cells in user-space tables.
+
 getInstance(Class)
 - Static method in class org.apache.hadoop.hbase.CompatibilityFactory
  
 getInstance(Class)
 - Static method in class org.apache.hadoop.hbase.CompatibilitySingletonFactory
@@ -49762,7 +49782,12 @@
 getTypeByte()
 - Method in class org.apache.hadoop.hbase.ByteBufferKeyValue
  
 getTypeByte() 
- Method in interface org.apache.hadoop.hbase.Cell
- 
+
+Deprecated.
+since 2.0.0, use 
appropriate CellUtil.isDelete(org.apache.hadoop.hbase.Cell)
 or
+CellUtil.isPut(Cell)
 methods instead. This will be removed in 3.0.0.
+
+
 getTypeByte()
 - Method in class org.apache.hadoop.hbase.filter.KeyOnlyFilter.KeyOnlyByteBufferCell
  
 getTypeByte()
 - Method in class org.apache.hadoop.hbase.filter.KeyOnlyFilter.KeyOnlyCell
@@ -51892,6 +51917,8 @@
  
 hashCode()
 - Method in class org.apache.hadoop.hbase.favored.StartcodeAgnosticServerName
  
+hashCode()
 - Method in class org.apache.hadoop.hbase.filter.BigDecimalComparator
+ 
 hashCode()
 - Method in class org.apache.hadoop.hbase.filter.NullComparator
  
 hashCode()
 - Method in class org.apache.hadoop.hbase.HColumnDescriptor
@@ -60124,6 +60151,8 @@
 
 isProcessed()
 - Method in class org.apache.hadoop.hbase.client.SingleResponse.Entry
  
+isPut(Cell)
 - Static method in class org.apache.hadoop.hbase.CellUtil
+ 
 isQueueRecovered()
 - Method in class org.apache.hadoop.hbase.replication.ReplicationQueueInfo
  
 isQuotaEnabled(Configuration)
 - Static method in class org.apache.hadoop.hbase.quotas.QuotaUtil
@@ -76464,6 +76493,8 @@ service.
  
 parseFrom(byte[])
 - Static method in class org.apache.hadoop.hbase.ClusterId
  
+parseFrom(byte[])
 - Static method in class org.apache.hadoop.hbase.fi

[50/51] [partial] hbase-site git commit: Published site at .

2017-11-07 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/2cef721c/apidocs/index-all.html
--
diff --git a/apidocs/index-all.html b/apidocs/index-all.html
index 1dcf301..5bd6351 100644
--- a/apidocs/index-all.html
+++ b/apidocs/index-all.html
@@ -742,6 +742,12 @@
  
 batchCoprocessorService(Descriptors.MethodDescriptor,
 Message, byte[], byte[], R, Batch.Callback) - Method in 
class org.apache.hadoop.hbase.rest.client.RemoteHTable
  
+BigDecimalComparator - Class in org.apache.hadoop.hbase.filter
+
+A BigDecimal comparator which numerical compares against 
the specified byte array
+
+BigDecimalComparator(BigDecimal)
 - Constructor for class org.apache.hadoop.hbase.filter.BigDecimalComparator
+ 
 BinaryComparator - Class in org.apache.hadoop.hbase.filter
 
 A binary comparator which lexicographically compares 
against the specified
@@ -1142,6 +1148,11 @@
 
 Used by CellBuilderFactory and 
ExtendedCellBuilderFactory.
 
+CellComparator - 
Interface in org.apache.hadoop.hbase
+
+Comparator for comparing cells and has some specialized 
methods that allows comparing individual
+ cell components like row, family, qualifier and timestamp
+
 CellCounter - Class in org.apache.hadoop.hbase.mapreduce
 
 A job with a a map and reduce phase to count cells in a 
table.
@@ -1728,6 +1739,10 @@
 
 constructor
 
+compare(Cell,
 Cell) - Method in interface org.apache.hadoop.hbase.CellComparator
+
+Lexographically compares two cells.
+
 compare(CellComparator,
 Cell, byte[], int, int) - Static method in class 
org.apache.hadoop.hbase.CellUtil
 
 Deprecated.
@@ -1752,6 +1767,10 @@
 
 Compares the cell's column (family and qualifier) with the 
given byte[]
 
+compareFamilies(Cell,
 Cell) - Method in interface org.apache.hadoop.hbase.CellComparator
+
+Lexographically compares the families of the two cells
+
 compareFamilies(Cell,
 byte[], int, int) - Static method in class 
org.apache.hadoop.hbase.CellUtil
 
 Compares the cell's family with the given byte[]
@@ -1798,6 +1817,10 @@
 
 compareQualifier(CompareOperator,
 ByteArrayComparable, Cell) - Method in class 
org.apache.hadoop.hbase.filter.CompareFilter
  
+compareQualifiers(Cell,
 Cell) - Method in interface org.apache.hadoop.hbase.CellComparator
+
+Lexographically compares the qualifiers of the two 
cells
+
 compareQualifiers(Cell,
 byte[], int, int) - Static method in class 
org.apache.hadoop.hbase.CellUtil
 
 Compares the cell's qualifier with the given byte[]
@@ -1815,6 +1838,23 @@
 
 compareRow(CompareOperator,
 ByteArrayComparable, Cell) - Method in class 
org.apache.hadoop.hbase.filter.CompareFilter
  
+compareRows(Cell,
 Cell) - Method in interface org.apache.hadoop.hbase.CellComparator
+
+Lexographically compares the rows of two cells.
+
+compareRows(Cell,
 byte[], int, int) - Method in interface org.apache.hadoop.hbase.CellComparator
+
+Compares the row part of the cell with a simple plain 
byte[] like the
+ stopRow in Scan.
+
+compareTimestamps(Cell,
 Cell) - Method in interface org.apache.hadoop.hbase.CellComparator
+
+Compares cell's timestamps in DESCENDING order.
+
+compareTimestamps(long,
 long) - Method in interface org.apache.hadoop.hbase.CellComparator
+
+Compares cell's timestamps in DESCENDING order.
+
 compareTo(Row)
 - Method in class org.apache.hadoop.hbase.client.Get
  
 compareTo(Row)
 - Method in class org.apache.hadoop.hbase.client.Increment
@@ -1823,6 +1863,10 @@
  
 compareTo(Row)
 - Method in class org.apache.hadoop.hbase.client.RowMutations
  
+compareTo(byte[],
 int, int) - Method in class org.apache.hadoop.hbase.filter.BigDecimalComparator
+ 
+compareTo(ByteBuffer,
 int, int) - Method in class org.apache.hadoop.hbase.filter.BigDecimalComparator
+ 
 compareTo(byte[],
 int, int) - Method in class org.apache.hadoop.hbase.filter.BinaryComparator
  
 compareTo(ByteBuffer,
 int, int) - Method in class org.apache.hadoop.hbase.filter.BinaryComparator
@@ -1930,6 +1974,10 @@
  
 compareVersion(String,
 String) - Static method in class org.apache.hadoop.hbase.util.VersionInfo
  
+compareWithoutRow(Cell,
 Cell) - Method in interface org.apache.hadoop.hbase.CellComparator
+
+Lexographically compares the two cells excluding the row 
part.
+
 COMPRESS_TAGS
 - Static variable in class org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder
  
 COMPRESS_TAGS
 - Static variable in class org.apache.hadoop.hbase.HColumnDescriptor
@@ -4673,6 +4721,8 @@
  
 equals(Object)
 - Method in class org.apache.hadoop.hbase.ClusterStatus
  
+equals(Object)
 - Method in class org.apache.hadoop.hbase.filter.BigDecimalComparator
+ 
 equals(Object)
 - Method in class org.apache.hadoop.hbase.filter.NullComparator
  
 equals(Object)
 - Method in class org.apache.hadoop.hbase.HColumnDescriptor
@@ -6498,6 +6548,10 @@
 
 Deprecated.
  
+getInstance()
 - Static method in interface org.apache.hadoop.hbase.CellComparator
+
+A comparator for ordering cells in user-space tables.
+
 getInstance()
 - Static method in c

[32/51] [partial] hbase-site git commit: Published site at .

2017-11-07 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/2cef721c/devapidocs/org/apache/hadoop/hbase/class-use/Cell.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/class-use/Cell.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/Cell.html
index c7b71d2..b7db0b0 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/Cell.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/Cell.html
@@ -1724,25 +1724,29 @@ service.
 PrivateCellUtil.isDeleteType(Cell cell) 
 
 
+static boolean
+CellUtil.isPut(Cell cell) 
+
+
 static int
 KeyValueUtil.keyLength(Cell cell)
 Returns number of bytes this cell's key part would have 
been used if serialized as in
  KeyValue.
 
 
-
+
 static int
 KeyValueUtil.length(Cell cell)
 Returns number of bytes this cell would have been used if 
serialized as in KeyValue
 
 
-
+
 static boolean
 CellUtil.matchingColumn(Cell left,
   byte[] fam,
   byte[] qual) 
 
-
+
 static boolean
 CellUtil.matchingColumn(Cell left,
   byte[] fam,
@@ -1756,7 +1760,7 @@ service.
 
 
 
-
+
 static boolean
 PrivateCellUtil.matchingColumn(Cell left,
   byte[] fam,
@@ -1766,17 +1770,17 @@ service.
   int qoffset,
   int qlength) 
 
-
+
 static boolean
 CellUtil.matchingColumn(Cell left,
   Cell right) 
 
-
+
 static boolean
 CellUtil.matchingFamily(Cell left,
   byte[] buf) 
 
-
+
 static boolean
 CellUtil.matchingFamily(Cell left,
   byte[] buf,
@@ -1787,19 +1791,19 @@ service.
 
 
 
-
+
 static boolean
 PrivateCellUtil.matchingFamily(Cell left,
   byte[] buf,
   int offset,
   int length) 
 
-
+
 static boolean
 CellUtil.matchingFamily(Cell left,
   Cell right) 
 
-
+
 static boolean
 CellUtil.matchingQualifier(Cell left,
  byte[] buf)
@@ -1807,7 +1811,7 @@ service.
  byte[] are equal
 
 
-
+
 static boolean
 CellUtil.matchingQualifier(Cell left,
  byte[] buf,
@@ -1818,7 +1822,7 @@ service.
 
 
 
-
+
 static boolean
 PrivateCellUtil.matchingQualifier(Cell left,
  byte[] buf,
@@ -1827,12 +1831,12 @@ service.
 Finds if the qualifier part of the cell and the KV 
serialized byte[] are equal
 
 
-
+
 static boolean
 CellUtil.matchingQualifier(Cell left,
  Cell right) 
 
-
+
 static boolean
 CellUtil.matchingRow(Cell left,
byte[] buf)
@@ -1842,14 +1846,14 @@ service.
 
 
 
-
+
 static boolean
 CellUtil.matchingRow(Cell left,
byte[] buf,
int offset,
int length) 
 
-
+
 static boolean
 CellUtil.matchingRow(Cell left,
Cell right)
@@ -1859,14 +1863,14 @@ service.
 
 
 
-
+
 static boolean
 CellUtil.matchingRowColumn(Cell left,
  Cell right)
 Compares the row and column of two keyvalues for 
equality
 
 
-
+
 boolean
 KeyValue.KVComparator.matchingRowColumn(Cell left,
  Cell right)
@@ -1874,31 +1878,31 @@ service.
 Compares the row and column of two keyvalues for 
equality
 
 
-
+
 static boolean
 CellUtil.matchingRowColumnBytes(Cell left,
   Cell right) 
 
-
+
 static boolean
 CellUtil.matchingRows(Cell left,
 byte[] buf) 
 
-
+
 static boolean
 PrivateCellUtil.matchingRows(Cell left,
 byte[] buf,
 int offset,
 int length) 
 
-
+
 static boolean
 CellUtil.matchingRows(Cell left,
 Cell right)
 Compares the row of two keyvalues for equality
 
 
-
+
 boolean
 KeyValue.KVComparator.matchingRows(Cell left,
 Cell right)
@@ -1906,7 +1910,7 @@ service.
 Compares the row of two keyvalues for equality
 
 
-
+
 private boolean
 KeyValue.KVComparator.matchingRows(Cell left,
 short lrowlength,
@@ -1915,12 +1919,12 @@ service.
 Deprecated. 
  
 
-
+
 static boolean
 CellUtil.matchingTimestamp(Cell a,
  Cell b) 
 
-
+
 static boolean
 CellUtil.matchingType(Cell a,
 Cell b)
@@ -1929,42 +1933,42 @@ service.
 
 
 
-
+
 static boolean
 PrivateCellUtil.matchingType(Cell a,
 Cell b) 
 
-
+
 static boolean
 CellUtil.matchingValue(Cell left,
  byte[] buf) 
 
-
+
 static boolean
 CellUtil.matchingValue(Cell left,
  Cell right) 
 
-
+
 static boolean
 CellUtil.matchingValue(Cell left,
  Cell right,
  int lvlength,
  int rvlength) 
 
-
+
 static boolean
 PrivateCellUtil.matchingValue(Cell left,
  Cell right,
  int lvlength,
  int rvlength) 
 
-
+
 static int
 KeyValueUtil.oswrite(Cell cell,
http://docs.oracle.com/javase/8/docs/api/java/io/OutputStream.html?is-external=true";
 title="class or interface in java.io">OutputStream out,
boolean withTags) 
 
-
+
 static void
 CellUtil.setSequenceId(Cell cell,
  long seqId)
@@ -1973,14 +1977,14 @@ service.
 
 
 
-
+
 static void
 PrivateCellUtil.setSeq

[35/51] [partial] hbase-site git commit: Published site at .

2017-11-07 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/2cef721c/checkstyle.rss
--
diff --git a/checkstyle.rss b/checkstyle.rss
index b97262f..53f4b3c 100644
--- a/checkstyle.rss
+++ b/checkstyle.rss
@@ -25,8 +25,8 @@ under the License.
 en-us
 ©2007 - 2017 The Apache Software Foundation
 
-  File: 3425,
- Errors: 21610,
+  File: 3427,
+ Errors: 21597,
  Warnings: 0,
  Infos: 0
   
@@ -1875,7 +1875,7 @@ under the License.
   0
 
 
-  16
+  15
 
   
   
@@ -5128,6 +5128,20 @@ under the License.
   
   
 
+  http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.filter.TestBigDecimalComparator.java";>org/apache/hadoop/hbase/filter/TestBigDecimalComparator.java
+
+
+  0
+
+
+  0
+
+
+  0
+
+  
+  
+
   http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.mapred.TestGroupingTableMap.java";>org/apache/hadoop/hbase/mapred/TestGroupingTableMap.java
 
 
@@ -8679,7 +8693,7 @@ under the License.
   0
 
 
-  14
+  13
 
   
   
@@ -11927,7 +11941,7 @@ under the License.
   0
 
 
-  351
+  349
 
   
   
@@ -21531,7 +21545,7 @@ under the License.
   0
 
 
-  10
+  11
 
   
   
@@ -25395,7 +25409,7 @@ under the License.
   0
 
 
-  177
+  166
 
   
   
@@ -29413,7 +29427,7 @@ under the License.
   0
 
 
-  16
+  17
 
   
   
@@ -30323,7 +30337,7 @@ under the License.
   0
 
 
-  4
+  5
 
   
   
@@ -42895,7 +42909,7 @@ under the License.
   0
 
 
-  15
+  14
 
   
   
@@ -46624,6 +46638,20 @@ under the License.
   
   
 
+  http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.filter.BigDecimalComparator.java";>org/apache/hadoop/hbase/filter/BigDecimalComparator.java
+
+
+  0
+
+
+  0
+
+
+  0
+
+  
+  
+
   http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.metrics.MetricSet.java";>org/apache/hadoop/hbase/metrics/MetricSet.java
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/2cef721c/coc.html
--
diff --git a/coc.html b/coc.html
index 88f48f2..80648f5 100644
--- a/coc.html
+++ b/coc.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase – 
   Code of Conduct Policy
@@ -380,7 +380,7 @@ email to mailto:priv...@hbase.apache.org";>the priv
 https://www.apache.org/";>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2017-11-06
+  Last Published: 
2017-11-07
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/2cef721c/cygwin.html
--
diff --git a/cygwin.html b/cygwin.html
index 13d36b8..38f9d71 100644
--- a/cygwin.html
+++ b/cygwin.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase – Installing Apache HBase (TM) on Windows using 
Cygwin
 
@@ -679,7 +679,7 @@ Now your HBase server is running, start 
coding and build that next
 https://www.apache.org/";>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2017-11-06
+   

[24/51] [partial] hbase-site git commit: Published site at .

2017-11-07 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/2cef721c/devapidocs/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.html 
b/devapidocs/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.html
index 768aed0..99ce39a 100644
--- a/devapidocs/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.html
+++ b/devapidocs/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.html
@@ -114,7 +114,7 @@ var activeTableTab = "activeTableTab";
 
 
 @InterfaceAudience.Private
-public class HFileReaderImpl
+public class HFileReaderImpl
 extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true";
 title="class or interface in java.lang">Object
 implements HFile.Reader, 
org.apache.hadoop.conf.Configurable
 Implementation that can handle all hfile versions of HFile.Reader.
@@ -677,7 +677,7 @@ implements 
 
 LOG
-private static final org.apache.commons.logging.Log LOG
+private static final org.apache.commons.logging.Log LOG
 
 
 
@@ -686,7 +686,7 @@ implements 
 
 dataBlockIndexReader
-private HFileBlockIndex.CellBasedKeyBlockIndexReader
 dataBlockIndexReader
+private HFileBlockIndex.CellBasedKeyBlockIndexReader
 dataBlockIndexReader
 Data block index reader keeping the root data index in 
memory
 
 
@@ -696,7 +696,7 @@ implements 
 
 metaBlockIndexReader
-private HFileBlockIndex.ByteArrayKeyBlockIndexReader
 metaBlockIndexReader
+private HFileBlockIndex.ByteArrayKeyBlockIndexReader
 metaBlockIndexReader
 Meta block index reader -- always single level
 
 
@@ -706,7 +706,7 @@ implements 
 
 trailer
-private final FixedFileTrailer trailer
+private final FixedFileTrailer trailer
 
 
 
@@ -715,7 +715,7 @@ implements 
 
 compressAlgo
-private final Compression.Algorithm 
compressAlgo
+private final Compression.Algorithm 
compressAlgo
 Filled when we read in the trailer.
 
 
@@ -725,7 +725,7 @@ implements 
 
 primaryReplicaReader
-private final boolean primaryReplicaReader
+private final boolean primaryReplicaReader
 
 
 
@@ -734,7 +734,7 @@ implements 
 
 dataBlockEncoder
-private HFileDataBlockEncoder dataBlockEncoder
+private HFileDataBlockEncoder dataBlockEncoder
 What kind of data block encoding should be used while 
reading, writing,
  and handling cache.
 
@@ -745,7 +745,7 @@ implements 
 
 lastKeyCell
-private Cell lastKeyCell
+private Cell lastKeyCell
 Last key in the file. Filled in when we read in the file 
info
 
 
@@ -755,7 +755,7 @@ implements 
 
 avgKeyLen
-private int avgKeyLen
+private int avgKeyLen
 Average key length read from file info
 
 
@@ -765,7 +765,7 @@ implements 
 
 avgValueLen
-private int avgValueLen
+private int avgValueLen
 Average value length read from file info
 
 
@@ -775,7 +775,7 @@ implements 
 
 comparator
-private CellComparator comparator
+private CellComparator comparator
 Key comparator
 
 
@@ -785,7 +785,7 @@ implements 
 
 fileSize
-private final long fileSize
+private final long fileSize
 Size of this file.
 
 
@@ -795,7 +795,7 @@ implements 
 
 cacheConf
-private final CacheConfig cacheConf
+private final CacheConfig cacheConf
 Block cache configuration.
 
 
@@ -805,7 +805,7 @@ implements 
 
 path
-private final org.apache.hadoop.fs.Path path
+private final org.apache.hadoop.fs.Path path
 Path of file
 
 
@@ -815,7 +815,7 @@ implements 
 
 name
-private final http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String name
+private final http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String name
 File name to be used for block names
 
 
@@ -825,7 +825,7 @@ implements 
 
 fileInfo
-private HFile.FileInfo fileInfo
+private HFile.FileInfo fileInfo
 
 
 
@@ -834,7 +834,7 @@ implements 
 
 conf
-private org.apache.hadoop.conf.Configuration conf
+private org.apache.hadoop.conf.Configuration conf
 
 
 
@@ -843,7 +843,7 @@ implements 
 
 hfileContext
-private HFileContext hfileContext
+private HFileContext hfileContext
 
 
 
@@ -852,7 +852,7 @@ implements 
 
 fsBlockReader
-private HFileBlock.FSReader 
fsBlockReader
+private HFileBlock.FSReader 
fsBlockReader
 Filesystem-level block reader.
 
 
@@ -862,7 +862,7 @@ implements 
 
 offsetLock
-private IdLock offsetLock
+private IdLock offsetLock
 A "sparse lock" implementation allowing to lock on a 
particular block
  identified by offset. The purpose of this is to avoid two clients loading
  the same block, and have all but one client wait to get the block from the
@@ -875,7 +875,7 @@ implements 
 
 loadOnOpenBlocks
-private http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List loadOnOpenBlocks
+private http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List loadOnOpen

[03/51] [partial] hbase-site git commit: Published site at .

2017-11-07 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/2cef721c/devapidocs/src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.SeekerState.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.SeekerState.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.SeekerState.html
index 83e48f3..157463c 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.SeekerState.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.SeekerState.html
@@ -33,1142 +33,1141 @@
 025import 
org.apache.hadoop.hbase.ByteBufferCell;
 026import org.apache.hadoop.hbase.Cell;
 027import 
org.apache.hadoop.hbase.CellComparator;
-028import 
org.apache.hadoop.hbase.CellComparatorImpl;
-029import 
org.apache.hadoop.hbase.CellUtil;
-030import 
org.apache.hadoop.hbase.ExtendedCell;
-031import 
org.apache.hadoop.hbase.HConstants;
-032import 
org.apache.hadoop.hbase.PrivateCellUtil;
-033import 
org.apache.hadoop.hbase.KeyValue;
-034import 
org.apache.hadoop.hbase.KeyValue.Type;
-035import 
org.apache.hadoop.hbase.KeyValueUtil;
-036import 
org.apache.yetus.audience.InterfaceAudience;
-037import 
org.apache.hadoop.hbase.io.TagCompressionContext;
-038import 
org.apache.hadoop.hbase.io.util.LRUDictionary;
-039import 
org.apache.hadoop.hbase.io.util.StreamUtils;
-040import 
org.apache.hadoop.hbase.nio.ByteBuff;
-041import 
org.apache.hadoop.hbase.util.ByteBufferUtils;
-042import 
org.apache.hadoop.hbase.util.Bytes;
-043import 
org.apache.hadoop.hbase.util.ClassSize;
-044import 
org.apache.hadoop.hbase.util.ObjectIntPair;
-045import 
org.apache.hadoop.io.WritableUtils;
-046
-047/**
-048 * Base class for all data block encoders 
that use a buffer.
-049 */
-050@InterfaceAudience.Private
-051abstract class BufferedDataBlockEncoder 
extends AbstractDataBlockEncoder {
-052  /**
-053   * TODO: This datablockencoder is 
dealing in internals of hfileblocks. Purge reference to HFBs
-054   */
-055  private static int 
INITIAL_KEY_BUFFER_SIZE = 512;
-056
-057  @Override
-058  public ByteBuffer 
decodeKeyValues(DataInputStream source,
-059  HFileBlockDecodingContext 
blkDecodingCtx) throws IOException {
-060if (blkDecodingCtx.getClass() != 
HFileBlockDefaultDecodingContext.class) {
-061  throw new 
IOException(this.getClass().getName() + " only accepts "
-062  + 
HFileBlockDefaultDecodingContext.class.getName() + " as the decoding 
context.");
-063}
-064
-065HFileBlockDefaultDecodingContext 
decodingCtx =
-066
(HFileBlockDefaultDecodingContext) blkDecodingCtx;
-067if 
(decodingCtx.getHFileContext().isIncludesTags()
-068&& 
decodingCtx.getHFileContext().isCompressTags()) {
-069  if 
(decodingCtx.getTagCompressionContext() != null) {
-070// It will be overhead to create 
the TagCompressionContext again and again for every block
-071// decoding.
-072
decodingCtx.getTagCompressionContext().clear();
-073  } else {
-074try {
-075  TagCompressionContext 
tagCompressionContext = new TagCompressionContext(
-076  LRUDictionary.class, 
Byte.MAX_VALUE);
-077  
decodingCtx.setTagCompressionContext(tagCompressionContext);
-078} catch (Exception e) {
-079  throw new IOException("Failed 
to initialize TagCompressionContext", e);
-080}
-081  }
-082}
-083return 
internalDecodeKeyValues(source, 0, 0, decodingCtx);
-084  }
-085
-086  /* common prefixes 
*/
-087  // Having this as static is fine but if 
META is having DBE then we should
-088  // change this.
-089  public static int 
compareCommonRowPrefix(Cell left, Cell right, int rowCommonPrefix) {
-090return 
Bytes.compareTo(left.getRowArray(), left.getRowOffset() + rowCommonPrefix,
-091left.getRowLength() - 
rowCommonPrefix, right.getRowArray(), right.getRowOffset()
-092+ rowCommonPrefix, 
right.getRowLength() - rowCommonPrefix);
-093  }
-094
-095  public static int 
compareCommonFamilyPrefix(Cell left, Cell right, int familyCommonPrefix) {
-096return 
Bytes.compareTo(left.getFamilyArray(), left.getFamilyOffset() + 
familyCommonPrefix,
-097left.getFamilyLength() - 
familyCommonPrefix, right.getFamilyArray(),
-098right.getFamilyOffset() + 
familyCommonPrefix, right.getFamilyLength() - familyCommonPrefix);
-099  }
-100
-101  public static int 
compareCommonQualifierPrefix(Cell left, Cell right, int qualCommonPrefix) {
-102return 
Bytes.compareTo(left.getQualifierArray(), left.getQualifierOffset() + 
qualCommonPrefix,
-103left.getQualifierLength() - 
qualCommonPrefix, right.getQualifierArray(),
-104right.getQualifierOffset() + 
qualCommonPrefix, right.getQualifierLength()
-105- qualCommonPrefix);
-106  }
-107
-108  pr

[28/51] [partial] hbase-site git commit: Published site at .

2017-11-07 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/2cef721c/devapidocs/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OffheapDecodedCell.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OffheapDecodedCell.html
 
b/devapidocs/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OffheapDecodedCell.html
index fe83140..e4d1825 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OffheapDecodedCell.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OffheapDecodedCell.html
@@ -122,7 +122,7 @@ var activeTableTab = "activeTableTab";
 
 
 
-protected static class BufferedDataBlockEncoder.OffheapDecodedCell
+protected static class BufferedDataBlockEncoder.OffheapDecodedCell
 extends ByteBufferCell
 implements ExtendedCell
 
@@ -471,7 +471,7 @@ implements 
 
 FIXED_OVERHEAD
-private static final long FIXED_OVERHEAD
+private static final long FIXED_OVERHEAD
 
 
 
@@ -480,7 +480,7 @@ implements 
 
 keyBuffer
-private http://docs.oracle.com/javase/8/docs/api/java/nio/ByteBuffer.html?is-external=true";
 title="class or interface in java.nio">ByteBuffer keyBuffer
+private http://docs.oracle.com/javase/8/docs/api/java/nio/ByteBuffer.html?is-external=true";
 title="class or interface in java.nio">ByteBuffer keyBuffer
 
 
 
@@ -489,7 +489,7 @@ implements 
 
 rowLength
-private short rowLength
+private short rowLength
 
 
 
@@ -498,7 +498,7 @@ implements 
 
 familyOffset
-private int familyOffset
+private int familyOffset
 
 
 
@@ -507,7 +507,7 @@ implements 
 
 familyLength
-private byte familyLength
+private byte familyLength
 
 
 
@@ -516,7 +516,7 @@ implements 
 
 qualifierOffset
-private int qualifierOffset
+private int qualifierOffset
 
 
 
@@ -525,7 +525,7 @@ implements 
 
 qualifierLength
-private int qualifierLength
+private int qualifierLength
 
 
 
@@ -534,7 +534,7 @@ implements 
 
 timestamp
-private long timestamp
+private long timestamp
 
 
 
@@ -543,7 +543,7 @@ implements 
 
 typeByte
-private byte typeByte
+private byte typeByte
 
 
 
@@ -552,7 +552,7 @@ implements 
 
 valueBuffer
-private http://docs.oracle.com/javase/8/docs/api/java/nio/ByteBuffer.html?is-external=true";
 title="class or interface in java.nio">ByteBuffer valueBuffer
+private http://docs.oracle.com/javase/8/docs/api/java/nio/ByteBuffer.html?is-external=true";
 title="class or interface in java.nio">ByteBuffer valueBuffer
 
 
 
@@ -561,7 +561,7 @@ implements 
 
 valueOffset
-private int valueOffset
+private int valueOffset
 
 
 
@@ -570,7 +570,7 @@ implements 
 
 valueLength
-private int valueLength
+private int valueLength
 
 
 
@@ -579,7 +579,7 @@ implements 
 
 tagsBuffer
-private http://docs.oracle.com/javase/8/docs/api/java/nio/ByteBuffer.html?is-external=true";
 title="class or interface in java.nio">ByteBuffer tagsBuffer
+private http://docs.oracle.com/javase/8/docs/api/java/nio/ByteBuffer.html?is-external=true";
 title="class or interface in java.nio">ByteBuffer tagsBuffer
 
 
 
@@ -588,7 +588,7 @@ implements 
 
 tagsOffset
-private int tagsOffset
+private int tagsOffset
 
 
 
@@ -597,7 +597,7 @@ implements 
 
 tagsLength
-private int tagsLength
+private int tagsLength
 
 
 
@@ -606,7 +606,7 @@ implements 
 
 seqId
-private long seqId
+private long seqId
 
 
 
@@ -623,7 +623,7 @@ implements 
 
 OffheapDecodedCell
-protected OffheapDecodedCell(http://docs.oracle.com/javase/8/docs/api/java/nio/ByteBuffer.html?is-external=true";
 title="class or interface in java.nio">ByteBuffer keyBuffer,
+protected OffheapDecodedCell(http://docs.oracle.com/javase/8/docs/api/java/nio/ByteBuffer.html?is-external=true";
 title="class or interface in java.nio">ByteBuffer keyBuffer,
  short rowLength,
  int familyOffset,
  byte familyLength,
@@ -654,7 +654,7 @@ implements 
 
 getRowArray
-public byte[] getRowArray()
+public byte[] getRowArray()
 Description copied from 
interface: Cell
 Contiguous raw bytes that may start at any index in the 
containing array. Max length is
  Short.MAX_VALUE which is 32,767 bytes.
@@ -672,7 +672,7 @@ implements 
 
 getRowOffset
-public int getRowOffset()
+public int getRowOffset()
 
 Specified by:
 getRowOffset in
 interface Cell
@@ -687,7 +687,7 @@ implements 
 
 getRowLength
-public short getRowLength()
+public short getRowLength()
 
 Specified by:
 getRowLength in
 interface Cell
@@ -702,7 +702,7 @@ implements 
 
 getFamilyArray
-public byte[] getFamilyArray()
+public byte[] getFamilyArray()
 Description copied from 
interface: Cell
 Contiguous bytes composed of legal HDFS filename characters 
which may start at any index in the
  containing array. Max length is Byte.MAX_VALUE, which is 127 bytes.
@@ -720,7 +720,7 @@ implements 
 
 getFamilyOffset
-public int getFamilyOffset()
+public int getFamilyOffset()
 
 Specified by:
 getFamilyOffse

[33/51] [partial] hbase-site git commit: Published site at .

2017-11-07 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/2cef721c/devapidocs/org/apache/hadoop/hbase/CellUtil.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/CellUtil.html 
b/devapidocs/org/apache/hadoop/hbase/CellUtil.html
index 5644c17..fe55529 100644
--- a/devapidocs/org/apache/hadoop/hbase/CellUtil.html
+++ b/devapidocs/org/apache/hadoop/hbase/CellUtil.html
@@ -18,7 +18,7 @@
 catch(err) {
 }
 //-->
-var methods = 
{"i0":9,"i1":9,"i2":9,"i3":41,"i4":9,"i5":41,"i6":9,"i7":9,"i8":9,"i9":9,"i10":9,"i11":9,"i12":9,"i13":9,"i14":9,"i15":9,"i16":41,"i17":41,"i18":9,"i19":9,"i20":41,"i21":41,"i22":41,"i23":41,"i24":41,"i25":41,"i26":41,"i27":41,"i28":9,"i29":9,"i30":9,"i31":9,"i32":9,"i33":9,"i34":9,"i35":9,"i36":9,"i37":41,"i38":41,"i39":41,"i40":41,"i41":41,"i42":41,"i43":41,"i44":41,"i45":41,"i46":41,"i47":9,"i48":41,"i49":41,"i50":41,"i51":41,"i52":41,"i53":41,"i54":41,"i55":41,"i56":9,"i57":41,"i58":41,"i59":41,"i60":41,"i61":41,"i62":41,"i63":9,"i64":9,"i65":41,"i66":9,"i67":9,"i68":41,"i69":9,"i70":9,"i71":41,"i72":9,"i73":41,"i74":9,"i75":41,"i76":9,"i77":9,"i78":9,"i79":9,"i80":9,"i81":41,"i82":9,"i83":9,"i84":9,"i85":41,"i86":9,"i87":41,"i88":41,"i89":41,"i90":41,"i91":9,"i92":41,"i93":41,"i94":41,"i95":41,"i96":41,"i97":41};
+var methods = 
{"i0":9,"i1":9,"i2":9,"i3":41,"i4":9,"i5":41,"i6":9,"i7":9,"i8":9,"i9":9,"i10":9,"i11":9,"i12":9,"i13":9,"i14":9,"i15":9,"i16":41,"i17":41,"i18":9,"i19":9,"i20":41,"i21":41,"i22":41,"i23":41,"i24":41,"i25":41,"i26":41,"i27":41,"i28":9,"i29":9,"i30":9,"i31":9,"i32":9,"i33":9,"i34":9,"i35":9,"i36":9,"i37":41,"i38":41,"i39":41,"i40":41,"i41":41,"i42":41,"i43":41,"i44":41,"i45":41,"i46":41,"i47":9,"i48":41,"i49":41,"i50":41,"i51":41,"i52":41,"i53":41,"i54":41,"i55":41,"i56":9,"i57":41,"i58":41,"i59":41,"i60":41,"i61":41,"i62":41,"i63":9,"i64":9,"i65":9,"i66":41,"i67":9,"i68":9,"i69":41,"i70":9,"i71":9,"i72":41,"i73":9,"i74":41,"i75":9,"i76":41,"i77":9,"i78":9,"i79":9,"i80":9,"i81":9,"i82":41,"i83":9,"i84":9,"i85":9,"i86":41,"i87":9,"i88":41,"i89":41,"i90":41,"i91":41,"i92":9,"i93":41,"i94":41,"i95":41,"i96":41,"i97":41,"i98":41};
 var tabs = {65535:["t0","All Methods"],1:["t1","Static 
Methods"],8:["t4","Concrete Methods"],32:["t6","Deprecated Methods"]};
 var altColor = "altColor";
 var rowColor = "rowColor";
@@ -673,19 +673,23 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 
+static boolean
+isPut(Cell cell) 
+
+
 static byte[]
 makeColumn(byte[] family,
   byte[] qualifier)
 Makes a column in family:qualifier form from separate byte 
arrays.
 
 
-
+
 static boolean
 matchingColumn(Cell left,
   byte[] fam,
   byte[] qual) 
 
-
+
 static boolean
 matchingColumn(Cell left,
   byte[] fam,
@@ -699,17 +703,17 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 
-
+
 static boolean
 matchingColumn(Cell left,
   Cell right) 
 
-
+
 static boolean
 matchingFamily(Cell left,
   byte[] buf) 
 
-
+
 static boolean
 matchingFamily(Cell left,
   byte[] buf,
@@ -720,12 +724,12 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 
-
+
 static boolean
 matchingFamily(Cell left,
   Cell right) 
 
-
+
 static boolean
 matchingQualifier(Cell left,
  byte[] buf)
@@ -733,7 +737,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
  byte[] are equal
 
 
-
+
 static boolean
 matchingQualifier(Cell left,
  byte[] buf,
@@ -744,12 +748,12 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 
-
+
 static boolean
 matchingQualifier(Cell left,
  Cell right) 
 
-
+
 static boolean
 matchingRow(Cell left,
byte[] buf)
@@ -759,14 +763,14 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 
-
+
 static boolean
 matchingRow(Cell left,
byte[] buf,
int offset,
int length) 
 
-
+
 static boolean
 matchingRow(Cell left,
Cell right)
@@ -776,36 +780,36 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 
-
+
 static boolean
 matchingRowColumn(Cell left,
  Cell right)
 Compares the row and column of two keyvalues for 
equality
 
 
-
+
 static boolean
 matchingRowColumnBytes(Cell left,
   Cell right) 
 
-
+
 static boolean
 matchingRows(Cell left,
 byte[] buf) 
 
-
+
 static boolean
 matchingRows(Cell left,
 Cell right)
 Compares the row of two keyvalues for equality
 
 
-
+
 static boolean
 matchingTimestamp(Cell a,
  Cell b) 
 
-
+
 static boolean
 matchingType(Cell a,
 Cell b)
@@ -814,24 +818,24 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 
-
+
 static boolean
 matchingValue(Cell left,
  byte[] buf) 
 
-

[31/51] [partial] hbase-site git commit: Published site at .

2017-11-07 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/2cef721c/devapidocs/org/apache/hadoop/hbase/client/Result.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/client/Result.html 
b/devapidocs/org/apache/hadoop/hbase/client/Result.html
index c33c309..7c79d41 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/Result.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/Result.html
@@ -114,7 +114,7 @@ var activeTableTab = "activeTableTab";
 
 
 @InterfaceAudience.Public
-public class Result
+public class Result
 extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true";
 title="class or interface in java.lang">Object
 implements CellScannable, CellScanner
 Single row result of a Get or Scan query.
@@ -659,7 +659,7 @@ implements 
 
 cells
-private Cell[] cells
+private Cell[] cells
 
 
 
@@ -668,7 +668,7 @@ implements 
 
 exists
-private http://docs.oracle.com/javase/8/docs/api/java/lang/Boolean.html?is-external=true";
 title="class or interface in java.lang">Boolean exists
+private http://docs.oracle.com/javase/8/docs/api/java/lang/Boolean.html?is-external=true";
 title="class or interface in java.lang">Boolean exists
 
 
 
@@ -677,7 +677,7 @@ implements 
 
 stale
-private boolean stale
+private boolean stale
 
 
 
@@ -686,7 +686,7 @@ implements 
 
 mayHaveMoreCellsInRow
-private boolean mayHaveMoreCellsInRow
+private boolean mayHaveMoreCellsInRow
 See mayHaveMoreCellsInRow().
 
 
@@ -696,7 +696,7 @@ implements 
 
 row
-private transient byte[] row
+private transient byte[] row
 
 
 
@@ -705,7 +705,7 @@ implements 
 
 familyMap
-private transient http://docs.oracle.com/javase/8/docs/api/java/util/NavigableMap.html?is-external=true";
 title="class or interface in java.util">NavigableMapNavigableMapNavigableMapLong,byte[]>>> familyMap
+private transient http://docs.oracle.com/javase/8/docs/api/java/util/NavigableMap.html?is-external=true";
 title="class or interface in java.util">NavigableMapNavigableMapNavigableMapLong,byte[]>>> familyMap
 
 
 
@@ -714,7 +714,7 @@ implements 
 
 localBuffer
-private static http://docs.oracle.com/javase/8/docs/api/java/lang/ThreadLocal.html?is-external=true";
 title="class or interface in java.lang">ThreadLocal localBuffer
+private static http://docs.oracle.com/javase/8/docs/api/java/lang/ThreadLocal.html?is-external=true";
 title="class or interface in java.lang">ThreadLocal localBuffer
 
 
 
@@ -723,7 +723,7 @@ implements 
 
 PAD_WIDTH
-private static final int PAD_WIDTH
+private static final int PAD_WIDTH
 
 See Also:
 Constant
 Field Values
@@ -736,7 +736,7 @@ implements 
 
 EMPTY_RESULT
-public static final Result EMPTY_RESULT
+public static final Result EMPTY_RESULT
 
 
 
@@ -745,7 +745,7 @@ implements 
 
 INITIAL_CELLSCANNER_INDEX
-private static final int INITIAL_CELLSCANNER_INDEX
+private static final int INITIAL_CELLSCANNER_INDEX
 
 See Also:
 Constant
 Field Values
@@ -758,7 +758,7 @@ implements 
 
 cellScannerIndex
-private int cellScannerIndex
+private int cellScannerIndex
 Index for where we are when Result is acting as a CellScanner.
 
 
@@ -768,7 +768,7 @@ implements 
 
 stats
-private RegionLoadStats stats
+private RegionLoadStats stats
 
 
 
@@ -777,7 +777,7 @@ implements 
 
 readonly
-private final boolean readonly
+private final boolean readonly
 
 
 
@@ -786,7 +786,7 @@ implements 
 
 cursor
-private Cursor cursor
+private Cursor cursor
 
 
 
@@ -803,7 +803,7 @@ implements 
 
 Result
-public Result()
+public Result()
 Creates an empty Result w/ no KeyValue payload; returns 
null if you call rawCells().
  Use this to represent no results if null won't do or in old 
'mapred' as opposed
  to 'mapreduce' package MapReduce where you need to overwrite a Result 
instance with a
@@ -816,7 +816,7 @@ implements 
 
 Result
-private Result(boolean readonly)
+private Result(boolean readonly)
 Allows to construct special purpose immutable Result 
objects,
  such as EMPTY_RESULT.
 
@@ -831,7 +831,7 @@ implements 
 
 Result
-private Result(Cursor cursor)
+private Result(Cursor cursor)
 
 
 
@@ -840,7 +840,7 @@ implements 
 
 Result
-private Result(Cell[] cells,
+private 

[39/51] [partial] hbase-site git commit: Published site at .

2017-11-07 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/2cef721c/apidocs/src-html/org/apache/hadoop/hbase/filter/InclusiveStopFilter.html
--
diff --git 
a/apidocs/src-html/org/apache/hadoop/hbase/filter/InclusiveStopFilter.html 
b/apidocs/src-html/org/apache/hadoop/hbase/filter/InclusiveStopFilter.html
index ced79d7..71cf628 100644
--- a/apidocs/src-html/org/apache/hadoop/hbase/filter/InclusiveStopFilter.html
+++ b/apidocs/src-html/org/apache/hadoop/hbase/filter/InclusiveStopFilter.html
@@ -30,7 +30,7 @@
 022import java.util.ArrayList;
 023
 024import org.apache.hadoop.hbase.Cell;
-025import 
org.apache.hadoop.hbase.CellComparatorImpl;
+025import 
org.apache.hadoop.hbase.CellComparator;
 026import 
org.apache.yetus.audience.InterfaceAudience;
 027import 
org.apache.hadoop.hbase.exceptions.DeserializationException;
 028import 
org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException;
@@ -74,7 +74,7 @@
 066  public boolean filterRowKey(Cell 
firstRowCell) {
 067// if stopRowKey is <= buffer, 
then true, filter row.
 068if (filterAllRemaining()) return 
true;
-069int cmp = 
CellComparatorImpl.COMPARATOR.compareRows(firstRowCell, stopRowKey, 0, 
stopRowKey.length);
+069int cmp = 
CellComparator.getInstance().compareRows(firstRowCell, stopRowKey, 0, 
stopRowKey.length);
 070done = reversed ? cmp < 0 : cmp 
> 0;
 071return done;
 072  }

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/2cef721c/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/CellSortReducer.html
--
diff --git 
a/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/CellSortReducer.html 
b/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/CellSortReducer.html
index 98374fa..2c4df69 100644
--- a/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/CellSortReducer.html
+++ b/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/CellSortReducer.html
@@ -30,7 +30,7 @@
 022import java.util.TreeSet;
 023
 024import org.apache.hadoop.hbase.Cell;
-025import 
org.apache.hadoop.hbase.CellComparatorImpl;
+025import 
org.apache.hadoop.hbase.CellComparator;
 026import 
org.apache.hadoop.hbase.PrivateCellUtil;
 027import 
org.apache.hadoop.hbase.io.ImmutableBytesWritable;
 028import 
org.apache.hadoop.hbase.util.MapReduceCell;
@@ -50,7 +50,7 @@
 042  protected void 
reduce(ImmutableBytesWritable row, Iterable kvs,
 043  Reducer.Context context)
 044  throws java.io.IOException, 
InterruptedException {
-045TreeSet map = new 
TreeSet<>(CellComparatorImpl.COMPARATOR);
+045TreeSet map = new 
TreeSet<>(CellComparator.getInstance());
 046for (Cell kv : kvs) {
 047  try {
 048
map.add(PrivateCellUtil.deepClone(kv));

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/2cef721c/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.html
--
diff --git 
a/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.html 
b/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.html
index bbadffb..62bc799 100644
--- a/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.html
+++ b/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.html
@@ -54,7 +54,7 @@
 046import org.apache.hadoop.fs.FileSystem;
 047import org.apache.hadoop.fs.Path;
 048import org.apache.hadoop.hbase.Cell;
-049import 
org.apache.hadoop.hbase.CellComparatorImpl;
+049import 
org.apache.hadoop.hbase.CellComparator;
 050import 
org.apache.hadoop.hbase.CellUtil;
 051import 
org.apache.hadoop.hbase.HConstants;
 052import 
org.apache.hadoop.hbase.HRegionLocation;
@@ -410,12 +410,12 @@
 402  wl.writer =
 403  new 
StoreFileWriter.Builder(conf, new CacheConfig(tempConf), fs)
 404  
.withOutputDir(familydir).withBloomType(bloomType)
-405  
.withComparator(CellComparatorImpl.COMPARATOR).withFileContext(hFileContext).build();
+405  
.withComparator(CellComparator.getInstance()).withFileContext(hFileContext).build();
 406} else {
 407  wl.writer =
 408  new 
StoreFileWriter.Builder(conf, new CacheConfig(tempConf), new HFileSystem(fs))
 409  
.withOutputDir(familydir).withBloomType(bloomType)
-410  
.withComparator(CellComparatorImpl.COMPARATOR).withFileContext(hFileContext)
+410  
.withComparator(CellComparator.getInstance()).withFileContext(hFileContext)
 411  
.withFavoredNodes(favoredNodes).build();
 412}
 413



[37/51] [partial] hbase-site git commit: Published site at .

2017-11-07 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/2cef721c/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/PutSortReducer.html
--
diff --git 
a/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/PutSortReducer.html 
b/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/PutSortReducer.html
index d7088f8..a314d9d 100644
--- a/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/PutSortReducer.html
+++ b/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/PutSortReducer.html
@@ -35,7 +35,7 @@
 027import 
org.apache.hadoop.conf.Configuration;
 028import 
org.apache.hadoop.hbase.ArrayBackedTag;
 029import org.apache.hadoop.hbase.Cell;
-030import 
org.apache.hadoop.hbase.CellComparatorImpl;
+030import 
org.apache.hadoop.hbase.CellComparator;
 031import 
org.apache.hadoop.hbase.KeyValue;
 032import 
org.apache.hadoop.hbase.KeyValueUtil;
 033import org.apache.hadoop.hbase.Tag;
@@ -85,7 +85,7 @@
 077"putsortreducer.row.threshold", 
1L * (1<<30));
 078Iterator iter = 
puts.iterator();
 079while (iter.hasNext()) {
-080  TreeSet map = new 
TreeSet<>(CellComparatorImpl.COMPARATOR);
+080  TreeSet map = new 
TreeSet<>(CellComparator.getInstance());
 081  long curSize = 0;
 082  // stop at the end or the RAM 
threshold
 083  List tags = new 
ArrayList<>();

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/2cef721c/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/TextSortReducer.html
--
diff --git 
a/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/TextSortReducer.html 
b/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/TextSortReducer.html
index 237cdd2..44251b2 100644
--- a/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/TextSortReducer.html
+++ b/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/TextSortReducer.html
@@ -35,7 +35,7 @@
 027import 
org.apache.hadoop.conf.Configuration;
 028import 
org.apache.hadoop.hbase.ArrayBackedTag;
 029import org.apache.hadoop.hbase.Cell;
-030import 
org.apache.hadoop.hbase.CellComparatorImpl;
+030import 
org.apache.hadoop.hbase.CellComparator;
 031import 
org.apache.hadoop.hbase.KeyValue;
 032import 
org.apache.hadoop.hbase.KeyValueUtil;
 033import org.apache.hadoop.hbase.Tag;
@@ -152,7 +152,7 @@
 144"reducer.row.threshold", 1L * 
(1<<30));
 145Iterator iter = 
lines.iterator();
 146while (iter.hasNext()) {
-147  Set kvs = new 
TreeSet<>(CellComparatorImpl.COMPARATOR);
+147  Set kvs = new 
TreeSet<>(CellComparator.getInstance());
 148  long curSize = 0;
 149  // stop at the end or the RAM 
threshold
 150  while (iter.hasNext() && 
curSize < threshold) {

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/2cef721c/book.html
--
diff --git a/book.html b/book.html
index 5bc12e9..ff26207 100644
--- a/book.html
+++ b/book.html
@@ -35484,7 +35484,7 @@ The server will return cellblocks compressed using this 
same compressor as long
 
 
 Version 3.0.0-SNAPSHOT
-Last updated 2017-11-06 14:30:44 UTC
+Last updated 2017-11-07 14:29:34 UTC
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/2cef721c/bulk-loads.html
--
diff --git a/bulk-loads.html b/bulk-loads.html
index 9a0a7b5..6e0a855 100644
--- a/bulk-loads.html
+++ b/bulk-loads.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase –  
   Bulk Loads in Apache HBase (TM)
@@ -311,7 +311,7 @@ under the License. -->
 https://www.apache.org/";>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2017-11-06
+  Last Published: 
2017-11-07
 
 
 



[48/51] [partial] hbase-site git commit: Published site at .

2017-11-07 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/2cef721c/apidocs/org/apache/hadoop/hbase/class-use/Cell.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/class-use/Cell.html 
b/apidocs/org/apache/hadoop/hbase/class-use/Cell.html
index 5c389ff..c10ded7 100644
--- a/apidocs/org/apache/hadoop/hbase/class-use/Cell.html
+++ b/apidocs/org/apache/hadoop/hbase/class-use/Cell.html
@@ -289,8 +289,15 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 CellUtil.cloneValue(Cell cell) 
 
 
+int
+CellComparator.compare(Cell leftCell,
+   Cell rightCell)
+Lexographically compares two cells.
+
+
+
 static int
-CellUtil.compare(org.apache.hadoop.hbase.CellComparator comparator,
+CellUtil.compare(CellComparator comparator,
Cell left,
byte[] key,
int offset,
@@ -300,7 +307,7 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 
-
+
 static int
 CellUtil.compareColumns(Cell left,
   byte[] right,
@@ -311,7 +318,7 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 Compares the cell's column (family and qualifier) with the 
given byte[]
 
 
-
+
 static int
 CellUtil.compareFamilies(Cell left,
byte[] right,
@@ -320,6 +327,13 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 Compares the cell's family with the given byte[]
 
 
+
+int
+CellComparator.compareFamilies(Cell leftCell,
+   Cell rightCell)
+Lexographically compares the families of the two cells
+
+
 
 static int
 CellUtil.compareQualifiers(Cell left,
@@ -330,6 +344,44 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 
+int
+CellComparator.compareQualifiers(Cell leftCell,
+ Cell rightCell)
+Lexographically compares the qualifiers of the two 
cells
+
+
+
+int
+CellComparator.compareRows(Cell cell,
+   byte[] bytes,
+   int offset,
+   int length)
+Compares the row part of the cell with a simple plain 
byte[] like the
+ stopRow in Scan.
+
+
+
+int
+CellComparator.compareRows(Cell leftCell,
+   Cell rightCell)
+Lexographically compares the rows of two cells.
+
+
+
+int
+CellComparator.compareTimestamps(Cell leftCell,
+ Cell rightCell)
+Compares cell's timestamps in DESCENDING order.
+
+
+
+int
+CellComparator.compareWithoutRow(Cell leftCell,
+ Cell rightCell)
+Lexographically compares the two cells excluding the row 
part.
+
+
+
 static int
 CellUtil.copyFamilyTo(Cell cell,
 byte[] destination,
@@ -337,7 +389,7 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 Copies the family to the given byte[]
 
 
-
+
 static int
 CellUtil.copyFamilyTo(Cell cell,
 http://docs.oracle.com/javase/8/docs/api/java/nio/ByteBuffer.html?is-external=true";
 title="class or interface in java.nio">ByteBuffer destination,
@@ -345,7 +397,7 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 Copies the family to the given bytebuffer
 
 
-
+
 static int
 CellUtil.copyQualifierTo(Cell cell,
byte[] destination,
@@ -353,7 +405,7 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 Copies the qualifier to the given byte[]
 
 
-
+
 static int
 CellUtil.copyQualifierTo(Cell cell,
http://docs.oracle.com/javase/8/docs/api/java/nio/ByteBuffer.html?is-external=true";
 title="class or interface in java.nio">ByteBuffer destination,
@@ -361,13 +413,13 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 Copies the qualifier to the given bytebuffer
 
 
-
+
 static byte[]
 CellUtil.copyRow(Cell cell)
 Copies the row to a new byte[]
 
 
-
+
 static int
 CellUtil.copyRowTo(Cell cell,
  byte[] destination,
@@ -375,7 +427,7 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 Copies the row to the given byte[]
 
 
-
+
 static int
 CellUtil.copyRowTo(Cell cell,
  http://docs.oracle.com/javase/8/docs/api/java/nio/ByteBuffer.html?is-external=true";
 title="class or interface in java.nio">ByteBuffer destination,
@@ -383,7 +435,7 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 Copies the row to the given bytebuffer
 
 
-
+
 static int
 CellUtil.copyTagTo(Cell cell,
  byte[] destination,
@@ -393,7 +445,7 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 
-
+
 static int
 CellUtil.copyTagTo(Cell cell,
  http://docs.oracle.com/javase/8/docs/api/java/nio/ByteBuffer.html?is-external=true";
 title="class or interface in java.nio">ByteBuffer destination,
@@ -403,7 +455,7 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 
-
+
 static int
 CellUtil.copyValueTo(Cell cell,
byte[] destination,
@@ -411,7 +463,7 @@ Input/OutputFormats, a table index

[51/51] [partial] hbase-site git commit: Published site at .

2017-11-07 Thread git-site-role
Published site at .


Project: http://git-wip-us.apache.org/repos/asf/hbase-site/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase-site/commit/2cef721c
Tree: http://git-wip-us.apache.org/repos/asf/hbase-site/tree/2cef721c
Diff: http://git-wip-us.apache.org/repos/asf/hbase-site/diff/2cef721c

Branch: refs/heads/asf-site
Commit: 2cef721c49784bb3f1a6068031fc43d03d45fa4e
Parents: 2ef9b5f
Author: jenkins 
Authored: Tue Nov 7 15:15:31 2017 +
Committer: jenkins 
Committed: Tue Nov 7 15:15:31 2017 +

--
 acid-semantics.html | 4 +-
 apache_hbase_reference_guide.pdf| 4 +-
 apidocs/allclasses-frame.html   | 2 +
 apidocs/allclasses-noframe.html | 2 +
 apidocs/deprecated-list.html|   226 +-
 apidocs/index-all.html  |69 +-
 apidocs/org/apache/hadoop/hbase/Cell.html   |32 +-
 .../apache/hadoop/hbase/CellBuilderType.html| 4 +-
 .../org/apache/hadoop/hbase/CellComparator.html |   471 +
 apidocs/org/apache/hadoop/hbase/CellUtil.html   |   171 +-
 .../org/apache/hadoop/hbase/class-use/Cell.html |   146 +-
 .../hadoop/hbase/class-use/CellComparator.html  |   214 +
 .../org/apache/hadoop/hbase/client/Result.html  |   110 +-
 .../hbase/filter/BigDecimalComparator.html  |   420 +
 .../hadoop/hbase/filter/BinaryComparator.html   | 4 +-
 .../hbase/filter/ByteArrayComparable.html   | 2 +-
 .../hadoop/hbase/filter/FuzzyRowFilter.html |18 +-
 .../filter/class-use/BigDecimalComparator.html  |   168 +
 .../filter/class-use/ByteArrayComparable.html   |18 +-
 .../hadoop/hbase/filter/package-frame.html  | 1 +
 .../hadoop/hbase/filter/package-summary.html|80 +-
 .../hadoop/hbase/filter/package-tree.html   | 1 +
 .../apache/hadoop/hbase/filter/package-use.html |85 +-
 .../apache/hadoop/hbase/mapreduce/Import.html   |16 +-
 .../org/apache/hadoop/hbase/package-frame.html  | 1 +
 .../apache/hadoop/hbase/package-summary.html| 7 +
 .../org/apache/hadoop/hbase/package-tree.html   | 5 +
 .../org/apache/hadoop/hbase/package-use.html|48 +-
 apidocs/org/apache/hadoop/hbase/util/Bytes.html | 4 +-
 apidocs/overview-tree.html  | 6 +
 .../src-html/org/apache/hadoop/hbase/Cell.html  |   113 +-
 .../org/apache/hadoop/hbase/CellComparator.html |   197 +
 .../org/apache/hadoop/hbase/CellUtil.html   |  1181 +-
 .../org/apache/hadoop/hbase/ClusterStatus.html  | 6 +-
 .../org/apache/hadoop/hbase/client/Result.html  |  1943 +-
 .../hbase/filter/BigDecimalComparator.html  |   188 +
 .../hadoop/hbase/filter/FuzzyRowFilter.html |  1212 +-
 .../hbase/filter/InclusiveStopFilter.html   | 4 +-
 .../hadoop/hbase/mapreduce/CellSortReducer.html | 4 +-
 .../hbase/mapreduce/HFileOutputFormat2.html | 6 +-
 .../apache/hadoop/hbase/mapreduce/Import.html   |  1264 +-
 .../hadoop/hbase/mapreduce/PutSortReducer.html  | 4 +-
 .../hadoop/hbase/mapreduce/TextSortReducer.html | 4 +-
 book.html   | 2 +-
 bulk-loads.html | 4 +-
 checkstyle-aggregate.html   | 29352 -
 checkstyle.rss  |48 +-
 coc.html| 4 +-
 cygwin.html | 4 +-
 dependencies.html   | 4 +-
 dependency-convergence.html | 4 +-
 dependency-info.html| 4 +-
 dependency-management.html  | 4 +-
 devapidocs/allclasses-frame.html| 1 +
 devapidocs/allclasses-noframe.html  | 1 +
 devapidocs/constant-values.html | 6 +-
 devapidocs/deprecated-list.html |   316 +-
 devapidocs/index-all.html   |37 +-
 devapidocs/org/apache/hadoop/hbase/Cell.html|32 +-
 .../org/apache/hadoop/hbase/CellComparator.html |44 +-
 .../CellComparatorImpl.MetaCellComparator.html  | 7 +
 .../apache/hadoop/hbase/CellComparatorImpl.html | 7 +
 .../org/apache/hadoop/hbase/CellUtil.html   |   165 +-
 .../org/apache/hadoop/hbase/KeyValue.html   | 4 +-
 .../hadoop/hbase/backup/package-tree.html   | 2 +-
 .../org/apache/hadoop/hbase/class-use/Cell.html |   130 +-
 .../hadoop/hbase/class-use/CellComparator.html  |15 +
 .../hbase/class-use/KeyValue.KVComparator.html  | 2 +-
 .../org/apache/hadoop/hbase/client/Result.html  |   142 +-
 .../hadoop/hbase/client/package-tree.html   |26 +-
 .../class-use/DeserializationException.html |58 +-
 .../hbase/filter/BigDecimalComparator.html  |   483 +
 .../hadoop/hbase/filter/BinaryComparator.html   | 4 +-
 .../hbase/filter/ByteArrayComparable.h

[10/51] [partial] hbase-site git commit: Published site at .

2017-11-07 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/2cef721c/devapidocs/src-html/org/apache/hadoop/hbase/filter/FuzzyRowFilter.SatisfiesCode.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/filter/FuzzyRowFilter.SatisfiesCode.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/filter/FuzzyRowFilter.SatisfiesCode.html
index aa5ad0d..30d80c0 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/filter/FuzzyRowFilter.SatisfiesCode.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/filter/FuzzyRowFilter.SatisfiesCode.html
@@ -32,619 +32,617 @@
 024import java.util.PriorityQueue;
 025
 026import org.apache.hadoop.hbase.Cell;
-027import 
org.apache.hadoop.hbase.CellComparatorImpl;
-028import 
org.apache.hadoop.hbase.CellUtil;
-029import 
org.apache.hadoop.hbase.PrivateCellUtil;
-030import 
org.apache.yetus.audience.InterfaceAudience;
-031import 
org.apache.hadoop.hbase.exceptions.DeserializationException;
-032import 
org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException;
-033import 
org.apache.hadoop.hbase.shaded.com.google.protobuf.UnsafeByteOperations;
-034import 
org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos;
-035import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair;
-036import 
org.apache.hadoop.hbase.util.Bytes;
-037import 
org.apache.hadoop.hbase.util.Pair;
-038import 
org.apache.hadoop.hbase.util.UnsafeAccess;
-039import 
org.apache.hadoop.hbase.util.UnsafeAvailChecker;
-040
-041import 
org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
-042
-043/**
-044 * This is optimized version of a 
standard FuzzyRowFilter Filters data based on fuzzy row key.
-045 * Performs fast-forwards during 
scanning. It takes pairs (row key, fuzzy info) to match row keys.
-046 * Where fuzzy info is a byte array with 
0 or 1 as its values:
-047 * 
    -048 *
  • 0 - means that this byte in provided row key is fixed, i.e. row key's byte at same position -049 * must match
  • -050 *
  • 1 - means that this byte in provided row key is NOT fixed, i.e. row key's byte at this -051 * position can be different from the one in provided row key
  • -052 *
-053 * Example: Let's assume row key format is userId_actionId_year_month. Length of userId is fixed and -054 * is 4, length of actionId is 2 and year and month are 4 and 2 bytes long respectively. Let's -055 * assume that we need to fetch all users that performed certain action (encoded as "99") in Jan of -056 * any year. Then the pair (row key, fuzzy info) would be the following: row key = "_99__01" -057 * (one can use any value instead of "?") fuzzy info = -058 * "\x01\x01\x01\x01\x00\x00\x00\x00\x01\x01\x01\x01\x00\x00\x00" I.e. fuzzy info tells the matching -059 * mask is "_99__01", where at ? can be any value. -060 */ -061@InterfaceAudience.Public -062public class FuzzyRowFilter extends FilterBase { -063 private static final boolean UNSAFE_UNALIGNED = UnsafeAvailChecker.unaligned(); -064 private List> fuzzyKeysData; -065 private boolean done = false; -066 -067 /** -068 * The index of a last successfully found matching fuzzy string (in fuzzyKeysData). We will start -069 * matching next KV with this one. If they do not match then we will return back to the one-by-one -070 * iteration over fuzzyKeysData. -071 */ -072 private int lastFoundIndex = -1; -073 -074 /** -075 * Row tracker (keeps all next rows after SEEK_NEXT_USING_HINT was returned) -076 */ -077 private RowTracker tracker; -078 -079 public FuzzyRowFilter(List> fuzzyKeysData) { -080List> fuzzyKeyDataCopy = new ArrayList<>(fuzzyKeysData.size()); -081 -082for (Pair aFuzzyKeysData : fuzzyKeysData) { -083 if (aFuzzyKeysData.getFirst().length != aFuzzyKeysData.getSecond().length) { -084Pair readable = -085 new Pair<>(Bytes.toStringBinary(aFuzzyKeysData.getFirst()), Bytes.toStringBinary(aFuzzyKeysData.getSecond())); -086throw new IllegalArgumentException("Fuzzy pair lengths do not match: " + readable); -087 } -088 -089 Pair p = new Pair<>(); -090 // create a copy of pair bytes so that they are not modified by the filter. -091 p.setFirst(Arrays.copyOf(aFuzzyKeysData.getFirst(), aFuzzyKeysData.getFirst().length)); -092 p.setSecond(Arrays.copyOf(aFuzzyKeysData.getSecond(), aFuzzyKeysData.getSecond().length)); -093 -094 // update mask ( 0 -> -1 (0xff), 1 -> 2) -095 p.setSecond(preprocessMask(p.getSecond())); -096 preprocessSearchKey(p); -097 -098 fuzzyKeyDataCopy.add(p); -099} -100this.fuzzyKeysData = fuzzyKeyDataCopy; -101this.tracker = new RowTracker(); -102 } +027import org.apache.hadoop.hbase.CellComparator; +028import org.apache.hadoop.hbase.Private

hbase-site git commit: INFRA-10751 Empty commit

2017-11-07 Thread git-site-role
Repository: hbase-site
Updated Branches:
  refs/heads/asf-site 2cef721c4 -> 167adb09f


INFRA-10751 Empty commit


Project: http://git-wip-us.apache.org/repos/asf/hbase-site/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase-site/commit/167adb09
Tree: http://git-wip-us.apache.org/repos/asf/hbase-site/tree/167adb09
Diff: http://git-wip-us.apache.org/repos/asf/hbase-site/diff/167adb09

Branch: refs/heads/asf-site
Commit: 167adb09ff39d6e00a39f742d9f26cd1c90116c2
Parents: 2cef721
Author: jenkins 
Authored: Tue Nov 7 15:15:55 2017 +
Committer: jenkins 
Committed: Tue Nov 7 15:15:55 2017 +

--

--




[29/51] [partial] hbase-site git commit: Published site at .

2017-11-07 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/2cef721c/devapidocs/org/apache/hadoop/hbase/filter/class-use/ByteArrayComparable.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/filter/class-use/ByteArrayComparable.html 
b/devapidocs/org/apache/hadoop/hbase/filter/class-use/ByteArrayComparable.html
index 0a495cf..b8ce178 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/filter/class-use/ByteArrayComparable.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/filter/class-use/ByteArrayComparable.html
@@ -263,39 +263,45 @@
 
 
 class 
+BigDecimalComparator
+A BigDecimal comparator which numerical compares against 
the specified byte array
+
+
+
+class 
 BinaryComparator
 A binary comparator which lexicographically compares 
against the specified
  byte array using Bytes.compareTo(byte[],
 byte[]).
 
 
-
+
 class 
 BinaryPrefixComparator
 A comparator which compares against a specified byte array, 
but only compares
  up to the length of this byte array.
 
 
-
+
 class 
 BitComparator
 A bit comparator which performs the specified bitwise 
operation on each of the bytes
  with the specified byte array.
 
 
-
+
 class 
 LongComparator
 A long comparator which numerical compares against the 
specified byte array
 
 
-
+
 class 
 NullComparator
 A binary comparator which lexicographically compares 
against the specified
  byte array using Bytes.compareTo(byte[],
 byte[]).
 
 
-
+
 class 
 RegexStringComparator
 This comparator is for use with CompareFilter 
implementations, such
@@ -303,7 +309,7 @@
  filtering based on the value of a given column.
 
 
-
+
 class 
 SubstringComparator
 This comparator is for use with SingleColumnValueFilter, 
for filtering based on

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/2cef721c/devapidocs/org/apache/hadoop/hbase/filter/package-frame.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/filter/package-frame.html 
b/devapidocs/org/apache/hadoop/hbase/filter/package-frame.html
index 406de8b..29fce09 100644
--- a/devapidocs/org/apache/hadoop/hbase/filter/package-frame.html
+++ b/devapidocs/org/apache/hadoop/hbase/filter/package-frame.html
@@ -17,6 +17,7 @@
 
 Classes
 
+BigDecimalComparator
 BinaryComparator
 BinaryPrefixComparator
 BitComparator

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/2cef721c/devapidocs/org/apache/hadoop/hbase/filter/package-summary.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/filter/package-summary.html 
b/devapidocs/org/apache/hadoop/hbase/filter/package-summary.html
index d4608f5..1f0cacd 100644
--- a/devapidocs/org/apache/hadoop/hbase/filter/package-summary.html
+++ b/devapidocs/org/apache/hadoop/hbase/filter/package-summary.html
@@ -106,65 +106,71 @@
 
 
 
+BigDecimalComparator
+
+A BigDecimal comparator which numerical compares against 
the specified byte array
+
+
+
 BinaryComparator
 
 A binary comparator which lexicographically compares 
against the specified
  byte array using Bytes.compareTo(byte[],
 byte[]).
 
 
-
+
 BinaryPrefixComparator
 
 A comparator which compares against a specified byte array, 
but only compares
  up to the length of this byte array.
 
 
-
+
 BitComparator
 
 A bit comparator which performs the specified bitwise 
operation on each of the bytes
  with the specified byte array.
 
 
-
+
 ByteArrayComparable
 
 Base class for byte array comparators
 
 
-
+
 ColumnCountGetFilter
 
 Simple filter that returns first N columns on row 
only.
 
 
-
+
 ColumnPaginationFilter
 
 A filter, based on the ColumnCountGetFilter, takes two 
arguments: limit and offset.
 
 
-
+
 ColumnPrefixFilter
 
 This filter is used for selecting only those keys with 
columns that matches
  a particular prefix.
 
 
-
+
 ColumnRangeFilter
 
 This filter is used for selecting only those keys with 
columns that are
  between minColumn to maxColumn.
 
 
-
+
 CompareFilter
 
 This is a generic filter to be used to filter by 
comparison.
 
 
-
+
 DependentColumnFilter
 
 A filter for adding inter-column timestamp matching
@@ -174,26 +180,26 @@
  full rows for correct filtering
 
 
-
+
 FamilyFilter
 
 
  This filter is used to filter based on the column family.
 
 
-
+
 Filter
 
 Interface for row and column filters directly applied 
within the regionserver.
 
 
-
+
 FilterBase
 
 Abstract base class to help you implement new Filters.
 
 
-
+
 FilterList
 
 Implementation of Filter that represents an 
ordered List of Filters which will be
@@ -201,116 +207,116 @@
  FilterList.Operator.MUST_PASS_ONE
 (OR).
 
 
-
+
 FilterListBase
 
 Base class for FilterList.
 
 
-
+
 FilterListWithAND
 
 FilterListWithAND represents an ordered list of filters 
which will be evaluated with an AND
  operator.
 
 
-
+
 FilterListWithOR
 
 FilterListWithOR represents an ordered list of filters 
which will be evaluated with an OR
  ope

[14/51] [partial] hbase-site git commit: Published site at .

2017-11-07 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/2cef721c/devapidocs/src-html/org/apache/hadoop/hbase/client/Result.html
--
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/client/Result.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/Result.html
index 396e574..c28f3c1 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/client/Result.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/client/Result.html
@@ -43,978 +43,977 @@
 035
 036import org.apache.hadoop.hbase.Cell;
 037import 
org.apache.hadoop.hbase.CellComparator;
-038import 
org.apache.hadoop.hbase.CellComparatorImpl;
-039import 
org.apache.hadoop.hbase.CellScannable;
-040import 
org.apache.hadoop.hbase.CellScanner;
-041import 
org.apache.hadoop.hbase.CellUtil;
-042import 
org.apache.hadoop.hbase.HConstants;
-043import 
org.apache.hadoop.hbase.PrivateCellUtil;
-044import 
org.apache.hadoop.hbase.KeyValue;
-045import 
org.apache.hadoop.hbase.KeyValueUtil;
-046import 
org.apache.yetus.audience.InterfaceAudience;
-047import 
org.apache.hadoop.hbase.util.Bytes;
-048
-049/**
-050 * Single row result of a {@link Get} or 
{@link Scan} query.

-051 * -052 * This class is NOT THREAD SAFE.

-053 * -054 * Convenience methods are available that return various {@link Map} -055 * structures and values directly.

-056 * -057 * To get a complete mapping of all cells in the Result, which can include -058 * multiple families and multiple versions, use {@link #getMap()}.

-059 * -060 * To get a mapping of each family to its columns (qualifiers and values), -061 * including only the latest version of each, use {@link #getNoVersionMap()}. -062 * -063 * To get a mapping of qualifiers to latest values for an individual family use -064 * {@link #getFamilyMap(byte[])}.

-065 * -066 * To get the latest value for a specific family and qualifier use -067 * {@link #getValue(byte[], byte[])}. -068 * -069 * A Result is backed by an array of {@link Cell} objects, each representing -070 * an HBase cell defined by the row, family, qualifier, timestamp, and value.

-071 * -072 * The underlying {@link Cell} objects can be accessed through the method {@link #listCells()}. -073 * This will create a List from the internal Cell []. Better is to exploit the fact that -074 * a new Result instance is a primed {@link CellScanner}; just call {@link #advance()} and -075 * {@link #current()} to iterate over Cells as you would any {@link CellScanner}. -076 * Call {@link #cellScanner()} to reset should you need to iterate the same Result over again -077 * ({@link CellScanner}s are one-shot). -078 * -079 * If you need to overwrite a Result with another Result instance -- as in the old 'mapred' -080 * RecordReader next invocations -- then create an empty Result with the null constructor and -081 * in then use {@link #copyFrom(Result)} -082 */ -083@InterfaceAudience.Public -084public class Result implements CellScannable, CellScanner { -085 private Cell[] cells; -086 private Boolean exists; // if the query was just to check existence. -087 private boolean stale = false; -088 -089 /** -090 * See {@link #mayHaveMoreCellsInRow()}. -091 */ -092 private boolean mayHaveMoreCellsInRow = false; -093 // We're not using java serialization. Transient here is just a marker to say -094 // that this is where we cache row if we're ever asked for it. -095 private transient byte [] row = null; -096 // Ditto for familyMap. It can be composed on fly from passed in kvs. -097 private transient NavigableMap>> -098 familyMap = null; -099 -100 private static ThreadLocal localBuffer = new ThreadLocal<>(); -101 private static final int PAD_WIDTH = 128; -102 public static final Result EMPTY_RESULT = new Result(true); -103 -104 private final static int INITIAL_CELLSCANNER_INDEX = -1; -105 -106 /** -107 * Index for where we are when Result is acting as a {@link CellScanner}. -108 */ -109 private int cellScannerIndex = INITIAL_CELLSCANNER_INDEX; -110 private RegionLoadStats stats; -111 -112 private final boolean readonly; -113 -114 private Cursor cursor = null; -115 -116 /** -117 * Creates an empty Result w/ no KeyValue payload; returns null if you call {@link #rawCells()}. -118 * Use this to represent no results if {@code null} won't do or in old 'mapred' as opposed -119 * to 'mapreduce' package MapReduce where you need to overwrite a Result instance with a -120 * {@link #copyFrom(Result)} call. -121 */ -122 public Result() { -123this(false); -124 } -125 -126 /** -127 * Allows to construct special purpose immutable Result objects, -128 * such as EMPTY_RESULT. -129 * @param readonly whether this Result instance is readonly -130 */ -131 private Result(boolean readonly) { -132this.readonly = readonly; -133 } -134 -135 /** -136 * Instantiate a


[36/51] [partial] hbase-site git commit: Published site at .

2017-11-07 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/2cef721c/checkstyle-aggregate.html
--
diff --git a/checkstyle-aggregate.html b/checkstyle-aggregate.html
index 93555a3..5a8bc64 100644
--- a/checkstyle-aggregate.html
+++ b/checkstyle-aggregate.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase – Checkstyle Results
 
@@ -286,10 +286,10 @@
  Warnings
  Errors
 
-3425
+3427
 0
 0
-21610
+21597
 
 Files
 
@@ -2427,7 +2427,7 @@
 org/apache/hadoop/hbase/client/TestFromClientSide.java
 0
 0
-177
+166
 
 org/apache/hadoop/hbase/client/TestFromClientSide3.java
 0
@@ -3757,7 +3757,7 @@
 org/apache/hadoop/hbase/filter/TestFilter.java
 0
 0
-351
+349
 
 org/apache/hadoop/hbase/filter/TestFilterFromRegionSide.java
 0
@@ -6992,7 +6992,7 @@
 org/apache/hadoop/hbase/mob/MobUtils.java
 0
 0
-16
+15
 
 org/apache/hadoop/hbase/mob/TestCachedMobFile.java
 0
@@ -7022,7 +7022,7 @@
 org/apache/hadoop/hbase/mob/compactions/PartitionedMobCompactor.java
 0
 0
-10
+11
 
 org/apache/hadoop/hbase/mob/compactions/TestMobCompactor.java
 0
@@ -8587,7 +8587,7 @@
 org/apache/hadoop/hbase/regionserver/StoreFileWriter.java
 0
 0
-15
+14
 
 org/apache/hadoop/hbase/regionserver/StoreFlushContext.java
 0
@@ -8692,7 +8692,7 @@
 org/apache/hadoop/hbase/regionserver/TestCompactingMemStore.java
 0
 0
-16
+17
 
 org/apache/hadoop/hbase/regionserver/TestCompactingToCellFlatMapMemStore.java
 0
@@ -9172,7 +9172,7 @@
 org/apache/hadoop/hbase/regionserver/TestStoreScanner.java
 0
 0
-14
+13
 
 org/apache/hadoop/hbase/regionserver/TestStripeStoreFileManager.java
 0
@@ -12047,7 +12047,7 @@
 org/apache/hadoop/hbase/util/CompressionTest.java
 0
 0
-4
+5
 
 org/apache/hadoop/hbase/util/ConcatenatedLists.java
 0
@@ -13293,7 +13293,7 @@
 http://checkstyle.sourceforge.net/config_imports.html#UnusedImports";>UnusedImports
 
 processJavadoc: "true"
-266
+263
  Error
 
 indentation
@@ -13311,12 +13311,12 @@
 http://checkstyle.sourceforge.net/config_javadoc.html#JavadocTagContinuationIndentation";>JavadocTagContinuationIndentation
 
 offset: "2"
-842
+860
  Error
 
 
 http://checkstyle.sourceforge.net/config_javadoc.html#NonEmptyAtclauseDescription";>NonEmptyAtclauseDescription
-4415
+4386
  Error
 
 misc
@@ -13334,7 +13334,7 @@
 
 max: "100"
 ignorePattern: "^package.*|^import.*|a 
href|href|http://|https://|ftp://|org.apache.thrift.|com.google.protobuf.|hbase.protobuf.generated"
-1948
+1949
  Error
 
 
@@ -13870,13 +13870,13 @@
 sizes
 LineLength
 Line is longer than 100 characters (found 116).
-184
+187
 
  Error
 sizes
 LineLength
 Line is longer than 100 characters (found 109).
-186
+189
 
 org/apache/hadoop/hbase/CellBuilderFactory.java
 
@@ -14272,193 +14272,193 @@
 javadoc
 NonEmptyAtclauseDescription
 At-clause should have a non-empty description.
-968
+977
 
  Error
 javadoc
 NonEmptyAtclauseDescription
 At-clause should have a non-empty description.
-995
+1004
 
  Error
 javadoc
 NonEmptyAtclauseDescription
 At-clause should have a non-empty description.
-1008
+1017
 
  Error
 javadoc
 NonEmptyAtclauseDescription
 At-clause should have a non-empty description.
-1009
+1018
 
  Error
 javadoc
 NonEmptyAtclauseDescription
 At-clause should have a non-empty description.
-1010
+1019
 
  Error
 javadoc
 NonEmptyAtclauseDescription
 At-clause should have a non-empty description.
-1082
+1091
 
  Error
 javadoc
 NonEmptyAtclauseDescription
 At-clause should have a non-empty description.
-1083
+1092
 
  Error
 javadoc
 NonEmptyAtclauseDescription
 At-clause should have a non-empty description.
-1094
+1103
 
  Error
 javadoc
 NonEmptyAtclauseDescription
 At-clause should have a non-empty description.
-1095
+1104
 
  Error
 javadoc
 NonEmptyAtclauseDescription
 At-clause should have a non-empty description.
-1106
+1115
 
  Error
 javadoc
 NonEmptyAtclauseDescription
 At-clause should have a non-empty description.
-1120
+1129
 
  Error
 javadoc
 NonEmptyAtclauseDescription
 At-clause should have a non-empty description.
-1121
+1130
 
  Error
 javadoc
 NonEmptyAtclauseDescription
 At-clause should have a non-empty description.
-1134
+1143
 
  Error
 javadoc
 NonEmptyAtclauseDescription
 At-clause should have a non-empty description.
-1160
+1169
 
  Error
 javadoc
 NonEmptyAtclauseDescription
 At-clause should have a non-empty description.
-1174
+1183
 
  Error
 sizes
 LineLength
 Line is longer than 100 characters (found 107).
-1181
+1190
 
  Error
 javadoc
 NonEmptyAtclauseDescription
 At-clause should have a non-empty description.
-1190
+1199
 
  Error
 blocks
 NeedBraces
 'if' construct must use '{}'s.
-1200
+1209
 
  Error
 javadoc
 NonEmptyAtclauseDescription
 At-clause should have a non-empty description.
-1221
+1230
 
  Error
 javadoc
 JavadocTagContinuationIndentation
 Line continuation have incorrect indentation level, expected level should 
be 2.
-1223
+1232
 
  Error
 javadoc
 NonEmptyAtclauseDescription
 At-clause should have a non-empty description.
-1233
+12

[41/51] [partial] hbase-site git commit: Published site at .

2017-11-07 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/2cef721c/apidocs/src-html/org/apache/hadoop/hbase/filter/BigDecimalComparator.html
--
diff --git 
a/apidocs/src-html/org/apache/hadoop/hbase/filter/BigDecimalComparator.html 
b/apidocs/src-html/org/apache/hadoop/hbase/filter/BigDecimalComparator.html
new file mode 100644
index 000..21f4ee7
--- /dev/null
+++ b/apidocs/src-html/org/apache/hadoop/hbase/filter/BigDecimalComparator.html
@@ -0,0 +1,188 @@
+http://www.w3.org/TR/html4/loose.dtd";>
+
+
+Source code
+
+
+
+
+001/*
+002 *
+003 * Licensed to the Apache Software 
Foundation (ASF) under one
+004 * or more contributor license 
agreements.  See the NOTICE file
+005 * distributed with this work for 
additional information
+006 * regarding copyright ownership.  The 
ASF licenses this file
+007 * to you under the Apache License, 
Version 2.0 (the
+008 * "License"); you may not use this file 
except in compliance
+009 * with the License.  You may obtain a 
copy of the License at
+010 *
+011 * 
http://www.apache.org/licenses/LICENSE-2.0
+012 *
+013 * Unless required by applicable law or 
agreed to in writing, software
+014 * distributed under the License is 
distributed on an "AS IS" BASIS,
+015 * WITHOUT WARRANTIES OR CONDITIONS OF 
ANY KIND, either express or implied.
+016 * See the License for the specific 
language governing permissions and
+017 * limitations under the License.
+018 */
+019
+020package org.apache.hadoop.hbase.filter;
+021
+022import java.math.BigDecimal;
+023import java.nio.ByteBuffer;
+024import java.util.Objects;
+025
+026import 
org.apache.hadoop.hbase.exceptions.DeserializationException;
+027import 
org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException;
+028import 
org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
+029import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos;
+030import 
org.apache.hadoop.hbase.util.ByteBufferUtils;
+031import 
org.apache.hadoop.hbase.util.Bytes;
+032
+033import 
org.apache.yetus.audience.InterfaceAudience;
+034
+035/**
+036 * A BigDecimal comparator which 
numerical compares against the specified byte array
+037 */
+038@InterfaceAudience.Public
+039public class BigDecimalComparator extends 
ByteArrayComparable {
+040  private BigDecimal bigDecimal;
+041
+042  public BigDecimalComparator(BigDecimal 
value) {
+043super(Bytes.toBytes(value));
+044this.bigDecimal = value;
+045  }
+046
+047  @Override
+048  public boolean equals(Object obj) {
+049if (obj == null || !(obj instanceof 
BigDecimalComparator)) {
+050  return false;
+051}
+052if (this == obj) {
+053  return true;
+054}
+055BigDecimalComparator bdc = 
(BigDecimalComparator) obj;
+056return 
this.bigDecimal.equals(bdc.bigDecimal);
+057  }
+058
+059  @Override
+060  public int hashCode() {
+061return 
Objects.hash(this.bigDecimal);
+062  }
+063
+064  @Override
+065  public int compareTo(byte[] value, int 
offset, int length) {
+066BigDecimal that = 
Bytes.toBigDecimal(value, offset, length);
+067return 
this.bigDecimal.compareTo(that);
+068  }
+069
+070  @Override
+071  public int compareTo(ByteBuffer value, 
int offset, int length) {
+072BigDecimal that = 
ByteBufferUtils.toBigDecimal(value, offset, length);
+073return 
this.bigDecimal.compareTo(that);
+074  }
+075
+076  /**
+077   * @return The comparator serialized 
using pb
+078   */
+079  @Override
+080  public byte[] toByteArray() {
+081
ComparatorProtos.BigDecimalComparator.Builder builder =
+082
ComparatorProtos.BigDecimalComparator.newBuilder();
+083
builder.setComparable(ProtobufUtil.toByteArrayComparable(this.value));
+084return 
builder.build().toByteArray();
+085  }
+086
+087  /**
+088   * @param pbBytes A pb serialized 
{@link BigDecimalComparator} instance
+089   * @return An instance of {@link 
BigDecimalComparator} made from bytes
+090   * @throws DeserializationException A 
deserialization exception
+091   * @see #toByteArray
+092   */
+093  public static BigDecimalComparator 
parseFrom(final byte[] pbBytes)
+094  throws DeserializationException {
+095ComparatorProtos.BigDecimalComparator 
proto;
+096try {
+097  proto = 
ComparatorProtos.BigDecimalComparator.parseFrom(pbBytes);
+098} catch 
(InvalidProtocolBufferException e) {
+099  throw new 
DeserializationException(e);
+100}
+101return new 
BigDecimalComparator(Bytes.toBigDecimal(proto.getComparable().getValue()
+102.toByteArray()));
+103  }
+104
+105  /**
+106   * @param other the other comparator
+107   * @return true if and only if the 
fields of the comparator that are serialized are equal to the
+108   * corresponding fields in 
other. Used for testing.
+109   */
+110  boolean 
areSerializedFieldsEqual(BigDecimalComparator other) {
+111if (other == this) {
+112  return true;
+113}
+114return 

[40/51] [partial] hbase-site git commit: Published site at .

2017-11-07 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/2cef721c/apidocs/src-html/org/apache/hadoop/hbase/filter/FuzzyRowFilter.html
--
diff --git 
a/apidocs/src-html/org/apache/hadoop/hbase/filter/FuzzyRowFilter.html 
b/apidocs/src-html/org/apache/hadoop/hbase/filter/FuzzyRowFilter.html
index aa5ad0d..30d80c0 100644
--- a/apidocs/src-html/org/apache/hadoop/hbase/filter/FuzzyRowFilter.html
+++ b/apidocs/src-html/org/apache/hadoop/hbase/filter/FuzzyRowFilter.html
@@ -32,619 +32,617 @@
 024import java.util.PriorityQueue;
 025
 026import org.apache.hadoop.hbase.Cell;
-027import 
org.apache.hadoop.hbase.CellComparatorImpl;
-028import 
org.apache.hadoop.hbase.CellUtil;
-029import 
org.apache.hadoop.hbase.PrivateCellUtil;
-030import 
org.apache.yetus.audience.InterfaceAudience;
-031import 
org.apache.hadoop.hbase.exceptions.DeserializationException;
-032import 
org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException;
-033import 
org.apache.hadoop.hbase.shaded.com.google.protobuf.UnsafeByteOperations;
-034import 
org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos;
-035import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair;
-036import 
org.apache.hadoop.hbase.util.Bytes;
-037import 
org.apache.hadoop.hbase.util.Pair;
-038import 
org.apache.hadoop.hbase.util.UnsafeAccess;
-039import 
org.apache.hadoop.hbase.util.UnsafeAvailChecker;
-040
-041import 
org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
-042
-043/**
-044 * This is optimized version of a 
standard FuzzyRowFilter Filters data based on fuzzy row key.
-045 * Performs fast-forwards during 
scanning. It takes pairs (row key, fuzzy info) to match row keys.
-046 * Where fuzzy info is a byte array with 
0 or 1 as its values:
-047 * 
    -048 *
  • 0 - means that this byte in provided row key is fixed, i.e. row key's byte at same position -049 * must match
  • -050 *
  • 1 - means that this byte in provided row key is NOT fixed, i.e. row key's byte at this -051 * position can be different from the one in provided row key
  • -052 *
-053 * Example: Let's assume row key format is userId_actionId_year_month. Length of userId is fixed and -054 * is 4, length of actionId is 2 and year and month are 4 and 2 bytes long respectively. Let's -055 * assume that we need to fetch all users that performed certain action (encoded as "99") in Jan of -056 * any year. Then the pair (row key, fuzzy info) would be the following: row key = "_99__01" -057 * (one can use any value instead of "?") fuzzy info = -058 * "\x01\x01\x01\x01\x00\x00\x00\x00\x01\x01\x01\x01\x00\x00\x00" I.e. fuzzy info tells the matching -059 * mask is "_99__01", where at ? can be any value. -060 */ -061@InterfaceAudience.Public -062public class FuzzyRowFilter extends FilterBase { -063 private static final boolean UNSAFE_UNALIGNED = UnsafeAvailChecker.unaligned(); -064 private List> fuzzyKeysData; -065 private boolean done = false; -066 -067 /** -068 * The index of a last successfully found matching fuzzy string (in fuzzyKeysData). We will start -069 * matching next KV with this one. If they do not match then we will return back to the one-by-one -070 * iteration over fuzzyKeysData. -071 */ -072 private int lastFoundIndex = -1; -073 -074 /** -075 * Row tracker (keeps all next rows after SEEK_NEXT_USING_HINT was returned) -076 */ -077 private RowTracker tracker; -078 -079 public FuzzyRowFilter(List> fuzzyKeysData) { -080List> fuzzyKeyDataCopy = new ArrayList<>(fuzzyKeysData.size()); -081 -082for (Pair aFuzzyKeysData : fuzzyKeysData) { -083 if (aFuzzyKeysData.getFirst().length != aFuzzyKeysData.getSecond().length) { -084Pair readable = -085 new Pair<>(Bytes.toStringBinary(aFuzzyKeysData.getFirst()), Bytes.toStringBinary(aFuzzyKeysData.getSecond())); -086throw new IllegalArgumentException("Fuzzy pair lengths do not match: " + readable); -087 } -088 -089 Pair p = new Pair<>(); -090 // create a copy of pair bytes so that they are not modified by the filter. -091 p.setFirst(Arrays.copyOf(aFuzzyKeysData.getFirst(), aFuzzyKeysData.getFirst().length)); -092 p.setSecond(Arrays.copyOf(aFuzzyKeysData.getSecond(), aFuzzyKeysData.getSecond().length)); -093 -094 // update mask ( 0 -> -1 (0xff), 1 -> 2) -095 p.setSecond(preprocessMask(p.getSecond())); -096 preprocessSearchKey(p); -097 -098 fuzzyKeyDataCopy.add(p); -099} -100this.fuzzyKeysData = fuzzyKeyDataCopy; -101this.tracker = new RowTracker(); -102 } +027import org.apache.hadoop.hbase.CellComparator; +028import org.apache.hadoop.hbase.PrivateCellUtil; +029import org.apache.yetus.audience.InterfaceAudience; +030import org.apach

[02/51] [partial] hbase-site git commit: Published site at .

2017-11-07 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/2cef721c/devapidocs/src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.html
index 83e48f3..157463c 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.html
@@ -33,1142 +33,1141 @@
 025import 
org.apache.hadoop.hbase.ByteBufferCell;
 026import org.apache.hadoop.hbase.Cell;
 027import 
org.apache.hadoop.hbase.CellComparator;
-028import 
org.apache.hadoop.hbase.CellComparatorImpl;
-029import 
org.apache.hadoop.hbase.CellUtil;
-030import 
org.apache.hadoop.hbase.ExtendedCell;
-031import 
org.apache.hadoop.hbase.HConstants;
-032import 
org.apache.hadoop.hbase.PrivateCellUtil;
-033import 
org.apache.hadoop.hbase.KeyValue;
-034import 
org.apache.hadoop.hbase.KeyValue.Type;
-035import 
org.apache.hadoop.hbase.KeyValueUtil;
-036import 
org.apache.yetus.audience.InterfaceAudience;
-037import 
org.apache.hadoop.hbase.io.TagCompressionContext;
-038import 
org.apache.hadoop.hbase.io.util.LRUDictionary;
-039import 
org.apache.hadoop.hbase.io.util.StreamUtils;
-040import 
org.apache.hadoop.hbase.nio.ByteBuff;
-041import 
org.apache.hadoop.hbase.util.ByteBufferUtils;
-042import 
org.apache.hadoop.hbase.util.Bytes;
-043import 
org.apache.hadoop.hbase.util.ClassSize;
-044import 
org.apache.hadoop.hbase.util.ObjectIntPair;
-045import 
org.apache.hadoop.io.WritableUtils;
-046
-047/**
-048 * Base class for all data block encoders 
that use a buffer.
-049 */
-050@InterfaceAudience.Private
-051abstract class BufferedDataBlockEncoder 
extends AbstractDataBlockEncoder {
-052  /**
-053   * TODO: This datablockencoder is 
dealing in internals of hfileblocks. Purge reference to HFBs
-054   */
-055  private static int 
INITIAL_KEY_BUFFER_SIZE = 512;
-056
-057  @Override
-058  public ByteBuffer 
decodeKeyValues(DataInputStream source,
-059  HFileBlockDecodingContext 
blkDecodingCtx) throws IOException {
-060if (blkDecodingCtx.getClass() != 
HFileBlockDefaultDecodingContext.class) {
-061  throw new 
IOException(this.getClass().getName() + " only accepts "
-062  + 
HFileBlockDefaultDecodingContext.class.getName() + " as the decoding 
context.");
-063}
-064
-065HFileBlockDefaultDecodingContext 
decodingCtx =
-066
(HFileBlockDefaultDecodingContext) blkDecodingCtx;
-067if 
(decodingCtx.getHFileContext().isIncludesTags()
-068&& 
decodingCtx.getHFileContext().isCompressTags()) {
-069  if 
(decodingCtx.getTagCompressionContext() != null) {
-070// It will be overhead to create 
the TagCompressionContext again and again for every block
-071// decoding.
-072
decodingCtx.getTagCompressionContext().clear();
-073  } else {
-074try {
-075  TagCompressionContext 
tagCompressionContext = new TagCompressionContext(
-076  LRUDictionary.class, 
Byte.MAX_VALUE);
-077  
decodingCtx.setTagCompressionContext(tagCompressionContext);
-078} catch (Exception e) {
-079  throw new IOException("Failed 
to initialize TagCompressionContext", e);
-080}
-081  }
-082}
-083return 
internalDecodeKeyValues(source, 0, 0, decodingCtx);
-084  }
-085
-086  /* common prefixes 
*/
-087  // Having this as static is fine but if 
META is having DBE then we should
-088  // change this.
-089  public static int 
compareCommonRowPrefix(Cell left, Cell right, int rowCommonPrefix) {
-090return 
Bytes.compareTo(left.getRowArray(), left.getRowOffset() + rowCommonPrefix,
-091left.getRowLength() - 
rowCommonPrefix, right.getRowArray(), right.getRowOffset()
-092+ rowCommonPrefix, 
right.getRowLength() - rowCommonPrefix);
-093  }
-094
-095  public static int 
compareCommonFamilyPrefix(Cell left, Cell right, int familyCommonPrefix) {
-096return 
Bytes.compareTo(left.getFamilyArray(), left.getFamilyOffset() + 
familyCommonPrefix,
-097left.getFamilyLength() - 
familyCommonPrefix, right.getFamilyArray(),
-098right.getFamilyOffset() + 
familyCommonPrefix, right.getFamilyLength() - familyCommonPrefix);
-099  }
-100
-101  public static int 
compareCommonQualifierPrefix(Cell left, Cell right, int qualCommonPrefix) {
-102return 
Bytes.compareTo(left.getQualifierArray(), left.getQualifierOffset() + 
qualCommonPrefix,
-103left.getQualifierLength() - 
qualCommonPrefix, right.getQualifierArray(),
-104right.getQualifierOffset() + 
qualCommonPrefix, right.getQualifierLength()
-105- qualCommonPrefix);
-106  }
-107
-108  protected static class SeekerState {
-109protected ByteBuf

[43/51] [partial] hbase-site git commit: Published site at .

2017-11-07 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/2cef721c/apidocs/src-html/org/apache/hadoop/hbase/ClusterStatus.html
--
diff --git a/apidocs/src-html/org/apache/hadoop/hbase/ClusterStatus.html 
b/apidocs/src-html/org/apache/hadoop/hbase/ClusterStatus.html
index c638897..e8675ce 100644
--- a/apidocs/src-html/org/apache/hadoop/hbase/ClusterStatus.html
+++ b/apidocs/src-html/org/apache/hadoop/hbase/ClusterStatus.html
@@ -146,7 +146,7 @@
 138   */
 139  public List 
getDeadServerNames() {
 140if (deadServers == null) {
-141  return Collections.EMPTY_LIST;
+141  return Collections.emptyList();
 142}
 143return 
Collections.unmodifiableList(deadServers);
 144  }
@@ -264,7 +264,7 @@
 256
 257  public Collection 
getServers() {
 258if (liveServers == null) {
-259  return Collections.EMPTY_LIST;
+259  return Collections.emptyList();
 260}
 261return 
Collections.unmodifiableCollection(this.liveServers.keySet());
 262  }
@@ -289,7 +289,7 @@
 281   */
 282  public List 
getBackupMasters() {
 283if (backupMasters == null) {
-284  return Collections.EMPTY_LIST;
+284  return Collections.emptyList();
 285}
 286return 
Collections.unmodifiableList(this.backupMasters);
 287  }



[42/51] [partial] hbase-site git commit: Published site at .

2017-11-07 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/2cef721c/apidocs/src-html/org/apache/hadoop/hbase/client/Result.html
--
diff --git a/apidocs/src-html/org/apache/hadoop/hbase/client/Result.html 
b/apidocs/src-html/org/apache/hadoop/hbase/client/Result.html
index 396e574..c28f3c1 100644
--- a/apidocs/src-html/org/apache/hadoop/hbase/client/Result.html
+++ b/apidocs/src-html/org/apache/hadoop/hbase/client/Result.html
@@ -43,978 +43,977 @@
 035
 036import org.apache.hadoop.hbase.Cell;
 037import 
org.apache.hadoop.hbase.CellComparator;
-038import 
org.apache.hadoop.hbase.CellComparatorImpl;
-039import 
org.apache.hadoop.hbase.CellScannable;
-040import 
org.apache.hadoop.hbase.CellScanner;
-041import 
org.apache.hadoop.hbase.CellUtil;
-042import 
org.apache.hadoop.hbase.HConstants;
-043import 
org.apache.hadoop.hbase.PrivateCellUtil;
-044import 
org.apache.hadoop.hbase.KeyValue;
-045import 
org.apache.hadoop.hbase.KeyValueUtil;
-046import 
org.apache.yetus.audience.InterfaceAudience;
-047import 
org.apache.hadoop.hbase.util.Bytes;
-048
-049/**
-050 * Single row result of a {@link Get} or 
{@link Scan} query.

-051 * -052 * This class is NOT THREAD SAFE.

-053 * -054 * Convenience methods are available that return various {@link Map} -055 * structures and values directly.

-056 * -057 * To get a complete mapping of all cells in the Result, which can include -058 * multiple families and multiple versions, use {@link #getMap()}.

-059 * -060 * To get a mapping of each family to its columns (qualifiers and values), -061 * including only the latest version of each, use {@link #getNoVersionMap()}. -062 * -063 * To get a mapping of qualifiers to latest values for an individual family use -064 * {@link #getFamilyMap(byte[])}.

-065 * -066 * To get the latest value for a specific family and qualifier use -067 * {@link #getValue(byte[], byte[])}. -068 * -069 * A Result is backed by an array of {@link Cell} objects, each representing -070 * an HBase cell defined by the row, family, qualifier, timestamp, and value.

-071 * -072 * The underlying {@link Cell} objects can be accessed through the method {@link #listCells()}. -073 * This will create a List from the internal Cell []. Better is to exploit the fact that -074 * a new Result instance is a primed {@link CellScanner}; just call {@link #advance()} and -075 * {@link #current()} to iterate over Cells as you would any {@link CellScanner}. -076 * Call {@link #cellScanner()} to reset should you need to iterate the same Result over again -077 * ({@link CellScanner}s are one-shot). -078 * -079 * If you need to overwrite a Result with another Result instance -- as in the old 'mapred' -080 * RecordReader next invocations -- then create an empty Result with the null constructor and -081 * in then use {@link #copyFrom(Result)} -082 */ -083@InterfaceAudience.Public -084public class Result implements CellScannable, CellScanner { -085 private Cell[] cells; -086 private Boolean exists; // if the query was just to check existence. -087 private boolean stale = false; -088 -089 /** -090 * See {@link #mayHaveMoreCellsInRow()}. -091 */ -092 private boolean mayHaveMoreCellsInRow = false; -093 // We're not using java serialization. Transient here is just a marker to say -094 // that this is where we cache row if we're ever asked for it. -095 private transient byte [] row = null; -096 // Ditto for familyMap. It can be composed on fly from passed in kvs. -097 private transient NavigableMap>> -098 familyMap = null; -099 -100 private static ThreadLocal localBuffer = new ThreadLocal<>(); -101 private static final int PAD_WIDTH = 128; -102 public static final Result EMPTY_RESULT = new Result(true); -103 -104 private final static int INITIAL_CELLSCANNER_INDEX = -1; -105 -106 /** -107 * Index for where we are when Result is acting as a {@link CellScanner}. -108 */ -109 private int cellScannerIndex = INITIAL_CELLSCANNER_INDEX; -110 private RegionLoadStats stats; -111 -112 private final boolean readonly; -113 -114 private Cursor cursor = null; -115 -116 /** -117 * Creates an empty Result w/ no KeyValue payload; returns null if you call {@link #rawCells()}. -118 * Use this to represent no results if {@code null} won't do or in old 'mapred' as opposed -119 * to 'mapreduce' package MapReduce where you need to overwrite a Result instance with a -120 * {@link #copyFrom(Result)} call. -121 */ -122 public Result() { -123this(false); -124 } -125 -126 /** -127 * Allows to construct special purpose immutable Result objects, -128 * such as EMPTY_RESULT. -129 * @param readonly whether this Result instance is readonly -130 */ -131 private Result(boolean readonly) { -132this.readonly = readonly; -133 } -134 -135 /** -136 * Instantiate a Result with the


[44/51] [partial] hbase-site git commit: Published site at .

2017-11-07 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/2cef721c/apidocs/src-html/org/apache/hadoop/hbase/CellUtil.html
--
diff --git a/apidocs/src-html/org/apache/hadoop/hbase/CellUtil.html 
b/apidocs/src-html/org/apache/hadoop/hbase/CellUtil.html
index b492d9f..435b2b3 100644
--- a/apidocs/src-html/org/apache/hadoop/hbase/CellUtil.html
+++ b/apidocs/src-html/org/apache/hadoop/hbase/CellUtil.html
@@ -901,599 +901,608 @@
 893   * {KeyValue.Type#DeleteFamily} 
or a
 894   * {@link 
KeyValue.Type#DeleteColumn} KeyValue type.
 895   */
-896  public static boolean isDelete(final 
Cell cell) {
-897return 
PrivateCellUtil.isDelete(cell.getTypeByte());
-898  }
-899
-900  /**
-901   * @return True if a delete type, a 
{@link KeyValue.Type#Delete} or a
-902   * {KeyValue.Type#DeleteFamily} 
or a
-903   * {@link 
KeyValue.Type#DeleteColumn} KeyValue type.
-904   * @deprecated As of release 2.0.0, 
this will be removed in HBase 3.0.0.
-905   */
-906  @Deprecated
-907  public static boolean isDelete(final 
byte type) {
-908return Type.Delete.getCode() <= 
type
-909&& type <= 
Type.DeleteFamily.getCode();
-910  }
-911
-912  /**
-913   * @return True if this cell is a 
{@link KeyValue.Type#Delete} type.
-914   * @deprecated As of release 2.0.0, 
this will be removed in HBase 3.0.0.
-915   */
-916  @Deprecated
-917  public static boolean isDeleteType(Cell 
cell) {
-918return cell.getTypeByte() == 
Type.Delete.getCode();
-919  }
-920
-921  /**
-922   * @deprecated As of release 2.0.0, 
this will be removed in HBase 3.0.0. 
-923   */
-924  @Deprecated
-925  public static boolean 
isDeleteFamily(final Cell cell) {
-926return cell.getTypeByte() == 
Type.DeleteFamily.getCode();
-927  }
-928
-929  /**
-930   * @deprecated As of release 2.0.0, 
this will be removed in HBase 3.0.0. 
-931   */
-932  @Deprecated
-933  public static boolean 
isDeleteFamilyVersion(final Cell cell) {
-934return cell.getTypeByte() == 
Type.DeleteFamilyVersion.getCode();
-935  }
-936
-937  /**
-938   * @deprecated As of release 2.0.0, 
this will be removed in HBase 3.0.0. 
-939   */
-940  @Deprecated
-941  public static boolean 
isDeleteColumns(final Cell cell) {
-942return cell.getTypeByte() == 
Type.DeleteColumn.getCode();
-943  }
-944
-945  /**
-946   * @deprecated As of release 2.0.0, 
this will be removed in HBase 3.0.0. 
-947   */
-948  @Deprecated
-949  public static boolean 
isDeleteColumnVersion(final Cell cell) {
-950return cell.getTypeByte() == 
Type.Delete.getCode();
-951  }
-952
-953  /**
-954   *
-955   * @return True if this cell is a 
delete family or column type.
-956   * @deprecated As of release 2.0.0, 
this will be removed in HBase 3.0.0.
-957   */
-958  @Deprecated
-959  public static boolean 
isDeleteColumnOrFamily(Cell cell) {
-960int t = cell.getTypeByte();
-961return t == 
Type.DeleteColumn.getCode() || t == Type.DeleteFamily.getCode();
-962  }
-963
-964  /**
-965   * Estimate based on keyvalue's 
serialization format in the RPC layer. Note that there is an extra
-966   * SIZEOF_INT added to the size here 
that indicates the actual length of the cell for cases where
-967   * cell's are serialized in a 
contiguous format (For eg in RPCs).
-968   * @param cell
-969   * @return Estimate of the 
cell size in bytes plus an extra SIZEOF_INT indicating 
the
-970   * actual cell length.
-971   * @deprecated As of release 2.0.0, 
this will be removed in HBase 3.0.0.
-972   */
-973  @Deprecated
-974  public static int 
estimatedSerializedSizeOf(final Cell cell) {
-975return 
PrivateCellUtil.estimatedSerializedSizeOf(cell);
-976  }
-977
-978  /**
-979   * Calculates the serialized key size. 
We always serialize in the KeyValue's serialization
-980   * format.
-981   * @param cell the cell for which the 
key size has to be calculated.
-982   * @return the key size
-983   * @deprecated As of release 2.0.0, 
this will be removed in HBase 3.0.0.
-984   */
-985  @Deprecated
-986  public static int 
estimatedSerializedSizeOfKey(final Cell cell) {
-987return 
PrivateCellUtil.estimatedSerializedSizeOfKey(cell);
-988  }
-989
-990  /**
-991   * This is an estimate of the heap 
space occupied by a cell. When the cell is of type
-992   * {@link HeapSize} we call {@link 
HeapSize#heapSize()} so cell can give a correct value. In other
-993   * cases we just consider the bytes 
occupied by the cell components ie. row, CF, qualifier,
-994   * timestamp, type, value and tags.
-995   * @param cell
-996   * @return estimate of the heap space
-997   * @deprecated As of release 2.0.0, 
this will be removed in HBase 3.0.0.
-998   */
-999  @Deprecated
-1000  public static long 
estimatedHeapSizeOf(final Cell cell) {
-1001return 
PrivateCellUtil.estimatedHeapSizeOf(cell);
-1002  }
-1003
-1004  /* tags 
*/
-1005  /**
-1006   * Util met

[12/51] [partial] hbase-site git commit: Published site at .

2017-11-07 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/2cef721c/devapidocs/src-html/org/apache/hadoop/hbase/filter/FuzzyRowFilter.Order.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/filter/FuzzyRowFilter.Order.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/filter/FuzzyRowFilter.Order.html
index aa5ad0d..30d80c0 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/filter/FuzzyRowFilter.Order.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/filter/FuzzyRowFilter.Order.html
@@ -32,619 +32,617 @@
 024import java.util.PriorityQueue;
 025
 026import org.apache.hadoop.hbase.Cell;
-027import 
org.apache.hadoop.hbase.CellComparatorImpl;
-028import 
org.apache.hadoop.hbase.CellUtil;
-029import 
org.apache.hadoop.hbase.PrivateCellUtil;
-030import 
org.apache.yetus.audience.InterfaceAudience;
-031import 
org.apache.hadoop.hbase.exceptions.DeserializationException;
-032import 
org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException;
-033import 
org.apache.hadoop.hbase.shaded.com.google.protobuf.UnsafeByteOperations;
-034import 
org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos;
-035import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair;
-036import 
org.apache.hadoop.hbase.util.Bytes;
-037import 
org.apache.hadoop.hbase.util.Pair;
-038import 
org.apache.hadoop.hbase.util.UnsafeAccess;
-039import 
org.apache.hadoop.hbase.util.UnsafeAvailChecker;
-040
-041import 
org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
-042
-043/**
-044 * This is optimized version of a 
standard FuzzyRowFilter Filters data based on fuzzy row key.
-045 * Performs fast-forwards during 
scanning. It takes pairs (row key, fuzzy info) to match row keys.
-046 * Where fuzzy info is a byte array with 
0 or 1 as its values:
-047 * 
    -048 *
  • 0 - means that this byte in provided row key is fixed, i.e. row key's byte at same position -049 * must match
  • -050 *
  • 1 - means that this byte in provided row key is NOT fixed, i.e. row key's byte at this -051 * position can be different from the one in provided row key
  • -052 *
-053 * Example: Let's assume row key format is userId_actionId_year_month. Length of userId is fixed and -054 * is 4, length of actionId is 2 and year and month are 4 and 2 bytes long respectively. Let's -055 * assume that we need to fetch all users that performed certain action (encoded as "99") in Jan of -056 * any year. Then the pair (row key, fuzzy info) would be the following: row key = "_99__01" -057 * (one can use any value instead of "?") fuzzy info = -058 * "\x01\x01\x01\x01\x00\x00\x00\x00\x01\x01\x01\x01\x00\x00\x00" I.e. fuzzy info tells the matching -059 * mask is "_99__01", where at ? can be any value. -060 */ -061@InterfaceAudience.Public -062public class FuzzyRowFilter extends FilterBase { -063 private static final boolean UNSAFE_UNALIGNED = UnsafeAvailChecker.unaligned(); -064 private List> fuzzyKeysData; -065 private boolean done = false; -066 -067 /** -068 * The index of a last successfully found matching fuzzy string (in fuzzyKeysData). We will start -069 * matching next KV with this one. If they do not match then we will return back to the one-by-one -070 * iteration over fuzzyKeysData. -071 */ -072 private int lastFoundIndex = -1; -073 -074 /** -075 * Row tracker (keeps all next rows after SEEK_NEXT_USING_HINT was returned) -076 */ -077 private RowTracker tracker; -078 -079 public FuzzyRowFilter(List> fuzzyKeysData) { -080List> fuzzyKeyDataCopy = new ArrayList<>(fuzzyKeysData.size()); -081 -082for (Pair aFuzzyKeysData : fuzzyKeysData) { -083 if (aFuzzyKeysData.getFirst().length != aFuzzyKeysData.getSecond().length) { -084Pair readable = -085 new Pair<>(Bytes.toStringBinary(aFuzzyKeysData.getFirst()), Bytes.toStringBinary(aFuzzyKeysData.getSecond())); -086throw new IllegalArgumentException("Fuzzy pair lengths do not match: " + readable); -087 } -088 -089 Pair p = new Pair<>(); -090 // create a copy of pair bytes so that they are not modified by the filter. -091 p.setFirst(Arrays.copyOf(aFuzzyKeysData.getFirst(), aFuzzyKeysData.getFirst().length)); -092 p.setSecond(Arrays.copyOf(aFuzzyKeysData.getSecond(), aFuzzyKeysData.getSecond().length)); -093 -094 // update mask ( 0 -> -1 (0xff), 1 -> 2) -095 p.setSecond(preprocessMask(p.getSecond())); -096 preprocessSearchKey(p); -097 -098 fuzzyKeyDataCopy.add(p); -099} -100this.fuzzyKeysData = fuzzyKeyDataCopy; -101this.tracker = new RowTracker(); -102 } +027import org.apache.hadoop.hbase.CellComparator; +028import org.apache.hadoop.hbase.PrivateCellUtil; +029import org.apache.yetus.au

[21/51] [partial] hbase-site git commit: Published site at .

2017-11-07 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/2cef721c/devapidocs/org/apache/hadoop/hbase/mob/compactions/PartitionedMobCompactor.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/mob/compactions/PartitionedMobCompactor.html
 
b/devapidocs/org/apache/hadoop/hbase/mob/compactions/PartitionedMobCompactor.html
index 1a40d8f..ef2120e 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/mob/compactions/PartitionedMobCompactor.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/mob/compactions/PartitionedMobCompactor.html
@@ -115,7 +115,7 @@ var activeTableTab = "activeTableTab";
 
 
 @InterfaceAudience.Private
-public class PartitionedMobCompactor
+public class PartitionedMobCompactor
 extends MobCompactor
 An implementation of MobCompactor that 
compacts the mob files in partitions.
 
@@ -398,7 +398,7 @@ extends 
 
 LOG
-private static final org.apache.commons.logging.Log LOG
+private static final org.apache.commons.logging.Log LOG
 
 
 
@@ -407,7 +407,7 @@ extends 
 
 mergeableSize
-protected long mergeableSize
+protected long mergeableSize
 
 
 
@@ -416,7 +416,7 @@ extends 
 
 delFileMaxCount
-protected int delFileMaxCount
+protected int delFileMaxCount
 
 
 
@@ -425,7 +425,7 @@ extends 
 
 compactionBatchSize
-protected int compactionBatchSize
+protected int compactionBatchSize
 The number of files compacted in a batch
 
 
@@ -435,7 +435,7 @@ extends 
 
 compactionKVMax
-protected int compactionKVMax
+protected int compactionKVMax
 
 
 
@@ -444,7 +444,7 @@ extends 
 
 tempPath
-private final org.apache.hadoop.fs.Path tempPath
+private final org.apache.hadoop.fs.Path tempPath
 
 
 
@@ -453,7 +453,7 @@ extends 
 
 bulkloadPath
-private final org.apache.hadoop.fs.Path bulkloadPath
+private final org.apache.hadoop.fs.Path bulkloadPath
 
 
 
@@ -462,7 +462,7 @@ extends 
 
 compactionCacheConfig
-private final CacheConfig compactionCacheConfig
+private final CacheConfig compactionCacheConfig
 
 
 
@@ -471,7 +471,7 @@ extends 
 
 refCellTags
-private final byte[] refCellTags
+private final byte[] refCellTags
 
 
 
@@ -480,7 +480,7 @@ extends 
 
 cryptoContext
-private Encryption.Context cryptoContext
+private Encryption.Context cryptoContext
 
 
 
@@ -497,7 +497,7 @@ extends 
 
 PartitionedMobCompactor
-public PartitionedMobCompactor(org.apache.hadoop.conf.Configuration conf,
+public PartitionedMobCompactor(org.apache.hadoop.conf.Configuration conf,
org.apache.hadoop.fs.FileSystem fs,
TableName tableName,
ColumnFamilyDescriptor column,
@@ -523,7 +523,7 @@ extends 
 
 compact
-public http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in 
java.util">List compact(http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in 
java.util">List files,
+public http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in 
java.util">List compact(http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in 
java.util">List files,
boolean allFiles)
 throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true";
 title="class or interface in java.io">IOException
 Description copied from 
class: MobCompactor
@@ -547,7 +547,7 @@ extends 
 
 select
-protected PartitionedMobCompactionRequest select(http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in 
java.util">List candidates,
+protected PartitionedMobCompactionRequest select(http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in 
java.util">List candidates,
  boolean allFiles)
   throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true";
 title="class or interface in java.io">IOException
 Selects the compacted mob/del files.
@@ -569,7 +569,7 @@ extends 
 
 performCompaction
-protected http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in 
java.util">List performCompaction(PartitionedMobCompactionRequest request)
+protected http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in 
java.util">List performCompaction(PartitionedMobCompactionRequest request)
  throws http://docs.

[19/51] [partial] hbase-site git commit: Published site at .

2017-11-07 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/2cef721c/devapidocs/org/apache/hadoop/hbase/regionserver/StoreFileReader.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/regionserver/StoreFileReader.html 
b/devapidocs/org/apache/hadoop/hbase/regionserver/StoreFileReader.html
index b922db2..974ee48 100644
--- a/devapidocs/org/apache/hadoop/hbase/regionserver/StoreFileReader.html
+++ b/devapidocs/org/apache/hadoop/hbase/regionserver/StoreFileReader.html
@@ -115,7 +115,7 @@ var activeTableTab = "activeTableTab";
 
 @InterfaceAudience.LimitedPrivate(value="Phoenix")
  @InterfaceStability.Evolving
-public class StoreFileReader
+public class StoreFileReader
 extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true";
 title="class or interface in java.lang">Object
 Reader for a StoreFile.
 
@@ -514,7 +514,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 LOG
-private static final org.apache.commons.logging.Log LOG
+private static final org.apache.commons.logging.Log LOG
 
 
 
@@ -523,7 +523,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 generalBloomFilter
-protected BloomFilter generalBloomFilter
+protected BloomFilter generalBloomFilter
 
 
 
@@ -532,7 +532,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 deleteFamilyBloomFilter
-protected BloomFilter deleteFamilyBloomFilter
+protected BloomFilter deleteFamilyBloomFilter
 
 
 
@@ -541,7 +541,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 bloomFilterType
-protected BloomType bloomFilterType
+protected BloomType bloomFilterType
 
 
 
@@ -550,7 +550,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 reader
-private final HFile.Reader reader
+private final HFile.Reader reader
 
 
 
@@ -559,7 +559,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 sequenceID
-protected long sequenceID
+protected long sequenceID
 
 
 
@@ -568,7 +568,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 timeRange
-protected TimeRange timeRange
+protected TimeRange timeRange
 
 
 
@@ -577,7 +577,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 lastBloomKey
-private byte[] lastBloomKey
+private byte[] lastBloomKey
 
 
 
@@ -586,7 +586,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 deleteFamilyCnt
-private long deleteFamilyCnt
+private long deleteFamilyCnt
 
 
 
@@ -595,7 +595,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 bulkLoadResult
-private boolean bulkLoadResult
+private boolean bulkLoadResult
 
 
 
@@ -604,7 +604,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 lastBloomKeyOnlyKV
-private KeyValue.KeyOnlyKeyValue lastBloomKeyOnlyKV
+private KeyValue.KeyOnlyKeyValue lastBloomKeyOnlyKV
 
 
 
@@ -613,7 +613,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 skipResetSeqId
-private boolean skipResetSeqId
+private boolean skipResetSeqId
 
 
 
@@ -622,7 +622,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 refCount
-private final http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/AtomicInteger.html?is-external=true";
 title="class or interface in java.util.concurrent.atomic">AtomicInteger refCount
+private final http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/AtomicInteger.html?is-external=true";
 title="class or interface in java.util.concurrent.atomic">AtomicInteger refCount
 
 
 
@@ -631,7 +631,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 shared
-final boolean shared
+final boolean shared
 
 
 
@@ -648,7 +648,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 StoreFileReader
-private StoreFileReader(HFile.Reader reader,
+private StoreFileReader(HFile.Reader reader,
 http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/AtomicInteger.html?is-external=true";
 title="class or interface in 
java.util.concurrent.atomic">AtomicInteger refCount,
 boolean shared)
 
@@ -659,7 +659,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 StoreFileReader
-public StoreFileReader(org.apache.hadoop.fs.FileSystem fs,
+public StoreFileReader(org.apache.hadoop.fs.FileSystem fs,
org.apache.hadoop.fs.Path path,
CacheConfig cacheConf,
boolean primaryReplicaStoreFile,
@@ -679,7 +679,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 StoreFileReader
-public StoreFileReader(org.apache.hadoop.fs.FileSystem fs,
+public StoreFileReader(org.apache.hadoop.fs.FileSystem fs,
 

[04/51] [partial] hbase-site git commit: Published site at .

2017-11-07 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/2cef721c/devapidocs/src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OnheapDecodedCell.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OnheapDecodedCell.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OnheapDecodedCell.html
index 83e48f3..157463c 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OnheapDecodedCell.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OnheapDecodedCell.html
@@ -33,1142 +33,1141 @@
 025import 
org.apache.hadoop.hbase.ByteBufferCell;
 026import org.apache.hadoop.hbase.Cell;
 027import 
org.apache.hadoop.hbase.CellComparator;
-028import 
org.apache.hadoop.hbase.CellComparatorImpl;
-029import 
org.apache.hadoop.hbase.CellUtil;
-030import 
org.apache.hadoop.hbase.ExtendedCell;
-031import 
org.apache.hadoop.hbase.HConstants;
-032import 
org.apache.hadoop.hbase.PrivateCellUtil;
-033import 
org.apache.hadoop.hbase.KeyValue;
-034import 
org.apache.hadoop.hbase.KeyValue.Type;
-035import 
org.apache.hadoop.hbase.KeyValueUtil;
-036import 
org.apache.yetus.audience.InterfaceAudience;
-037import 
org.apache.hadoop.hbase.io.TagCompressionContext;
-038import 
org.apache.hadoop.hbase.io.util.LRUDictionary;
-039import 
org.apache.hadoop.hbase.io.util.StreamUtils;
-040import 
org.apache.hadoop.hbase.nio.ByteBuff;
-041import 
org.apache.hadoop.hbase.util.ByteBufferUtils;
-042import 
org.apache.hadoop.hbase.util.Bytes;
-043import 
org.apache.hadoop.hbase.util.ClassSize;
-044import 
org.apache.hadoop.hbase.util.ObjectIntPair;
-045import 
org.apache.hadoop.io.WritableUtils;
-046
-047/**
-048 * Base class for all data block encoders 
that use a buffer.
-049 */
-050@InterfaceAudience.Private
-051abstract class BufferedDataBlockEncoder 
extends AbstractDataBlockEncoder {
-052  /**
-053   * TODO: This datablockencoder is 
dealing in internals of hfileblocks. Purge reference to HFBs
-054   */
-055  private static int 
INITIAL_KEY_BUFFER_SIZE = 512;
-056
-057  @Override
-058  public ByteBuffer 
decodeKeyValues(DataInputStream source,
-059  HFileBlockDecodingContext 
blkDecodingCtx) throws IOException {
-060if (blkDecodingCtx.getClass() != 
HFileBlockDefaultDecodingContext.class) {
-061  throw new 
IOException(this.getClass().getName() + " only accepts "
-062  + 
HFileBlockDefaultDecodingContext.class.getName() + " as the decoding 
context.");
-063}
-064
-065HFileBlockDefaultDecodingContext 
decodingCtx =
-066
(HFileBlockDefaultDecodingContext) blkDecodingCtx;
-067if 
(decodingCtx.getHFileContext().isIncludesTags()
-068&& 
decodingCtx.getHFileContext().isCompressTags()) {
-069  if 
(decodingCtx.getTagCompressionContext() != null) {
-070// It will be overhead to create 
the TagCompressionContext again and again for every block
-071// decoding.
-072
decodingCtx.getTagCompressionContext().clear();
-073  } else {
-074try {
-075  TagCompressionContext 
tagCompressionContext = new TagCompressionContext(
-076  LRUDictionary.class, 
Byte.MAX_VALUE);
-077  
decodingCtx.setTagCompressionContext(tagCompressionContext);
-078} catch (Exception e) {
-079  throw new IOException("Failed 
to initialize TagCompressionContext", e);
-080}
-081  }
-082}
-083return 
internalDecodeKeyValues(source, 0, 0, decodingCtx);
-084  }
-085
-086  /* common prefixes 
*/
-087  // Having this as static is fine but if 
META is having DBE then we should
-088  // change this.
-089  public static int 
compareCommonRowPrefix(Cell left, Cell right, int rowCommonPrefix) {
-090return 
Bytes.compareTo(left.getRowArray(), left.getRowOffset() + rowCommonPrefix,
-091left.getRowLength() - 
rowCommonPrefix, right.getRowArray(), right.getRowOffset()
-092+ rowCommonPrefix, 
right.getRowLength() - rowCommonPrefix);
-093  }
-094
-095  public static int 
compareCommonFamilyPrefix(Cell left, Cell right, int familyCommonPrefix) {
-096return 
Bytes.compareTo(left.getFamilyArray(), left.getFamilyOffset() + 
familyCommonPrefix,
-097left.getFamilyLength() - 
familyCommonPrefix, right.getFamilyArray(),
-098right.getFamilyOffset() + 
familyCommonPrefix, right.getFamilyLength() - familyCommonPrefix);
-099  }
-100
-101  public static int 
compareCommonQualifierPrefix(Cell left, Cell right, int qualCommonPrefix) {
-102return 
Bytes.compareTo(left.getQualifierArray(), left.getQualifierOffset() + 
qualCommonPrefix,
-103left.getQualifierLength() - 
qualCommonPrefix, right.getQualifierArray(),
-104right.getQualifierOffset() + 
qualCommonPrefix, right.getQualifierLength()
-105- qualCommon

[27/51] [partial] hbase-site git commit: Published site at .

2017-11-07 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/2cef721c/devapidocs/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.SeekerState.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.SeekerState.html
 
b/devapidocs/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.SeekerState.html
index fc24ce1..90ae7c6 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.SeekerState.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.SeekerState.html
@@ -117,7 +117,7 @@ var activeTableTab = "activeTableTab";
 
 
 
-protected static class BufferedDataBlockEncoder.SeekerState
+protected static class BufferedDataBlockEncoder.SeekerState
 extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true";
 title="class or interface in java.lang">Object
 
 
@@ -312,7 +312,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 currentBuffer
-protected ByteBuff currentBuffer
+protected ByteBuff currentBuffer
 
 
 
@@ -321,7 +321,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 tagCompressionContext
-protected TagCompressionContext tagCompressionContext
+protected TagCompressionContext tagCompressionContext
 
 
 
@@ -330,7 +330,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 valueOffset
-protected int valueOffset
+protected int valueOffset
 
 
 
@@ -339,7 +339,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 keyLength
-protected int keyLength
+protected int keyLength
 
 
 
@@ -348,7 +348,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 valueLength
-protected int valueLength
+protected int valueLength
 
 
 
@@ -357,7 +357,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 lastCommonPrefix
-protected int lastCommonPrefix
+protected int lastCommonPrefix
 
 
 
@@ -366,7 +366,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 tagsLength
-protected int tagsLength
+protected int tagsLength
 
 
 
@@ -375,7 +375,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 tagsOffset
-protected int tagsOffset
+protected int tagsOffset
 
 
 
@@ -384,7 +384,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 tagsCompressedLength
-protected int tagsCompressedLength
+protected int tagsCompressedLength
 
 
 
@@ -393,7 +393,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 uncompressTags
-protected boolean uncompressTags
+protected boolean uncompressTags
 
 
 
@@ -402,7 +402,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 keyBuffer
-protected byte[] keyBuffer
+protected byte[] keyBuffer
 We need to store a copy of the key.
 
 
@@ -412,7 +412,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 tagsBuffer
-protected byte[] tagsBuffer
+protected byte[] tagsBuffer
 
 
 
@@ -421,7 +421,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 memstoreTS
-protected long memstoreTS
+protected long memstoreTS
 
 
 
@@ -430,7 +430,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 nextKvOffset
-protected int nextKvOffset
+protected int nextKvOffset
 
 
 
@@ -439,7 +439,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 currentKey
-protected KeyValue.KeyOnlyKeyValue currentKey
+protected KeyValue.KeyOnlyKeyValue currentKey
 
 
 
@@ -448,7 +448,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 tmpPair
-private final ObjectIntPairByteBuffer> tmpPair
+private final ObjectIntPairByteBuffer> tmpPair
 
 
 
@@ -457,7 +457,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 includeTags
-private final boolean includeTags
+private final boolean includeTags
 
 
 
@@ -474,7 +474,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 SeekerState
-public SeekerState(ObjectIntPairByteBuffer> tmpPair,
+public SeekerState(ObjectIntPairByteBuffer> tmpPair,
boolean includeTags)
 
 
@@ -492,7 +492,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 isValid
-protected boolean is

[20/51] [partial] hbase-site git commit: Published site at .

2017-11-07 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/2cef721c/devapidocs/org/apache/hadoop/hbase/regionserver/HStore.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/regionserver/HStore.html 
b/devapidocs/org/apache/hadoop/hbase/regionserver/HStore.html
index d4d4e08..c8e3361 100644
--- a/devapidocs/org/apache/hadoop/hbase/regionserver/HStore.html
+++ b/devapidocs/org/apache/hadoop/hbase/regionserver/HStore.html
@@ -118,7 +118,7 @@ var activeTableTab = "activeTableTab";
 
 
 @InterfaceAudience.Private
-public class HStore
+public class HStore
 extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true";
 title="class or interface in java.lang">Object
 implements Store, HeapSize, StoreConfigInformation, PropagatingConfigurationObserver
 A Store holds a column family in a Region.  Its a memstore 
and a set of zero
@@ -1218,7 +1218,7 @@ implements 
 
 MEMSTORE_CLASS_NAME
-public static final http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String MEMSTORE_CLASS_NAME
+public static final http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String MEMSTORE_CLASS_NAME
 
 See Also:
 Constant
 Field Values
@@ -1231,7 +1231,7 @@ implements 
 
 COMPACTCHECKER_INTERVAL_MULTIPLIER_KEY
-public static final http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String COMPACTCHECKER_INTERVAL_MULTIPLIER_KEY
+public static final http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String COMPACTCHECKER_INTERVAL_MULTIPLIER_KEY
 
 See Also:
 Constant
 Field Values
@@ -1244,7 +1244,7 @@ implements 
 
 BLOCKING_STOREFILES_KEY
-public static final http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String BLOCKING_STOREFILES_KEY
+public static final http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String BLOCKING_STOREFILES_KEY
 
 See Also:
 Constant
 Field Values
@@ -1257,7 +1257,7 @@ implements 
 
 BLOCK_STORAGE_POLICY_KEY
-public static final http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String BLOCK_STORAGE_POLICY_KEY
+public static final http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String BLOCK_STORAGE_POLICY_KEY
 
 See Also:
 Constant
 Field Values
@@ -1270,7 +1270,7 @@ implements 
 
 DEFAULT_BLOCK_STORAGE_POLICY
-public static final http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String DEFAULT_BLOCK_STORAGE_POLICY
+public static final http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String DEFAULT_BLOCK_STORAGE_POLICY
 
 See Also:
 Constant
 Field Values
@@ -1283,7 +1283,7 @@ implements 
 
 DEFAULT_COMPACTCHECKER_INTERVAL_MULTIPLIER
-public static final int DEFAULT_COMPACTCHECKER_INTERVAL_MULTIPLIER
+public static final int DEFAULT_COMPACTCHECKER_INTERVAL_MULTIPLIER
 
 See Also:
 Constant
 Field Values
@@ -1296,7 +1296,7 @@ implements 
 
 DEFAULT_BLOCKING_STOREFILE_COUNT
-public static final int DEFAULT_BLOCKING_STOREFILE_COUNT
+public static final int DEFAULT_BLOCKING_STOREFILE_COUNT
 
 See Also:
 Constant
 Field Values
@@ -1309,7 +1309,7 @@ implements 
 
 LOG
-private static final org.apache.commons.logging.Log LOG
+private static final org.apache.commons.logging.Log LOG
 
 
 
@@ -1318,7 +1318,7 @@ implements 
 
 memstore
-protected final MemStore memstore
+protected final MemStore memstore
 
 
 
@@ -1327,7 +1327,7 @@ implements 
 
 region
-protected final HRegion region
+protected final HRegion region
 
 
 
@@ -1336,7 +1336,7 @@ implements 
 
 family
-private final ColumnFamilyDescriptor 
family
+private final ColumnFamilyDescriptor 
family
 
 
 
@@ -1345,7 +1345,7 @@ implements 
 
 fs
-private final HRegionFileSystem fs
+private final HRegionFileSystem fs
 
 
 
@@ -1354,7 +1354,7 @@ implements 
 
 conf
-protected org.apache.hadoop.conf.Configuration conf
+protected org.apache.hadoop.conf.Configuration conf
 
 
 
@@ -1363,7 +1363,7 @@ implements 
 
 cacheConf
-protected CacheConfig cacheConf
+protected CacheConfig cacheConf
 
 
 
@@ -1372,7 +1372,7 @@ implements 
 
 lastCompactSize
-private long lastCompactSize
+private long lastCompactSize
 
 
 
@@ -1381,7 +1381,7 @@ implements 
 
 forceMajor
-volatile boolean forceMajor
+volatile boolean forceMajor
 
 
 
@@ -1390,7 +1390,7 @@ implements 
 
 closeCheckInterval
-static int closeCheckInterval
+static int closeCheckInterval
 
 
 
@@ -1399,7 +1399,

[07/51] [partial] hbase-site git commit: Published site at .

2017-11-07 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/2cef721c/devapidocs/src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.BufferedDataBlockEncodingState.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.BufferedDataBlockEncodingState.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.BufferedDataBlockEncodingState.html
index 83e48f3..157463c 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.BufferedDataBlockEncodingState.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.BufferedDataBlockEncodingState.html
@@ -33,1142 +33,1141 @@
 025import 
org.apache.hadoop.hbase.ByteBufferCell;
 026import org.apache.hadoop.hbase.Cell;
 027import 
org.apache.hadoop.hbase.CellComparator;
-028import 
org.apache.hadoop.hbase.CellComparatorImpl;
-029import 
org.apache.hadoop.hbase.CellUtil;
-030import 
org.apache.hadoop.hbase.ExtendedCell;
-031import 
org.apache.hadoop.hbase.HConstants;
-032import 
org.apache.hadoop.hbase.PrivateCellUtil;
-033import 
org.apache.hadoop.hbase.KeyValue;
-034import 
org.apache.hadoop.hbase.KeyValue.Type;
-035import 
org.apache.hadoop.hbase.KeyValueUtil;
-036import 
org.apache.yetus.audience.InterfaceAudience;
-037import 
org.apache.hadoop.hbase.io.TagCompressionContext;
-038import 
org.apache.hadoop.hbase.io.util.LRUDictionary;
-039import 
org.apache.hadoop.hbase.io.util.StreamUtils;
-040import 
org.apache.hadoop.hbase.nio.ByteBuff;
-041import 
org.apache.hadoop.hbase.util.ByteBufferUtils;
-042import 
org.apache.hadoop.hbase.util.Bytes;
-043import 
org.apache.hadoop.hbase.util.ClassSize;
-044import 
org.apache.hadoop.hbase.util.ObjectIntPair;
-045import 
org.apache.hadoop.io.WritableUtils;
-046
-047/**
-048 * Base class for all data block encoders 
that use a buffer.
-049 */
-050@InterfaceAudience.Private
-051abstract class BufferedDataBlockEncoder 
extends AbstractDataBlockEncoder {
-052  /**
-053   * TODO: This datablockencoder is 
dealing in internals of hfileblocks. Purge reference to HFBs
-054   */
-055  private static int 
INITIAL_KEY_BUFFER_SIZE = 512;
-056
-057  @Override
-058  public ByteBuffer 
decodeKeyValues(DataInputStream source,
-059  HFileBlockDecodingContext 
blkDecodingCtx) throws IOException {
-060if (blkDecodingCtx.getClass() != 
HFileBlockDefaultDecodingContext.class) {
-061  throw new 
IOException(this.getClass().getName() + " only accepts "
-062  + 
HFileBlockDefaultDecodingContext.class.getName() + " as the decoding 
context.");
-063}
-064
-065HFileBlockDefaultDecodingContext 
decodingCtx =
-066
(HFileBlockDefaultDecodingContext) blkDecodingCtx;
-067if 
(decodingCtx.getHFileContext().isIncludesTags()
-068&& 
decodingCtx.getHFileContext().isCompressTags()) {
-069  if 
(decodingCtx.getTagCompressionContext() != null) {
-070// It will be overhead to create 
the TagCompressionContext again and again for every block
-071// decoding.
-072
decodingCtx.getTagCompressionContext().clear();
-073  } else {
-074try {
-075  TagCompressionContext 
tagCompressionContext = new TagCompressionContext(
-076  LRUDictionary.class, 
Byte.MAX_VALUE);
-077  
decodingCtx.setTagCompressionContext(tagCompressionContext);
-078} catch (Exception e) {
-079  throw new IOException("Failed 
to initialize TagCompressionContext", e);
-080}
-081  }
-082}
-083return 
internalDecodeKeyValues(source, 0, 0, decodingCtx);
-084  }
-085
-086  /* common prefixes 
*/
-087  // Having this as static is fine but if 
META is having DBE then we should
-088  // change this.
-089  public static int 
compareCommonRowPrefix(Cell left, Cell right, int rowCommonPrefix) {
-090return 
Bytes.compareTo(left.getRowArray(), left.getRowOffset() + rowCommonPrefix,
-091left.getRowLength() - 
rowCommonPrefix, right.getRowArray(), right.getRowOffset()
-092+ rowCommonPrefix, 
right.getRowLength() - rowCommonPrefix);
-093  }
-094
-095  public static int 
compareCommonFamilyPrefix(Cell left, Cell right, int familyCommonPrefix) {
-096return 
Bytes.compareTo(left.getFamilyArray(), left.getFamilyOffset() + 
familyCommonPrefix,
-097left.getFamilyLength() - 
familyCommonPrefix, right.getFamilyArray(),
-098right.getFamilyOffset() + 
familyCommonPrefix, right.getFamilyLength() - familyCommonPrefix);
-099  }
-100
-101  public static int 
compareCommonQualifierPrefix(Cell left, Cell right, int qualCommonPrefix) {
-102return 
Bytes.compareTo(left.getQualifierArray(), left.getQualifierOffset() + 
qualCommonPrefix,
-103left.getQualifierLength() - 
qualCommonPrefix, right.getQualifierArray(),
-104right.getQualifierOffset() + 
qualComm

[17/51] [partial] hbase-site git commit: Published site at .

2017-11-07 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/2cef721c/devapidocs/src-html/org/apache/hadoop/hbase/CellComparator.html
--
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/CellComparator.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/CellComparator.html
index a5bdfb5..5685dfa 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/CellComparator.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/CellComparator.html
@@ -33,94 +33,104 @@
 025 * Comparator for comparing cells and has 
some specialized methods that allows comparing individual
 026 * cell components like row, family, 
qualifier and timestamp
 027 */
-028@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.COPROC)
+028@InterfaceAudience.Public
 029@InterfaceStability.Evolving
 030public interface CellComparator extends 
Comparator {
-031
-032  /**
-033   * Lexographically compares two cells. 
The key part of the cell is taken for comparison which
-034   * includes row, family, qualifier, 
timestamp and type
-035   * @param leftCell the left hand side 
cell
-036   * @param rightCell the right hand side 
cell
-037   * @return greater than 0 if leftCell 
is bigger, less than 0 if rightCell is bigger, 0 if both
-038   * cells are equal
-039   */
-040  @Override
-041  int compare(Cell leftCell, Cell 
rightCell);
-042
-043  /**
-044   * Lexographically compares the rows of 
two cells.
+031  /**
+032   * A comparator for ordering cells in 
user-space tables. Useful when writing cells in sorted
+033   * order as necessary for bulk import 
(i.e. via MapReduce)
+034   * 

+035 * CAUTION: This comparator may provide inaccurate ordering for cells from system tables, +036 * and should not be relied upon in that case. +037 */ +038 static CellComparator getInstance() { +039return CellComparatorImpl.COMPARATOR; +040 } +041 +042 /** +043 * Lexographically compares two cells. The key part of the cell is taken for comparison which +044 * includes row, family, qualifier, timestamp and type 045 * @param leftCell the left hand side cell 046 * @param rightCell the right hand side cell 047 * @return greater than 0 if leftCell is bigger, less than 0 if rightCell is bigger, 0 if both 048 * cells are equal 049 */ -050 int compareRows(Cell leftCell, Cell rightCell); -051 -052 /** -053 * Compares the row part of the cell with a simple plain byte[] like the -054 * stopRow in Scan. -055 * @param cell the cell -056 * @param bytes the byte[] representing the row to be compared with -057 * @param offset the offset of the byte[] -058 * @param length the length of the byte[] -059 * @return greater than 0 if leftCell is bigger, less than 0 if rightCell is bigger, 0 if both -060 * cells are equal -061 */ -062 int compareRows(Cell cell, byte[] bytes, int offset, int length); -063 -064 /** -065 * Lexographically compares the two cells excluding the row part. It compares family, qualifier, -066 * timestamp and the type -067 * @param leftCell the left hand side cell -068 * @param rightCell the right hand side cell +050 @Override +051 int compare(Cell leftCell, Cell rightCell); +052 +053 /** +054 * Lexographically compares the rows of two cells. +055 * @param leftCell the left hand side cell +056 * @param rightCell the right hand side cell +057 * @return greater than 0 if leftCell is bigger, less than 0 if rightCell is bigger, 0 if both +058 * cells are equal +059 */ +060 int compareRows(Cell leftCell, Cell rightCell); +061 +062 /** +063 * Compares the row part of the cell with a simple plain byte[] like the +064 * stopRow in Scan. +065 * @param cell the cell +066 * @param bytes the byte[] representing the row to be compared with +067 * @param offset the offset of the byte[] +068 * @param length the length of the byte[] 069 * @return greater than 0 if leftCell is bigger, less than 0 if rightCell is bigger, 0 if both 070 * cells are equal 071 */ -072 int compareWithoutRow(Cell leftCell, Cell rightCell); +072 int compareRows(Cell cell, byte[] bytes, int offset, int length); 073 074 /** -075 * Lexographically compares the families of the two cells -076 * @param leftCell the left hand side cell -077 * @param rightCell the right hand side cell -078 * @return greater than 0 if leftCell is bigger, less than 0 if rightCell is bigger, 0 if both -079 * cells are equal -080 */ -081 int compareFamilies(Cell leftCell, Cell rightCell); -082 -083 /** -084 * Lexographically compares the qualifiers of the two cells -085 * @param leftCell the left hand side cell -086 * @param rightCell the right hand side cell -087 * @return greater than 0 if leftCell is bigger, less than 0 if rightCell is bigger, 0 if both -088 * cells are equal -089 */ -090 int compareQualifiers(Cell leftCell


[01/51] [partial] hbase-site git commit: Published site at .

2017-11-07 Thread git-site-role
Repository: hbase-site
Updated Branches:
  refs/heads/asf-site 2ef9b5f9c -> 2cef721c4


http://git-wip-us.apache.org/repos/asf/hbase-site/blob/2cef721c/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/FixedFileTrailer.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/FixedFileTrailer.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/FixedFileTrailer.html
index 9326aab..d8d7897 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/FixedFileTrailer.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/FixedFileTrailer.html
@@ -118,7 +118,7 @@
 110
 111  /** Raw key comparator class name in 
version 3 */
 112  // We could write the actual class name 
from 2.0 onwards and handle BC
-113  private String comparatorClassName = 
CellComparatorImpl.COMPARATOR.getClass().getName();
+113  private String comparatorClassName = 
CellComparator.getInstance().getClass().getName();
 114
 115  /** The encryption key */
 116  private byte[] encryptionKey;



[18/51] [partial] hbase-site git commit: Published site at .

2017-11-07 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/2cef721c/devapidocs/org/apache/hadoop/hbase/regionserver/StoreFileWriter.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/regionserver/StoreFileWriter.html 
b/devapidocs/org/apache/hadoop/hbase/regionserver/StoreFileWriter.html
index 0257a12..cb1963d 100644
--- a/devapidocs/org/apache/hadoop/hbase/regionserver/StoreFileWriter.html
+++ b/devapidocs/org/apache/hadoop/hbase/regionserver/StoreFileWriter.html
@@ -114,7 +114,7 @@ var activeTableTab = "activeTableTab";
 
 
 @InterfaceAudience.Private
-public class StoreFileWriter
+public class StoreFileWriter
 extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true";
 title="class or interface in java.lang">Object
 implements CellSink, ShipperListener
 A StoreFile writer.  Use this to read/write HBase Store 
Files. It is package
@@ -366,7 +366,7 @@ implements 
 
 LOG
-private static final org.apache.commons.logging.Log LOG
+private static final org.apache.commons.logging.Log LOG
 
 
 
@@ -375,7 +375,7 @@ implements 
 
 dash
-private static final http://docs.oracle.com/javase/8/docs/api/java/util/regex/Pattern.html?is-external=true";
 title="class or interface in java.util.regex">Pattern dash
+private static final http://docs.oracle.com/javase/8/docs/api/java/util/regex/Pattern.html?is-external=true";
 title="class or interface in java.util.regex">Pattern dash
 
 
 
@@ -384,7 +384,7 @@ implements 
 
 generalBloomFilterWriter
-private final BloomFilterWriter generalBloomFilterWriter
+private final BloomFilterWriter generalBloomFilterWriter
 
 
 
@@ -393,7 +393,7 @@ implements 
 
 deleteFamilyBloomFilterWriter
-private final BloomFilterWriter deleteFamilyBloomFilterWriter
+private final BloomFilterWriter deleteFamilyBloomFilterWriter
 
 
 
@@ -402,7 +402,7 @@ implements 
 
 bloomType
-private final BloomType bloomType
+private final BloomType bloomType
 
 
 
@@ -411,7 +411,7 @@ implements 
 
 earliestPutTs
-private long earliestPutTs
+private long earliestPutTs
 
 
 
@@ -420,7 +420,7 @@ implements 
 
 deleteFamilyCnt
-private long deleteFamilyCnt
+private long deleteFamilyCnt
 
 
 
@@ -429,7 +429,7 @@ implements 
 
 bloomContext
-private BloomContext bloomContext
+private BloomContext bloomContext
 
 
 
@@ -438,7 +438,7 @@ implements 
 
 deleteFamilyBloomContext
-private BloomContext deleteFamilyBloomContext
+private BloomContext deleteFamilyBloomContext
 
 
 
@@ -447,7 +447,7 @@ implements 
 
 timeRangeTracker
-private final TimeRangeTracker timeRangeTracker
+private final TimeRangeTracker timeRangeTracker
 
 
 
@@ -456,7 +456,7 @@ implements 
 
 writer
-protected HFile.Writer writer
+protected HFile.Writer writer
 
 
 
@@ -473,7 +473,7 @@ implements 
 
 StoreFileWriter
-private StoreFileWriter(org.apache.hadoop.fs.FileSystem fs,
+private StoreFileWriter(org.apache.hadoop.fs.FileSystem fs,
 org.apache.hadoop.fs.Path path,
 org.apache.hadoop.conf.Configuration conf,
 CacheConfig cacheConf,
@@ -516,7 +516,7 @@ implements 
 
 appendMetadata
-public void appendMetadata(long maxSequenceId,
+public void appendMetadata(long maxSequenceId,
boolean majorCompaction)
 throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true";
 title="class or interface in java.io">IOException
 Writes meta data.
@@ -536,7 +536,7 @@ implements 
 
 appendMetadata
-public void appendMetadata(long maxSequenceId,
+public void appendMetadata(long maxSequenceId,
boolean majorCompaction,
long mobCellsCount)
 throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true";
 title="class or interface in java.io">IOException
@@ -558,7 +558,7 @@ implements 
 
 appendTrackedTimestampsToMetadata
-public void appendTrackedTimestampsToMetadata()
+public void appendTrackedTimestampsToMetadata()
throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true";
 title="class or interface in java.io">IOException
 Add TimestampRange and earliest put timestamp to 
Metadata
 
@@ -573,7 +573,7 @@ implements 
 
 trackTimestamps
-public void trackTimestamps(Cell cell)
+public void trackTimestamps(Cell cell)
 Record the earlest Put timestamp.
 
  If the timeRangeTracker is not set,
@@ -586,7 +586,7 @@ implements 
 
 appendGeneralBloomfilter
-private void appendGeneralBloomfilter(Cell cell)
+private void appendGeneralBloomfilter(Cell cell)
throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true";
 title="class or interface in java.io">IOException
 
 Throws:
@@ -600,7 +600,7 @@ implements 
 
 appendDeleteFamilyBloomFilter
-private void appendDeleteFamilyBloomFilter(Cell cell)
+priv

[30/51] [partial] hbase-site git commit: Published site at .

2017-11-07 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/2cef721c/devapidocs/org/apache/hadoop/hbase/filter/BigDecimalComparator.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/filter/BigDecimalComparator.html 
b/devapidocs/org/apache/hadoop/hbase/filter/BigDecimalComparator.html
new file mode 100644
index 000..6e85941
--- /dev/null
+++ b/devapidocs/org/apache/hadoop/hbase/filter/BigDecimalComparator.html
@@ -0,0 +1,483 @@
+http://www.w3.org/TR/html4/loose.dtd";>
+
+
+
+
+
+BigDecimalComparator (Apache HBase 3.0.0-SNAPSHOT API)
+
+
+
+
+
+var methods = {"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":9,"i6":10};
+var tabs = {65535:["t0","All Methods"],1:["t1","Static 
Methods"],2:["t2","Instance Methods"],8:["t4","Concrete Methods"]};
+var altColor = "altColor";
+var rowColor = "rowColor";
+var tableTab = "tableTab";
+var activeTableTab = "activeTableTab";
+
+
+JavaScript is disabled on your browser.
+
+
+
+
+
+Skip navigation links
+
+
+
+
+Overview
+Package
+Class
+Use
+Tree
+Deprecated
+Index
+Help
+
+
+
+
+Prev Class
+Next Class
+
+
+Frames
+No Frames
+
+
+All Classes
+
+
+
+
+
+
+
+Summary: 
+Nested | 
+Field | 
+Constr | 
+Method
+
+
+Detail: 
+Field | 
+Constr | 
+Method
+
+
+
+
+
+
+
+
+org.apache.hadoop.hbase.filter
+Class 
BigDecimalComparator
+
+
+
+http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true";
 title="class or interface in java.lang">java.lang.Object
+
+
+org.apache.hadoop.hbase.filter.ByteArrayComparable
+
+
+org.apache.hadoop.hbase.filter.BigDecimalComparator
+
+
+
+
+
+
+
+
+
+All Implemented Interfaces:
+http://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true";
 title="class or interface in java.lang">Comparable
+
+
+
+@InterfaceAudience.Public
+public class BigDecimalComparator
+extends ByteArrayComparable
+A BigDecimal comparator which numerical compares against 
the specified byte array
+
+
+
+
+
+
+
+
+
+
+
+Field Summary
+
+Fields 
+
+Modifier and Type
+Field and Description
+
+
+private http://docs.oracle.com/javase/8/docs/api/java/math/BigDecimal.html?is-external=true";
 title="class or interface in java.math">BigDecimal
+bigDecimal 
+
+
+
+
+
+
+Fields inherited from class org.apache.hadoop.hbase.filter.ByteArrayComparable
+value
+
+
+
+
+
+
+
+
+Constructor Summary
+
+Constructors 
+
+Constructor and Description
+
+
+BigDecimalComparator(http://docs.oracle.com/javase/8/docs/api/java/math/BigDecimal.html?is-external=true";
 title="class or interface in 
java.math">BigDecimal value) 
+
+
+
+
+
+
+
+
+
+Method Summary
+
+All Methods Static Methods Instance Methods Concrete Methods 
+
+Modifier and Type
+Method and Description
+
+
+(package private) boolean
+areSerializedFieldsEqual(BigDecimalComparator other) 
+
+
+int
+compareTo(byte[] value,
+ int offset,
+ int length)
+Special compareTo method for subclasses, to avoid
+ copying byte[] unnecessarily.
+
+
+
+int
+compareTo(http://docs.oracle.com/javase/8/docs/api/java/nio/ByteBuffer.html?is-external=true";
 title="class or interface in java.nio">ByteBuffer value,
+ int offset,
+ int length)
+Special compareTo method for subclasses, to avoid copying 
bytes unnecessarily.
+
+
+
+boolean
+equals(http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true";
 title="class or interface in java.lang">Object obj) 
+
+
+int
+hashCode() 
+
+
+static BigDecimalComparator
+parseFrom(byte[] pbBytes) 
+
+
+byte[]
+toByteArray() 
+
+
+
+
+
+
+Methods inherited from class org.apache.hadoop.hbase.filter.ByteArrayComparable
+areSerializedFieldsEqual,
 compareTo,
 getValue
+
+
+
+
+
+Methods inherited from class java.lang.http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true";
 title="class or interface in java.lang">Object
+http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#clone--";
 title="class or interface in java.lang">clone, http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#finalize--";
 title="class or interface in java.lang">finalize, http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#getClass--";
 title="class or interface in java.lang">getClass, http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#notify--";
 title="class or interface in java.lang">notify, http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#notifyAll--";
 title="class or interface in j

[26/51] [partial] hbase-site git commit: Published site at .

2017-11-07 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/2cef721c/devapidocs/org/apache/hadoop/hbase/io/hfile/HFile.Reader.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/io/hfile/HFile.Reader.html 
b/devapidocs/org/apache/hadoop/hbase/io/hfile/HFile.Reader.html
index fdf22cb..a81e393 100644
--- a/devapidocs/org/apache/hadoop/hbase/io/hfile/HFile.Reader.html
+++ b/devapidocs/org/apache/hadoop/hbase/io/hfile/HFile.Reader.html
@@ -113,7 +113,7 @@ var activeTableTab = "activeTableTab";
 
 
 
-public static interface HFile.Reader
+public static interface HFile.Reader
 extends http://docs.oracle.com/javase/8/docs/api/java/io/Closeable.html?is-external=true";
 title="class or interface in java.io">Closeable, HFile.CachingBlockReader
 An interface used by clients to open and iterate an HFile.
 
@@ -311,7 +311,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/io/Closeable.html
 
 
 getName
-http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String getName()
+http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String getName()
 Returns this reader's "name". Usually the last component of 
the path.
  Needs to be constant as the file is being moved to support caching on
  write.
@@ -323,7 +323,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/io/Closeable.html
 
 
 getComparator
-CellComparator getComparator()
+CellComparator getComparator()
 
 
 
@@ -332,7 +332,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/io/Closeable.html
 
 
 getScanner
-HFileScanner getScanner(boolean cacheBlocks,
+HFileScanner getScanner(boolean cacheBlocks,
 boolean pread,
 boolean isCompaction)
 
@@ -343,7 +343,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/io/Closeable.html
 
 
 getMetaBlock
-HFileBlock getMetaBlock(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String metaBlockName,
+HFileBlock getMetaBlock(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String metaBlockName,
 boolean cacheBlock)
  throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true";
 title="class or interface in java.io">IOException
 
@@ -358,7 +358,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/io/Closeable.html
 
 
 loadFileInfo
-http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true";
 title="class or interface in java.util">Map loadFileInfo()
+http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true";
 title="class or interface in java.util">Map loadFileInfo()
  throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true";
 title="class or interface in java.io">IOException
 
 Throws:
@@ -372,7 +372,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/io/Closeable.html
 
 
 getLastKey
-http://docs.oracle.com/javase/8/docs/api/java/util/Optional.html?is-external=true";
 title="class or interface in java.util">Optional getLastKey()
+http://docs.oracle.com/javase/8/docs/api/java/util/Optional.html?is-external=true";
 title="class or interface in java.util">Optional getLastKey()
 
 
 
@@ -381,7 +381,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/io/Closeable.html
 
 
 midKey
-http://docs.oracle.com/javase/8/docs/api/java/util/Optional.html?is-external=true";
 title="class or interface in java.util">Optional midKey()
+http://docs.oracle.com/javase/8/docs/api/java/util/Optional.html?is-external=true";
 title="class or interface in java.util">Optional midKey()
throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true";
 title="class or interface in java.io">IOException
 
 Throws:
@@ -395,7 +395,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/io/Closeable.html
 
 
 length
-long length()
+long length()
 
 
 
@@ -404,7 +404,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/io/Closeable.html
 
 
 getEntries
-long getEntries()
+long getEntries()
 
 
 
@@ -413,7 +413,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/io/Closeable.html
 
 
 getFirstKey
-http://docs.oracle.com/javase/8/docs/api/java/util/Optional.html?is-external=true";
 title="class or interface in java.util">Optional getFirstKey()
+http://docs.oracle.com/javase/8/docs/api/java/util/Optional.html?is-external=true";
 title="class or interface in java.util">Optional getFirstKey()
 
 
 
@@ -422,7 +422,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/io/Closeable.html
 
 
 indexSize
-long indexSize()
+long indexSize()
 

[08/51] [partial] hbase-site git commit: Published site at .

2017-11-07 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/2cef721c/devapidocs/src-html/org/apache/hadoop/hbase/filter/InclusiveStopFilter.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/filter/InclusiveStopFilter.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/filter/InclusiveStopFilter.html
index ced79d7..71cf628 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/filter/InclusiveStopFilter.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/filter/InclusiveStopFilter.html
@@ -30,7 +30,7 @@
 022import java.util.ArrayList;
 023
 024import org.apache.hadoop.hbase.Cell;
-025import 
org.apache.hadoop.hbase.CellComparatorImpl;
+025import 
org.apache.hadoop.hbase.CellComparator;
 026import 
org.apache.yetus.audience.InterfaceAudience;
 027import 
org.apache.hadoop.hbase.exceptions.DeserializationException;
 028import 
org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException;
@@ -74,7 +74,7 @@
 066  public boolean filterRowKey(Cell 
firstRowCell) {
 067// if stopRowKey is <= buffer, 
then true, filter row.
 068if (filterAllRemaining()) return 
true;
-069int cmp = 
CellComparatorImpl.COMPARATOR.compareRows(firstRowCell, stopRowKey, 0, 
stopRowKey.length);
+069int cmp = 
CellComparator.getInstance().compareRows(firstRowCell, stopRowKey, 0, 
stopRowKey.length);
 070done = reversed ? cmp < 0 : cmp 
> 0;
 071return done;
 072  }



[23/51] [partial] hbase-site git commit: Published site at .

2017-11-07 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/2cef721c/devapidocs/org/apache/hadoop/hbase/io/hfile/HFileWriterImpl.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/io/hfile/HFileWriterImpl.html 
b/devapidocs/org/apache/hadoop/hbase/io/hfile/HFileWriterImpl.html
index b5768f1..ab43799 100644
--- a/devapidocs/org/apache/hadoop/hbase/io/hfile/HFileWriterImpl.html
+++ b/devapidocs/org/apache/hadoop/hbase/io/hfile/HFileWriterImpl.html
@@ -114,7 +114,7 @@ var activeTableTab = "activeTableTab";
 
 
 @InterfaceAudience.Private
-public class HFileWriterImpl
+public class HFileWriterImpl
 extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true";
 title="class or interface in java.lang">Object
 implements HFile.Writer
 Common functionality needed by all versions of HFile 
writers.
@@ -575,7 +575,7 @@ implements 
 
 LOG
-private static final org.apache.commons.logging.Log LOG
+private static final org.apache.commons.logging.Log LOG
 
 
 
@@ -584,7 +584,7 @@ implements 
 
 UNSET
-private static final long UNSET
+private static final long UNSET
 
 See Also:
 Constant
 Field Values
@@ -597,7 +597,7 @@ implements 
 
 UNIFIED_ENCODED_BLOCKSIZE_RATIO
-public static final http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String UNIFIED_ENCODED_BLOCKSIZE_RATIO
+public static final http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String UNIFIED_ENCODED_BLOCKSIZE_RATIO
 if this feature is enabled, preCalculate encoded data size 
before real encoding happens
 
 See Also:
@@ -611,7 +611,7 @@ implements 
 
 encodedBlockSizeLimit
-private final int encodedBlockSizeLimit
+private final int encodedBlockSizeLimit
 Block size limit after encoding, used to unify encoded 
block Cache entry size
 
 
@@ -621,7 +621,7 @@ implements 
 
 lastCell
-protected Cell lastCell
+protected Cell lastCell
 The Cell previously appended. Becomes the last cell in the 
file.
 
 
@@ -631,7 +631,7 @@ implements 
 
 outputStream
-protected org.apache.hadoop.fs.FSDataOutputStream outputStream
+protected org.apache.hadoop.fs.FSDataOutputStream outputStream
 FileSystem stream to write into.
 
 
@@ -641,7 +641,7 @@ implements 
 
 closeOutputStream
-protected final boolean closeOutputStream
+protected final boolean closeOutputStream
 True if we opened the outputStream (and so 
will close it).
 
 
@@ -651,7 +651,7 @@ implements 
 
 fileInfo
-protected HFile.FileInfo fileInfo
+protected HFile.FileInfo fileInfo
 A "file info" block: a key-value map of file-wide 
metadata.
 
 
@@ -661,7 +661,7 @@ implements 
 
 entryCount
-protected long entryCount
+protected long entryCount
 Total # of key/value entries, i.e. how many times add() was 
called.
 
 
@@ -671,7 +671,7 @@ implements 
 
 totalKeyLength
-protected long totalKeyLength
+protected long totalKeyLength
 Used for calculating the average key length.
 
 
@@ -681,7 +681,7 @@ implements 
 
 totalValueLength
-protected long totalValueLength
+protected long totalValueLength
 Used for calculating the average value length.
 
 
@@ -691,7 +691,7 @@ implements 
 
 totalUncompressedBytes
-protected long totalUncompressedBytes
+protected long totalUncompressedBytes
 Total uncompressed bytes, maybe calculate a compression 
ratio later.
 
 
@@ -701,7 +701,7 @@ implements 
 
 comparator
-protected final CellComparator comparator
+protected final CellComparator comparator
 Key comparator. Used to ensure we write in order.
 
 
@@ -711,7 +711,7 @@ implements 
 
 metaNames
-protected http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List metaNames
+protected http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List metaNames
 Meta block names.
 
 
@@ -721,7 +721,7 @@ implements 
 
 metaData
-protected http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in 
java.util">List metaData
+protected http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in 
java.util">List metaData
 Writables representing meta block data.
 
 
@@ -731,7 +731,7 @@ implements 
 
 firstCellInBlock
-protected Cell firstCellInBlock
+protected Cell firstCellInBlock
 First cell in a block.
  This reference should be short-lived since we write hfiles in a burst.
 
@@ -742,7 +742,7 @@ implements 
 
 path
-protected final org.apache.hadoop.fs.Path path
+protected final org.apache.hadoop.fs.Path path
 May be null if we were passed a stream.
 
 
@@ -752,7 +752,7 @@ implements 
 
 cacheConf
-protected final CacheConfig cacheConf
+protected final CacheConfig cacheConf
 Cache configuration for cach

[05/51] [partial] hbase-site git commit: Published site at .

2017-11-07 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/2cef721c/devapidocs/src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OffheapDecodedCell.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OffheapDecodedCell.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OffheapDecodedCell.html
index 83e48f3..157463c 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OffheapDecodedCell.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OffheapDecodedCell.html
@@ -33,1142 +33,1141 @@
 025import 
org.apache.hadoop.hbase.ByteBufferCell;
 026import org.apache.hadoop.hbase.Cell;
 027import 
org.apache.hadoop.hbase.CellComparator;
-028import 
org.apache.hadoop.hbase.CellComparatorImpl;
-029import 
org.apache.hadoop.hbase.CellUtil;
-030import 
org.apache.hadoop.hbase.ExtendedCell;
-031import 
org.apache.hadoop.hbase.HConstants;
-032import 
org.apache.hadoop.hbase.PrivateCellUtil;
-033import 
org.apache.hadoop.hbase.KeyValue;
-034import 
org.apache.hadoop.hbase.KeyValue.Type;
-035import 
org.apache.hadoop.hbase.KeyValueUtil;
-036import 
org.apache.yetus.audience.InterfaceAudience;
-037import 
org.apache.hadoop.hbase.io.TagCompressionContext;
-038import 
org.apache.hadoop.hbase.io.util.LRUDictionary;
-039import 
org.apache.hadoop.hbase.io.util.StreamUtils;
-040import 
org.apache.hadoop.hbase.nio.ByteBuff;
-041import 
org.apache.hadoop.hbase.util.ByteBufferUtils;
-042import 
org.apache.hadoop.hbase.util.Bytes;
-043import 
org.apache.hadoop.hbase.util.ClassSize;
-044import 
org.apache.hadoop.hbase.util.ObjectIntPair;
-045import 
org.apache.hadoop.io.WritableUtils;
-046
-047/**
-048 * Base class for all data block encoders 
that use a buffer.
-049 */
-050@InterfaceAudience.Private
-051abstract class BufferedDataBlockEncoder 
extends AbstractDataBlockEncoder {
-052  /**
-053   * TODO: This datablockencoder is 
dealing in internals of hfileblocks. Purge reference to HFBs
-054   */
-055  private static int 
INITIAL_KEY_BUFFER_SIZE = 512;
-056
-057  @Override
-058  public ByteBuffer 
decodeKeyValues(DataInputStream source,
-059  HFileBlockDecodingContext 
blkDecodingCtx) throws IOException {
-060if (blkDecodingCtx.getClass() != 
HFileBlockDefaultDecodingContext.class) {
-061  throw new 
IOException(this.getClass().getName() + " only accepts "
-062  + 
HFileBlockDefaultDecodingContext.class.getName() + " as the decoding 
context.");
-063}
-064
-065HFileBlockDefaultDecodingContext 
decodingCtx =
-066
(HFileBlockDefaultDecodingContext) blkDecodingCtx;
-067if 
(decodingCtx.getHFileContext().isIncludesTags()
-068&& 
decodingCtx.getHFileContext().isCompressTags()) {
-069  if 
(decodingCtx.getTagCompressionContext() != null) {
-070// It will be overhead to create 
the TagCompressionContext again and again for every block
-071// decoding.
-072
decodingCtx.getTagCompressionContext().clear();
-073  } else {
-074try {
-075  TagCompressionContext 
tagCompressionContext = new TagCompressionContext(
-076  LRUDictionary.class, 
Byte.MAX_VALUE);
-077  
decodingCtx.setTagCompressionContext(tagCompressionContext);
-078} catch (Exception e) {
-079  throw new IOException("Failed 
to initialize TagCompressionContext", e);
-080}
-081  }
-082}
-083return 
internalDecodeKeyValues(source, 0, 0, decodingCtx);
-084  }
-085
-086  /* common prefixes 
*/
-087  // Having this as static is fine but if 
META is having DBE then we should
-088  // change this.
-089  public static int 
compareCommonRowPrefix(Cell left, Cell right, int rowCommonPrefix) {
-090return 
Bytes.compareTo(left.getRowArray(), left.getRowOffset() + rowCommonPrefix,
-091left.getRowLength() - 
rowCommonPrefix, right.getRowArray(), right.getRowOffset()
-092+ rowCommonPrefix, 
right.getRowLength() - rowCommonPrefix);
-093  }
-094
-095  public static int 
compareCommonFamilyPrefix(Cell left, Cell right, int familyCommonPrefix) {
-096return 
Bytes.compareTo(left.getFamilyArray(), left.getFamilyOffset() + 
familyCommonPrefix,
-097left.getFamilyLength() - 
familyCommonPrefix, right.getFamilyArray(),
-098right.getFamilyOffset() + 
familyCommonPrefix, right.getFamilyLength() - familyCommonPrefix);
-099  }
-100
-101  public static int 
compareCommonQualifierPrefix(Cell left, Cell right, int qualCommonPrefix) {
-102return 
Bytes.compareTo(left.getQualifierArray(), left.getQualifierOffset() + 
qualCommonPrefix,
-103left.getQualifierLength() - 
qualCommonPrefix, right.getQualifierArray(),
-104right.getQualifierOffset() + 
qualCommonPrefix, right.getQualifierLength()
-105- qualC

[06/51] [partial] hbase-site git commit: Published site at .

2017-11-07 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/2cef721c/devapidocs/src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.BufferedEncodedSeeker.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.BufferedEncodedSeeker.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.BufferedEncodedSeeker.html
index 83e48f3..157463c 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.BufferedEncodedSeeker.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.BufferedEncodedSeeker.html
@@ -33,1142 +33,1141 @@
 025import 
org.apache.hadoop.hbase.ByteBufferCell;
 026import org.apache.hadoop.hbase.Cell;
 027import 
org.apache.hadoop.hbase.CellComparator;
-028import 
org.apache.hadoop.hbase.CellComparatorImpl;
-029import 
org.apache.hadoop.hbase.CellUtil;
-030import 
org.apache.hadoop.hbase.ExtendedCell;
-031import 
org.apache.hadoop.hbase.HConstants;
-032import 
org.apache.hadoop.hbase.PrivateCellUtil;
-033import 
org.apache.hadoop.hbase.KeyValue;
-034import 
org.apache.hadoop.hbase.KeyValue.Type;
-035import 
org.apache.hadoop.hbase.KeyValueUtil;
-036import 
org.apache.yetus.audience.InterfaceAudience;
-037import 
org.apache.hadoop.hbase.io.TagCompressionContext;
-038import 
org.apache.hadoop.hbase.io.util.LRUDictionary;
-039import 
org.apache.hadoop.hbase.io.util.StreamUtils;
-040import 
org.apache.hadoop.hbase.nio.ByteBuff;
-041import 
org.apache.hadoop.hbase.util.ByteBufferUtils;
-042import 
org.apache.hadoop.hbase.util.Bytes;
-043import 
org.apache.hadoop.hbase.util.ClassSize;
-044import 
org.apache.hadoop.hbase.util.ObjectIntPair;
-045import 
org.apache.hadoop.io.WritableUtils;
-046
-047/**
-048 * Base class for all data block encoders 
that use a buffer.
-049 */
-050@InterfaceAudience.Private
-051abstract class BufferedDataBlockEncoder 
extends AbstractDataBlockEncoder {
-052  /**
-053   * TODO: This datablockencoder is 
dealing in internals of hfileblocks. Purge reference to HFBs
-054   */
-055  private static int 
INITIAL_KEY_BUFFER_SIZE = 512;
-056
-057  @Override
-058  public ByteBuffer 
decodeKeyValues(DataInputStream source,
-059  HFileBlockDecodingContext 
blkDecodingCtx) throws IOException {
-060if (blkDecodingCtx.getClass() != 
HFileBlockDefaultDecodingContext.class) {
-061  throw new 
IOException(this.getClass().getName() + " only accepts "
-062  + 
HFileBlockDefaultDecodingContext.class.getName() + " as the decoding 
context.");
-063}
-064
-065HFileBlockDefaultDecodingContext 
decodingCtx =
-066
(HFileBlockDefaultDecodingContext) blkDecodingCtx;
-067if 
(decodingCtx.getHFileContext().isIncludesTags()
-068&& 
decodingCtx.getHFileContext().isCompressTags()) {
-069  if 
(decodingCtx.getTagCompressionContext() != null) {
-070// It will be overhead to create 
the TagCompressionContext again and again for every block
-071// decoding.
-072
decodingCtx.getTagCompressionContext().clear();
-073  } else {
-074try {
-075  TagCompressionContext 
tagCompressionContext = new TagCompressionContext(
-076  LRUDictionary.class, 
Byte.MAX_VALUE);
-077  
decodingCtx.setTagCompressionContext(tagCompressionContext);
-078} catch (Exception e) {
-079  throw new IOException("Failed 
to initialize TagCompressionContext", e);
-080}
-081  }
-082}
-083return 
internalDecodeKeyValues(source, 0, 0, decodingCtx);
-084  }
-085
-086  /* common prefixes 
*/
-087  // Having this as static is fine but if 
META is having DBE then we should
-088  // change this.
-089  public static int 
compareCommonRowPrefix(Cell left, Cell right, int rowCommonPrefix) {
-090return 
Bytes.compareTo(left.getRowArray(), left.getRowOffset() + rowCommonPrefix,
-091left.getRowLength() - 
rowCommonPrefix, right.getRowArray(), right.getRowOffset()
-092+ rowCommonPrefix, 
right.getRowLength() - rowCommonPrefix);
-093  }
-094
-095  public static int 
compareCommonFamilyPrefix(Cell left, Cell right, int familyCommonPrefix) {
-096return 
Bytes.compareTo(left.getFamilyArray(), left.getFamilyOffset() + 
familyCommonPrefix,
-097left.getFamilyLength() - 
familyCommonPrefix, right.getFamilyArray(),
-098right.getFamilyOffset() + 
familyCommonPrefix, right.getFamilyLength() - familyCommonPrefix);
-099  }
-100
-101  public static int 
compareCommonQualifierPrefix(Cell left, Cell right, int qualCommonPrefix) {
-102return 
Bytes.compareTo(left.getQualifierArray(), left.getQualifierOffset() + 
qualCommonPrefix,
-103left.getQualifierLength() - 
qualCommonPrefix, right.getQualifierArray(),
-104right.getQualifierOffset() + 
qualCommonPrefix, right.getQualifierLength()
-105

[49/51] [partial] hbase-site git commit: Published site at .

2017-11-07 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/2cef721c/apidocs/org/apache/hadoop/hbase/CellUtil.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/CellUtil.html 
b/apidocs/org/apache/hadoop/hbase/CellUtil.html
index 95e7014..9523443 100644
--- a/apidocs/org/apache/hadoop/hbase/CellUtil.html
+++ b/apidocs/org/apache/hadoop/hbase/CellUtil.html
@@ -18,7 +18,7 @@
 catch(err) {
 }
 //-->
-var methods = 
{"i0":9,"i1":9,"i2":9,"i3":41,"i4":9,"i5":41,"i6":9,"i7":9,"i8":9,"i9":9,"i10":9,"i11":9,"i12":9,"i13":9,"i14":9,"i15":9,"i16":41,"i17":41,"i18":9,"i19":9,"i20":41,"i21":41,"i22":41,"i23":41,"i24":41,"i25":9,"i26":9,"i27":9,"i28":9,"i29":9,"i30":9,"i31":9,"i32":9,"i33":9,"i34":41,"i35":41,"i36":41,"i37":41,"i38":41,"i39":41,"i40":41,"i41":41,"i42":41,"i43":41,"i44":9,"i45":41,"i46":41,"i47":41,"i48":41,"i49":41,"i50":41,"i51":41,"i52":9,"i53":41,"i54":41,"i55":41,"i56":41,"i57":41,"i58":41,"i59":9,"i60":9,"i61":41,"i62":9,"i63":9,"i64":41,"i65":9,"i66":9,"i67":41,"i68":9,"i69":41,"i70":9,"i71":41,"i72":9,"i73":9,"i74":9,"i75":9,"i76":9,"i77":41,"i78":9,"i79":9,"i80":9,"i81":41,"i82":9,"i83":41,"i84":41,"i85":41,"i86":41,"i87":9,"i88":41,"i89":41,"i90":41,"i91":41,"i92":41,"i93":41};
+var methods = 
{"i0":9,"i1":9,"i2":9,"i3":41,"i4":9,"i5":41,"i6":9,"i7":9,"i8":9,"i9":9,"i10":9,"i11":9,"i12":9,"i13":9,"i14":9,"i15":9,"i16":41,"i17":41,"i18":9,"i19":9,"i20":41,"i21":41,"i22":41,"i23":41,"i24":41,"i25":9,"i26":9,"i27":9,"i28":9,"i29":9,"i30":9,"i31":9,"i32":9,"i33":9,"i34":41,"i35":41,"i36":41,"i37":41,"i38":41,"i39":41,"i40":41,"i41":41,"i42":41,"i43":41,"i44":9,"i45":41,"i46":41,"i47":41,"i48":41,"i49":41,"i50":41,"i51":41,"i52":9,"i53":41,"i54":41,"i55":41,"i56":41,"i57":41,"i58":41,"i59":9,"i60":9,"i61":9,"i62":41,"i63":9,"i64":9,"i65":41,"i66":9,"i67":9,"i68":41,"i69":9,"i70":41,"i71":9,"i72":41,"i73":9,"i74":9,"i75":9,"i76":9,"i77":9,"i78":41,"i79":9,"i80":9,"i81":9,"i82":41,"i83":9,"i84":41,"i85":41,"i86":41,"i87":41,"i88":9,"i89":41,"i90":41,"i91":41,"i92":41,"i93":41,"i94":41};
 var tabs = {65535:["t0","All Methods"],1:["t1","Static 
Methods"],8:["t4","Concrete Methods"],32:["t6","Deprecated Methods"]};
 var altColor = "altColor";
 var rowColor = "rowColor";
@@ -49,7 +49,7 @@ var activeTableTab = "activeTableTab";
 
 
 
-Prev Class
+Prev Class
 Next Class
 
 
@@ -161,7 +161,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 static int
-compare(org.apache.hadoop.hbase.CellComparator comparator,
+compare(CellComparator comparator,
Cell left,
byte[] key,
int offset,
@@ -599,19 +599,23 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 
+static boolean
+isPut(Cell cell) 
+
+
 static byte[]
 makeColumn(byte[] family,
   byte[] qualifier)
 Makes a column in family:qualifier form from separate byte 
arrays.
 
 
-
+
 static boolean
 matchingColumn(Cell left,
   byte[] fam,
   byte[] qual) 
 
-
+
 static boolean
 matchingColumn(Cell left,
   byte[] fam,
@@ -625,17 +629,17 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 
-
+
 static boolean
 matchingColumn(Cell left,
   Cell right) 
 
-
+
 static boolean
 matchingFamily(Cell left,
   byte[] buf) 
 
-
+
 static boolean
 matchingFamily(Cell left,
   byte[] buf,
@@ -646,12 +650,12 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 
-
+
 static boolean
 matchingFamily(Cell left,
   Cell right) 
 
-
+
 static boolean
 matchingQualifier(Cell left,
  byte[] buf)
@@ -659,7 +663,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
  byte[] are equal
 
 
-
+
 static boolean
 matchingQualifier(Cell left,
  byte[] buf,
@@ -670,12 +674,12 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 
-
+
 static boolean
 matchingQualifier(Cell left,
  Cell right) 
 
-
+
 static boolean
 matchingRow(Cell left,
byte[] buf)
@@ -685,14 +689,14 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 
-
+
 static boolean
 matchingRow(Cell left,
byte[] buf,
int offset,
int length) 
 
-
+
 static boolean
 matchingRow(Cell left,
Cell right)
@@ -702,36 +706,36 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 
-
+
 static boolean
 matchingRowColumn(Cell left,
  Cell right)
 Compares the row and column of two keyvalues for 
equality
 
 
-
+
 static boolean
 matchingRowColumnBytes(Cell left,
   Cell right) 
 
-
+
 static boolean
 matchingRows(Cell left,
 byte[] buf) 
 
-
+
 static boolean
 matchingRows(Cell left,
 Cell right)
 Compares the row of two keyvalues for equality
 
 
-
+
 static boolean
 matchingTimesta

[25/51] [partial] hbase-site git commit: Published site at .

2017-11-07 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/2cef721c/devapidocs/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.EncodedScanner.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.EncodedScanner.html
 
b/devapidocs/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.EncodedScanner.html
index cb09dad..6af3ec6 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.EncodedScanner.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.EncodedScanner.html
@@ -122,7 +122,7 @@ var activeTableTab = "activeTableTab";
 
 
 
-protected static class HFileReaderImpl.EncodedScanner
+protected static class HFileReaderImpl.EncodedScanner
 extends HFileReaderImpl.HFileScannerImpl
 Scanner that operates on encoded data blocks.
 
@@ -312,7 +312,7 @@ extends 
 
 decodingCtx
-private final HFileBlockDecodingContext decodingCtx
+private final HFileBlockDecodingContext decodingCtx
 
 
 
@@ -321,7 +321,7 @@ extends 
 
 seeker
-private final DataBlockEncoder.EncodedSeeker seeker
+private final DataBlockEncoder.EncodedSeeker seeker
 
 
 
@@ -330,7 +330,7 @@ extends 
 
 dataBlockEncoder
-private final DataBlockEncoder 
dataBlockEncoder
+private final DataBlockEncoder 
dataBlockEncoder
 
 
 
@@ -347,7 +347,7 @@ extends 
 
 EncodedScanner
-public EncodedScanner(HFile.Reader reader,
+public EncodedScanner(HFile.Reader reader,
   boolean cacheBlocks,
   boolean pread,
   boolean isCompaction,
@@ -368,7 +368,7 @@ extends 
 
 isSeeked
-public boolean isSeeked()
+public boolean isSeeked()
 
 Specified by:
 isSeeked in
 interface HFileScanner
@@ -387,7 +387,7 @@ extends 
 
 setNonSeekedState
-public void setNonSeekedState()
+public void setNonSeekedState()
 
 Overrides:
 setNonSeekedState in
 class HFileReaderImpl.HFileScannerImpl
@@ -400,7 +400,7 @@ extends 
 
 updateCurrentBlock
-protected void updateCurrentBlock(HFileBlock newBlock)
+protected void updateCurrentBlock(HFileBlock newBlock)
throws CorruptHFileException
 Updates the current block to be the given HFileBlock. 
Seeks to
  the the first key/value pair.
@@ -420,7 +420,7 @@ extends 
 
 getEncodedBuffer
-private ByteBuff getEncodedBuffer(HFileBlock newBlock)
+private ByteBuff getEncodedBuffer(HFileBlock newBlock)
 
 
 
@@ -429,7 +429,7 @@ extends 
 
 processFirstDataBlock
-protected boolean processFirstDataBlock()
+protected boolean processFirstDataBlock()
  throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true";
 title="class or interface in java.io">IOException
 
 Overrides:
@@ -445,7 +445,7 @@ extends 
 
 next
-public boolean next()
+public boolean next()
  throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true";
 title="class or interface in java.io">IOException
 Description copied from 
class: HFileReaderImpl.HFileScannerImpl
 Go to the next key/value in the block section. Loads the 
next block if
@@ -469,7 +469,7 @@ extends 
 
 getKey
-public Cell getKey()
+public Cell getKey()
 Description copied from 
interface: HFileScanner
 Gets the current key in the form of a cell. You must call
  HFileScanner.seekTo(Cell)
 before this method.
@@ -489,7 +489,7 @@ extends 
 
 getValue
-public http://docs.oracle.com/javase/8/docs/api/java/nio/ByteBuffer.html?is-external=true";
 title="class or interface in java.nio">ByteBuffer getValue()
+public http://docs.oracle.com/javase/8/docs/api/java/nio/ByteBuffer.html?is-external=true";
 title="class or interface in java.nio">ByteBuffer getValue()
 Description copied from 
interface: HFileScanner
 Gets a buffer view to the current value.  You must call
  HFileScanner.seekTo(Cell)
 before this method.
@@ -510,7 +510,7 @@ extends 
 
 getCell
-public Cell getCell()
+public Cell getCell()
 
 Specified by:
 getCell in
 interface HFileScanner
@@ -527,7 +527,7 @@ extends 
 
 getKeyString
-public http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String getKeyString()
+public http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String getKeyString()
 Description copied from 
interface: HFileScanner
 Convenience method to get a copy of the key as a string - 
interpreting the
  bytes as UTF8. You must call HFileScanner.seekTo(Cell)
 before this method.
@@ -547,7 +547,7 @@ extends 
 
 getValueString
-public http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String getValueString()
+public http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String getValueString()
 Description copied from 
interface: HFileScanner
 Convenience method to get 

[15/51] [partial] hbase-site git commit: Published site at .

2017-11-07 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/2cef721c/devapidocs/src-html/org/apache/hadoop/hbase/ClusterStatus.Builder.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/ClusterStatus.Builder.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/ClusterStatus.Builder.html
index c638897..e8675ce 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/ClusterStatus.Builder.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/ClusterStatus.Builder.html
@@ -146,7 +146,7 @@
 138   */
 139  public List 
getDeadServerNames() {
 140if (deadServers == null) {
-141  return Collections.EMPTY_LIST;
+141  return Collections.emptyList();
 142}
 143return 
Collections.unmodifiableList(deadServers);
 144  }
@@ -264,7 +264,7 @@
 256
 257  public Collection 
getServers() {
 258if (liveServers == null) {
-259  return Collections.EMPTY_LIST;
+259  return Collections.emptyList();
 260}
 261return 
Collections.unmodifiableCollection(this.liveServers.keySet());
 262  }
@@ -289,7 +289,7 @@
 281   */
 282  public List 
getBackupMasters() {
 283if (backupMasters == null) {
-284  return Collections.EMPTY_LIST;
+284  return Collections.emptyList();
 285}
 286return 
Collections.unmodifiableList(this.backupMasters);
 287  }

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/2cef721c/devapidocs/src-html/org/apache/hadoop/hbase/ClusterStatus.Option.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/ClusterStatus.Option.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/ClusterStatus.Option.html
index c638897..e8675ce 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/ClusterStatus.Option.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/ClusterStatus.Option.html
@@ -146,7 +146,7 @@
 138   */
 139  public List 
getDeadServerNames() {
 140if (deadServers == null) {
-141  return Collections.EMPTY_LIST;
+141  return Collections.emptyList();
 142}
 143return 
Collections.unmodifiableList(deadServers);
 144  }
@@ -264,7 +264,7 @@
 256
 257  public Collection 
getServers() {
 258if (liveServers == null) {
-259  return Collections.EMPTY_LIST;
+259  return Collections.emptyList();
 260}
 261return 
Collections.unmodifiableCollection(this.liveServers.keySet());
 262  }
@@ -289,7 +289,7 @@
 281   */
 282  public List 
getBackupMasters() {
 283if (backupMasters == null) {
-284  return Collections.EMPTY_LIST;
+284  return Collections.emptyList();
 285}
 286return 
Collections.unmodifiableList(this.backupMasters);
 287  }

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/2cef721c/devapidocs/src-html/org/apache/hadoop/hbase/ClusterStatus.html
--
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/ClusterStatus.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/ClusterStatus.html
index c638897..e8675ce 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/ClusterStatus.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/ClusterStatus.html
@@ -146,7 +146,7 @@
 138   */
 139  public List 
getDeadServerNames() {
 140if (deadServers == null) {
-141  return Collections.EMPTY_LIST;
+141  return Collections.emptyList();
 142}
 143return 
Collections.unmodifiableList(deadServers);
 144  }
@@ -264,7 +264,7 @@
 256
 257  public Collection 
getServers() {
 258if (liveServers == null) {
-259  return Collections.EMPTY_LIST;
+259  return Collections.emptyList();
 260}
 261return 
Collections.unmodifiableCollection(this.liveServers.keySet());
 262  }
@@ -289,7 +289,7 @@
 281   */
 282  public List 
getBackupMasters() {
 283if (backupMasters == null) {
-284  return Collections.EMPTY_LIST;
+284  return Collections.emptyList();
 285}
 286return 
Collections.unmodifiableList(this.backupMasters);
 287  }

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/2cef721c/devapidocs/src-html/org/apache/hadoop/hbase/KeyValue.KVComparator.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/KeyValue.KVComparator.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/KeyValue.KVComparator.html
index 38046ae..d98042d 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/KeyValue.KVComparator.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/KeyValue.KVComparator.html
@@ -104,7 +104,7 @@
 096  /**
 097   * Comparator for plain key/values; 
i.e. non-catalog table key/values. Works on Key portion
 098   * of KeyValue only.
-099   * @deprecated Use {@link 
CellComparatorImpl#COMPARATOR} instead. Deprecated for hbase 2.0, remove for 
hbase 3.

[13/51] [partial] hbase-site git commit: Published site at .

2017-11-07 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/2cef721c/devapidocs/src-html/org/apache/hadoop/hbase/filter/BigDecimalComparator.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/filter/BigDecimalComparator.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/filter/BigDecimalComparator.html
new file mode 100644
index 000..21f4ee7
--- /dev/null
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/filter/BigDecimalComparator.html
@@ -0,0 +1,188 @@
+http://www.w3.org/TR/html4/loose.dtd";>
+
+
+Source code
+
+
+
+
+001/*
+002 *
+003 * Licensed to the Apache Software 
Foundation (ASF) under one
+004 * or more contributor license 
agreements.  See the NOTICE file
+005 * distributed with this work for 
additional information
+006 * regarding copyright ownership.  The 
ASF licenses this file
+007 * to you under the Apache License, 
Version 2.0 (the
+008 * "License"); you may not use this file 
except in compliance
+009 * with the License.  You may obtain a 
copy of the License at
+010 *
+011 * 
http://www.apache.org/licenses/LICENSE-2.0
+012 *
+013 * Unless required by applicable law or 
agreed to in writing, software
+014 * distributed under the License is 
distributed on an "AS IS" BASIS,
+015 * WITHOUT WARRANTIES OR CONDITIONS OF 
ANY KIND, either express or implied.
+016 * See the License for the specific 
language governing permissions and
+017 * limitations under the License.
+018 */
+019
+020package org.apache.hadoop.hbase.filter;
+021
+022import java.math.BigDecimal;
+023import java.nio.ByteBuffer;
+024import java.util.Objects;
+025
+026import 
org.apache.hadoop.hbase.exceptions.DeserializationException;
+027import 
org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException;
+028import 
org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
+029import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos;
+030import 
org.apache.hadoop.hbase.util.ByteBufferUtils;
+031import 
org.apache.hadoop.hbase.util.Bytes;
+032
+033import 
org.apache.yetus.audience.InterfaceAudience;
+034
+035/**
+036 * A BigDecimal comparator which 
numerical compares against the specified byte array
+037 */
+038@InterfaceAudience.Public
+039public class BigDecimalComparator extends 
ByteArrayComparable {
+040  private BigDecimal bigDecimal;
+041
+042  public BigDecimalComparator(BigDecimal 
value) {
+043super(Bytes.toBytes(value));
+044this.bigDecimal = value;
+045  }
+046
+047  @Override
+048  public boolean equals(Object obj) {
+049if (obj == null || !(obj instanceof 
BigDecimalComparator)) {
+050  return false;
+051}
+052if (this == obj) {
+053  return true;
+054}
+055BigDecimalComparator bdc = 
(BigDecimalComparator) obj;
+056return 
this.bigDecimal.equals(bdc.bigDecimal);
+057  }
+058
+059  @Override
+060  public int hashCode() {
+061return 
Objects.hash(this.bigDecimal);
+062  }
+063
+064  @Override
+065  public int compareTo(byte[] value, int 
offset, int length) {
+066BigDecimal that = 
Bytes.toBigDecimal(value, offset, length);
+067return 
this.bigDecimal.compareTo(that);
+068  }
+069
+070  @Override
+071  public int compareTo(ByteBuffer value, 
int offset, int length) {
+072BigDecimal that = 
ByteBufferUtils.toBigDecimal(value, offset, length);
+073return 
this.bigDecimal.compareTo(that);
+074  }
+075
+076  /**
+077   * @return The comparator serialized 
using pb
+078   */
+079  @Override
+080  public byte[] toByteArray() {
+081
ComparatorProtos.BigDecimalComparator.Builder builder =
+082
ComparatorProtos.BigDecimalComparator.newBuilder();
+083
builder.setComparable(ProtobufUtil.toByteArrayComparable(this.value));
+084return 
builder.build().toByteArray();
+085  }
+086
+087  /**
+088   * @param pbBytes A pb serialized 
{@link BigDecimalComparator} instance
+089   * @return An instance of {@link 
BigDecimalComparator} made from bytes
+090   * @throws DeserializationException A 
deserialization exception
+091   * @see #toByteArray
+092   */
+093  public static BigDecimalComparator 
parseFrom(final byte[] pbBytes)
+094  throws DeserializationException {
+095ComparatorProtos.BigDecimalComparator 
proto;
+096try {
+097  proto = 
ComparatorProtos.BigDecimalComparator.parseFrom(pbBytes);
+098} catch 
(InvalidProtocolBufferException e) {
+099  throw new 
DeserializationException(e);
+100}
+101return new 
BigDecimalComparator(Bytes.toBigDecimal(proto.getComparable().getValue()
+102.toByteArray()));
+103  }
+104
+105  /**
+106   * @param other the other comparator
+107   * @return true if and only if the 
fields of the comparator that are serialized are equal to the
+108   * corresponding fields in 
other. Used for testing.
+109   */
+110  boolean 
areSerializedFieldsEqual(BigDecimalComparator other) {
+111if (other == this) {
+112  return true;
+113}
+1

hbase git commit: HBASE-19103 Add BigDecimalComparator for filter

2017-11-07 Thread tedyu
Repository: hbase
Updated Branches:
  refs/heads/branch-2 57c0fb256 -> 1110910b3


HBASE-19103 Add BigDecimalComparator for filter

Signed-off-by: tedyu 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/1110910b
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/1110910b
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/1110910b

Branch: refs/heads/branch-2
Commit: 1110910b3a2b10b27e9c89f5fe291da9934492fb
Parents: 57c0fb2
Author: QilinCao 
Authored: Mon Oct 30 20:55:11 2017 +0800
Committer: tedyu 
Committed: Tue Nov 7 03:30:43 2017 -0800

--
 .../hbase/filter/BigDecimalComparator.java  | 116 ++
 .../src/main/protobuf/Comparator.proto  |   4 +
 .../src/main/protobuf/Comparator.proto  |   4 +
 .../hbase/filter/TestBigDecimalComparator.java  | 118 +++
 .../filter/TestComparatorSerialization.java |   9 ++
 .../hadoop/hbase/regionserver/TestHRegion.java  |  43 +++
 6 files changed, 294 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/1110910b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/BigDecimalComparator.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/BigDecimalComparator.java
 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/BigDecimalComparator.java
new file mode 100644
index 000..5da366f
--- /dev/null
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/BigDecimalComparator.java
@@ -0,0 +1,116 @@
+/*
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hbase.filter;
+
+import java.math.BigDecimal;
+import java.nio.ByteBuffer;
+import java.util.Objects;
+
+import org.apache.hadoop.hbase.exceptions.DeserializationException;
+import 
org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException;
+import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos;
+import org.apache.hadoop.hbase.util.ByteBufferUtils;
+import org.apache.hadoop.hbase.util.Bytes;
+
+import org.apache.yetus.audience.InterfaceAudience;
+
+/**
+ * A BigDecimal comparator which numerical compares against the specified byte 
array
+ */
+@InterfaceAudience.Public
+public class BigDecimalComparator extends ByteArrayComparable {
+  private BigDecimal bigDecimal;
+
+  public BigDecimalComparator(BigDecimal value) {
+super(Bytes.toBytes(value));
+this.bigDecimal = value;
+  }
+
+  @Override
+  public boolean equals(Object obj) {
+if (obj == null || !(obj instanceof BigDecimalComparator)) {
+  return false;
+}
+if (this == obj) {
+  return true;
+}
+BigDecimalComparator bdc = (BigDecimalComparator) obj;
+return this.bigDecimal.equals(bdc.bigDecimal);
+  }
+
+  @Override
+  public int hashCode() {
+return Objects.hash(this.bigDecimal);
+  }
+
+  @Override
+  public int compareTo(byte[] value, int offset, int length) {
+BigDecimal that = Bytes.toBigDecimal(value, offset, length);
+return this.bigDecimal.compareTo(that);
+  }
+
+  @Override
+  public int compareTo(ByteBuffer value, int offset, int length) {
+BigDecimal that = ByteBufferUtils.toBigDecimal(value, offset, length);
+return this.bigDecimal.compareTo(that);
+  }
+
+  /**
+   * @return The comparator serialized using pb
+   */
+  @Override
+  public byte[] toByteArray() {
+ComparatorProtos.BigDecimalComparator.Builder builder =
+ComparatorProtos.BigDecimalComparator.newBuilder();
+builder.setComparable(ProtobufUtil.toByteArrayComparable(this.value));
+return builder.build().toByteArray();
+  }
+
+  /**
+   * @param pbBytes A pb serialized {@link BigDecimalComparator} instance
+   * @return An instance of {@link BigDecimalComparator} made from 
bytes
+   * @throws DeserializationException A deserialization exception
+   * @see #toByteArray
+   */
+  public static BigDecimalComparator parseFrom(final byt