hbase git commit: HBASE-17634 Cleaned up the usage of Result.isPartial

2017-02-27 Thread zhangduo
Repository: hbase
Updated Branches:
  refs/heads/master 4d9042503 -> 84a9eb3f5


HBASE-17634 Cleaned up the usage of Result.isPartial

Signed-off-by: zhangduo 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/84a9eb3f
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/84a9eb3f
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/84a9eb3f

Branch: refs/heads/master
Commit: 84a9eb3f5c740d56c02a3ba54c9479090cd49f31
Parents: 4d90425
Author: Jan Hentschel 
Authored: Mon Feb 27 13:19:03 2017 +0100
Committer: zhangduo 
Committed: Mon Feb 27 20:44:35 2017 +0800

--
 .../client/ScannerCallableWithReplicas.java |  2 +-
 .../hbase/TestPartialResultsFromClientSide.java | 42 ++--
 2 files changed, 22 insertions(+), 22 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/84a9eb3f/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ScannerCallableWithReplicas.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ScannerCallableWithReplicas.java
 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ScannerCallableWithReplicas.java
index 8ec00ee..1000753 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ScannerCallableWithReplicas.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ScannerCallableWithReplicas.java
@@ -324,7 +324,7 @@ class ScannerCallableWithReplicas implements 
RetryingCallable {
 // 2. The last result was not a partial result which means it contained 
all of the cells for
 // that row (we no longer need any information from it). Set the start row 
to the next
 // closest row that could be seen.
-callable.getScan().withStartRow(this.lastResult.getRow(), 
this.lastResult.isPartial());
+callable.getScan().withStartRow(this.lastResult.getRow(), 
this.lastResult.hasMoreCellsInRow());
   }
 
   @VisibleForTesting

http://git-wip-us.apache.org/repos/asf/hbase/blob/84a9eb3f/hbase-server/src/test/java/org/apache/hadoop/hbase/TestPartialResultsFromClientSide.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestPartialResultsFromClientSide.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestPartialResultsFromClientSide.java
index 83f3101..61e3467 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestPartialResultsFromClientSide.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestPartialResultsFromClientSide.java
@@ -158,7 +158,7 @@ public class TestPartialResultsFromClientSide {
   message = "Ensuring the expected keyValues are present for row " + row;
   List expectedKeyValues = createKeyValuesForRow(ROWS[row], 
FAMILIES, QUALIFIERS, VALUE);
   Result result = partialScanner.next();
-  assertFalse(result.isPartial());
+  assertFalse(result.hasMoreCellsInRow());
   verifyResult(result, expectedKeyValues, message);
 }
 
@@ -178,7 +178,7 @@ public class TestPartialResultsFromClientSide {
 Result result = scanner.next();
 
 assertTrue(result != null);
-assertTrue(result.isPartial());
+assertTrue(result.hasMoreCellsInRow());
 assertTrue(result.rawCells() != null);
 assertTrue(result.rawCells().length == 1);
 
@@ -189,7 +189,7 @@ public class TestPartialResultsFromClientSide {
 result = scanner.next();
 
 assertTrue(result != null);
-assertTrue(!result.isPartial());
+assertTrue(!result.hasMoreCellsInRow());
 assertTrue(result.rawCells() != null);
 assertTrue(result.rawCells().length == NUM_COLS);
 
@@ -283,7 +283,7 @@ public class TestPartialResultsFromClientSide {
 for (Cell c : partialResult.rawCells()) {
   aggregatePartialCells.add(c);
 }
-  } while (partialResult.isPartial());
+  } while (partialResult.hasMoreCellsInRow());
 
   assertTrue("Number of cells differs. iteration: " + iterationCount,
   oneShotResult.rawCells().length == aggregatePartialCells.size());
@@ -353,7 +353,7 @@ public class TestPartialResultsFromClientSide {
   // the last group of cells that fit inside the maxResultSize
   assertTrue(
   "Result's cell count differed from expected number. result: " + 
result,
-  result.rawCells().length == expectedNumberOfCells || 
!result.isPartial()
+  result.rawCells().length == expectedNumberOfCells || 
!result.hasMoreCellsInRow()
   || !Bytes.equals(prevRow, result.getRow()));
   prevRow = result.getRow();
 }
@@ -431,7 +431,7 @@ public class TestPartialResultsFromClientSide {
 while ((result = scanner.next()) != null) {
   assertTrue(result.rawCells() != null);
 
-  if (result.isPa

hbase git commit: HBASE-17634 Cleaned up the usage of Result.isPartial

2017-02-27 Thread zhangduo
Repository: hbase
Updated Branches:
  refs/heads/branch-1 8528e8287 -> ec84118c9


HBASE-17634 Cleaned up the usage of Result.isPartial

Signed-off-by: zhangduo 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/ec84118c
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/ec84118c
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/ec84118c

Branch: refs/heads/branch-1
Commit: ec84118c973b846775260cd9dfc7189a0e451ad4
Parents: 8528e82
Author: Jan Hentschel 
Authored: Mon Feb 27 13:19:03 2017 +0100
Committer: zhangduo 
Committed: Mon Feb 27 20:45:58 2017 +0800

--
 .../client/ScannerCallableWithReplicas.java |  2 +-
 .../hbase/TestPartialResultsFromClientSide.java | 46 ++--
 2 files changed, 24 insertions(+), 24 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/ec84118c/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ScannerCallableWithReplicas.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ScannerCallableWithReplicas.java
 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ScannerCallableWithReplicas.java
index e64baf5..168db88 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ScannerCallableWithReplicas.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ScannerCallableWithReplicas.java
@@ -325,7 +325,7 @@ class ScannerCallableWithReplicas implements 
RetryingCallable {
 // 2. The last result was not a partial result which means it contained 
all of the cells for
 // that row (we no longer need any information from it). Set the start row 
to the next
 // closest row that could be seen.
-callable.getScan().withStartRow(this.lastResult.getRow(), 
this.lastResult.isPartial());
+callable.getScan().withStartRow(this.lastResult.getRow(), 
this.lastResult.hasMoreCellsInRow());
   }
 
   @VisibleForTesting

http://git-wip-us.apache.org/repos/asf/hbase/blob/ec84118c/hbase-server/src/test/java/org/apache/hadoop/hbase/TestPartialResultsFromClientSide.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestPartialResultsFromClientSide.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestPartialResultsFromClientSide.java
index 8679e46..3899242 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestPartialResultsFromClientSide.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestPartialResultsFromClientSide.java
@@ -152,7 +152,7 @@ public class TestPartialResultsFromClientSide {
   message = "Ensuring the expected keyValues are present for row " + row;
   List expectedKeyValues = createKeyValuesForRow(ROWS[row], 
FAMILIES, QUALIFIERS, VALUE);
   Result result = partialScanner.next();
-  assertFalse(result.isPartial());
+  assertFalse(result.hasMoreCellsInRow());
   verifyResult(result, expectedKeyValues, message);
 }
 
@@ -172,7 +172,7 @@ public class TestPartialResultsFromClientSide {
 Result result = scanner.next();
 
 assertTrue(result != null);
-assertTrue(result.isPartial());
+assertTrue(result.hasMoreCellsInRow());
 assertTrue(result.rawCells() != null);
 assertTrue(result.rawCells().length == 1);
 
@@ -183,7 +183,7 @@ public class TestPartialResultsFromClientSide {
 result = scanner.next();
 
 assertTrue(result != null);
-assertTrue(!result.isPartial());
+assertTrue(!result.hasMoreCellsInRow());
 assertTrue(result.rawCells() != null);
 assertTrue(result.rawCells().length == NUM_COLS);
 
@@ -277,7 +277,7 @@ public class TestPartialResultsFromClientSide {
 for (Cell c : partialResult.rawCells()) {
   aggregatePartialCells.add(c);
 }
-  } while (partialResult.isPartial());
+  } while (partialResult.hasMoreCellsInRow());
 
   assertTrue("Number of cells differs. iteration: " + iterationCount,
   oneShotResult.rawCells().length == aggregatePartialCells.size());
@@ -346,8 +346,8 @@ public class TestPartialResultsFromClientSide {
   // 2. It is the first result we have seen for that row and thus may have 
been fetched as
   // the last group of cells that fit inside the maxResultSize
   assertTrue(
-  "Result's cell count differed from expected number. result: " + 
result.rawCells().length,
-  result.rawCells().length == expectedNumberOfCells || 
!result.isPartial()
+  "Result's cell count differed from expected number. result: " + 
result,
+  result.rawCells().length == expectedNumberOfCells || 
!result.hasMoreCellsInRow()
   || !Bytes.equals(prevRow, result.getRow()));
   prevRow = result.g

hbase git commit: HBASE-17654 Add support for table.existsAll in thrift2 THBaseservice (Yechao Chen)

2017-02-27 Thread tedyu
Repository: hbase
Updated Branches:
  refs/heads/master 84a9eb3f5 -> e8f9de785


HBASE-17654 Add support for table.existsAll in thrift2 THBaseservice (Yechao 
Chen)


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/e8f9de78
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/e8f9de78
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/e8f9de78

Branch: refs/heads/master
Commit: e8f9de7851ca91bc840618e6ca16493b1060f6dc
Parents: 84a9eb3
Author: tedyu 
Authored: Mon Feb 27 08:32:54 2017 -0800
Committer: tedyu 
Committed: Mon Feb 27 08:32:54 2017 -0800

--
 .../thrift2/ThriftHBaseServiceHandler.java  |   18 +
 .../hbase/thrift2/generated/THBaseService.java  | 1148 ++
 .../apache/hadoop/hbase/thrift2/hbase.thrift|   15 +
 .../thrift2/TestThriftHBaseServiceHandler.java  |   28 +
 4 files changed, 1209 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/e8f9de78/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/ThriftHBaseServiceHandler.java
--
diff --git 
a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/ThriftHBaseServiceHandler.java
 
b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/ThriftHBaseServiceHandler.java
index d0c44aa..5a68147 100644
--- 
a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/ThriftHBaseServiceHandler.java
+++ 
b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/ThriftHBaseServiceHandler.java
@@ -39,6 +39,7 @@ import java.lang.reflect.InvocationTargetException;
 import java.lang.reflect.Method;
 import java.lang.reflect.Proxy;
 import java.nio.ByteBuffer;
+import java.util.ArrayList;
 import java.util.Collections;
 import java.util.List;
 import java.util.Map;
@@ -252,6 +253,23 @@ public class ThriftHBaseServiceHandler implements 
THBaseService.Iface {
   }
 
   @Override
+  public List existsAll(ByteBuffer table, List gets) throws 
TIOError, TException {
+Table htable = getTable(table);
+try {
+  boolean[] exists = htable.existsAll(getsFromThrift(gets));
+  List result = new ArrayList<>(exists.length);
+  for (boolean exist : exists) {
+result.add(exist);
+  }
+  return result;
+} catch (IOException e) {
+  throw getTIOError(e);
+} finally {
+  closeTable(htable);
+}
+  }
+
+  @Override
   public TResult get(ByteBuffer table, TGet get) throws TIOError, TException {
 Table htable = getTable(table);
 try {

http://git-wip-us.apache.org/repos/asf/hbase/blob/e8f9de78/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/generated/THBaseService.java
--
diff --git 
a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/generated/THBaseService.java
 
b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/generated/THBaseService.java
index c010806..ae7c9d6 100644
--- 
a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/generated/THBaseService.java
+++ 
b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/generated/THBaseService.java
@@ -51,6 +51,18 @@ public class THBaseService {
 public boolean exists(ByteBuffer table, TGet tget) throws TIOError, 
org.apache.thrift.TException;
 
 /**
+ * Test for the existence of columns in the table, as specified by the 
TGets.
+ *
+ * This will return an array of booleans. Each value will be true if the 
related Get matches
+ * one or more keys, false if not.
+ *
+ * @param table the table to check on
+ *
+ * @param tgets a list of TGets to check for
+ */
+public List existsAll(ByteBuffer table, List tgets) throws 
TIOError, org.apache.thrift.TException;
+
+/**
  * Method for getting data from a row.
  * 
  * If the row cannot be found an empty Result is returned.
@@ -276,6 +288,8 @@ public class THBaseService {
 
 public void exists(ByteBuffer table, TGet tget, 
org.apache.thrift.async.AsyncMethodCallback resultHandler) throws 
org.apache.thrift.TException;
 
+public void existsAll(ByteBuffer table, List tgets, 
org.apache.thrift.async.AsyncMethodCallback 
resultHandler) throws org.apache.thrift.TException;
+
 public void get(ByteBuffer table, TGet tget, 
org.apache.thrift.async.AsyncMethodCallback resultHandler) throws 
org.apache.thrift.TException;
 
 public void getMultiple(ByteBuffer table, List tgets, 
org.apache.thrift.async.AsyncMethodCallback resultHandler) throws 
org.apache.thrift.TException;
@@ -361,6 +375,33 @@ public class THBaseService {
   throw new 
org.apache.thrift.TApplicationException(org.apache.thrift.TApplicationException.MISSING_RESULT,
 "exists failed: unknown result");
 }
 
+public List existsAll(ByteBuffer table

hbase git commit: HBASE-17654 Add support for table.existsAll in thrift2 THBaseservice (Yechao Chen)

2017-02-27 Thread tedyu
Repository: hbase
Updated Branches:
  refs/heads/branch-1 ec84118c9 -> 47872cec2


HBASE-17654 Add support for table.existsAll in thrift2 THBaseservice (Yechao 
Chen)


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/47872cec
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/47872cec
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/47872cec

Branch: refs/heads/branch-1
Commit: 47872cec23ba2cf50b7232d4b4e01655a990ce51
Parents: ec84118
Author: tedyu 
Authored: Mon Feb 27 08:44:58 2017 -0800
Committer: tedyu 
Committed: Mon Feb 27 08:44:58 2017 -0800

--
 .../thrift2/ThriftHBaseServiceHandler.java  |   18 +
 .../hbase/thrift2/generated/THBaseService.java  | 1148 ++
 .../apache/hadoop/hbase/thrift2/hbase.thrift|   15 +
 .../thrift2/TestThriftHBaseServiceHandler.java  |   28 +
 4 files changed, 1209 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/47872cec/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/ThriftHBaseServiceHandler.java
--
diff --git 
a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/ThriftHBaseServiceHandler.java
 
b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/ThriftHBaseServiceHandler.java
index 0165e79..b508a1f 100644
--- 
a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/ThriftHBaseServiceHandler.java
+++ 
b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/ThriftHBaseServiceHandler.java
@@ -39,6 +39,7 @@ import java.lang.reflect.InvocationTargetException;
 import java.lang.reflect.Method;
 import java.lang.reflect.Proxy;
 import java.nio.ByteBuffer;
+import java.util.ArrayList;
 import java.util.Collections;
 import java.util.List;
 import java.util.Map;
@@ -251,6 +252,23 @@ public class ThriftHBaseServiceHandler implements 
THBaseService.Iface {
   }
 
   @Override
+  public List existsAll(ByteBuffer table, List gets) throws 
TIOError, TException {
+Table htable = getTable(table);
+try {
+  boolean[] exists = htable.existsAll(getsFromThrift(gets));
+  List result = new ArrayList<>(exists.length);
+  for (boolean exist : exists) {
+result.add(exist);
+  }
+  return result;
+} catch (IOException e) {
+  throw getTIOError(e);
+} finally {
+  closeTable(htable);
+}
+  }
+
+  @Override
   public TResult get(ByteBuffer table, TGet get) throws TIOError, TException {
 Table htable = getTable(table);
 try {

http://git-wip-us.apache.org/repos/asf/hbase/blob/47872cec/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/generated/THBaseService.java
--
diff --git 
a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/generated/THBaseService.java
 
b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/generated/THBaseService.java
index 2dc7fc7..75ca93d 100644
--- 
a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/generated/THBaseService.java
+++ 
b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/generated/THBaseService.java
@@ -51,6 +51,18 @@ public class THBaseService {
 public boolean exists(ByteBuffer table, TGet tget) throws TIOError, 
org.apache.thrift.TException;
 
 /**
+ * Test for the existence of columns in the table, as specified by the 
TGets.
+ *
+ * This will return an array of booleans. Each value will be true if the 
related Get matches
+ * one or more keys, false if not.
+ *
+ * @param table the table to check on
+ *
+ * @param tgets a list of TGets to check for
+ */
+public List existsAll(ByteBuffer table, List tgets) throws 
TIOError, org.apache.thrift.TException;
+
+/**
  * Method for getting data from a row.
  * 
  * If the row cannot be found an empty Result is returned.
@@ -276,6 +288,8 @@ public class THBaseService {
 
 public void exists(ByteBuffer table, TGet tget, 
org.apache.thrift.async.AsyncMethodCallback resultHandler) throws 
org.apache.thrift.TException;
 
+public void existsAll(ByteBuffer table, List tgets, 
org.apache.thrift.async.AsyncMethodCallback 
resultHandler) throws org.apache.thrift.TException;
+
 public void get(ByteBuffer table, TGet tget, 
org.apache.thrift.async.AsyncMethodCallback resultHandler) throws 
org.apache.thrift.TException;
 
 public void getMultiple(ByteBuffer table, List tgets, 
org.apache.thrift.async.AsyncMethodCallback resultHandler) throws 
org.apache.thrift.TException;
@@ -361,6 +375,33 @@ public class THBaseService {
   throw new 
org.apache.thrift.TApplicationException(org.apache.thrift.TApplicationException.MISSING_RESULT,
 "exists failed: unknown result");
 }
 
+public List existsAll(ByteBuffer t

hbase git commit: Add section to shell tricks on how to set and query shell configuration

2017-02-27 Thread stack
Repository: hbase
Updated Branches:
  refs/heads/master e8f9de785 -> eec064dcd


Add section to shell tricks on how to set and query shell configuration


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/eec064dc
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/eec064dc
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/eec064dc

Branch: refs/heads/master
Commit: eec064dcd198f0fffd94ad3274e785d3e6d5ee32
Parents: e8f9de7
Author: Michael Stack 
Authored: Mon Feb 27 19:58:37 2017 +
Committer: Michael Stack 
Committed: Mon Feb 27 19:58:37 2017 +

--
 src/main/asciidoc/_chapters/shell.adoc | 13 +
 1 file changed, 13 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/eec064dc/src/main/asciidoc/_chapters/shell.adoc
--
diff --git a/src/main/asciidoc/_chapters/shell.adoc 
b/src/main/asciidoc/_chapters/shell.adoc
index 8f1f59b..1e51a20 100644
--- a/src/main/asciidoc/_chapters/shell.adoc
+++ b/src/main/asciidoc/_chapters/shell.adoc
@@ -352,6 +352,19 @@ hbase(main):022:0> Date.new(1218920189000).toString() => 
"Sat Aug 16 20:56:29 UT
 
 To output in a format that is exactly like that of the HBase log format will 
take a little messing with 
link:http://download.oracle.com/javase/6/docs/api/java/text/SimpleDateFormat.html[SimpleDateFormat].
 
+=== Query Shell Configuration
+
+hbase(main):001:0> @shell.hbase.configuration.get("hbase.rpc.timeout")
+=> "6"
+
+To set a config in the shell:
+
+hbase(main):005:0> @shell.hbase.configuration.setInt("hbase.rpc.timeout", 
61010)
+hbase(main):006:0> @shell.hbase.configuration.get("hbase.rpc.timeout")
+=> "61010"
+
+
+
 [[tricks.pre-split]]
 === Pre-splitting tables with the HBase Shell
 You can use a variety of options to pre-split tables when creating them via 
the HBase Shell `create` command.



[2/3] hbase git commit: HBASE-17428 Implement informational RPCs for space quotas

2017-02-27 Thread elserj
http://git-wip-us.apache.org/repos/asf/hbase/blob/50bbc402/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/QuotaProtos.java
--
diff --git 
a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/QuotaProtos.java
 
b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/QuotaProtos.java
index d8cd701..f207472 100644
--- 
a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/QuotaProtos.java
+++ 
b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/QuotaProtos.java
@@ -7476,6 +7476,5554 @@ public final class QuotaProtos {
 
   }
 
+  public interface GetSpaceQuotaRegionSizesRequestOrBuilder extends
+  // 
@@protoc_insertion_point(interface_extends:hbase.pb.GetSpaceQuotaRegionSizesRequest)
+  org.apache.hadoop.hbase.shaded.com.google.protobuf.MessageOrBuilder {
+  }
+  /**
+   * Protobuf type {@code hbase.pb.GetSpaceQuotaRegionSizesRequest}
+   */
+  public  static final class GetSpaceQuotaRegionSizesRequest extends
+  org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 
implements
+  // 
@@protoc_insertion_point(message_implements:hbase.pb.GetSpaceQuotaRegionSizesRequest)
+  GetSpaceQuotaRegionSizesRequestOrBuilder {
+// Use GetSpaceQuotaRegionSizesRequest.newBuilder() to construct.
+private 
GetSpaceQuotaRegionSizesRequest(org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder
 builder) {
+  super(builder);
+}
+private GetSpaceQuotaRegionSizesRequest() {
+}
+
+@java.lang.Override
+public final 
org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet
+getUnknownFields() {
+  return this.unknownFields;
+}
+private GetSpaceQuotaRegionSizesRequest(
+org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream 
input,
+
org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite 
extensionRegistry)
+throws 
org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException
 {
+  this();
+  
org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.Builder 
unknownFields =
+  
org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.newBuilder();
+  try {
+boolean done = false;
+while (!done) {
+  int tag = input.readTag();
+  switch (tag) {
+case 0:
+  done = true;
+  break;
+default: {
+  if (!parseUnknownField(input, unknownFields,
+ extensionRegistry, tag)) {
+done = true;
+  }
+  break;
+}
+  }
+}
+  } catch 
(org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException
 e) {
+throw e.setUnfinishedMessage(this);
+  } catch (java.io.IOException e) {
+throw new 
org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException(
+e).setUnfinishedMessage(this);
+  } finally {
+this.unknownFields = unknownFields.build();
+makeExtensionsImmutable();
+  }
+}
+public static final 
org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor
+getDescriptor() {
+  return 
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.internal_static_hbase_pb_GetSpaceQuotaRegionSizesRequest_descriptor;
+}
+
+protected 
org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
+internalGetFieldAccessorTable() {
+  return 
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.internal_static_hbase_pb_GetSpaceQuotaRegionSizesRequest_fieldAccessorTable
+  .ensureFieldAccessorsInitialized(
+  
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaRegionSizesRequest.class,
 
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaRegionSizesRequest.Builder.class);
+}
+
+private byte memoizedIsInitialized = -1;
+public final boolean isInitialized() {
+  byte isInitialized = memoizedIsInitialized;
+  if (isInitialized == 1) return true;
+  if (isInitialized == 0) return false;
+
+  memoizedIsInitialized = 1;
+  return true;
+}
+
+public void 
writeTo(org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream 
output)
+throws java.io.IOException {
+  unknownFields.writeTo(output);
+}
+
+public int getSerializedSize() {
+  int size = memoizedSize;
+  if (size != -1) return size;
+
+  size = 0;
+  size += unknownFields.getSerializedSize();
+  memoizedSize = size;
+  return size;
+}
+
+private static final long serialVersionUID = 0

[1/3] hbase git commit: HBASE-17428 Implement informational RPCs for space quotas

2017-02-27 Thread elserj
Repository: hbase
Updated Branches:
  refs/heads/HBASE-16961 637e3cb16 -> 50bbc4025


http://git-wip-us.apache.org/repos/asf/hbase/blob/50bbc402/hbase-protocol-shaded/src/main/protobuf/Admin.proto
--
diff --git a/hbase-protocol-shaded/src/main/protobuf/Admin.proto 
b/hbase-protocol-shaded/src/main/protobuf/Admin.proto
index 338c80b..1a085e6 100644
--- a/hbase-protocol-shaded/src/main/protobuf/Admin.proto
+++ b/hbase-protocol-shaded/src/main/protobuf/Admin.proto
@@ -28,6 +28,7 @@ option optimize_for = SPEED;
 import "ClusterStatus.proto";
 import "HBase.proto";
 import "WAL.proto";
+import "Quota.proto";
 
 message GetRegionInfoRequest {
   required RegionSpecifier region = 1;
@@ -314,4 +315,12 @@ service AdminService {
 
   rpc GetRegionLoad(GetRegionLoadRequest)
 returns(GetRegionLoadResponse);
+
+  /** Fetches the RegionServer's view of space quotas */
+  rpc GetSpaceQuotaSnapshots(GetSpaceQuotaSnapshotsRequest)
+returns(GetSpaceQuotaSnapshotsResponse);
+
+  /** Fetches the RegionServer's space quota active enforcements */
+  rpc GetSpaceQuotaEnforcements(GetSpaceQuotaEnforcementsRequest)
+returns(GetSpaceQuotaEnforcementsResponse);
 }

http://git-wip-us.apache.org/repos/asf/hbase/blob/50bbc402/hbase-protocol-shaded/src/main/protobuf/Master.proto
--
diff --git a/hbase-protocol-shaded/src/main/protobuf/Master.proto 
b/hbase-protocol-shaded/src/main/protobuf/Master.proto
index f4f5fc8..157801a 100644
--- a/hbase-protocol-shaded/src/main/protobuf/Master.proto
+++ b/hbase-protocol-shaded/src/main/protobuf/Master.proto
@@ -891,4 +891,8 @@ service MasterService {
   /** Unmark a list of ServerNames marked as draining. */
   rpc removeDrainFromRegionServers(RemoveDrainFromRegionServersRequest)
 returns(RemoveDrainFromRegionServersResponse);
+
+  /** Fetches the Master's view of space quotas */
+  rpc GetSpaceQuotaRegionSizes(GetSpaceQuotaRegionSizesRequest)
+returns(GetSpaceQuotaRegionSizesResponse);
 }

http://git-wip-us.apache.org/repos/asf/hbase/blob/50bbc402/hbase-protocol-shaded/src/main/protobuf/Quota.proto
--
diff --git a/hbase-protocol-shaded/src/main/protobuf/Quota.proto 
b/hbase-protocol-shaded/src/main/protobuf/Quota.proto
index 597b059..2d7e5f5 100644
--- a/hbase-protocol-shaded/src/main/protobuf/Quota.proto
+++ b/hbase-protocol-shaded/src/main/protobuf/Quota.proto
@@ -111,3 +111,38 @@ message SpaceQuotaSnapshot {
   optional uint64 usage = 2;
   optional uint64 limit = 3;
 }
+
+message GetSpaceQuotaRegionSizesRequest {
+}
+
+message GetSpaceQuotaRegionSizesResponse {
+  message RegionSizes {
+optional TableName table_name = 1;
+optional uint64 size = 2;
+  }
+  repeated RegionSizes sizes = 1;
+}
+
+message GetSpaceQuotaSnapshotsRequest {
+}
+
+message GetSpaceQuotaSnapshotsResponse {
+  // Cannot use TableName as a map key, do the repeated nested message by hand.
+  message TableQuotaSnapshot {
+optional TableName table_name = 1;
+optional SpaceQuotaSnapshot snapshot = 2;
+  }
+  repeated TableQuotaSnapshot snapshots = 1;
+}
+
+message GetSpaceQuotaEnforcementsRequest {
+}
+
+message GetSpaceQuotaEnforcementsResponse {
+  // Cannot use TableName as a map key, do the repeated nested message by hand.
+  message TableViolationPolicy {
+optional TableName table_name = 1;
+optional SpaceViolationPolicy violation_policy = 2;
+  }
+  repeated TableViolationPolicy violation_policies = 1;
+}

http://git-wip-us.apache.org/repos/asf/hbase/blob/50bbc402/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterRpcServices.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterRpcServices.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterRpcServices.java
index df1c089..4b5f9d6 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterRpcServices.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterRpcServices.java
@@ -21,8 +21,11 @@ package org.apache.hadoop.hbase.master;
 import java.io.IOException;
 import java.net.InetAddress;
 import java.util.ArrayList;
+import java.util.HashMap;
 import java.util.HashSet;
 import java.util.List;
+import java.util.Map;
+import java.util.Map.Entry;
 import java.util.Set;
 
 import org.apache.commons.logging.Log;
@@ -93,6 +96,9 @@ import 
org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockS
 import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos;
 import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.*;
 import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SecurityCapabilitiesResponse.Capability;
+import 
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaRegionSizesReque

[3/3] hbase git commit: HBASE-17428 Implement informational RPCs for space quotas

2017-02-27 Thread elserj
HBASE-17428 Implement informational RPCs for space quotas

Create some RPCs that can expose the in-memory state that the
RegionServers and Master hold to drive the space quota "state machine".
Then, create some hbase shell commands to interact with those.


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/50bbc402
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/50bbc402
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/50bbc402

Branch: refs/heads/HBASE-16961
Commit: 50bbc40254fb9991530e0a24458c4b01b3660fc9
Parents: 637e3cb
Author: Josh Elser 
Authored: Tue Feb 21 15:36:39 2017 -0500
Committer: Josh Elser 
Committed: Mon Feb 27 17:08:10 2017 -0500

--
 .../hbase/client/ConnectionImplementation.java  |9 +
 .../hadoop/hbase/client/QuotaStatusCalls.java   |  125 +
 .../hadoop/hbase/quotas/QuotaTableUtil.java |   77 +
 .../hbase/shaded/protobuf/RequestConverter.java |   33 +
 .../shaded/protobuf/generated/AdminProtos.java  |  394 +-
 .../shaded/protobuf/generated/MasterProtos.java |   90 +-
 .../shaded/protobuf/generated/QuotaProtos.java  | 5686 +-
 .../src/main/protobuf/Admin.proto   |9 +
 .../src/main/protobuf/Master.proto  |4 +
 .../src/main/protobuf/Quota.proto   |   35 +
 .../hadoop/hbase/master/MasterRpcServices.java  |   40 +
 .../hbase/quotas/ActivePolicyEnforcement.java   |8 +
 .../hbase/regionserver/RSRpcServices.java   |   57 +
 .../hadoop/hbase/master/MockRegionServer.java   |   18 +
 .../hbase/quotas/TestQuotaStatusRPCs.java   |  192 +
 hbase-shell/src/main/ruby/hbase/quotas.rb   |   16 +
 hbase-shell/src/main/ruby/shell.rb  |3 +
 .../ruby/shell/commands/list_quota_snapshots.rb |   59 +
 .../shell/commands/list_quota_table_sizes.rb|   47 +
 .../shell/commands/list_quota_violations.rb |   48 +
 hbase-shell/src/test/ruby/hbase/quotas_test.rb  |   24 -
 .../test/ruby/hbase/quotas_test_no_cluster.rb   |   69 +
 22 files changed, 6891 insertions(+), 152 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/50bbc402/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ConnectionImplementation.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ConnectionImplementation.java
 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ConnectionImplementation.java
index ca21365..b14496e 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ConnectionImplementation.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ConnectionImplementation.java
@@ -94,6 +94,8 @@ import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SecurityCa
 import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SecurityCapabilitiesResponse;
 import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetNormalizerRunningRequest;
 import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetNormalizerRunningResponse;
+import 
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaRegionSizesRequest;
+import 
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaRegionSizesResponse;
 import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.AddReplicationPeerRequest;
 import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.AddReplicationPeerResponse;
 import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.DisableReplicationPeerRequest;
@@ -1718,6 +1720,13 @@ class ConnectionImplementation implements 
ClusterConnection, Closeable {
   ListReplicationPeersRequest request) throws ServiceException {
 return stub.listReplicationPeers(controller, request);
   }
+
+  @Override
+  public GetSpaceQuotaRegionSizesResponse getSpaceQuotaRegionSizes(
+  RpcController controller, GetSpaceQuotaRegionSizesRequest request)
+  throws ServiceException {
+return stub.getSpaceQuotaRegionSizes(controller, request);
+  }
 };
   }
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/50bbc402/hbase-client/src/main/java/org/apache/hadoop/hbase/client/QuotaStatusCalls.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/QuotaStatusCalls.java
 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/QuotaStatusCalls.java
new file mode 100644
index 000..f0f385d
--- /dev/null
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/QuotaStatusCalls.java
@@ -0,0 +1,125 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreem

hbase git commit: HBASE-17689 Add support for table.existsAll in thrift2 THBaseservice - Revert wrong JIRA Id

2017-02-27 Thread tedyu
Repository: hbase
Updated Branches:
  refs/heads/master eec064dcd -> bbc232e4e


HBASE-17689 Add support for table.existsAll in thrift2 THBaseservice - Revert 
wrong JIRA Id


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/bbc232e4
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/bbc232e4
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/bbc232e4

Branch: refs/heads/master
Commit: bbc232e4eed08209f2a51010c29a5c3df1cd792b
Parents: eec064d
Author: tedyu 
Authored: Mon Feb 27 14:38:18 2017 -0800
Committer: tedyu 
Committed: Mon Feb 27 14:38:18 2017 -0800

--
 .../thrift2/ThriftHBaseServiceHandler.java  |   18 -
 .../hbase/thrift2/generated/THBaseService.java  | 1148 --
 .../apache/hadoop/hbase/thrift2/hbase.thrift|   15 -
 .../thrift2/TestThriftHBaseServiceHandler.java  |   28 -
 4 files changed, 1209 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/bbc232e4/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/ThriftHBaseServiceHandler.java
--
diff --git 
a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/ThriftHBaseServiceHandler.java
 
b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/ThriftHBaseServiceHandler.java
index 5a68147..d0c44aa 100644
--- 
a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/ThriftHBaseServiceHandler.java
+++ 
b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/ThriftHBaseServiceHandler.java
@@ -39,7 +39,6 @@ import java.lang.reflect.InvocationTargetException;
 import java.lang.reflect.Method;
 import java.lang.reflect.Proxy;
 import java.nio.ByteBuffer;
-import java.util.ArrayList;
 import java.util.Collections;
 import java.util.List;
 import java.util.Map;
@@ -253,23 +252,6 @@ public class ThriftHBaseServiceHandler implements 
THBaseService.Iface {
   }
 
   @Override
-  public List existsAll(ByteBuffer table, List gets) throws 
TIOError, TException {
-Table htable = getTable(table);
-try {
-  boolean[] exists = htable.existsAll(getsFromThrift(gets));
-  List result = new ArrayList<>(exists.length);
-  for (boolean exist : exists) {
-result.add(exist);
-  }
-  return result;
-} catch (IOException e) {
-  throw getTIOError(e);
-} finally {
-  closeTable(htable);
-}
-  }
-
-  @Override
   public TResult get(ByteBuffer table, TGet get) throws TIOError, TException {
 Table htable = getTable(table);
 try {

http://git-wip-us.apache.org/repos/asf/hbase/blob/bbc232e4/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/generated/THBaseService.java
--
diff --git 
a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/generated/THBaseService.java
 
b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/generated/THBaseService.java
index ae7c9d6..c010806 100644
--- 
a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/generated/THBaseService.java
+++ 
b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/generated/THBaseService.java
@@ -51,18 +51,6 @@ public class THBaseService {
 public boolean exists(ByteBuffer table, TGet tget) throws TIOError, 
org.apache.thrift.TException;
 
 /**
- * Test for the existence of columns in the table, as specified by the 
TGets.
- *
- * This will return an array of booleans. Each value will be true if the 
related Get matches
- * one or more keys, false if not.
- *
- * @param table the table to check on
- *
- * @param tgets a list of TGets to check for
- */
-public List existsAll(ByteBuffer table, List tgets) throws 
TIOError, org.apache.thrift.TException;
-
-/**
  * Method for getting data from a row.
  * 
  * If the row cannot be found an empty Result is returned.
@@ -288,8 +276,6 @@ public class THBaseService {
 
 public void exists(ByteBuffer table, TGet tget, 
org.apache.thrift.async.AsyncMethodCallback resultHandler) throws 
org.apache.thrift.TException;
 
-public void existsAll(ByteBuffer table, List tgets, 
org.apache.thrift.async.AsyncMethodCallback 
resultHandler) throws org.apache.thrift.TException;
-
 public void get(ByteBuffer table, TGet tget, 
org.apache.thrift.async.AsyncMethodCallback resultHandler) throws 
org.apache.thrift.TException;
 
 public void getMultiple(ByteBuffer table, List tgets, 
org.apache.thrift.async.AsyncMethodCallback resultHandler) throws 
org.apache.thrift.TException;
@@ -375,33 +361,6 @@ public class THBaseService {
   throw new 
org.apache.thrift.TApplicationException(org.apache.thrift.TApplicationException.MISSING_RESULT,
 "exists failed: unknown result");
 }
 
-public List existsAll(ByteBuff

hbase git commit: HBASE-17689 Add support for table.existsAll in thrift2 THBaseservice - Revert wrong JIRA Id

2017-02-27 Thread tedyu
Repository: hbase
Updated Branches:
  refs/heads/branch-1 47872cec2 -> 04dd73c77


HBASE-17689 Add support for table.existsAll in thrift2 THBaseservice - Revert 
wrong JIRA Id


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/04dd73c7
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/04dd73c7
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/04dd73c7

Branch: refs/heads/branch-1
Commit: 04dd73c77b141c7ac8b7d899086ca1604842662a
Parents: 47872ce
Author: tedyu 
Authored: Mon Feb 27 14:38:55 2017 -0800
Committer: tedyu 
Committed: Mon Feb 27 14:38:55 2017 -0800

--
 .../thrift2/ThriftHBaseServiceHandler.java  |   18 -
 .../hbase/thrift2/generated/THBaseService.java  | 1148 --
 .../apache/hadoop/hbase/thrift2/hbase.thrift|   15 -
 .../thrift2/TestThriftHBaseServiceHandler.java  |   28 -
 4 files changed, 1209 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/04dd73c7/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/ThriftHBaseServiceHandler.java
--
diff --git 
a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/ThriftHBaseServiceHandler.java
 
b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/ThriftHBaseServiceHandler.java
index b508a1f..0165e79 100644
--- 
a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/ThriftHBaseServiceHandler.java
+++ 
b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/ThriftHBaseServiceHandler.java
@@ -39,7 +39,6 @@ import java.lang.reflect.InvocationTargetException;
 import java.lang.reflect.Method;
 import java.lang.reflect.Proxy;
 import java.nio.ByteBuffer;
-import java.util.ArrayList;
 import java.util.Collections;
 import java.util.List;
 import java.util.Map;
@@ -252,23 +251,6 @@ public class ThriftHBaseServiceHandler implements 
THBaseService.Iface {
   }
 
   @Override
-  public List existsAll(ByteBuffer table, List gets) throws 
TIOError, TException {
-Table htable = getTable(table);
-try {
-  boolean[] exists = htable.existsAll(getsFromThrift(gets));
-  List result = new ArrayList<>(exists.length);
-  for (boolean exist : exists) {
-result.add(exist);
-  }
-  return result;
-} catch (IOException e) {
-  throw getTIOError(e);
-} finally {
-  closeTable(htable);
-}
-  }
-
-  @Override
   public TResult get(ByteBuffer table, TGet get) throws TIOError, TException {
 Table htable = getTable(table);
 try {

http://git-wip-us.apache.org/repos/asf/hbase/blob/04dd73c7/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/generated/THBaseService.java
--
diff --git 
a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/generated/THBaseService.java
 
b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/generated/THBaseService.java
index 75ca93d..2dc7fc7 100644
--- 
a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/generated/THBaseService.java
+++ 
b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/generated/THBaseService.java
@@ -51,18 +51,6 @@ public class THBaseService {
 public boolean exists(ByteBuffer table, TGet tget) throws TIOError, 
org.apache.thrift.TException;
 
 /**
- * Test for the existence of columns in the table, as specified by the 
TGets.
- *
- * This will return an array of booleans. Each value will be true if the 
related Get matches
- * one or more keys, false if not.
- *
- * @param table the table to check on
- *
- * @param tgets a list of TGets to check for
- */
-public List existsAll(ByteBuffer table, List tgets) throws 
TIOError, org.apache.thrift.TException;
-
-/**
  * Method for getting data from a row.
  * 
  * If the row cannot be found an empty Result is returned.
@@ -288,8 +276,6 @@ public class THBaseService {
 
 public void exists(ByteBuffer table, TGet tget, 
org.apache.thrift.async.AsyncMethodCallback resultHandler) throws 
org.apache.thrift.TException;
 
-public void existsAll(ByteBuffer table, List tgets, 
org.apache.thrift.async.AsyncMethodCallback 
resultHandler) throws org.apache.thrift.TException;
-
 public void get(ByteBuffer table, TGet tget, 
org.apache.thrift.async.AsyncMethodCallback resultHandler) throws 
org.apache.thrift.TException;
 
 public void getMultiple(ByteBuffer table, List tgets, 
org.apache.thrift.async.AsyncMethodCallback resultHandler) throws 
org.apache.thrift.TException;
@@ -375,33 +361,6 @@ public class THBaseService {
   throw new 
org.apache.thrift.TApplicationException(org.apache.thrift.TApplicationException.MISSING_RESULT,
 "exists failed: unknown result");
 }
 
-public List existsAll(Byte

hbase git commit: HBASE-17689 Add support for table.existsAll in thrift2 THBaseservice (Yechao Chen)

2017-02-27 Thread tedyu
Repository: hbase
Updated Branches:
  refs/heads/master bbc232e4e -> eabfa39b2


HBASE-17689 Add support for table.existsAll in thrift2 THBaseservice (Yechao 
Chen)


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/eabfa39b
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/eabfa39b
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/eabfa39b

Branch: refs/heads/master
Commit: eabfa39b27b2198835a2bef2f98ee7fcc29b0538
Parents: bbc232e
Author: tedyu 
Authored: Mon Feb 27 14:40:10 2017 -0800
Committer: tedyu 
Committed: Mon Feb 27 14:40:10 2017 -0800

--
 .../thrift2/ThriftHBaseServiceHandler.java  |   18 +
 .../hbase/thrift2/generated/THBaseService.java  | 1148 ++
 .../apache/hadoop/hbase/thrift2/hbase.thrift|   15 +
 .../thrift2/TestThriftHBaseServiceHandler.java  |   28 +
 4 files changed, 1209 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/eabfa39b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/ThriftHBaseServiceHandler.java
--
diff --git 
a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/ThriftHBaseServiceHandler.java
 
b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/ThriftHBaseServiceHandler.java
index d0c44aa..5a68147 100644
--- 
a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/ThriftHBaseServiceHandler.java
+++ 
b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/ThriftHBaseServiceHandler.java
@@ -39,6 +39,7 @@ import java.lang.reflect.InvocationTargetException;
 import java.lang.reflect.Method;
 import java.lang.reflect.Proxy;
 import java.nio.ByteBuffer;
+import java.util.ArrayList;
 import java.util.Collections;
 import java.util.List;
 import java.util.Map;
@@ -252,6 +253,23 @@ public class ThriftHBaseServiceHandler implements 
THBaseService.Iface {
   }
 
   @Override
+  public List existsAll(ByteBuffer table, List gets) throws 
TIOError, TException {
+Table htable = getTable(table);
+try {
+  boolean[] exists = htable.existsAll(getsFromThrift(gets));
+  List result = new ArrayList<>(exists.length);
+  for (boolean exist : exists) {
+result.add(exist);
+  }
+  return result;
+} catch (IOException e) {
+  throw getTIOError(e);
+} finally {
+  closeTable(htable);
+}
+  }
+
+  @Override
   public TResult get(ByteBuffer table, TGet get) throws TIOError, TException {
 Table htable = getTable(table);
 try {

http://git-wip-us.apache.org/repos/asf/hbase/blob/eabfa39b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/generated/THBaseService.java
--
diff --git 
a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/generated/THBaseService.java
 
b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/generated/THBaseService.java
index c010806..ae7c9d6 100644
--- 
a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/generated/THBaseService.java
+++ 
b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/generated/THBaseService.java
@@ -51,6 +51,18 @@ public class THBaseService {
 public boolean exists(ByteBuffer table, TGet tget) throws TIOError, 
org.apache.thrift.TException;
 
 /**
+ * Test for the existence of columns in the table, as specified by the 
TGets.
+ *
+ * This will return an array of booleans. Each value will be true if the 
related Get matches
+ * one or more keys, false if not.
+ *
+ * @param table the table to check on
+ *
+ * @param tgets a list of TGets to check for
+ */
+public List existsAll(ByteBuffer table, List tgets) throws 
TIOError, org.apache.thrift.TException;
+
+/**
  * Method for getting data from a row.
  * 
  * If the row cannot be found an empty Result is returned.
@@ -276,6 +288,8 @@ public class THBaseService {
 
 public void exists(ByteBuffer table, TGet tget, 
org.apache.thrift.async.AsyncMethodCallback resultHandler) throws 
org.apache.thrift.TException;
 
+public void existsAll(ByteBuffer table, List tgets, 
org.apache.thrift.async.AsyncMethodCallback 
resultHandler) throws org.apache.thrift.TException;
+
 public void get(ByteBuffer table, TGet tget, 
org.apache.thrift.async.AsyncMethodCallback resultHandler) throws 
org.apache.thrift.TException;
 
 public void getMultiple(ByteBuffer table, List tgets, 
org.apache.thrift.async.AsyncMethodCallback resultHandler) throws 
org.apache.thrift.TException;
@@ -361,6 +375,33 @@ public class THBaseService {
   throw new 
org.apache.thrift.TApplicationException(org.apache.thrift.TApplicationException.MISSING_RESULT,
 "exists failed: unknown result");
 }
 
+public List existsAll(ByteBuffer table

hbase git commit: HBASE-17689 Add support for table.existsAll in thrift2 THBaseservice (Yechao Chen)

2017-02-27 Thread tedyu
Repository: hbase
Updated Branches:
  refs/heads/branch-1 04dd73c77 -> 324915508


HBASE-17689 Add support for table.existsAll in thrift2 THBaseservice (Yechao 
Chen)


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/32491550
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/32491550
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/32491550

Branch: refs/heads/branch-1
Commit: 3249155080aa4a205fff2a141d9e5f9ab018d9f1
Parents: 04dd73c
Author: tedyu 
Authored: Mon Feb 27 14:41:07 2017 -0800
Committer: tedyu 
Committed: Mon Feb 27 14:41:07 2017 -0800

--
 .../thrift2/ThriftHBaseServiceHandler.java  |   18 +
 .../hbase/thrift2/generated/THBaseService.java  | 1148 ++
 .../apache/hadoop/hbase/thrift2/hbase.thrift|   15 +
 .../thrift2/TestThriftHBaseServiceHandler.java  |   28 +
 4 files changed, 1209 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/32491550/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/ThriftHBaseServiceHandler.java
--
diff --git 
a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/ThriftHBaseServiceHandler.java
 
b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/ThriftHBaseServiceHandler.java
index 0165e79..b508a1f 100644
--- 
a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/ThriftHBaseServiceHandler.java
+++ 
b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/ThriftHBaseServiceHandler.java
@@ -39,6 +39,7 @@ import java.lang.reflect.InvocationTargetException;
 import java.lang.reflect.Method;
 import java.lang.reflect.Proxy;
 import java.nio.ByteBuffer;
+import java.util.ArrayList;
 import java.util.Collections;
 import java.util.List;
 import java.util.Map;
@@ -251,6 +252,23 @@ public class ThriftHBaseServiceHandler implements 
THBaseService.Iface {
   }
 
   @Override
+  public List existsAll(ByteBuffer table, List gets) throws 
TIOError, TException {
+Table htable = getTable(table);
+try {
+  boolean[] exists = htable.existsAll(getsFromThrift(gets));
+  List result = new ArrayList<>(exists.length);
+  for (boolean exist : exists) {
+result.add(exist);
+  }
+  return result;
+} catch (IOException e) {
+  throw getTIOError(e);
+} finally {
+  closeTable(htable);
+}
+  }
+
+  @Override
   public TResult get(ByteBuffer table, TGet get) throws TIOError, TException {
 Table htable = getTable(table);
 try {

http://git-wip-us.apache.org/repos/asf/hbase/blob/32491550/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/generated/THBaseService.java
--
diff --git 
a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/generated/THBaseService.java
 
b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/generated/THBaseService.java
index 2dc7fc7..75ca93d 100644
--- 
a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/generated/THBaseService.java
+++ 
b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/generated/THBaseService.java
@@ -51,6 +51,18 @@ public class THBaseService {
 public boolean exists(ByteBuffer table, TGet tget) throws TIOError, 
org.apache.thrift.TException;
 
 /**
+ * Test for the existence of columns in the table, as specified by the 
TGets.
+ *
+ * This will return an array of booleans. Each value will be true if the 
related Get matches
+ * one or more keys, false if not.
+ *
+ * @param table the table to check on
+ *
+ * @param tgets a list of TGets to check for
+ */
+public List existsAll(ByteBuffer table, List tgets) throws 
TIOError, org.apache.thrift.TException;
+
+/**
  * Method for getting data from a row.
  * 
  * If the row cannot be found an empty Result is returned.
@@ -276,6 +288,8 @@ public class THBaseService {
 
 public void exists(ByteBuffer table, TGet tget, 
org.apache.thrift.async.AsyncMethodCallback resultHandler) throws 
org.apache.thrift.TException;
 
+public void existsAll(ByteBuffer table, List tgets, 
org.apache.thrift.async.AsyncMethodCallback 
resultHandler) throws org.apache.thrift.TException;
+
 public void get(ByteBuffer table, TGet tget, 
org.apache.thrift.async.AsyncMethodCallback resultHandler) throws 
org.apache.thrift.TException;
 
 public void getMultiple(ByteBuffer table, List tgets, 
org.apache.thrift.async.AsyncMethodCallback resultHandler) throws 
org.apache.thrift.TException;
@@ -361,6 +375,33 @@ public class THBaseService {
   throw new 
org.apache.thrift.TApplicationException(org.apache.thrift.TApplicationException.MISSING_RESULT,
 "exists failed: unknown result");
 }
 
+public List existsAll(ByteBuffer t

[1/2] hbase git commit: HBASE-16188 Add EventCounter information to log4j properties file (Gopi Krishnan Nambiar)

2017-02-27 Thread apurtell
Repository: hbase
Updated Branches:
  refs/heads/branch-1 324915508 -> 07e63c837
  refs/heads/master eabfa39b2 -> be1cdc737


HBASE-16188 Add EventCounter information to log4j properties file (Gopi 
Krishnan Nambiar)


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/07e63c83
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/07e63c83
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/07e63c83

Branch: refs/heads/branch-1
Commit: 07e63c837b54c6c5908932c11203d1531f44f9a9
Parents: 3249155
Author: Andrew Purtell 
Authored: Mon Feb 27 15:55:32 2017 -0800
Committer: Andrew Purtell 
Committed: Mon Feb 27 15:55:32 2017 -0800

--
 conf/log4j.properties   | 5 +
 .../hbase-client-project/src/main/resources/log4j.properties| 5 +
 .../src/main/resources/log4j.properties | 5 +
 3 files changed, 15 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/07e63c83/conf/log4j.properties
--
diff --git a/conf/log4j.properties b/conf/log4j.properties
index e527c9b..61f49d8 100644
--- a/conf/log4j.properties
+++ b/conf/log4j.properties
@@ -111,3 +111,8 @@ 
log4j.logger.org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher=INFO
 # and scan of hbase:meta messages
 # 
log4j.logger.org.apache.hadoop.hbase.client.HConnectionManager$HConnectionImplementation=INFO
 # log4j.logger.org.apache.hadoop.hbase.client.MetaScanner=INFO
+
+# EventCounter
+# Add "EventCounter" to rootlogger if you want to use this
+# Uncomment the line below to add EventCounter information
+# log4j.appender.EventCounter=org.apache.hadoop.log.metrics.EventCounter

http://git-wip-us.apache.org/repos/asf/hbase/blob/07e63c83/hbase-archetypes/hbase-client-project/src/main/resources/log4j.properties
--
diff --git 
a/hbase-archetypes/hbase-client-project/src/main/resources/log4j.properties 
b/hbase-archetypes/hbase-client-project/src/main/resources/log4j.properties
index d7c4552..2475ea0 100644
--- a/hbase-archetypes/hbase-client-project/src/main/resources/log4j.properties
+++ b/hbase-archetypes/hbase-client-project/src/main/resources/log4j.properties
@@ -109,3 +109,8 @@ 
log4j.logger.org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher=INFO
 # and scan of hbase:meta messages
 # 
log4j.logger.org.apache.hadoop.hbase.client.HConnectionManager$HConnectionImplementation=INFO
 # log4j.logger.org.apache.hadoop.hbase.client.MetaScanner=INFO
+
+# EventCounter
+# Add "EventCounter" to rootlogger if you want to use this
+# Uncomment the line below to add EventCounter information
+# log4j.appender.EventCounter=org.apache.hadoop.log.metrics.EventCounter

http://git-wip-us.apache.org/repos/asf/hbase/blob/07e63c83/hbase-archetypes/hbase-shaded-client-project/src/main/resources/log4j.properties
--
diff --git 
a/hbase-archetypes/hbase-shaded-client-project/src/main/resources/log4j.properties
 
b/hbase-archetypes/hbase-shaded-client-project/src/main/resources/log4j.properties
index d7c4552..2475ea0 100644
--- 
a/hbase-archetypes/hbase-shaded-client-project/src/main/resources/log4j.properties
+++ 
b/hbase-archetypes/hbase-shaded-client-project/src/main/resources/log4j.properties
@@ -109,3 +109,8 @@ 
log4j.logger.org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher=INFO
 # and scan of hbase:meta messages
 # 
log4j.logger.org.apache.hadoop.hbase.client.HConnectionManager$HConnectionImplementation=INFO
 # log4j.logger.org.apache.hadoop.hbase.client.MetaScanner=INFO
+
+# EventCounter
+# Add "EventCounter" to rootlogger if you want to use this
+# Uncomment the line below to add EventCounter information
+# log4j.appender.EventCounter=org.apache.hadoop.log.metrics.EventCounter



[2/2] hbase git commit: HBASE-16188 Add EventCounter information to log4j properties file (Gopi Krishnan Nambiar)

2017-02-27 Thread apurtell
HBASE-16188 Add EventCounter information to log4j properties file (Gopi 
Krishnan Nambiar)


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/be1cdc73
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/be1cdc73
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/be1cdc73

Branch: refs/heads/master
Commit: be1cdc7376ae6b076016bd7291930b35e5ba352f
Parents: eabfa39
Author: Andrew Purtell 
Authored: Mon Feb 27 15:55:32 2017 -0800
Committer: Andrew Purtell 
Committed: Mon Feb 27 15:56:55 2017 -0800

--
 conf/log4j.properties   | 5 +
 .../hbase-client-project/src/main/resources/log4j.properties| 5 +
 .../src/main/resources/log4j.properties | 5 +
 3 files changed, 15 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/be1cdc73/conf/log4j.properties
--
diff --git a/conf/log4j.properties b/conf/log4j.properties
index 74b13b1..293386a 100644
--- a/conf/log4j.properties
+++ b/conf/log4j.properties
@@ -111,3 +111,8 @@ 
log4j.logger.org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher=INFO
 # Uncomment the below if you want to remove logging of client region caching'
 # and scan of hbase:meta messages
 # log4j.logger.org.apache.hadoop.hbase.client.ConnectionImplementation=INFO
+
+# EventCounter
+# Add "EventCounter" to rootlogger if you want to use this
+# Uncomment the line below to add EventCounter information
+# log4j.appender.EventCounter=org.apache.hadoop.log.metrics.EventCounter

http://git-wip-us.apache.org/repos/asf/hbase/blob/be1cdc73/hbase-archetypes/hbase-client-project/src/main/resources/log4j.properties
--
diff --git 
a/hbase-archetypes/hbase-client-project/src/main/resources/log4j.properties 
b/hbase-archetypes/hbase-client-project/src/main/resources/log4j.properties
index d7c4552..2475ea0 100644
--- a/hbase-archetypes/hbase-client-project/src/main/resources/log4j.properties
+++ b/hbase-archetypes/hbase-client-project/src/main/resources/log4j.properties
@@ -109,3 +109,8 @@ 
log4j.logger.org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher=INFO
 # and scan of hbase:meta messages
 # 
log4j.logger.org.apache.hadoop.hbase.client.HConnectionManager$HConnectionImplementation=INFO
 # log4j.logger.org.apache.hadoop.hbase.client.MetaScanner=INFO
+
+# EventCounter
+# Add "EventCounter" to rootlogger if you want to use this
+# Uncomment the line below to add EventCounter information
+# log4j.appender.EventCounter=org.apache.hadoop.log.metrics.EventCounter

http://git-wip-us.apache.org/repos/asf/hbase/blob/be1cdc73/hbase-archetypes/hbase-shaded-client-project/src/main/resources/log4j.properties
--
diff --git 
a/hbase-archetypes/hbase-shaded-client-project/src/main/resources/log4j.properties
 
b/hbase-archetypes/hbase-shaded-client-project/src/main/resources/log4j.properties
index d7c4552..2475ea0 100644
--- 
a/hbase-archetypes/hbase-shaded-client-project/src/main/resources/log4j.properties
+++ 
b/hbase-archetypes/hbase-shaded-client-project/src/main/resources/log4j.properties
@@ -109,3 +109,8 @@ 
log4j.logger.org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher=INFO
 # and scan of hbase:meta messages
 # 
log4j.logger.org.apache.hadoop.hbase.client.HConnectionManager$HConnectionImplementation=INFO
 # log4j.logger.org.apache.hadoop.hbase.client.MetaScanner=INFO
+
+# EventCounter
+# Add "EventCounter" to rootlogger if you want to use this
+# Uncomment the line below to add EventCounter information
+# log4j.appender.EventCounter=org.apache.hadoop.log.metrics.EventCounter



hbase git commit: HBASE-17690 Clean up MOB code

2017-02-27 Thread jingchengdu
Repository: hbase
Updated Branches:
  refs/heads/master be1cdc737 -> 5a8f1e8aa


HBASE-17690 Clean up MOB code


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/5a8f1e8a
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/5a8f1e8a
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/5a8f1e8a

Branch: refs/heads/master
Commit: 5a8f1e8aaa026fd33be0bd8a69f0ee78f54392ce
Parents: be1cdc7
Author: Jingcheng Du 
Authored: Tue Feb 28 09:57:34 2017 +0800
Committer: Jingcheng Du 
Committed: Tue Feb 28 09:57:34 2017 +0800

--
 .../apache/hadoop/hbase/client/HBaseAdmin.java  |   2 +-
 .../hbase/mob/DefaultMobStoreCompactor.java | 130 +--
 .../hbase/mob/DefaultMobStoreFlusher.java   |  68 +++---
 .../org/apache/hadoop/hbase/mob/MobUtils.java   | 111 +++-
 .../PartitionedMobCompactionRequest.java|   1 -
 .../compactions/PartitionedMobCompactor.java|   5 +-
 .../hadoop/hbase/regionserver/HMobStore.java|  48 +++
 .../hadoop/hbase/regionserver/HStore.java   |   2 +-
 .../regionserver/compactions/Compactor.java |   2 +-
 .../hadoop/hbase/HBaseTestingUtility.java   |   1 -
 .../hadoop/hbase/mob/TestMobFileCache.java  |   2 +-
 .../hbase/mob/compactions/TestMobCompactor.java |  46 ---
 .../hbase/regionserver/TestHMobStore.java   |   2 +-
 .../hbase/regionserver/TestMobStoreScanner.java |   2 +-
 14 files changed, 249 insertions(+), 173 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/5a8f1e8a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java
index 5150194..c68d3bb 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java
@@ -1206,8 +1206,8 @@ public class HBaseAdmin implements Admin {
* @param tableName table or region to compact
* @param columnFamily column family within a table or region
* @param major True if we are to do a major compaction.
+   * @param compactType {@link org.apache.hadoop.hbase.client.CompactType}
* @throws IOException if a remote or network exception occurs
-   * @throws InterruptedException
*/
   private void compact(final TableName tableName, final byte[] 
columnFamily,final boolean major,
CompactType compactType) throws IOException {

http://git-wip-us.apache.org/repos/asf/hbase/blob/5a8f1e8a/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/DefaultMobStoreCompactor.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/DefaultMobStoreCompactor.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/DefaultMobStoreCompactor.java
index 936a6fd..697286c 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/DefaultMobStoreCompactor.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/DefaultMobStoreCompactor.java
@@ -18,6 +18,7 @@
 package org.apache.hadoop.hbase.mob;
 
 import java.io.IOException;
+import java.io.InterruptedIOException;
 import java.util.ArrayList;
 import java.util.Date;
 import java.util.List;
@@ -36,17 +37,21 @@ import org.apache.hadoop.hbase.regionserver.CellSink;
 import org.apache.hadoop.hbase.regionserver.HMobStore;
 import org.apache.hadoop.hbase.regionserver.HStore;
 import org.apache.hadoop.hbase.regionserver.InternalScanner;
+import org.apache.hadoop.hbase.regionserver.KeyValueScanner;
 import org.apache.hadoop.hbase.regionserver.ScanType;
 import org.apache.hadoop.hbase.regionserver.ScannerContext;
+import org.apache.hadoop.hbase.regionserver.ShipperListener;
 import org.apache.hadoop.hbase.regionserver.Store;
 import org.apache.hadoop.hbase.regionserver.StoreFileScanner;
 import org.apache.hadoop.hbase.regionserver.StoreFileWriter;
 import org.apache.hadoop.hbase.regionserver.StoreScanner;
 import org.apache.hadoop.hbase.regionserver.compactions.CompactionRequest;
 import org.apache.hadoop.hbase.regionserver.compactions.DefaultCompactor;
+import org.apache.hadoop.hbase.regionserver.throttle.ThroughputControlUtil;
 import org.apache.hadoop.hbase.regionserver.throttle.ThroughputController;
 import org.apache.hadoop.hbase.security.User;
 import org.apache.hadoop.hbase.util.Bytes;
+import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
 
 /**
  * Compact passed set of files in the mob-enabled column family.
@@ -164,12 +169,20 @@ public class DefaultMobStoreCompactor extends 
DefaultCompactor {
   protected boolean performCompac

hbase git commit: HBASE-17688 MultiRowRangeFilter not working correctly if given same start and stop RowKey

2017-02-27 Thread jingchengdu
Repository: hbase
Updated Branches:
  refs/heads/master 5a8f1e8aa -> f4e0ea24e


HBASE-17688 MultiRowRangeFilter not working correctly if given same start and 
stop RowKey


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/f4e0ea24
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/f4e0ea24
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/f4e0ea24

Branch: refs/heads/master
Commit: f4e0ea24e674a8dd68b3a780318abe5e97c135b3
Parents: 5a8f1e8
Author: Jingcheng Du 
Authored: Tue Feb 28 11:38:35 2017 +0800
Committer: Jingcheng Du 
Committed: Tue Feb 28 11:38:35 2017 +0800

--
 .../hbase/filter/MultiRowRangeFilter.java   | 15 +-
 .../hbase/filter/TestMultiRowRangeFilter.java   | 31 
 2 files changed, 38 insertions(+), 8 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/f4e0ea24/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/MultiRowRangeFilter.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/MultiRowRangeFilter.java
 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/MultiRowRangeFilter.java
index 768088b..2cc754a 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/MultiRowRangeFilter.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/MultiRowRangeFilter.java
@@ -116,7 +116,9 @@ public class MultiRowRangeFilter extends FilterBase {
   } else {
 if (range.contains(rowArr, offset, length)) {
   currentReturnCode = ReturnCode.INCLUDE;
-} else currentReturnCode = ReturnCode.SEEK_NEXT_USING_HINT;
+} else {
+  currentReturnCode = ReturnCode.SEEK_NEXT_USING_HINT;
+}
   }
 } else {
   currentReturnCode = ReturnCode.INCLUDE;
@@ -151,7 +153,6 @@ public class MultiRowRangeFilter extends FilterBase {
 if (range.stopRow != null)
   
rangebuilder.setStopRow(UnsafeByteOperations.unsafeWrap(range.stopRow));
 rangebuilder.setStopRowInclusive(range.stopRowInclusive);
-range.isScan = Bytes.equals(range.startRow, range.stopRow) ? 1 : 0;
 builder.addRowRangeList(rangebuilder.build());
   }
 }
@@ -418,7 +419,6 @@ public class MultiRowRangeFilter extends FilterBase {
 private boolean startRowInclusive = true;
 private byte[] stopRow;
 private boolean stopRowInclusive = false;
-private int isScan = 0;
 
 public RowRange() {
 }
@@ -441,7 +441,6 @@ public class MultiRowRangeFilter extends FilterBase {
   this.startRowInclusive = startRowInclusive;
   this.stopRow = (stopRow == null) ? HConstants.EMPTY_BYTE_ARRAY :stopRow;
   this.stopRowInclusive = stopRowInclusive;
-  isScan = Bytes.equals(startRow, stopRow) ? 1 : 0;
 }
 
 public byte[] getStartRow() {
@@ -475,21 +474,21 @@ public class MultiRowRangeFilter extends FilterBase {
 if(stopRowInclusive) {
   return Bytes.compareTo(buffer, offset, length, startRow, 0, 
startRow.length) >= 0
   && (Bytes.equals(stopRow, HConstants.EMPTY_BYTE_ARRAY) ||
-  Bytes.compareTo(buffer, offset, length, stopRow, 0, 
stopRow.length) <= isScan);
+  Bytes.compareTo(buffer, offset, length, stopRow, 0, 
stopRow.length) <= 0);
 } else {
   return Bytes.compareTo(buffer, offset, length, startRow, 0, 
startRow.length) >= 0
   && (Bytes.equals(stopRow, HConstants.EMPTY_BYTE_ARRAY) ||
-  Bytes.compareTo(buffer, offset, length, stopRow, 0, 
stopRow.length) < isScan);
+  Bytes.compareTo(buffer, offset, length, stopRow, 0, 
stopRow.length) < 0);
 }
   } else {
 if(stopRowInclusive) {
   return Bytes.compareTo(buffer, offset, length, startRow, 0, 
startRow.length) > 0
   && (Bytes.equals(stopRow, HConstants.EMPTY_BYTE_ARRAY) ||
-  Bytes.compareTo(buffer, offset, length, stopRow, 0, 
stopRow.length) <= isScan);
+  Bytes.compareTo(buffer, offset, length, stopRow, 0, 
stopRow.length) <= 0);
 } else {
   return Bytes.compareTo(buffer, offset, length, startRow, 0, 
startRow.length) > 0
   && (Bytes.equals(stopRow, HConstants.EMPTY_BYTE_ARRAY) ||
-  Bytes.compareTo(buffer, offset, length, stopRow, 0, 
stopRow.length) < isScan);
+  Bytes.compareTo(buffer, offset, length, stopRow, 0, 
stopRow.length) < 0);
 }
   }
 }

http://git-wip-us.apache.org/repos/asf/hbase/blob/f4e0ea24/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestMultiRowRangeFilter.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoo

hbase git commit: HBASE-17688 MultiRowRangeFilter not working correctly if given same start and stop RowKey

2017-02-27 Thread jingchengdu
Repository: hbase
Updated Branches:
  refs/heads/branch-1 07e63c837 -> 2131c7955


HBASE-17688 MultiRowRangeFilter not working correctly if given same start and 
stop RowKey


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/2131c795
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/2131c795
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/2131c795

Branch: refs/heads/branch-1
Commit: 2131c7955257a74fb29d0ec28739521f2e84e90d
Parents: 07e63c8
Author: Jingcheng Du 
Authored: Tue Feb 28 12:15:38 2017 +0800
Committer: Jingcheng Du 
Committed: Tue Feb 28 12:15:38 2017 +0800

--
 .../hbase/filter/MultiRowRangeFilter.java   | 15 +-
 .../hbase/filter/TestMultiRowRangeFilter.java   | 31 
 2 files changed, 38 insertions(+), 8 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/2131c795/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/MultiRowRangeFilter.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/MultiRowRangeFilter.java
 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/MultiRowRangeFilter.java
index 9df5249..7e9503c 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/MultiRowRangeFilter.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/MultiRowRangeFilter.java
@@ -117,7 +117,9 @@ public class MultiRowRangeFilter extends FilterBase {
   } else {
 if (range.contains(buffer, offset, length)) {
   currentReturnCode = ReturnCode.INCLUDE;
-} else currentReturnCode = ReturnCode.SEEK_NEXT_USING_HINT;
+} else {
+  currentReturnCode = ReturnCode.SEEK_NEXT_USING_HINT;
+}
   }
 } else {
   currentReturnCode = ReturnCode.INCLUDE;
@@ -151,7 +153,6 @@ public class MultiRowRangeFilter extends FilterBase {
 if (range.stopRow != null)
   rangebuilder.setStopRow(ByteStringer.wrap(range.stopRow));
 rangebuilder.setStopRowInclusive(range.stopRowInclusive);
-range.isScan = Bytes.equals(range.startRow, range.stopRow) ? 1 : 0;
 builder.addRowRangeList(rangebuilder.build());
   }
 }
@@ -422,7 +423,6 @@ public class MultiRowRangeFilter extends FilterBase {
 private boolean startRowInclusive = true;
 private byte[] stopRow;
 private boolean stopRowInclusive = false;
-private int isScan = 0;
 
 public RowRange() {
 }
@@ -445,7 +445,6 @@ public class MultiRowRangeFilter extends FilterBase {
   this.startRowInclusive = startRowInclusive;
   this.stopRow = (stopRow == null) ? HConstants.EMPTY_BYTE_ARRAY :stopRow;
   this.stopRowInclusive = stopRowInclusive;
-  isScan = Bytes.equals(startRow, stopRow) ? 1 : 0;
 }
 
 public byte[] getStartRow() {
@@ -479,21 +478,21 @@ public class MultiRowRangeFilter extends FilterBase {
 if(stopRowInclusive) {
   return Bytes.compareTo(buffer, offset, length, startRow, 0, 
startRow.length) >= 0
   && (Bytes.equals(stopRow, HConstants.EMPTY_BYTE_ARRAY) ||
-  Bytes.compareTo(buffer, offset, length, stopRow, 0, 
stopRow.length) <= isScan);
+  Bytes.compareTo(buffer, offset, length, stopRow, 0, 
stopRow.length) <= 0);
 } else {
   return Bytes.compareTo(buffer, offset, length, startRow, 0, 
startRow.length) >= 0
   && (Bytes.equals(stopRow, HConstants.EMPTY_BYTE_ARRAY) ||
-  Bytes.compareTo(buffer, offset, length, stopRow, 0, 
stopRow.length) < isScan);
+  Bytes.compareTo(buffer, offset, length, stopRow, 0, 
stopRow.length) < 0);
 }
   } else {
 if(stopRowInclusive) {
   return Bytes.compareTo(buffer, offset, length, startRow, 0, 
startRow.length) > 0
   && (Bytes.equals(stopRow, HConstants.EMPTY_BYTE_ARRAY) ||
-  Bytes.compareTo(buffer, offset, length, stopRow, 0, 
stopRow.length) <= isScan);
+  Bytes.compareTo(buffer, offset, length, stopRow, 0, 
stopRow.length) <= 0);
 } else {
   return Bytes.compareTo(buffer, offset, length, startRow, 0, 
startRow.length) > 0
   && (Bytes.equals(stopRow, HConstants.EMPTY_BYTE_ARRAY) ||
-  Bytes.compareTo(buffer, offset, length, stopRow, 0, 
stopRow.length) < isScan);
+  Bytes.compareTo(buffer, offset, length, stopRow, 0, 
stopRow.length) < 0);
 }
   }
 }

http://git-wip-us.apache.org/repos/asf/hbase/blob/2131c795/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestMultiRowRangeFilter.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/fil

hbase git commit: HBASE-17688 MultiRowRangeFilter not working correctly if given same start and stop RowKey

2017-02-27 Thread jingchengdu
Repository: hbase
Updated Branches:
  refs/heads/branch-1.1 1a81a27ac -> 5dd170b20


HBASE-17688 MultiRowRangeFilter not working correctly if given same start and 
stop RowKey


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/5dd170b2
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/5dd170b2
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/5dd170b2

Branch: refs/heads/branch-1.1
Commit: 5dd170b204cbbd3b7be935f2025c632fface6f90
Parents: 1a81a27
Author: Jingcheng Du 
Authored: Tue Feb 28 12:30:02 2017 +0800
Committer: Jingcheng Du 
Committed: Tue Feb 28 12:30:02 2017 +0800

--
 .../hbase/filter/MultiRowRangeFilter.java   | 15 +-
 .../hbase/filter/TestMultiRowRangeFilter.java   | 31 
 2 files changed, 38 insertions(+), 8 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/5dd170b2/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/MultiRowRangeFilter.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/MultiRowRangeFilter.java
 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/MultiRowRangeFilter.java
index 4bcf430..01bdb04 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/MultiRowRangeFilter.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/MultiRowRangeFilter.java
@@ -117,7 +117,9 @@ public class MultiRowRangeFilter extends FilterBase {
   } else {
 if (range.contains(buffer, offset, length)) {
   currentReturnCode = ReturnCode.INCLUDE;
-} else currentReturnCode = ReturnCode.SEEK_NEXT_USING_HINT;
+} else {
+  currentReturnCode = ReturnCode.SEEK_NEXT_USING_HINT;
+}
   }
 } else {
   currentReturnCode = ReturnCode.INCLUDE;
@@ -151,7 +153,6 @@ public class MultiRowRangeFilter extends FilterBase {
 if (range.stopRow != null)
   rangebuilder.setStopRow(ByteStringer.wrap(range.stopRow));
 rangebuilder.setStopRowInclusive(range.stopRowInclusive);
-range.isScan = Bytes.equals(range.startRow, range.stopRow) ? 1 : 0;
 builder.addRowRangeList(rangebuilder.build());
   }
 }
@@ -422,7 +423,6 @@ public class MultiRowRangeFilter extends FilterBase {
 private boolean startRowInclusive = true;
 private byte[] stopRow;
 private boolean stopRowInclusive = false;
-private int isScan = 0;
 
 public RowRange() {
 }
@@ -445,7 +445,6 @@ public class MultiRowRangeFilter extends FilterBase {
   this.startRowInclusive = startRowInclusive;
   this.stopRow = (stopRow == null) ? HConstants.EMPTY_BYTE_ARRAY :stopRow;
   this.stopRowInclusive = stopRowInclusive;
-  isScan = Bytes.equals(startRow, stopRow) ? 1 : 0;
 }
 
 public byte[] getStartRow() {
@@ -479,21 +478,21 @@ public class MultiRowRangeFilter extends FilterBase {
 if(stopRowInclusive) {
   return Bytes.compareTo(buffer, offset, length, startRow, 0, 
startRow.length) >= 0
   && (Bytes.equals(stopRow, HConstants.EMPTY_BYTE_ARRAY) ||
-  Bytes.compareTo(buffer, offset, length, stopRow, 0, 
stopRow.length) <= isScan);
+  Bytes.compareTo(buffer, offset, length, stopRow, 0, 
stopRow.length) <= 0);
 } else {
   return Bytes.compareTo(buffer, offset, length, startRow, 0, 
startRow.length) >= 0
   && (Bytes.equals(stopRow, HConstants.EMPTY_BYTE_ARRAY) ||
-  Bytes.compareTo(buffer, offset, length, stopRow, 0, 
stopRow.length) < isScan);
+  Bytes.compareTo(buffer, offset, length, stopRow, 0, 
stopRow.length) < 0);
 }
   } else {
 if(stopRowInclusive) {
   return Bytes.compareTo(buffer, offset, length, startRow, 0, 
startRow.length) > 0
   && (Bytes.equals(stopRow, HConstants.EMPTY_BYTE_ARRAY) ||
-  Bytes.compareTo(buffer, offset, length, stopRow, 0, 
stopRow.length) <= isScan);
+  Bytes.compareTo(buffer, offset, length, stopRow, 0, 
stopRow.length) <= 0);
 } else {
   return Bytes.compareTo(buffer, offset, length, startRow, 0, 
startRow.length) > 0
   && (Bytes.equals(stopRow, HConstants.EMPTY_BYTE_ARRAY) ||
-  Bytes.compareTo(buffer, offset, length, stopRow, 0, 
stopRow.length) < isScan);
+  Bytes.compareTo(buffer, offset, length, stopRow, 0, 
stopRow.length) < 0);
 }
   }
 }

http://git-wip-us.apache.org/repos/asf/hbase/blob/5dd170b2/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestMultiRowRangeFilter.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase

hbase git commit: HBASE-17688 MultiRowRangeFilter not working correctly if given same start and stop RowKey

2017-02-27 Thread jingchengdu
Repository: hbase
Updated Branches:
  refs/heads/branch-1.2 4caed356f -> 56dea8f87


HBASE-17688 MultiRowRangeFilter not working correctly if given same start and 
stop RowKey


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/56dea8f8
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/56dea8f8
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/56dea8f8

Branch: refs/heads/branch-1.2
Commit: 56dea8f87437c88dcf5ddade46a63c80a73822c0
Parents: 4caed35
Author: Jingcheng Du 
Authored: Tue Feb 28 12:40:20 2017 +0800
Committer: Jingcheng Du 
Committed: Tue Feb 28 12:40:20 2017 +0800

--
 .../hbase/filter/MultiRowRangeFilter.java   | 15 +-
 .../hbase/filter/TestMultiRowRangeFilter.java   | 31 
 2 files changed, 38 insertions(+), 8 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/56dea8f8/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/MultiRowRangeFilter.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/MultiRowRangeFilter.java
 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/MultiRowRangeFilter.java
index 9df5249..7e9503c 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/MultiRowRangeFilter.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/MultiRowRangeFilter.java
@@ -117,7 +117,9 @@ public class MultiRowRangeFilter extends FilterBase {
   } else {
 if (range.contains(buffer, offset, length)) {
   currentReturnCode = ReturnCode.INCLUDE;
-} else currentReturnCode = ReturnCode.SEEK_NEXT_USING_HINT;
+} else {
+  currentReturnCode = ReturnCode.SEEK_NEXT_USING_HINT;
+}
   }
 } else {
   currentReturnCode = ReturnCode.INCLUDE;
@@ -151,7 +153,6 @@ public class MultiRowRangeFilter extends FilterBase {
 if (range.stopRow != null)
   rangebuilder.setStopRow(ByteStringer.wrap(range.stopRow));
 rangebuilder.setStopRowInclusive(range.stopRowInclusive);
-range.isScan = Bytes.equals(range.startRow, range.stopRow) ? 1 : 0;
 builder.addRowRangeList(rangebuilder.build());
   }
 }
@@ -422,7 +423,6 @@ public class MultiRowRangeFilter extends FilterBase {
 private boolean startRowInclusive = true;
 private byte[] stopRow;
 private boolean stopRowInclusive = false;
-private int isScan = 0;
 
 public RowRange() {
 }
@@ -445,7 +445,6 @@ public class MultiRowRangeFilter extends FilterBase {
   this.startRowInclusive = startRowInclusive;
   this.stopRow = (stopRow == null) ? HConstants.EMPTY_BYTE_ARRAY :stopRow;
   this.stopRowInclusive = stopRowInclusive;
-  isScan = Bytes.equals(startRow, stopRow) ? 1 : 0;
 }
 
 public byte[] getStartRow() {
@@ -479,21 +478,21 @@ public class MultiRowRangeFilter extends FilterBase {
 if(stopRowInclusive) {
   return Bytes.compareTo(buffer, offset, length, startRow, 0, 
startRow.length) >= 0
   && (Bytes.equals(stopRow, HConstants.EMPTY_BYTE_ARRAY) ||
-  Bytes.compareTo(buffer, offset, length, stopRow, 0, 
stopRow.length) <= isScan);
+  Bytes.compareTo(buffer, offset, length, stopRow, 0, 
stopRow.length) <= 0);
 } else {
   return Bytes.compareTo(buffer, offset, length, startRow, 0, 
startRow.length) >= 0
   && (Bytes.equals(stopRow, HConstants.EMPTY_BYTE_ARRAY) ||
-  Bytes.compareTo(buffer, offset, length, stopRow, 0, 
stopRow.length) < isScan);
+  Bytes.compareTo(buffer, offset, length, stopRow, 0, 
stopRow.length) < 0);
 }
   } else {
 if(stopRowInclusive) {
   return Bytes.compareTo(buffer, offset, length, startRow, 0, 
startRow.length) > 0
   && (Bytes.equals(stopRow, HConstants.EMPTY_BYTE_ARRAY) ||
-  Bytes.compareTo(buffer, offset, length, stopRow, 0, 
stopRow.length) <= isScan);
+  Bytes.compareTo(buffer, offset, length, stopRow, 0, 
stopRow.length) <= 0);
 } else {
   return Bytes.compareTo(buffer, offset, length, startRow, 0, 
startRow.length) > 0
   && (Bytes.equals(stopRow, HConstants.EMPTY_BYTE_ARRAY) ||
-  Bytes.compareTo(buffer, offset, length, stopRow, 0, 
stopRow.length) < isScan);
+  Bytes.compareTo(buffer, offset, length, stopRow, 0, 
stopRow.length) < 0);
 }
   }
 }

http://git-wip-us.apache.org/repos/asf/hbase/blob/56dea8f8/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestMultiRowRangeFilter.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase

hbase git commit: HBASE-17688 MultiRowRangeFilter not working correctly if given same start and stop RowKey

2017-02-27 Thread jingchengdu
Repository: hbase
Updated Branches:
  refs/heads/branch-1.3 37a49a0fa -> a5a8e9fa4


HBASE-17688 MultiRowRangeFilter not working correctly if given same start and 
stop RowKey


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/a5a8e9fa
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/a5a8e9fa
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/a5a8e9fa

Branch: refs/heads/branch-1.3
Commit: a5a8e9fa439a59d54ff04dd2089f94bb8cbad840
Parents: 37a49a0
Author: Jingcheng Du 
Authored: Tue Feb 28 12:53:22 2017 +0800
Committer: Jingcheng Du 
Committed: Tue Feb 28 12:53:22 2017 +0800

--
 .../hbase/filter/MultiRowRangeFilter.java   | 15 +-
 .../hbase/filter/TestMultiRowRangeFilter.java   | 31 
 2 files changed, 38 insertions(+), 8 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/a5a8e9fa/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/MultiRowRangeFilter.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/MultiRowRangeFilter.java
 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/MultiRowRangeFilter.java
index 9df5249..7e9503c 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/MultiRowRangeFilter.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/MultiRowRangeFilter.java
@@ -117,7 +117,9 @@ public class MultiRowRangeFilter extends FilterBase {
   } else {
 if (range.contains(buffer, offset, length)) {
   currentReturnCode = ReturnCode.INCLUDE;
-} else currentReturnCode = ReturnCode.SEEK_NEXT_USING_HINT;
+} else {
+  currentReturnCode = ReturnCode.SEEK_NEXT_USING_HINT;
+}
   }
 } else {
   currentReturnCode = ReturnCode.INCLUDE;
@@ -151,7 +153,6 @@ public class MultiRowRangeFilter extends FilterBase {
 if (range.stopRow != null)
   rangebuilder.setStopRow(ByteStringer.wrap(range.stopRow));
 rangebuilder.setStopRowInclusive(range.stopRowInclusive);
-range.isScan = Bytes.equals(range.startRow, range.stopRow) ? 1 : 0;
 builder.addRowRangeList(rangebuilder.build());
   }
 }
@@ -422,7 +423,6 @@ public class MultiRowRangeFilter extends FilterBase {
 private boolean startRowInclusive = true;
 private byte[] stopRow;
 private boolean stopRowInclusive = false;
-private int isScan = 0;
 
 public RowRange() {
 }
@@ -445,7 +445,6 @@ public class MultiRowRangeFilter extends FilterBase {
   this.startRowInclusive = startRowInclusive;
   this.stopRow = (stopRow == null) ? HConstants.EMPTY_BYTE_ARRAY :stopRow;
   this.stopRowInclusive = stopRowInclusive;
-  isScan = Bytes.equals(startRow, stopRow) ? 1 : 0;
 }
 
 public byte[] getStartRow() {
@@ -479,21 +478,21 @@ public class MultiRowRangeFilter extends FilterBase {
 if(stopRowInclusive) {
   return Bytes.compareTo(buffer, offset, length, startRow, 0, 
startRow.length) >= 0
   && (Bytes.equals(stopRow, HConstants.EMPTY_BYTE_ARRAY) ||
-  Bytes.compareTo(buffer, offset, length, stopRow, 0, 
stopRow.length) <= isScan);
+  Bytes.compareTo(buffer, offset, length, stopRow, 0, 
stopRow.length) <= 0);
 } else {
   return Bytes.compareTo(buffer, offset, length, startRow, 0, 
startRow.length) >= 0
   && (Bytes.equals(stopRow, HConstants.EMPTY_BYTE_ARRAY) ||
-  Bytes.compareTo(buffer, offset, length, stopRow, 0, 
stopRow.length) < isScan);
+  Bytes.compareTo(buffer, offset, length, stopRow, 0, 
stopRow.length) < 0);
 }
   } else {
 if(stopRowInclusive) {
   return Bytes.compareTo(buffer, offset, length, startRow, 0, 
startRow.length) > 0
   && (Bytes.equals(stopRow, HConstants.EMPTY_BYTE_ARRAY) ||
-  Bytes.compareTo(buffer, offset, length, stopRow, 0, 
stopRow.length) <= isScan);
+  Bytes.compareTo(buffer, offset, length, stopRow, 0, 
stopRow.length) <= 0);
 } else {
   return Bytes.compareTo(buffer, offset, length, startRow, 0, 
startRow.length) > 0
   && (Bytes.equals(stopRow, HConstants.EMPTY_BYTE_ARRAY) ||
-  Bytes.compareTo(buffer, offset, length, stopRow, 0, 
stopRow.length) < isScan);
+  Bytes.compareTo(buffer, offset, length, stopRow, 0, 
stopRow.length) < 0);
 }
   }
 }

http://git-wip-us.apache.org/repos/asf/hbase/blob/a5a8e9fa/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestMultiRowRangeFilter.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase

[hbase] Git Push Summary

2017-02-27 Thread ndimiduk
Repository: hbase
Updated Tags:  refs/tags/rel/1.1.9 [created] 1f79151c5


hbase git commit: bump version for next dev cycle

2017-02-27 Thread ndimiduk
Repository: hbase
Updated Branches:
  refs/heads/branch-1.1 5dd170b20 -> e4ba586fc


bump version for next dev cycle


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/e4ba586f
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/e4ba586f
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/e4ba586f

Branch: refs/heads/branch-1.1
Commit: e4ba586fc2f398a3f163fef90059e5d3ab27a012
Parents: 5dd170b
Author: Nick Dimiduk 
Authored: Mon Feb 27 22:21:53 2017 -0800
Committer: Nick Dimiduk 
Committed: Mon Feb 27 22:21:58 2017 -0800

--
 hbase-annotations/pom.xml| 2 +-
 hbase-assembly/pom.xml   | 2 +-
 hbase-checkstyle/pom.xml | 4 ++--
 hbase-client/pom.xml | 2 +-
 hbase-common/pom.xml | 2 +-
 hbase-examples/pom.xml   | 2 +-
 hbase-hadoop-compat/pom.xml  | 2 +-
 hbase-hadoop2-compat/pom.xml | 2 +-
 hbase-it/pom.xml | 2 +-
 hbase-prefix-tree/pom.xml| 2 +-
 hbase-procedure/pom.xml  | 2 +-
 hbase-protocol/pom.xml   | 2 +-
 hbase-resource-bundle/pom.xml| 2 +-
 hbase-rest/pom.xml   | 2 +-
 hbase-server/pom.xml | 2 +-
 hbase-shaded/hbase-shaded-client/pom.xml | 2 +-
 hbase-shaded/hbase-shaded-server/pom.xml | 2 +-
 hbase-shaded/pom.xml | 2 +-
 hbase-shell/pom.xml  | 2 +-
 hbase-testing-util/pom.xml   | 2 +-
 hbase-thrift/pom.xml | 2 +-
 pom.xml  | 2 +-
 22 files changed, 23 insertions(+), 23 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/e4ba586f/hbase-annotations/pom.xml
--
diff --git a/hbase-annotations/pom.xml b/hbase-annotations/pom.xml
index f4e2210..ea324bb 100644
--- a/hbase-annotations/pom.xml
+++ b/hbase-annotations/pom.xml
@@ -23,7 +23,7 @@
   
 hbase
 org.apache.hbase
-1.1.9
+1.1.10-SNAPSHOT
 ..
   
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/e4ba586f/hbase-assembly/pom.xml
--
diff --git a/hbase-assembly/pom.xml b/hbase-assembly/pom.xml
index e4bc606..5937e80 100644
--- a/hbase-assembly/pom.xml
+++ b/hbase-assembly/pom.xml
@@ -23,7 +23,7 @@
   
 hbase
 org.apache.hbase
-1.1.9
+1.1.10-SNAPSHOT
 ..
   
   hbase-assembly

http://git-wip-us.apache.org/repos/asf/hbase/blob/e4ba586f/hbase-checkstyle/pom.xml
--
diff --git a/hbase-checkstyle/pom.xml b/hbase-checkstyle/pom.xml
index bf1a49c..6c09b64 100644
--- a/hbase-checkstyle/pom.xml
+++ b/hbase-checkstyle/pom.xml
@@ -24,14 +24,14 @@
 4.0.0
 org.apache.hbase
 hbase-checkstyle
-1.1.9
+1.1.10-SNAPSHOT
 Apache HBase - Checkstyle
 Module to hold Checkstyle properties for HBase.
 
   
 hbase
 org.apache.hbase
-1.1.9
+1.1.10-SNAPSHOT
 ..
   
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/e4ba586f/hbase-client/pom.xml
--
diff --git a/hbase-client/pom.xml b/hbase-client/pom.xml
index b098f13..6b3bc44 100644
--- a/hbase-client/pom.xml
+++ b/hbase-client/pom.xml
@@ -24,7 +24,7 @@
   
 hbase
 org.apache.hbase
-1.1.9
+1.1.10-SNAPSHOT
 ..
   
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/e4ba586f/hbase-common/pom.xml
--
diff --git a/hbase-common/pom.xml b/hbase-common/pom.xml
index 666ed04..4b16b99 100644
--- a/hbase-common/pom.xml
+++ b/hbase-common/pom.xml
@@ -23,7 +23,7 @@
   
 hbase
 org.apache.hbase
-1.1.9
+1.1.10-SNAPSHOT
 ..
   
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/e4ba586f/hbase-examples/pom.xml
--
diff --git a/hbase-examples/pom.xml b/hbase-examples/pom.xml
index f51a38b..c0907ac 100644
--- a/hbase-examples/pom.xml
+++ b/hbase-examples/pom.xml
@@ -23,7 +23,7 @@
   
 hbase
 org.apache.hbase
-1.1.9
+1.1.10-SNAPSHOT
 ..
   
   hbase-examples

http://git-wip-us.apache.org/repos/asf/hbase/blob/e4ba586f/hbase-hadoop-compat/pom.xml
--
diff --git a/hbase-hadoop-compat/pom.xml b/hbase-hadoop-compat/pom.xml
index 1910ccf..5c122af 100644
--- a/hbase-hadoop-compat/pom.xml
+++ b/hbase-hadoop-compat/pom.xml
@@ -23,7 +23,7 @@
 
 hbase
 org.apache.hbase
-1.1.9
+1.1.10-SNAPSHOT
 ..
 
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/e4ba58

svn commit: r18498 - /dev/hbase/hbase-1.1.9RC0/ /release/hbase/1.1.9/

2017-02-27 Thread ndimiduk
Author: ndimiduk
Date: Tue Feb 28 06:24:09 2017
New Revision: 18498

Log:
publish HBase 1.1.9

Added:
release/hbase/1.1.9/
  - copied from r18497, dev/hbase/hbase-1.1.9RC0/
Removed:
dev/hbase/hbase-1.1.9RC0/



svn commit: r18499 - /release/hbase/1.1.8/

2017-02-27 Thread ndimiduk
Author: ndimiduk
Date: Tue Feb 28 06:24:56 2017
New Revision: 18499

Log:
drop old hbase 1.1.8 release

Removed:
release/hbase/1.1.8/