HBASE-16303 FilterList with MUST_PASS_ONE optimization (Ram)

Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/19c609fa
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/19c609fa
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/19c609fa

Branch: refs/heads/hbase-12439
Commit: 19c609fa5e2624e537a445d9204f06e56ae782c2
Parents: 5e23b3a
Author: Ramkrishna <ramkrishna.s.vasude...@intel.com>
Authored: Fri Aug 5 10:58:02 2016 +0530
Committer: Ramkrishna <ramkrishna.s.vasude...@intel.com>
Committed: Fri Aug 5 10:58:02 2016 +0530

----------------------------------------------------------------------
 .../test/java/org/apache/hadoop/hbase/filter/TestFilter.java    | 5 +++++
 .../java/org/apache/hadoop/hbase/filter/TestFilterList.java     | 3 +++
 2 files changed, 8 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hbase/blob/19c609fa/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilter.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilter.java 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilter.java
index 1dd6616..4b8da96 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilter.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilter.java
@@ -315,6 +315,7 @@ public class TestFilter {
     verifyScan(s, expectedRows, expectedKeys);
   }
 
+  @Test
   public void testPrefixFilterWithReverseScan() throws Exception {
     // Grab rows from group one (half of total)
     long expectedRows = this.numRows / 2;
@@ -412,6 +413,7 @@ public class TestFilter {
 
   }
 
+  @Test
   public void testPageFilterWithReverseScan() throws Exception {
     // KVs in first 6 rows
     KeyValue[] expectedKVs = {
@@ -491,6 +493,7 @@ public class TestFilter {
     verifyScan(s, expectedRows, expectedKeys);
   }
 
+  @Test
   public void testWhileMatchFilterWithFilterRowWithReverseScan()
       throws Exception {
     final int pageSize = 4;
@@ -520,6 +523,7 @@ public class TestFilter {
         pageSize, scannerCounter);
   }
 
+  @Test
   public void testWhileMatchFilterWithFilterRowKeyWithReverseScan()
       throws Exception {
     Scan s = new Scan();
@@ -1766,6 +1770,7 @@ public class TestFilter {
         kvs.length, idx);
   }
 
+  @Test
   public void testColumnPaginationFilterColumnOffset() throws Exception {
     KeyValue [] expectedKVs = {
       // testRowOne-0

http://git-wip-us.apache.org/repos/asf/hbase/blob/19c609fa/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterList.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterList.java 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterList.java
index 440c9f5..1211e39 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterList.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterList.java
@@ -262,6 +262,7 @@ public class TestFilterList {
    * we expect to get the same result as the 'prefix' only result.
    * @throws Exception
    */
+  @Test
   public void testFilterListTwoFiltersMustPassOne() throws Exception {
     byte[] r1 = Bytes.toBytes("Row1");
     byte[] r11 = Bytes.toBytes("Row11");
@@ -294,6 +295,7 @@ public class TestFilterList {
    * we expect to get the same result as the inclusive stop result.
    * @throws Exception
    */
+  @Test
   public void testFilterListWithInclusiveStopFilteMustPassOne() throws 
Exception {
     byte[] r1 = Bytes.toBytes("Row1");
     byte[] r11 = Bytes.toBytes("Row11");
@@ -353,6 +355,7 @@ public class TestFilterList {
    * Test filterKeyValue logic.
    * @throws Exception
    */
+  @Test
   public void testFilterKeyValue() throws Exception {
     Filter includeFilter = new FilterBase() {
       @Override

Reply via email to