[13/21] hbase git commit: HBASE-18879 HBase FilterList cause KeyOnlyFilter not work

2017-10-25 Thread zhangduo
HBASE-18879 HBase FilterList cause KeyOnlyFilter not work


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/6a3ea5b5
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/6a3ea5b5
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/6a3ea5b5

Branch: refs/heads/HBASE-18410
Commit: 6a3ea5b5da2d587b83418f377f0e24d93d975edc
Parents: aede28d
Author: huzheng 
Authored: Wed Oct 11 21:17:03 2017 +0800
Committer: zhangduo 
Committed: Wed Oct 25 20:27:16 2017 +0800

--
 .../apache/hadoop/hbase/filter/FilterList.java  |  6 +++
 .../hadoop/hbase/filter/FilterListBase.java |  3 ++
 .../hadoop/hbase/filter/FilterListWithAND.java  | 22 +
 .../hadoop/hbase/filter/FilterListWithOR.java   | 22 +
 .../hadoop/hbase/filter/TestFilterList.java | 48 
 5 files changed, 85 insertions(+), 16 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/6a3ea5b5/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
index 97392d1..e87f1b3 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
@@ -72,6 +72,8 @@ final public class FilterList extends FilterBase {
   filterListBase = new FilterListWithAND(filters);
 } else if (operator == Operator.MUST_PASS_ONE) {
   filterListBase = new FilterListWithOR(filters);
+} else {
+  throw new IllegalArgumentException("Invalid operator: " + operator);
 }
 this.operator = operator;
   }
@@ -168,6 +170,10 @@ final public class FilterList extends FilterBase {
 return filterListBase.transformCell(c);
   }
 
+  ReturnCode internalFilterKeyValue(Cell c, Cell currentTransformedCell) 
throws IOException {
+return this.filterListBase.internalFilterKeyValue(c, 
currentTransformedCell);
+  }
+
   @Override
   public ReturnCode filterKeyValue(Cell c) throws IOException {
 return filterListBase.filterKeyValue(c);

http://git-wip-us.apache.org/repos/asf/hbase/blob/6a3ea5b5/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterListBase.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterListBase.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterListBase.java
index 7fa0245..60b0dc1 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterListBase.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterListBase.java
@@ -107,6 +107,9 @@ public abstract class FilterListBase extends FilterBase {
 return cell;
   }
 
+  abstract ReturnCode internalFilterKeyValue(Cell c, Cell 
currentTransformedCell)
+  throws IOException;
+
   /**
* Filters that never filter by modifying the returned List of Cells can 
inherit this
* implementation that does nothing. {@inheritDoc}

http://git-wip-us.apache.org/repos/asf/hbase/blob/6a3ea5b5/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterListWithAND.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterListWithAND.java
 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterListWithAND.java
index fa979c0..4909dfd 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterListWithAND.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterListWithAND.java
@@ -147,16 +147,26 @@ public class FilterListWithAND extends FilterListBase {
 "Received code is not valid. rc: " + rc + ", localRC: " + localRC);
   }
 
-  private ReturnCode filterKeyValueWithMustPassAll(Cell c) throws IOException {
+  @Override
+  ReturnCode internalFilterKeyValue(Cell c, Cell currentTransformedCell) 
throws IOException {
+if (isEmpty()) {
+  return ReturnCode.INCLUDE;
+}
 ReturnCode rc = ReturnCode.INCLUDE;
-Cell transformed = c;
+Cell transformed = currentTransformedCell;
+this.referenceCell = c;
 this.seekHintFilter.clear();
 for (int i = 0, n = filters.size(); i < n; i++) {
   Filter filter = filters.get(i);
   if (filter.filterAllRemaining()) {
 return ReturnCode.NEXT_ROW;
   }
-  ReturnCode localRC = filter.filterKeyValue(c);
+  ReturnCode localRC;
+  if (filter instanceof FilterList) {
+localRC = ((FilterList) filter).internalFilterKeyValue(c, transformed);
+  } else {
+localRC = filter.filterKeyValue(c

[09/21] hbase git commit: HBASE-19053 Split out o.a.h.h.http from hbase-server into a separate module

2017-10-25 Thread zhangduo
HBASE-19053 Split out o.a.h.h.http from hbase-server into a separate module

Change-Id: Ie3a688b789104df7feaf34ac9fb326a79d6a3960


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/3969b853
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/3969b853
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/3969b853

Branch: refs/heads/HBASE-18410
Commit: 3969b853b272c9d898024b3e86308c964e6fe6d0
Parents: 456057e
Author: Apekshit Sharma 
Authored: Sat Oct 21 18:12:07 2017 -0700
Committer: Apekshit Sharma 
Committed: Mon Oct 23 22:52:24 2017 -0700

--
 hbase-common/pom.xml|4 +
 .../org/apache/hadoop/hbase/util/JSONBean.java  |  351 +
 .../hadoop/hbase/util/JSONMetricUtil.java   |  214 +++
 hbase-endpoint/pom.xml  |6 +
 hbase-http/pom.xml  |  515 +++
 .../hbase/http/AdminAuthorizedServlet.java  |   49 +
 .../http/ClickjackingPreventionFilter.java  |   55 +
 .../hadoop/hbase/http/FilterContainer.java  |   41 +
 .../hadoop/hbase/http/FilterInitializer.java|   32 +
 .../apache/hadoop/hbase/http/HtmlQuoting.java   |  215 +++
 .../apache/hadoop/hbase/http/HttpConfig.java|   80 +
 .../hadoop/hbase/http/HttpRequestLog.java   |   93 ++
 .../hbase/http/HttpRequestLogAppender.java  |   63 +
 .../apache/hadoop/hbase/http/HttpServer.java| 1380 ++
 .../hadoop/hbase/http/HttpServerUtil.java   |   52 +
 .../apache/hadoop/hbase/http/InfoServer.java|  112 ++
 .../apache/hadoop/hbase/http/NoCacheFilter.java |   56 +
 .../hbase/http/ServerConfigurationKeys.java |   47 +
 .../hadoop/hbase/http/conf/ConfServlet.java |  107 ++
 .../hadoop/hbase/http/jmx/JMXJsonServlet.java   |  240 +++
 .../hadoop/hbase/http/jmx/package-info.java |   26 +
 .../hbase/http/lib/StaticUserWebFilter.java |  155 ++
 .../hadoop/hbase/http/lib/package-info.java |   38 +
 .../apache/hadoop/hbase/http/log/LogLevel.java  |  175 +++
 .../apache/hadoop/hbase/http/package-info.java  |   27 +
 .../hbase/http/HttpServerFunctionalTest.java|  272 
 .../hadoop/hbase/http/TestGlobalFilter.java |  151 ++
 .../hadoop/hbase/http/TestHtmlQuoting.java  |   94 ++
 .../hadoop/hbase/http/TestHttpRequestLog.java   |   52 +
 .../hbase/http/TestHttpRequestLogAppender.java  |   41 +
 .../hadoop/hbase/http/TestHttpServer.java   |  621 
 .../hbase/http/TestHttpServerLifecycle.java |  135 ++
 .../hbase/http/TestHttpServerWebapps.java   |   68 +
 .../hadoop/hbase/http/TestPathFilter.java   |  155 ++
 .../hadoop/hbase/http/TestSSLHttpServer.java|  124 ++
 .../hadoop/hbase/http/TestServletFilter.java|  217 +++
 .../hadoop/hbase/http/TestSpnegoHttpServer.java |  258 
 .../hadoop/hbase/http/conf/TestConfServlet.java |  116 ++
 .../hbase/http/jmx/TestJMXJsonServlet.java  |  134 ++
 .../hbase/http/lib/TestStaticUserWebFilter.java |   86 ++
 .../hadoop/hbase/http/log/TestLogLevel.java |   92 ++
 .../hbase/http/resource/JerseyResource.java |   64 +
 .../hadoop/hbase/http/ssl/KeyStoreTestUtil.java |  342 +
 hbase-http/src/test/resources/log4j.properties  |   68 +
 .../src/test/resources/webapps/static/test.css  |   21 +
 .../src/test/resources/webapps/test/testjsp.jsp |   21 +
 .../apache/hadoop/hbase/rest/RESTServer.java|4 +-
 .../hbase/rest/HBaseRESTTestingUtility.java |4 +-
 hbase-server/pom.xml|   45 +-
 .../hbase/http/AdminAuthorizedServlet.java  |   49 -
 .../http/ClickjackingPreventionFilter.java  |   55 -
 .../hadoop/hbase/http/FilterContainer.java  |   41 -
 .../hadoop/hbase/http/FilterInitializer.java|   32 -
 .../apache/hadoop/hbase/http/HtmlQuoting.java   |  215 ---
 .../apache/hadoop/hbase/http/HttpConfig.java|   80 -
 .../hadoop/hbase/http/HttpRequestLog.java   |   93 --
 .../hbase/http/HttpRequestLogAppender.java  |   63 -
 .../apache/hadoop/hbase/http/HttpServer.java| 1380 --
 .../apache/hadoop/hbase/http/InfoServer.java|  112 --
 .../apache/hadoop/hbase/http/NoCacheFilter.java |   56 -
 .../hbase/http/ServerConfigurationKeys.java |   47 -
 .../hadoop/hbase/http/conf/ConfServlet.java |  107 --
 .../hadoop/hbase/http/jmx/JMXJsonServlet.java   |  240 ---
 .../hadoop/hbase/http/jmx/package-info.java |   26 -
 .../hbase/http/lib/StaticUserWebFilter.java |  155 --
 .../hadoop/hbase/http/lib/package-info.java |   38 -
 .../apache/hadoop/hbase/http/log/LogLevel.java  |  175 ---
 .../apache/hadoop/hbase/http/package-info.java  |   27 -
 .../regionserver/DumpRegionServerMetrics.java   |   60 +
 .../hbase/regionserver/HRegionServer.java   |3 +-
 .../hadoop/hbase/util/HttpServerUtil.java   |   52 -
 .../org/apache/hadoop/hbase/util/JSONBean.java  |  387 -
 .../hadoop/hbase/util/JS

[12/21] hbase git commit: HBASE-15410 Utilize the max seek value when all Filters in MUST_PASS_ALL FilterList return SEEK_NEXT_USING_HINT

2017-10-25 Thread zhangduo
HBASE-15410 Utilize the max seek value when all Filters in MUST_PASS_ALL 
FilterList return SEEK_NEXT_USING_HINT


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/57c1a844
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/57c1a844
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/57c1a844

Branch: refs/heads/HBASE-18410
Commit: 57c1a84449c160f5f3de7e3acb32a708aa5a32d9
Parents: 772f672
Author: tedyu 
Authored: Thu Sep 7 04:07:09 2017 -0700
Committer: zhangduo 
Committed: Wed Oct 25 20:27:16 2017 +0800

--
 .../main/java/org/apache/hadoop/hbase/filter/FilterList.java| 5 +++--
 .../java/org/apache/hadoop/hbase/filter/TestFilterList.java | 4 ++--
 2 files changed, 5 insertions(+), 4 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/57c1a844/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
index 83db1f2..3ff978d 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
@@ -28,12 +28,13 @@ import java.util.List;
 import org.apache.hadoop.hbase.Cell;
 import org.apache.hadoop.hbase.CellComparatorImpl;
 import org.apache.hadoop.hbase.CellUtil;
-import org.apache.yetus.audience.InterfaceAudience;
 import org.apache.hadoop.hbase.KeyValueUtil;
 import org.apache.hadoop.hbase.exceptions.DeserializationException;
+import org.apache.yetus.audience.InterfaceAudience;
+
+import 
org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException;
 import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
 import org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos;
-import 
org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException;
 
 /**
  * Implementation of {@link Filter} that represents an ordered List of Filters

http://git-wip-us.apache.org/repos/asf/hbase/blob/57c1a844/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterList.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterList.java 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterList.java
index 46d44de..e414729 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterList.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterList.java
@@ -502,8 +502,8 @@ public class TestFilterList {
 // Should take the min if given two hints
 FilterList filterList = new FilterList(Operator.MUST_PASS_ONE,
 Arrays.asList(new Filter [] { filterMinHint, filterMaxHint } ));
-assertEquals(0,
-  CellComparatorImpl.COMPARATOR.compare(filterList.getNextCellHint(null), 
minKeyValue));
+assertEquals(0, 
CellComparatorImpl.COMPARATOR.compare(filterList.getNextCellHint(null),
+  minKeyValue));
 
 // Should have no hint if any filter has no hint
 filterList = new FilterList(Operator.MUST_PASS_ONE,



[16/21] hbase git commit: HBASE-17678 FilterList with MUST_PASS_ONE may lead to redundant cells returned

2017-10-25 Thread zhangduo
HBASE-17678 FilterList with MUST_PASS_ONE may lead to redundant cells returned

Signed-off-by: tedyu 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/8a2d3943
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/8a2d3943
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/8a2d3943

Branch: refs/heads/HBASE-18410
Commit: 8a2d39430a5702c723b870458132676f2775e36c
Parents: 086fadf
Author: huzheng 
Authored: Sat May 27 16:58:00 2017 +0800
Committer: zhangduo 
Committed: Wed Oct 25 20:27:16 2017 +0800

--
 .../apache/hadoop/hbase/filter/FilterList.java  |  74 +-
 .../hadoop/hbase/filter/TestFilterList.java | 136 +--
 2 files changed, 200 insertions(+), 10 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/8a2d3943/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
index 2f11472..3493082 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
@@ -67,6 +67,14 @@ final public class FilterList extends FilterBase {
   private final List filters;
   private Collection seekHintFilters = new ArrayList();
 
+  /**
+   * Save previous return code and previous cell for every filter in filter 
list. For MUST_PASS_ONE,
+   * we use the previous return code to decide whether we should pass current 
cell encountered to
+   * the filter. For MUST_PASS_ALL, the two list are meaningless.
+   */
+  private List prevFilterRCList = null;
+  private List prevCellList = null;
+
   /** Reference Cell used by {@link #transformCell(Cell)} for validation 
purpose. */
   private Cell referenceCell = null;
 
@@ -88,6 +96,7 @@ final public class FilterList extends FilterBase {
   public FilterList(final List rowFilters) {
 reversed = getReversed(rowFilters, reversed);
 this.filters = new ArrayList<>(rowFilters);
+initPrevListForMustPassOne(rowFilters.size());
   }
 
   /**
@@ -107,6 +116,7 @@ final public class FilterList extends FilterBase {
   public FilterList(final Operator operator) {
 this.operator = operator;
 this.filters = new ArrayList<>();
+initPrevListForMustPassOne(filters.size());
   }
 
   /**
@@ -118,6 +128,7 @@ final public class FilterList extends FilterBase {
   public FilterList(final Operator operator, final List rowFilters) {
 this(rowFilters);
 this.operator = operator;
+initPrevListForMustPassOne(rowFilters.size());
   }
 
   /**
@@ -129,8 +140,21 @@ final public class FilterList extends FilterBase {
   public FilterList(final Operator operator, final Filter... rowFilters) {
 this(rowFilters);
 this.operator = operator;
+initPrevListForMustPassOne(rowFilters.length);
+  }
+
+  public void initPrevListForMustPassOne(int size) {
+if (operator == Operator.MUST_PASS_ONE) {
+  if (this.prevCellList == null) {
+prevFilterRCList = new ArrayList<>(Collections.nCopies(size, null));
+  }
+  if (this.prevCellList == null) {
+prevCellList = new ArrayList<>(Collections.nCopies(size, null));
+  }
+}
   }
 
+
   /**
* Get the operator.
*
@@ -185,6 +209,10 @@ final public class FilterList extends FilterBase {
   public void addFilter(List filters) {
 checkReversed(filters, isReversed());
 this.filters.addAll(filters);
+if (operator == Operator.MUST_PASS_ONE) {
+  this.prevFilterRCList.addAll(Collections.nCopies(filters.size(), null));
+  this.prevCellList.addAll(Collections.nCopies(filters.size(), null));
+}
   }
 
   /**
@@ -201,6 +229,10 @@ final public class FilterList extends FilterBase {
 int listize = filters.size();
 for (int i = 0; i < listize; i++) {
   filters.get(i).reset();
+  if (operator == Operator.MUST_PASS_ONE) {
+prevFilterRCList.set(i, null);
+prevCellList.set(i, null);
+  }
 }
 seekHintFilters.clear();
   }
@@ -283,6 +315,41 @@ final public class FilterList extends FilterBase {
 return this.transformedCell;
   }
 
+  /**
+   * For MUST_PASS_ONE, we cannot make sure that when filter-A in filter list 
return NEXT_COL then
+   * the next cell passing to filterList will be the first cell in next 
column, because if filter-B
+   * in filter list return SKIP, then the filter list will return SKIP. In 
this case, we should pass
+   * the cell following the previous cell, and it's possible that the next 
cell has the same column
+   * as the previous cell even if filter-A has NEXT_COL returned for the 
previous cell. So we should
+   * sa

[19/21] hbase git commit: HBASE-18368 FilterList with multiple FamilyFilters concatenated by OR does not work

2017-10-25 Thread zhangduo
HBASE-18368 FilterList with multiple FamilyFilters concatenated by OR does not 
work

Signed-off-by: Guanghao Zhang 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/6bf4ea72
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/6bf4ea72
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/6bf4ea72

Branch: refs/heads/HBASE-18410
Commit: 6bf4ea722760c067923edecce729890e1025d1e5
Parents: 6a3ea5b
Author: huzheng 
Authored: Tue Oct 17 19:25:23 2017 +0800
Committer: zhangduo 
Committed: Wed Oct 25 20:27:16 2017 +0800

--
 .../org/apache/hadoop/hbase/filter/Filter.java  | 10 +---
 .../hadoop/hbase/filter/FilterListWithOR.java   | 10 ++--
 .../hadoop/hbase/filter/TestFilterList.java | 26 
 .../hbase/filter/TestFilterListOnMini.java  |  7 +++---
 4 files changed, 44 insertions(+), 9 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/6bf4ea72/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/Filter.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/Filter.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/Filter.java
index 70c68b6..a92ea0b 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/Filter.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/Filter.java
@@ -172,8 +172,12 @@ public abstract class Filter {
  */
 NEXT_COL,
 /**
- * Done with columns, skip to next row. Note that filterRow() will
- * still be called.
+ * Seek to next row in current family. It may still pass a cell whose 
family is different but
+ * row is the same as previous cell to {@link #filterKeyValue(Cell)} , 
even if we get a NEXT_ROW
+ * returned for previous cell. For more details see HBASE-18368. 
+ * Once reset() method was invoked, then we switch to the next row for all 
family, and you can
+ * catch the event by invoking CellUtils.matchingRows(previousCell, 
currentCell). 
+ * Note that filterRow() will still be called. 
  */
 NEXT_ROW,
 /**
@@ -181,7 +185,7 @@ public abstract class Filter {
  */
 SEEK_NEXT_USING_HINT,
 /**
- * Include KeyValue and done with row, seek to next.
+ * Include KeyValue and done with row, seek to next. See NEXT_ROW.
  */
 INCLUDE_AND_SEEK_NEXT_ROW,
 }

http://git-wip-us.apache.org/repos/asf/hbase/blob/6bf4ea72/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterListWithOR.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterListWithOR.java
 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterListWithOR.java
index bac9023..31e2a55 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterListWithOR.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterListWithOR.java
@@ -74,7 +74,12 @@ public class FilterListWithOR extends FilterListBase {
* as the previous cell even if filter-A has NEXT_COL returned for the 
previous cell. So we should
* save the previous cell and the return code list when checking previous 
cell for every filter in
* filter list, and verify if currentCell fit the previous return code, if 
fit then pass the
-   * currentCell to the corresponding filter. (HBASE-17678)
+   * currentCell to the corresponding filter. (HBASE-17678) 
+   * Note that: In StoreScanner level, NEXT_ROW will skip to the next row in 
current family, and in
+   * RegionScanner level, NEXT_ROW will skip to the next row in current family 
and switch to the
+   * next family for RegionScanner, INCLUDE_AND_NEXT_ROW is the same. so we 
should pass current cell
+   * to the filter, if row mismatch or row match but column family mismatch. 
(HBASE-18368)
+   * @see org.apache.hadoop.hbase.filter.Filter.ReturnCode
*/
   private boolean shouldPassCurrentCellToFilter(Cell prevCell, Cell 
currentCell, int filterIdx)
   throws IOException {
@@ -94,7 +99,8 @@ public class FilterListWithOR extends FilterListBase {
   return !CellUtil.matchingRowColumn(prevCell, currentCell);
 case NEXT_ROW:
 case INCLUDE_AND_SEEK_NEXT_ROW:
-  return !CellUtil.matchingRows(prevCell, currentCell);
+  return !CellUtil.matchingRows(prevCell, currentCell)
+  || !CellUtil.matchingFamily(prevCell, currentCell);
 default:
   throw new IllegalStateException("Received code is not valid.");
 }

http://git-wip-us.apache.org/repos/asf/hbase/blob/6bf4ea72/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterList.java
--
diff --git 
a/hbase-server/src/test/java

[05/21] hbase git commit: HBASE-19053 Split out o.a.h.h.http from hbase-server into a separate module

2017-10-25 Thread zhangduo
http://git-wip-us.apache.org/repos/asf/hbase/blob/3969b853/hbase-server/src/main/java/org/apache/hadoop/hbase/http/HttpServer.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/http/HttpServer.java 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/http/HttpServer.java
deleted file mode 100644
index 726595b..000
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/http/HttpServer.java
+++ /dev/null
@@ -1,1380 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.hbase.http;
-
-import java.io.FileNotFoundException;
-import java.io.IOException;
-import java.io.InterruptedIOException;
-import java.io.PrintStream;
-import java.net.BindException;
-import java.net.InetSocketAddress;
-import java.net.URI;
-import java.net.URISyntaxException;
-import java.net.URL;
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.Enumeration;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-
-import javax.servlet.Filter;
-import javax.servlet.FilterChain;
-import javax.servlet.FilterConfig;
-import javax.servlet.ServletContext;
-import javax.servlet.ServletException;
-import javax.servlet.ServletRequest;
-import javax.servlet.ServletResponse;
-import javax.servlet.http.HttpServlet;
-import javax.servlet.http.HttpServletRequest;
-import javax.servlet.http.HttpServletRequestWrapper;
-import javax.servlet.http.HttpServletResponse;
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.HadoopIllegalArgumentException;
-import org.apache.yetus.audience.InterfaceAudience;
-import org.apache.yetus.audience.InterfaceStability;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.CommonConfigurationKeys;
-import org.apache.hadoop.hbase.HBaseInterfaceAudience;
-import org.apache.hadoop.hbase.http.conf.ConfServlet;
-import org.apache.hadoop.hbase.http.jmx.JMXJsonServlet;
-import org.apache.hadoop.hbase.http.log.LogLevel;
-import org.apache.hadoop.hbase.util.Threads;
-import org.apache.hadoop.hbase.util.ReflectionUtils;
-import org.apache.hadoop.security.SecurityUtil;
-import org.apache.hadoop.security.UserGroupInformation;
-import org.apache.hadoop.security.authentication.server.AuthenticationFilter;
-import org.apache.hadoop.security.authorize.AccessControlList;
-import org.apache.hadoop.util.Shell;
-
-import org.eclipse.jetty.http.HttpVersion;
-import org.eclipse.jetty.server.Server;
-import org.eclipse.jetty.server.Handler;
-import org.eclipse.jetty.server.HttpConfiguration;
-import org.eclipse.jetty.server.HttpConnectionFactory;
-import org.eclipse.jetty.server.ServerConnector;
-import org.eclipse.jetty.server.SecureRequestCustomizer;
-import org.eclipse.jetty.server.SslConnectionFactory;
-import org.eclipse.jetty.server.handler.ContextHandlerCollection;
-import org.eclipse.jetty.server.handler.HandlerCollection;
-import org.eclipse.jetty.server.RequestLog;
-import org.eclipse.jetty.server.handler.RequestLogHandler;
-import org.eclipse.jetty.servlet.FilterMapping;
-import org.eclipse.jetty.servlet.ServletHandler;
-import org.eclipse.jetty.servlet.FilterHolder;
-import org.eclipse.jetty.servlet.ServletContextHandler;
-import org.eclipse.jetty.servlet.DefaultServlet;
-import org.eclipse.jetty.servlet.ServletHolder;
-import org.eclipse.jetty.util.MultiException;
-import org.eclipse.jetty.util.ssl.SslContextFactory;
-import org.eclipse.jetty.util.thread.QueuedThreadPool;
-import org.eclipse.jetty.webapp.WebAppContext;
-
-import org.glassfish.jersey.server.ResourceConfig;
-import org.glassfish.jersey.servlet.ServletContainer;
-
-import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions;
-import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
-
-/**
- * Create a Jetty embedded server to answer http requests. The primary goal
- * is to serve up status information for the server.
- * There are three contexts:
- *   "/logs/" -> points to the log directory
- *   "/static/" -> points to common static files (src/webapps/static)
- *   "/" -> the jsp server code from (src/webapps/)
- */
-@Int

[21/21] hbase git commit: HBASE-18411 Dividing FiterList into two separate sub-classes: FilterListWithOR , FilterListWithAND

2017-10-25 Thread zhangduo
HBASE-18411 Dividing FiterList into two separate sub-classes: FilterListWithOR 
, FilterListWithAND

Signed-off-by: zhangduo 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/aede28d7
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/aede28d7
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/aede28d7

Branch: refs/heads/HBASE-18410
Commit: aede28d7861bcb2b2731d8947acc259b0acbd186
Parents: e75c78c
Author: huzheng 
Authored: Tue Oct 10 20:01:48 2017 +0800
Committer: zhangduo 
Committed: Wed Oct 25 20:27:16 2017 +0800

--
 .../apache/hadoop/hbase/filter/FilterList.java  | 661 ++-
 .../hadoop/hbase/filter/FilterListBase.java | 159 +
 .../hadoop/hbase/filter/FilterListWithAND.java  | 273 
 .../hadoop/hbase/filter/FilterListWithOR.java   | 383 +++
 .../hadoop/hbase/filter/TestFilterList.java |  89 +++
 5 files changed, 962 insertions(+), 603 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/aede28d7/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
index b518645..97392d1 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
@@ -29,6 +29,7 @@ import org.apache.hadoop.hbase.Cell;
 import org.apache.hadoop.hbase.CellComparatorImpl;
 import org.apache.hadoop.hbase.CellUtil;
 import org.apache.hadoop.hbase.KeyValueUtil;
+import org.apache.yetus.audience.InterfaceAudience;
 import org.apache.hadoop.hbase.exceptions.DeserializationException;
 import org.apache.yetus.audience.InterfaceAudience;
 
@@ -37,86 +38,60 @@ import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
 import org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos;
 
 /**
- * Implementation of {@link Filter} that represents an ordered List of Filters
- * which will be evaluated with a specified boolean operator {@link 
Operator#MUST_PASS_ALL}
- * (AND) or {@link Operator#MUST_PASS_ONE} (OR).
- * Since you can use Filter Lists as children of Filter Lists, you can create a
- * hierarchy of filters to be evaluated.
- *
- * 
- * {@link Operator#MUST_PASS_ALL} evaluates lazily: evaluation stops as soon 
as one filter does
- * not include the KeyValue.
- *
- * 
- * {@link Operator#MUST_PASS_ONE} evaluates non-lazily: all filters are always 
evaluated.
- *
- * 
+ * Implementation of {@link Filter} that represents an ordered List of Filters 
which will be
+ * evaluated with a specified boolean operator {@link Operator#MUST_PASS_ALL} 
(AND) or
+ * {@link Operator#MUST_PASS_ONE} (OR). Since you can use Filter 
Lists as children of
+ * Filter Lists, you can create a hierarchy of filters to be evaluated. 
+ * {@link Operator#MUST_PASS_ALL} evaluates lazily: evaluation stops as soon 
as one filter does not
+ * include the KeyValue. 
+ * {@link Operator#MUST_PASS_ONE} evaluates non-lazily: all filters are always 
evaluated. 
  * Defaults to {@link Operator#MUST_PASS_ALL}.
  */
 @InterfaceAudience.Public
 final public class FilterList extends FilterBase {
+
   /** set operator */
   @InterfaceAudience.Public
-  public static enum Operator {
+  public enum Operator {
 /** !AND */
 MUST_PASS_ALL,
 /** !OR */
 MUST_PASS_ONE
   }
 
-  private static final int MAX_LOG_FILTERS = 5;
-  private Operator operator = Operator.MUST_PASS_ALL;
-  private final List filters;
-  private Collection seekHintFilters = new ArrayList();
-
-  /**
-   * Save previous return code and previous cell for every filter in filter 
list. For MUST_PASS_ONE,
-   * we use the previous return code to decide whether we should pass current 
cell encountered to
-   * the filter. For MUST_PASS_ALL, the two list are meaningless.
-   */
-  private List prevFilterRCList = null;
-  private List prevCellList = null;
-
-  /** Reference Cell used by {@link #transformCell(Cell)} for validation 
purpose. */
-  private Cell referenceCell = null;
-
-  /**
-   * When filtering a given Cell in {@link #filterKeyValue(Cell)},
-   * this stores the transformed Cell to be returned by {@link 
#transformCell(Cell)}.
-   *
-   * Individual filters transformation are applied only when the filter 
includes the Cell.
-   * Transformations are composed in the order specified by {@link #filters}.
-   */
-  private Cell transformedCell = null;
+  private Operator operator;
+  private FilterListBase filterListBase;
 
   /**
* Constructor that takes a set of {@link Filter}s and an operator.
* @param operator Operator to process filter set with.
-   * @p

[07/21] hbase git commit: HBASE-19053 Split out o.a.h.h.http from hbase-server into a separate module

2017-10-25 Thread zhangduo
http://git-wip-us.apache.org/repos/asf/hbase/blob/3969b853/hbase-http/src/main/java/org/apache/hadoop/hbase/http/log/LogLevel.java
--
diff --git 
a/hbase-http/src/main/java/org/apache/hadoop/hbase/http/log/LogLevel.java 
b/hbase-http/src/main/java/org/apache/hadoop/hbase/http/log/LogLevel.java
new file mode 100644
index 000..e23eecd
--- /dev/null
+++ b/hbase-http/src/main/java/org/apache/hadoop/hbase/http/log/LogLevel.java
@@ -0,0 +1,175 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.http.log;
+
+import java.io.BufferedReader;
+import java.io.IOException;
+import java.io.InputStreamReader;
+import java.io.PrintWriter;
+import java.net.URL;
+import java.net.URLConnection;
+import java.util.regex.Pattern;
+
+import javax.servlet.ServletException;
+import javax.servlet.http.HttpServlet;
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletResponse;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.commons.logging.impl.Jdk14Logger;
+import org.apache.commons.logging.impl.Log4JLogger;
+import org.apache.yetus.audience.InterfaceAudience;
+import org.apache.yetus.audience.InterfaceStability;
+import org.apache.hadoop.hbase.http.HttpServer;
+import org.apache.hadoop.util.ServletUtil;
+
+/**
+ * Change log level in runtime.
+ */
+@InterfaceStability.Evolving
+public class LogLevel {
+  public static final String USAGES = "\nUsage: General options are:\n"
+  + "\t[-getlevel  ]\n"
+  + "\t[-setlevel   ]\n";
+
+  /**
+   * A command line implementation
+   */
+  public static void main(String[] args) {
+if (args.length == 3 && "-getlevel".equals(args[0])) {
+  process("http://"; + args[1] + "/logLevel?log=" + args[2]);
+  return;
+}
+else if (args.length == 4 && "-setlevel".equals(args[0])) {
+  process("http://"; + args[1] + "/logLevel?log=" + args[2]
+  + "&level=" + args[3]);
+  return;
+}
+
+System.err.println(USAGES);
+System.exit(-1);
+  }
+
+  private static void process(String urlstring) {
+try {
+  URL url = new URL(urlstring);
+  System.out.println("Connecting to " + url);
+  URLConnection connection = url.openConnection();
+  connection.connect();
+  try (InputStreamReader streamReader = new 
InputStreamReader(connection.getInputStream());
+   BufferedReader bufferedReader = new BufferedReader(streamReader)) {
+for(String line; (line = bufferedReader.readLine()) != null; ) {
+  if (line.startsWith(MARKER)) {
+System.out.println(TAG.matcher(line).replaceAll(""));
+  }
+}
+  }
+} catch (IOException ioe) {
+  System.err.println("" + ioe);
+}
+  }
+
+  static final String MARKER = "";
+  static final Pattern TAG = Pattern.compile("<[^>]*>");
+
+  /**
+   * A servlet implementation
+   */
+  @InterfaceAudience.LimitedPrivate({"HDFS", "MapReduce"})
+  @InterfaceStability.Unstable
+  public static class Servlet extends HttpServlet {
+private static final long serialVersionUID = 1L;
+
+@Override
+public void doGet(HttpServletRequest request, HttpServletResponse response
+) throws ServletException, IOException {
+
+  // Do the authorization
+  if (!HttpServer.hasAdministratorAccess(getServletContext(), request,
+  response)) {
+return;
+  }
+
+  PrintWriter out = ServletUtil.initHTML(response, "Log Level");
+  String logName = ServletUtil.getParameter(request, "log");
+  String level = ServletUtil.getParameter(request, "level");
+
+  if (logName != null) {
+out.println("Results");
+out.println(MARKER
++ "Submitted Log Name: " + logName + "");
+
+Log log = LogFactory.getLog(logName);
+out.println(MARKER
++ "Log Class: " + log.getClass().getName() +"");
+if (level != null) {
+  out.println(MARKER + "Submitted Level: " + level + "");
+}
+
+if (log instanceof Log4JLogger) {
+  process(((Log4JLogger)log).getLogger(), level, out);
+}
+else if (

[03/21] hbase git commit: HBASE-19053 Split out o.a.h.h.http from hbase-server into a separate module

2017-10-25 Thread zhangduo
http://git-wip-us.apache.org/repos/asf/hbase/blob/3969b853/hbase-server/src/test/java/org/apache/hadoop/hbase/http/TestHttpServer.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/http/TestHttpServer.java 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/http/TestHttpServer.java
deleted file mode 100644
index e9a56ff..000
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/http/TestHttpServer.java
+++ /dev/null
@@ -1,621 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.hbase.http;
-
-import java.io.IOException;
-import java.io.PrintWriter;
-import java.net.HttpURLConnection;
-import java.net.URI;
-import java.net.URL;
-import java.util.Arrays;
-import java.util.Enumeration;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.SortedSet;
-import java.util.TreeSet;
-import java.util.concurrent.CountDownLatch;
-import java.util.concurrent.Executor;
-import java.util.concurrent.Executors;
-
-import javax.servlet.Filter;
-import javax.servlet.FilterChain;
-import javax.servlet.FilterConfig;
-import javax.servlet.ServletContext;
-import javax.servlet.ServletException;
-import javax.servlet.ServletRequest;
-import javax.servlet.ServletResponse;
-import javax.servlet.http.HttpServlet;
-import javax.servlet.http.HttpServletRequest;
-import javax.servlet.http.HttpServletRequestWrapper;
-import javax.servlet.http.HttpServletResponse;
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.CommonConfigurationKeys;
-import 
org.apache.hadoop.hbase.http.HttpServer.QuotingInputFilter.RequestQuoter;
-import org.apache.hadoop.hbase.http.resource.JerseyResource;
-import org.apache.hadoop.hbase.testclassification.MiscTests;
-import org.apache.hadoop.hbase.testclassification.SmallTests;
-import org.apache.hadoop.net.NetUtils;
-import org.apache.hadoop.security.Groups;
-import org.apache.hadoop.security.ShellBasedUnixGroupsMapping;
-import org.apache.hadoop.security.UserGroupInformation;
-import org.apache.hadoop.security.authorize.AccessControlList;
-import org.eclipse.jetty.server.ServerConnector;
-import org.eclipse.jetty.util.ajax.JSON;
-import org.junit.AfterClass;
-import org.junit.Assert;
-import org.junit.BeforeClass;
-import org.junit.Ignore;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
-import org.mockito.Mockito;
-import org.mockito.internal.util.reflection.Whitebox;
-
-@Category({MiscTests.class, SmallTests.class})
-public class TestHttpServer extends HttpServerFunctionalTest {
-  private static final Log LOG = LogFactory.getLog(TestHttpServer.class);
-  private static HttpServer server;
-  private static URL baseUrl;
-  // jetty 9.4.x needs this many threads to start, even in the small.
-  static final int MAX_THREADS = 16;
-  
-  @SuppressWarnings("serial")
-  public static class EchoMapServlet extends HttpServlet {
-@Override
-public void doGet(HttpServletRequest request, 
-  HttpServletResponse response
-  ) throws ServletException, IOException {
-  PrintWriter out = response.getWriter();
-  Map params = request.getParameterMap();
-  SortedSet keys = new TreeSet<>(params.keySet());
-  for(String key: keys) {
-out.print(key);
-out.print(':');
-String[] values = params.get(key);
-if (values.length > 0) {
-  out.print(values[0]);
-  for(int i=1; i < values.length; ++i) {
-out.print(',');
-out.print(values[i]);
-  }
-}
-out.print('\n');
-  }
-  out.close();
-}
-  }
-
-  @SuppressWarnings("serial")
-  public static class EchoServlet extends HttpServlet {
-@Override
-public void doGet(HttpServletRequest request, 
-  HttpServletResponse response
-  ) throws ServletException, IOException {
-  PrintWriter out = response.getWriter();
-  SortedSet sortedKeys = new TreeSet<>();
-  Enumeration keys = request.getParameter

[01/21] hbase git commit: HBASE-18846 Accommodate the hbase-indexer/lily/SEP consumer deploy-type [Forced Update!]

2017-10-25 Thread zhangduo
Repository: hbase
Updated Branches:
  refs/heads/HBASE-18410 806bcfa9f -> d370fac77 (forced update)


HBASE-18846 Accommodate the hbase-indexer/lily/SEP consumer deploy-type

Patch to start a standalone RegionServer that register's itself and
optionally stands up Services. Can work w/o a Master in the mix.
Useful testing. Also can be used by hbase-indexer to put up a
Replication sink that extends public-facing APIs w/o need to extend
internals. See JIRA release note for detail.

This patch adds booleans for whether to start Admin and Client Service.
Other refactoring moves all thread and service start into the one fat
location so we can ask to by-pass 'services' if we don't need them.
See JIRA for an example hbase-server.xml that has config to shutdown
WAL, cache, etc.

Adds checks if a service/thread has been setup before going to use it.

Renames the ExecutorService in HRegionServer from service to
executorService.

See JIRA too for example Connection implementation that makes use of
Connection plugin point to receive a replication stream. The default
replication sink catches the incoming replication stream, undoes the
WALEdits and then creates a Table to call a batch with the
edits; up on JIRA, an example Connection plugin (legit, supported)
returns a Table with an overridden batch method where in we do index
inserts returning appropriate results to keep the replication engine
ticking over.

Upsides: an unadulterated RegionServer that will keep replication metrics
and even hosts a web UI if wanted. No hacks. Just ordained configs
shutting down unused services. Injection of the indexing function at a
blessed point with no pollution by hbase internals; only public imports.
No user of Private nor LimitedPrivate classes.


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/456057ef
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/456057ef
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/456057ef

Branch: refs/heads/HBASE-18410
Commit: 456057ef90f152315a7f244141f3fca4ff748336
Parents: 37b29e9
Author: Michael Stack 
Authored: Tue Sep 26 22:27:58 2017 -0700
Committer: Michael Stack 
Committed: Mon Oct 23 21:16:13 2017 -0700

--
 .../org/apache/hadoop/hbase/master/HMaster.java | 114 +--
 .../hbase/regionserver/HRegionServer.java   | 705 ++-
 .../hbase/regionserver/RSRpcServices.java   |  70 +-
 .../regionserver/TestRegionServerNoMaster.java  |   2 +-
 4 files changed, 489 insertions(+), 402 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/456057ef/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java
index cbb1537..8f2ae6b 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java
@@ -1,4 +1,4 @@
-/**
+/*
  *
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
@@ -99,7 +99,6 @@ import 
org.apache.hadoop.hbase.master.assignment.AssignmentManager;
 import org.apache.hadoop.hbase.master.assignment.MergeTableRegionsProcedure;
 import org.apache.hadoop.hbase.master.assignment.RegionStates;
 import org.apache.hadoop.hbase.master.assignment.RegionStates.RegionStateNode;
-import org.apache.hadoop.hbase.master.assignment.RegionStates.ServerStateNode;
 import org.apache.hadoop.hbase.master.balancer.BalancerChore;
 import org.apache.hadoop.hbase.master.balancer.BaseLoadBalancer;
 import org.apache.hadoop.hbase.master.balancer.ClusterStatusChore;
@@ -472,66 +471,73 @@ public class HMaster extends HRegionServer implements 
MasterServices {
   public HMaster(final Configuration conf, CoordinatedStateManager csm)
   throws IOException, KeeperException {
 super(conf, csm);
-this.rsFatals = new MemoryBoundedLogMessageBuffer(
-  conf.getLong("hbase.master.buffer.for.rs.fatals", 1*1024*1024));
+try {
+  this.rsFatals = new MemoryBoundedLogMessageBuffer(
+  conf.getLong("hbase.master.buffer.for.rs.fatals", 1 * 1024 * 1024));
 
-LOG.info("hbase.rootdir=" + getRootDir() +
-  ", hbase.cluster.distributed=" + 
this.conf.getBoolean(HConstants.CLUSTER_DISTRIBUTED, false));
+  LOG.info("hbase.rootdir=" + getRootDir() +
+  ", hbase.cluster.distributed=" + 
this.conf.getBoolean(HConstants.CLUSTER_DISTRIBUTED, false));
 
-// Disable usage of meta replicas in the master
-this.conf.setBoolean(HConstants.USE_META_REPLICAS, false);
+  // Disable usage of meta replicas in the master
+  this.conf.setBoolean

[10/21] hbase git commit: HBASE-16290 Dump summary of callQueue content; can help debugging

2017-10-25 Thread zhangduo
HBASE-16290 Dump summary of callQueue content; can help debugging

Signed-off-by: Chia-Ping Tsai 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/7cdfbde3
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/7cdfbde3
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/7cdfbde3

Branch: refs/heads/HBASE-18410
Commit: 7cdfbde37dae11261b4be12cc086058826a8037a
Parents: 3969b85
Author: Sreeram Venkatasubramanian 
Authored: Tue Oct 24 14:03:32 2017 +0800
Committer: Chia-Ping Tsai 
Committed: Tue Oct 24 14:28:42 2017 +0800

--
 .../apache/hadoop/hbase/ipc/CallQueueInfo.java  |  81 ++
 .../hadoop/hbase/ipc/FifoRpcScheduler.java  |  57 ++-
 .../apache/hadoop/hbase/ipc/RpcExecutor.java|  47 ++
 .../apache/hadoop/hbase/ipc/RpcScheduler.java   |   3 +
 .../hadoop/hbase/ipc/SimpleRpcScheduler.java|  28 
 .../hbase/regionserver/RSDumpServlet.java   |  33 
 .../hbase/ipc/DelegatingRpcScheduler.java   |   5 +
 .../hadoop/hbase/ipc/TestFifoRpcScheduler.java  | 160 +++
 .../hbase/ipc/TestSimpleRpcScheduler.java   |  68 
 9 files changed, 481 insertions(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/7cdfbde3/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/CallQueueInfo.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/CallQueueInfo.java 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/CallQueueInfo.java
new file mode 100644
index 000..19a75ea
--- /dev/null
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/CallQueueInfo.java
@@ -0,0 +1,81 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.ipc;
+
+import org.apache.yetus.audience.InterfaceAudience;
+
+import java.util.HashMap;
+import java.util.Map;
+import java.util.Set;
+
+
+@InterfaceAudience.Private
+public class CallQueueInfo {
+  private final Map> callQueueMethodCountsSummary;
+  private final Map> callQueueMethodSizeSummary;
+
+  CallQueueInfo() {
+callQueueMethodCountsSummary = new HashMap<>();
+callQueueMethodSizeSummary = new HashMap<>();
+  }
+
+  public Set getCallQueueNames() {
+return callQueueMethodCountsSummary.keySet();
+  }
+
+  public Set getCalledMethodNames(String callQueueName) {
+return callQueueMethodCountsSummary.get(callQueueName).keySet();
+  }
+
+  public long getCallMethodCount(String callQueueName, String methodName) {
+long methodCount;
+
+Map methodCountMap = 
callQueueMethodCountsSummary.getOrDefault(callQueueName, null);
+
+if (null != methodCountMap) {
+  methodCount = methodCountMap.getOrDefault(methodName, 0L);
+} else {
+  methodCount = 0L;
+}
+
+return methodCount;
+  }
+
+  void setCallMethodCount(String callQueueName, Map 
methodCountMap) {
+callQueueMethodCountsSummary.put(callQueueName, methodCountMap);
+  }
+
+  public long getCallMethodSize(String callQueueName, String methodName) {
+long methodSize;
+
+Map methodSizeMap = 
callQueueMethodSizeSummary.getOrDefault(callQueueName, null);
+
+if (null != methodSizeMap) {
+  methodSize = methodSizeMap.getOrDefault(methodName, 0L);
+} else {
+  methodSize = 0L;
+}
+
+return methodSize;
+  }
+
+  void setCallMethodSize(String callQueueName, Map 
methodSizeMap) {
+callQueueMethodSizeSummary.put(callQueueName, methodSizeMap);
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/hbase/blob/7cdfbde3/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/FifoRpcScheduler.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/FifoRpcScheduler.java 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/FifoRpcScheduler.java
index 4ebfcd9..679f237 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/FifoRpcScheduler.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/

[08/21] hbase git commit: HBASE-19053 Split out o.a.h.h.http from hbase-server into a separate module

2017-10-25 Thread zhangduo
http://git-wip-us.apache.org/repos/asf/hbase/blob/3969b853/hbase-http/src/main/java/org/apache/hadoop/hbase/http/HttpServer.java
--
diff --git 
a/hbase-http/src/main/java/org/apache/hadoop/hbase/http/HttpServer.java 
b/hbase-http/src/main/java/org/apache/hadoop/hbase/http/HttpServer.java
new file mode 100644
index 000..726595b
--- /dev/null
+++ b/hbase-http/src/main/java/org/apache/hadoop/hbase/http/HttpServer.java
@@ -0,0 +1,1380 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.http;
+
+import java.io.FileNotFoundException;
+import java.io.IOException;
+import java.io.InterruptedIOException;
+import java.io.PrintStream;
+import java.net.BindException;
+import java.net.InetSocketAddress;
+import java.net.URI;
+import java.net.URISyntaxException;
+import java.net.URL;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.Enumeration;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+import javax.servlet.Filter;
+import javax.servlet.FilterChain;
+import javax.servlet.FilterConfig;
+import javax.servlet.ServletContext;
+import javax.servlet.ServletException;
+import javax.servlet.ServletRequest;
+import javax.servlet.ServletResponse;
+import javax.servlet.http.HttpServlet;
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletRequestWrapper;
+import javax.servlet.http.HttpServletResponse;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.HadoopIllegalArgumentException;
+import org.apache.yetus.audience.InterfaceAudience;
+import org.apache.yetus.audience.InterfaceStability;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.CommonConfigurationKeys;
+import org.apache.hadoop.hbase.HBaseInterfaceAudience;
+import org.apache.hadoop.hbase.http.conf.ConfServlet;
+import org.apache.hadoop.hbase.http.jmx.JMXJsonServlet;
+import org.apache.hadoop.hbase.http.log.LogLevel;
+import org.apache.hadoop.hbase.util.Threads;
+import org.apache.hadoop.hbase.util.ReflectionUtils;
+import org.apache.hadoop.security.SecurityUtil;
+import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hadoop.security.authentication.server.AuthenticationFilter;
+import org.apache.hadoop.security.authorize.AccessControlList;
+import org.apache.hadoop.util.Shell;
+
+import org.eclipse.jetty.http.HttpVersion;
+import org.eclipse.jetty.server.Server;
+import org.eclipse.jetty.server.Handler;
+import org.eclipse.jetty.server.HttpConfiguration;
+import org.eclipse.jetty.server.HttpConnectionFactory;
+import org.eclipse.jetty.server.ServerConnector;
+import org.eclipse.jetty.server.SecureRequestCustomizer;
+import org.eclipse.jetty.server.SslConnectionFactory;
+import org.eclipse.jetty.server.handler.ContextHandlerCollection;
+import org.eclipse.jetty.server.handler.HandlerCollection;
+import org.eclipse.jetty.server.RequestLog;
+import org.eclipse.jetty.server.handler.RequestLogHandler;
+import org.eclipse.jetty.servlet.FilterMapping;
+import org.eclipse.jetty.servlet.ServletHandler;
+import org.eclipse.jetty.servlet.FilterHolder;
+import org.eclipse.jetty.servlet.ServletContextHandler;
+import org.eclipse.jetty.servlet.DefaultServlet;
+import org.eclipse.jetty.servlet.ServletHolder;
+import org.eclipse.jetty.util.MultiException;
+import org.eclipse.jetty.util.ssl.SslContextFactory;
+import org.eclipse.jetty.util.thread.QueuedThreadPool;
+import org.eclipse.jetty.webapp.WebAppContext;
+
+import org.glassfish.jersey.server.ResourceConfig;
+import org.glassfish.jersey.servlet.ServletContainer;
+
+import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions;
+import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
+
+/**
+ * Create a Jetty embedded server to answer http requests. The primary goal
+ * is to serve up status information for the server.
+ * There are three contexts:
+ *   "/logs/" -> points to the log directory
+ *   "/static/" -> points to common static files (src/webapps/static)
+ *   "/" -> the jsp server code from (src/webapps/)
+ */
+@InterfaceAudien

[18/21] hbase git commit: HBASE-17678 FilterList with MUST_PASS_ONE lead to redundancy cells returned - addendum

2017-10-25 Thread zhangduo
HBASE-17678 FilterList with MUST_PASS_ONE lead to redundancy cells returned - 
addendum

Signed-off-by: tedyu 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/772f6729
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/772f6729
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/772f6729

Branch: refs/heads/HBASE-18410
Commit: 772f6729f343fd0a344f96a0fe1503f363e13497
Parents: 8a2d394
Author: huzheng 
Authored: Wed Jun 7 14:49:29 2017 +0800
Committer: zhangduo 
Committed: Wed Oct 25 20:27:16 2017 +0800

--
 .../java/org/apache/hadoop/hbase/filter/FilterList.java | 12 ++--
 1 file changed, 10 insertions(+), 2 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/772f6729/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
index 3493082..83db1f2 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
@@ -29,6 +29,7 @@ import org.apache.hadoop.hbase.Cell;
 import org.apache.hadoop.hbase.CellComparatorImpl;
 import org.apache.hadoop.hbase.CellUtil;
 import org.apache.yetus.audience.InterfaceAudience;
+import org.apache.hadoop.hbase.KeyValueUtil;
 import org.apache.hadoop.hbase.exceptions.DeserializationException;
 import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
 import org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos;
@@ -145,7 +146,7 @@ final public class FilterList extends FilterBase {
 
   public void initPrevListForMustPassOne(int size) {
 if (operator == Operator.MUST_PASS_ONE) {
-  if (this.prevCellList == null) {
+  if (this.prevFilterRCList == null) {
 prevFilterRCList = new ArrayList<>(Collections.nCopies(size, null));
   }
   if (this.prevCellList == null) {
@@ -407,7 +408,14 @@ final public class FilterList extends FilterBase {
 ReturnCode localRC = filter.filterKeyValue(c);
 // Update previous cell and return code we encountered.
 prevFilterRCList.set(i, localRC);
-prevCellList.set(i, c);
+if (c == null || localRC == ReturnCode.INCLUDE || localRC == 
ReturnCode.SKIP) {
+  // If previous return code is INCLUDE or SKIP, we should always pass 
the next cell to the
+  // corresponding sub-filter(need not test 
shouldPassCurrentCellToFilter() method), So we
+  // need not save current cell to prevCellList for saving heap memory.
+  prevCellList.set(i, null);
+} else {
+  prevCellList.set(i, KeyValueUtil.toNewKeyCell(c));
+}
 
 if (localRC != ReturnCode.SEEK_NEXT_USING_HINT) {
   seenNonHintReturnCode = true;



[17/21] hbase git commit: HBASE-18160 Fix incorrect logic in FilterList.filterKeyValue

2017-10-25 Thread zhangduo
HBASE-18160 Fix incorrect logic in FilterList.filterKeyValue

Signed-off-by: zhangduo 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/9be96049
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/9be96049
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/9be96049

Branch: refs/heads/HBASE-18410
Commit: 9be960493e40ef1bd310fc7ff936e08cffc2cf80
Parents: 57c1a84
Author: huzheng 
Authored: Thu Jun 8 15:58:42 2017 +0800
Committer: zhangduo 
Committed: Wed Oct 25 20:27:16 2017 +0800

--
 .../apache/hadoop/hbase/filter/FilterList.java  | 541 ---
 .../hadoop/hbase/filter/TestFilterList.java | 148 -
 2 files changed, 471 insertions(+), 218 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/9be96049/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
index 3ff978d..3147ab0 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
@@ -90,62 +90,53 @@ final public class FilterList extends FilterBase {
   private Cell transformedCell = null;
 
   /**
-   * Constructor that takes a set of {@link Filter}s. The default operator
-   * MUST_PASS_ALL is assumed.
+   * Constructor that takes a set of {@link Filter}s and an operator.
+   * @param operator Operator to process filter set with.
+   * @param rowFilters Set of row filters.
+   */
+  public FilterList(final Operator operator, final List rowFilters) {
+reversed = checkAndGetReversed(rowFilters, reversed);
+this.filters = new ArrayList<>(rowFilters);
+this.operator = operator;
+initPrevListForMustPassOne(rowFilters.size());
+  }
+
+  /**
+   * Constructor that takes a set of {@link Filter}s. The default operator 
MUST_PASS_ALL is assumed.
* All filters are cloned to internal list.
* @param rowFilters list of filters
*/
   public FilterList(final List rowFilters) {
-reversed = getReversed(rowFilters, reversed);
-this.filters = new ArrayList<>(rowFilters);
-initPrevListForMustPassOne(rowFilters.size());
+this(Operator.MUST_PASS_ALL, rowFilters);
   }
 
   /**
-   * Constructor that takes a var arg number of {@link Filter}s. The fefault 
operator
-   * MUST_PASS_ALL is assumed.
+   * Constructor that takes a var arg number of {@link Filter}s. The default 
operator MUST_PASS_ALL
+   * is assumed.
* @param rowFilters
*/
   public FilterList(final Filter... rowFilters) {
-this(Arrays.asList(rowFilters));
+this(Operator.MUST_PASS_ALL, Arrays.asList(rowFilters));
   }
 
   /**
* Constructor that takes an operator.
-   *
* @param operator Operator to process filter set with.
*/
   public FilterList(final Operator operator) {
-this.operator = operator;
-this.filters = new ArrayList<>();
-initPrevListForMustPassOne(filters.size());
-  }
-
-  /**
-   * Constructor that takes a set of {@link Filter}s and an operator.
-   *
-   * @param operator Operator to process filter set with.
-   * @param rowFilters Set of row filters.
-   */
-  public FilterList(final Operator operator, final List rowFilters) {
-this(rowFilters);
-this.operator = operator;
-initPrevListForMustPassOne(rowFilters.size());
+this(operator, new ArrayList<>());
   }
 
   /**
* Constructor that takes a var arg number of {@link Filter}s and an 
operator.
-   *
* @param operator Operator to process filter set with.
* @param rowFilters Filters to use
*/
   public FilterList(final Operator operator, final Filter... rowFilters) {
-this(rowFilters);
-this.operator = operator;
-initPrevListForMustPassOne(rowFilters.length);
+this(operator, Arrays.asList(rowFilters));
   }
 
-  public void initPrevListForMustPassOne(int size) {
+  private void initPrevListForMustPassOne(int size) {
 if (operator == Operator.MUST_PASS_ONE) {
   if (this.prevFilterRCList == null) {
 prevFilterRCList = new ArrayList<>(Collections.nCopies(size, null));
@@ -156,10 +147,8 @@ final public class FilterList extends FilterBase {
 }
   }
 
-
   /**
* Get the operator.
-   *
* @return operator
*/
   public Operator getOperator() {
@@ -168,7 +157,6 @@ final public class FilterList extends FilterBase {
 
   /**
* Get the filters.
-   *
* @return filters
*/
   public List getFilters() {
@@ -183,33 +171,22 @@ final public class FilterList extends FilterBase {
 return filters.isEmpty();
   }
 
-  private static boolean getReversed(List rowFilters, boolean 
defaultV

[15/21] hbase git commit: HBASE-18410 disable the HBASE-18957 test until we can fix it on the feature branch.

2017-10-25 Thread zhangduo
HBASE-18410 disable the HBASE-18957 test until we can fix it on the feature 
branch.


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/086fadfc
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/086fadfc
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/086fadfc

Branch: refs/heads/HBASE-18410
Commit: 086fadfc85a19cf579060b5843e0e892a9ab609b
Parents: 93bac3d
Author: Sean Busbey 
Authored: Mon Oct 9 15:24:00 2017 -0500
Committer: zhangduo 
Committed: Wed Oct 25 20:27:16 2017 +0800

--
 .../java/org/apache/hadoop/hbase/filter/TestFilterListOnMini.java   | 1 +
 1 file changed, 1 insertion(+)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/086fadfc/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterListOnMini.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterListOnMini.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterListOnMini.java
index dd2399f..590b26e 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterListOnMini.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterListOnMini.java
@@ -58,6 +58,7 @@ public class TestFilterListOnMini {
 TEST_UTIL.shutdownMiniCluster();
   }
 
+  @Ignore("HBASE-18410 Should not merge without this test running.")
   @Test
   public void testFiltersWithOR() throws Exception {
 TableName tn = TableName.valueOf(name.getMethodName());



[02/21] hbase git commit: HBASE-19053 Split out o.a.h.h.http from hbase-server into a separate module

2017-10-25 Thread zhangduo
http://git-wip-us.apache.org/repos/asf/hbase/blob/3969b853/hbase-server/src/test/java/org/apache/hadoop/hbase/http/ssl/KeyStoreTestUtil.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/http/ssl/KeyStoreTestUtil.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/http/ssl/KeyStoreTestUtil.java
deleted file mode 100644
index 234bd7a..000
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/http/ssl/KeyStoreTestUtil.java
+++ /dev/null
@@ -1,342 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.hbase.http.ssl;
-
-import java.io.File;
-import java.io.FileOutputStream;
-import java.io.FileWriter;
-import java.io.IOException;
-import java.io.Writer;
-import java.math.BigInteger;
-import java.net.URL;
-import java.security.GeneralSecurityException;
-import java.security.InvalidKeyException;
-import java.security.Key;
-import java.security.KeyPair;
-import java.security.KeyPairGenerator;
-import java.security.KeyStore;
-import java.security.NoSuchAlgorithmException;
-import java.security.NoSuchProviderException;
-import java.security.SecureRandom;
-import java.security.SignatureException;
-import java.security.cert.Certificate;
-import java.security.cert.CertificateEncodingException;
-import java.security.cert.X509Certificate;
-import java.util.Date;
-import java.util.HashMap;
-import java.util.Map;
-
-import javax.security.auth.x500.X500Principal;
-
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.security.ssl.FileBasedKeyStoresFactory;
-import org.apache.hadoop.security.ssl.SSLFactory;
-import org.bouncycastle.x509.X509V1CertificateGenerator;
-
-public class KeyStoreTestUtil {
-
-  public static String getClasspathDir(Class klass) throws Exception {
-String file = klass.getName();
-file = file.replace('.', '/') + ".class";
-URL url = Thread.currentThread().getContextClassLoader().getResource(file);
-String baseDir = url.toURI().getPath();
-baseDir = baseDir.substring(0, baseDir.length() - file.length() - 1);
-return baseDir;
-  }
-
-  /**
-   * Create a self-signed X.509 Certificate.
-   *
-   * @param dn the X.509 Distinguished Name, eg "CN=Test, L=London, C=GB"
-   * @param pair the KeyPair
-   * @param days how many days from now the Certificate is valid for
-   * @param algorithm the signing algorithm, eg "SHA1withRSA"
-   * @return the self-signed certificate
-   */
-  public static X509Certificate generateCertificate(String dn, KeyPair pair, 
int days, String algorithm) 
-  throws CertificateEncodingException, InvalidKeyException, 
IllegalStateException, 
-  NoSuchProviderException, NoSuchAlgorithmException, SignatureException {
-Date from = new Date();
-Date to = new Date(from.getTime() + days * 8640l);
-BigInteger sn = new BigInteger(64, new SecureRandom());
-KeyPair keyPair = pair;
-X509V1CertificateGenerator certGen = new X509V1CertificateGenerator();
-X500Principal  dnName = new X500Principal(dn);
-
-certGen.setSerialNumber(sn);
-certGen.setIssuerDN(dnName);
-certGen.setNotBefore(from);
-certGen.setNotAfter(to);
-certGen.setSubjectDN(dnName);
-certGen.setPublicKey(keyPair.getPublic());
-certGen.setSignatureAlgorithm(algorithm);
-X509Certificate cert = certGen.generate(pair.getPrivate());
-return cert;
-  }
-
-  public static KeyPair generateKeyPair(String algorithm)
-throws NoSuchAlgorithmException {
-KeyPairGenerator keyGen = KeyPairGenerator.getInstance(algorithm);
-keyGen.initialize(1024);
-return keyGen.genKeyPair();
-  }
-
-  private static KeyStore createEmptyKeyStore()
-throws GeneralSecurityException, IOException {
-KeyStore ks = KeyStore.getInstance("JKS");
-ks.load(null, null); // initialize
-return ks;
-  }
-
-  private static void saveKeyStore(KeyStore ks, String filename,
-   String password)
-throws GeneralSecurityException, IOException {
-FileOutputStream out = new FileOutputStream(filename);
-try {
-  ks.store(out, password.toCharArray());
-} finally {
-  out.close();
-}
-  }
-

[04/21] hbase git commit: HBASE-19053 Split out o.a.h.h.http from hbase-server into a separate module

2017-10-25 Thread zhangduo
http://git-wip-us.apache.org/repos/asf/hbase/blob/3969b853/hbase-server/src/main/java/org/apache/hadoop/hbase/http/log/LogLevel.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/http/log/LogLevel.java 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/http/log/LogLevel.java
deleted file mode 100644
index e23eecd..000
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/http/log/LogLevel.java
+++ /dev/null
@@ -1,175 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.hbase.http.log;
-
-import java.io.BufferedReader;
-import java.io.IOException;
-import java.io.InputStreamReader;
-import java.io.PrintWriter;
-import java.net.URL;
-import java.net.URLConnection;
-import java.util.regex.Pattern;
-
-import javax.servlet.ServletException;
-import javax.servlet.http.HttpServlet;
-import javax.servlet.http.HttpServletRequest;
-import javax.servlet.http.HttpServletResponse;
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.commons.logging.impl.Jdk14Logger;
-import org.apache.commons.logging.impl.Log4JLogger;
-import org.apache.yetus.audience.InterfaceAudience;
-import org.apache.yetus.audience.InterfaceStability;
-import org.apache.hadoop.hbase.http.HttpServer;
-import org.apache.hadoop.util.ServletUtil;
-
-/**
- * Change log level in runtime.
- */
-@InterfaceStability.Evolving
-public class LogLevel {
-  public static final String USAGES = "\nUsage: General options are:\n"
-  + "\t[-getlevel  ]\n"
-  + "\t[-setlevel   ]\n";
-
-  /**
-   * A command line implementation
-   */
-  public static void main(String[] args) {
-if (args.length == 3 && "-getlevel".equals(args[0])) {
-  process("http://"; + args[1] + "/logLevel?log=" + args[2]);
-  return;
-}
-else if (args.length == 4 && "-setlevel".equals(args[0])) {
-  process("http://"; + args[1] + "/logLevel?log=" + args[2]
-  + "&level=" + args[3]);
-  return;
-}
-
-System.err.println(USAGES);
-System.exit(-1);
-  }
-
-  private static void process(String urlstring) {
-try {
-  URL url = new URL(urlstring);
-  System.out.println("Connecting to " + url);
-  URLConnection connection = url.openConnection();
-  connection.connect();
-  try (InputStreamReader streamReader = new 
InputStreamReader(connection.getInputStream());
-   BufferedReader bufferedReader = new BufferedReader(streamReader)) {
-for(String line; (line = bufferedReader.readLine()) != null; ) {
-  if (line.startsWith(MARKER)) {
-System.out.println(TAG.matcher(line).replaceAll(""));
-  }
-}
-  }
-} catch (IOException ioe) {
-  System.err.println("" + ioe);
-}
-  }
-
-  static final String MARKER = "";
-  static final Pattern TAG = Pattern.compile("<[^>]*>");
-
-  /**
-   * A servlet implementation
-   */
-  @InterfaceAudience.LimitedPrivate({"HDFS", "MapReduce"})
-  @InterfaceStability.Unstable
-  public static class Servlet extends HttpServlet {
-private static final long serialVersionUID = 1L;
-
-@Override
-public void doGet(HttpServletRequest request, HttpServletResponse response
-) throws ServletException, IOException {
-
-  // Do the authorization
-  if (!HttpServer.hasAdministratorAccess(getServletContext(), request,
-  response)) {
-return;
-  }
-
-  PrintWriter out = ServletUtil.initHTML(response, "Log Level");
-  String logName = ServletUtil.getParameter(request, "log");
-  String level = ServletUtil.getParameter(request, "level");
-
-  if (logName != null) {
-out.println("Results");
-out.println(MARKER
-+ "Submitted Log Name: " + logName + "");
-
-Log log = LogFactory.getLog(logName);
-out.println(MARKER
-+ "Log Class: " + log.getClass().getName() +"");
-if (level != null) {
-  out.println(MARKER + "Submitted Level: " + level + "");
-}
-
-if (log instanceof Log4JLogger) {
-  process(((Log4JLogger)log).getLogger(), level, out);
-}
- 

[11/21] hbase git commit: HBASE-18754 Get rid of Writable from TimeRangeTracker

2017-10-25 Thread zhangduo
HBASE-18754 Get rid of Writable from TimeRangeTracker


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/93bac3de
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/93bac3de
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/93bac3de

Branch: refs/heads/HBASE-18410
Commit: 93bac3de0a5ccd3977fb9d3760efa19481955d80
Parents: 7cdfbde
Author: Chia-Ping Tsai 
Authored: Sun Oct 22 17:17:05 2017 +0800
Committer: Chia-Ping Tsai 
Committed: Tue Oct 24 14:54:34 2017 +0800

--
 .../hbase/mapreduce/TestHFileOutputFormat2.java |  4 +-
 .../src/main/protobuf/HBase.proto   |  5 ++
 .../hbase/io/hfile/HFilePrettyPrinter.java  |  2 +-
 .../hadoop/hbase/regionserver/HStoreFile.java   |  3 +-
 .../hbase/regionserver/StoreFileWriter.java |  5 +-
 .../hbase/regionserver/TimeRangeTracker.java| 58 ++--
 .../regionserver/compactions/Compactor.java |  3 +-
 .../TestSimpleTimeRangeTracker.java | 22 ++--
 8 files changed, 59 insertions(+), 43 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/93bac3de/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat2.java
--
diff --git 
a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat2.java
 
b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat2.java
index f2a3527..372737a 100644
--- 
a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat2.java
+++ 
b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat2.java
@@ -94,7 +94,6 @@ import org.apache.hadoop.hbase.tool.LoadIncrementalHFiles;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.hbase.util.FSUtils;
 import org.apache.hadoop.hbase.util.ReflectionUtils;
-import org.apache.hadoop.hbase.util.Writables;
 import org.apache.hadoop.hdfs.DistributedFileSystem;
 import org.apache.hadoop.hdfs.protocol.BlockStoragePolicy;
 import org.apache.hadoop.hdfs.protocol.HdfsFileStatus;
@@ -399,8 +398,7 @@ public class TestHFileOutputFormat2  {
   assertNotNull(range);
 
   // unmarshall and check values.
-  TimeRangeTracker timeRangeTracker = 
TimeRangeTracker.create(TimeRangeTracker.Type.SYNC);
-  Writables.copyWritable(range, timeRangeTracker);
+  TimeRangeTracker timeRangeTracker =TimeRangeTracker.parseFrom(range);
   LOG.info(timeRangeTracker.getMin() +
   "" + timeRangeTracker.getMax());
   assertEquals(1000, timeRangeTracker.getMin());

http://git-wip-us.apache.org/repos/asf/hbase/blob/93bac3de/hbase-protocol-shaded/src/main/protobuf/HBase.proto
--
diff --git a/hbase-protocol-shaded/src/main/protobuf/HBase.proto 
b/hbase-protocol-shaded/src/main/protobuf/HBase.proto
index 9de897a..cc1ae8f 100644
--- a/hbase-protocol-shaded/src/main/protobuf/HBase.proto
+++ b/hbase-protocol-shaded/src/main/protobuf/HBase.proto
@@ -118,6 +118,11 @@ message TimeRange {
   optional uint64 to = 2;
 }
 
+message TimeRangeTracker {
+  optional uint64 from = 1;
+  optional uint64 to = 2;
+}
+
 /* ColumnFamily Specific TimeRange */
 message ColumnFamilyTimeRange {
   required bytes column_family = 1;

http://git-wip-us.apache.org/repos/asf/hbase/blob/93bac3de/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFilePrettyPrinter.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFilePrettyPrinter.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFilePrettyPrinter.java
index acb25fc..a800ef1 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFilePrettyPrinter.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFilePrettyPrinter.java
@@ -539,7 +539,7 @@ public class HFilePrettyPrinter extends Configured 
implements Tool {
   || Bytes.equals(e.getKey(), HStoreFile.BULKLOAD_TIME_KEY)) {
 out.println(Bytes.toLong(e.getValue()));
   } else if (Bytes.equals(e.getKey(), HStoreFile.TIMERANGE_KEY)) {
-TimeRangeTracker timeRangeTracker = 
TimeRangeTracker.getTimeRangeTracker(e.getValue());
+TimeRangeTracker timeRangeTracker = 
TimeRangeTracker.parseFrom(e.getValue());
 out.println(timeRangeTracker.getMin() + "" + 
timeRangeTracker.getMax());
   } else if (Bytes.equals(e.getKey(), FileInfo.AVG_KEY_LEN)
   || Bytes.equals(e.getKey(), FileInfo.AVG_VALUE_LEN)

http://git-wip-us.apache.org/repos/asf/hbase/blob/93bac3de/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HStoreFile.java
--

[20/21] hbase git commit: HBASE-19057 Fix other code review comments about FilterList improvement

2017-10-25 Thread zhangduo
HBASE-19057 Fix other code review comments about FilterList improvement


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/d370fac7
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/d370fac7
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/d370fac7

Branch: refs/heads/HBASE-18410
Commit: d370fac77ec13ea317ac72311463cf06c8dbce03
Parents: 6bf4ea7
Author: huzheng 
Authored: Tue Oct 24 15:30:55 2017 +0800
Committer: zhangduo 
Committed: Wed Oct 25 20:27:16 2017 +0800

--
 .../apache/hadoop/hbase/filter/FilterList.java  | 24 ++-
 .../hadoop/hbase/filter/FilterListBase.java | 16 --
 .../hadoop/hbase/filter/FilterListWithAND.java  | 12 
 .../hadoop/hbase/filter/FilterListWithOR.java   | 31 +++-
 .../hadoop/hbase/filter/TestFilterList.java |  8 +++--
 5 files changed, 59 insertions(+), 32 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/d370fac7/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
index e87f1b3..d4242ae 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
@@ -21,17 +21,12 @@ package org.apache.hadoop.hbase.filter;
 import java.io.IOException;
 import java.util.ArrayList;
 import java.util.Arrays;
-import java.util.Collection;
 import java.util.Collections;
 import java.util.List;
 
 import org.apache.hadoop.hbase.Cell;
-import org.apache.hadoop.hbase.CellComparatorImpl;
-import org.apache.hadoop.hbase.CellUtil;
-import org.apache.hadoop.hbase.KeyValueUtil;
 import org.apache.yetus.audience.InterfaceAudience;
 import org.apache.hadoop.hbase.exceptions.DeserializationException;
-import org.apache.yetus.audience.InterfaceAudience;
 
 import 
org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException;
 import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
@@ -170,8 +165,23 @@ final public class FilterList extends FilterBase {
 return filterListBase.transformCell(c);
   }
 
-  ReturnCode internalFilterKeyValue(Cell c, Cell currentTransformedCell) 
throws IOException {
-return this.filterListBase.internalFilterKeyValue(c, 
currentTransformedCell);
+  /**
+   * Internal implementation of {@link #filterKeyValue(Cell)}. Compared to the
+   * {@link #filterKeyValue(Cell)} method, this method accepts an additional 
parameter named
+   * transformedCell. This parameter indicates the initial value of 
transformed cell before this
+   * filter operation. 
+   * For FilterList, we can consider a filter list as a node in a tree. 
sub-filters of the filter
+   * list are children of the relative node. The logic of transforming cell of 
a filter list, well,
+   * we can consider it as the process of post-order tree traverse. For a node 
, Before we traverse
+   * the current child, we should set the traverse result (transformed cell) 
of previous node(s) as
+   * the initial value. so the additional currentTransformedCell parameter is 
needed (HBASE-18879).
+   * @param c The cell in question.
+   * @param transformedCell The transformed cell of previous filter(s)
+   * @return ReturnCode of this filter operation.
+   * @throws IOException
+   */
+  ReturnCode internalFilterKeyValue(Cell c, Cell transformedCell) throws 
IOException {
+return this.filterListBase.internalFilterKeyValue(c, transformedCell);
   }
 
   @Override

http://git-wip-us.apache.org/repos/asf/hbase/blob/d370fac7/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterListBase.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterListBase.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterListBase.java
index 60b0dc1..f92d2e7 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterListBase.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterListBase.java
@@ -107,8 +107,20 @@ public abstract class FilterListBase extends FilterBase {
 return cell;
   }
 
-  abstract ReturnCode internalFilterKeyValue(Cell c, Cell 
currentTransformedCell)
-  throws IOException;
+  /**
+   * Internal implementation of {@link #filterKeyValue(Cell)}
+   * @param c The cell in question.
+   * @param transformedCell The transformed cell of previous filter(s)
+   * @return ReturnCode of this filter operation.
+   * @throws IOException
+   * @see 
org.apache.hadoop.hbase.filter.FilterList#internalFilterKe

[14/21] hbase git commit: HBASE-18904 Missing break in NEXT_ROW case of FilterList#mergeReturnCodeForOrOperator()

2017-10-25 Thread zhangduo
HBASE-18904 Missing break in NEXT_ROW case of 
FilterList#mergeReturnCodeForOrOperator()

Signed-off-by: tedyu 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/e75c78ca
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/e75c78ca
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/e75c78ca

Branch: refs/heads/HBASE-18410
Commit: e75c78ca7ac58891e2806075ef674dcc214fcefa
Parents: 9be9604
Author: Biju Nair 
Authored: Fri Sep 29 16:55:54 2017 -0400
Committer: zhangduo 
Committed: Wed Oct 25 20:27:16 2017 +0800

--
 .../src/main/java/org/apache/hadoop/hbase/filter/FilterList.java   | 2 ++
 1 file changed, 2 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/e75c78ca/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
index 3147ab0..b518645 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
@@ -566,6 +566,7 @@ final public class FilterList extends FilterBase {
   if (isInReturnCodes(rc, ReturnCode.NEXT_ROW)) {
 return ReturnCode.NEXT_ROW;
   }
+  break;
 case SEEK_NEXT_USING_HINT:
   if (isInReturnCodes(rc, ReturnCode.INCLUDE, 
ReturnCode.INCLUDE_AND_NEXT_COL,
 ReturnCode.INCLUDE_AND_SEEK_NEXT_ROW)) {
@@ -577,6 +578,7 @@ final public class FilterList extends FilterBase {
   if (isInReturnCodes(rc, ReturnCode.SEEK_NEXT_USING_HINT)) {
 return ReturnCode.SEEK_NEXT_USING_HINT;
   }
+  break;
 }
 throw new IllegalStateException(
 "Received code is not valid. rc: " + rc + ", localRC: " + localRC);



[06/21] hbase git commit: HBASE-19053 Split out o.a.h.h.http from hbase-server into a separate module

2017-10-25 Thread zhangduo
http://git-wip-us.apache.org/repos/asf/hbase/blob/3969b853/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestSpnegoHttpServer.java
--
diff --git 
a/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestSpnegoHttpServer.java
 
b/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestSpnegoHttpServer.java
new file mode 100644
index 000..4fad031
--- /dev/null
+++ 
b/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestSpnegoHttpServer.java
@@ -0,0 +1,258 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to you under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.http;
+
+import java.io.BufferedReader;
+import java.io.File;
+import java.io.IOException;
+import java.io.InputStreamReader;
+import java.net.HttpURLConnection;
+import java.net.URL;
+import java.security.Principal;
+import java.security.PrivilegedExceptionAction;
+import java.util.Set;
+
+import javax.security.auth.Subject;
+import javax.security.auth.kerberos.KerberosTicket;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.http.TestHttpServer.EchoServlet;
+import org.apache.hadoop.hbase.http.resource.JerseyResource;
+import org.apache.hadoop.hbase.testclassification.MiscTests;
+import org.apache.hadoop.hbase.testclassification.SmallTests;
+import org.apache.hadoop.security.authentication.util.KerberosName;
+import org.apache.http.HttpHost;
+import org.apache.http.HttpResponse;
+import org.apache.http.auth.AuthSchemeProvider;
+import org.apache.http.auth.AuthScope;
+import org.apache.http.auth.KerberosCredentials;
+import org.apache.http.client.HttpClient;
+import org.apache.http.client.config.AuthSchemes;
+import org.apache.http.client.methods.HttpGet;
+import org.apache.http.client.protocol.HttpClientContext;
+import org.apache.http.config.Lookup;
+import org.apache.http.config.RegistryBuilder;
+import org.apache.http.entity.ByteArrayEntity;
+import org.apache.http.entity.ContentType;
+import org.apache.http.impl.auth.SPNegoSchemeFactory;
+import org.apache.http.impl.client.BasicCredentialsProvider;
+import org.apache.http.impl.client.HttpClients;
+import org.apache.http.util.EntityUtils;
+import org.apache.kerby.kerberos.kerb.KrbException;
+import org.apache.kerby.kerberos.kerb.client.JaasKrbUtil;
+import org.apache.kerby.kerberos.kerb.server.SimpleKdcServer;
+import org.ietf.jgss.GSSCredential;
+import org.ietf.jgss.GSSManager;
+import org.ietf.jgss.GSSName;
+import org.ietf.jgss.Oid;
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+
+/**
+ * Test class for SPNEGO authentication on the HttpServer. Uses Kerby's 
MiniKDC and Apache
+ * HttpComponents to verify that a simple Servlet is reachable via SPNEGO and 
unreachable w/o.
+ */
+@Category({MiscTests.class, SmallTests.class})
+public class TestSpnegoHttpServer extends HttpServerFunctionalTest {
+  private static final Log LOG = LogFactory.getLog(TestSpnegoHttpServer.class);
+  private static final String KDC_SERVER_HOST = "localhost";
+  private static final String CLIENT_PRINCIPAL = "client";
+
+  private static HttpServer server;
+  private static URL baseUrl;
+  private static SimpleKdcServer kdc;
+  private static File infoServerKeytab;
+  private static File clientKeytab;
+
+  @BeforeClass
+  public static void setupServer() throws Exception {
+final String serverPrincipal = "HTTP/" + KDC_SERVER_HOST;
+final File target = new File(System.getProperty("user.dir"), "target");
+assertTrue(target.exists());
+
+kdc = buildMiniKdc();
+kdc.start();
+
+File keytabDir = new File(target, 
TestSpnegoHttpServer.class.getSimpleName()
++ "_keytabs");
+if (keytabDir.exists()) {
+  deleteRecursively(keytabDir);
+}
+keytabDir.mkdirs();
+
+infoServerKeytab = new File(keytabDir, serverPrincipal.replace('/', '_') + 
".keytab");
+clientKeytab = new File(keytabDir, CLIENT_PRINCIPAL + ".keytab");
+
+setupUser(kdc, clientKeytab, CLIENT_PRINCIPAL);
+setupUser(kdc, infoServerKeytab, serverPrincipal);
+
+Configuration conf = buildS

[01/23] hbase git commit: HBASE-19066 Correct the directory of openjdk-8 for jenkins [Forced Update!]

2017-10-25 Thread zhangduo
Repository: hbase
Updated Branches:
  refs/heads/HBASE-18410 d370fac77 -> 705b3fa98 (forced update)


HBASE-19066 Correct the directory of openjdk-8 for jenkins

Signed-off-by: Sean Busbey 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/a31e8e34
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/a31e8e34
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/a31e8e34

Branch: refs/heads/HBASE-18410
Commit: a31e8e34ea2d6de9b40c251bfda8511dfc80f7f7
Parents: 93bac3d
Author: Chia-Ping Tsai 
Authored: Mon Oct 23 03:49:07 2017 +0800
Committer: Sean Busbey 
Committed: Tue Oct 24 09:18:47 2017 -0500

--
 dev-support/Jenkinsfile | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/a31e8e34/dev-support/Jenkinsfile
--
diff --git a/dev-support/Jenkinsfile b/dev-support/Jenkinsfile
index 1f01a47..28efa61 100644
--- a/dev-support/Jenkinsfile
+++ b/dev-support/Jenkinsfile
@@ -121,7 +121,7 @@ curl -L  -o personality.sh "${env.PROJET_PERSONALITY}"
 TESTS = 'all,-unit,-findbugs'
 // on branches that don't support jdk7, this will already be 
JAVA_HOME, so we'll end up not
 // doing multijdk there.
-MULTIJDK = '/usr/lib/jvm/java-8-oracle'
+MULTIJDK = '/usr/lib/jvm/java-8-openjdk-amd64'
 OUTPUT_RELATIVE = "${env.OUTPUT_RELATIVE_GENERAL}"
 OUTPUTDIR = "${env.OUTPUTDIR_GENERAL}"
   }
@@ -211,7 +211,7 @@ curl -L  -o personality.sh "${env.PROJET_PERSONALITY}"
 OUTPUTDIR = "${env.OUTPUTDIR_JDK8}"
 // This isn't strictly needed on branches that only support jdk8, but 
doesn't hurt
 // and is needed on branches that do both jdk7 and jdk8
-SET_JAVA_HOME = '/usr/lib/jvm/java-8-oracle'
+SET_JAVA_HOME = '/usr/lib/jvm/java-8-openjdk-amd64'
   }
   steps {
 unstash 'yetus'



[23/23] hbase git commit: HBASE-17678 FilterList with MUST_PASS_ONE may lead to redundant cells returned

2017-10-25 Thread zhangduo
HBASE-17678 FilterList with MUST_PASS_ONE may lead to redundant cells returned

Signed-off-by: tedyu 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/45910981
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/45910981
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/45910981

Branch: refs/heads/HBASE-18410
Commit: 45910981b78ea2d238b9bcd2caef1270ca0261b4
Parents: 4e1c720
Author: huzheng 
Authored: Sat May 27 16:58:00 2017 +0800
Committer: zhangduo 
Committed: Wed Oct 25 20:36:19 2017 +0800

--
 .../apache/hadoop/hbase/filter/FilterList.java  |  74 +-
 .../hadoop/hbase/filter/TestFilterList.java | 136 +--
 2 files changed, 200 insertions(+), 10 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/45910981/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
index 2f11472..3493082 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
@@ -67,6 +67,14 @@ final public class FilterList extends FilterBase {
   private final List filters;
   private Collection seekHintFilters = new ArrayList();
 
+  /**
+   * Save previous return code and previous cell for every filter in filter 
list. For MUST_PASS_ONE,
+   * we use the previous return code to decide whether we should pass current 
cell encountered to
+   * the filter. For MUST_PASS_ALL, the two list are meaningless.
+   */
+  private List prevFilterRCList = null;
+  private List prevCellList = null;
+
   /** Reference Cell used by {@link #transformCell(Cell)} for validation 
purpose. */
   private Cell referenceCell = null;
 
@@ -88,6 +96,7 @@ final public class FilterList extends FilterBase {
   public FilterList(final List rowFilters) {
 reversed = getReversed(rowFilters, reversed);
 this.filters = new ArrayList<>(rowFilters);
+initPrevListForMustPassOne(rowFilters.size());
   }
 
   /**
@@ -107,6 +116,7 @@ final public class FilterList extends FilterBase {
   public FilterList(final Operator operator) {
 this.operator = operator;
 this.filters = new ArrayList<>();
+initPrevListForMustPassOne(filters.size());
   }
 
   /**
@@ -118,6 +128,7 @@ final public class FilterList extends FilterBase {
   public FilterList(final Operator operator, final List rowFilters) {
 this(rowFilters);
 this.operator = operator;
+initPrevListForMustPassOne(rowFilters.size());
   }
 
   /**
@@ -129,8 +140,21 @@ final public class FilterList extends FilterBase {
   public FilterList(final Operator operator, final Filter... rowFilters) {
 this(rowFilters);
 this.operator = operator;
+initPrevListForMustPassOne(rowFilters.length);
+  }
+
+  public void initPrevListForMustPassOne(int size) {
+if (operator == Operator.MUST_PASS_ONE) {
+  if (this.prevCellList == null) {
+prevFilterRCList = new ArrayList<>(Collections.nCopies(size, null));
+  }
+  if (this.prevCellList == null) {
+prevCellList = new ArrayList<>(Collections.nCopies(size, null));
+  }
+}
   }
 
+
   /**
* Get the operator.
*
@@ -185,6 +209,10 @@ final public class FilterList extends FilterBase {
   public void addFilter(List filters) {
 checkReversed(filters, isReversed());
 this.filters.addAll(filters);
+if (operator == Operator.MUST_PASS_ONE) {
+  this.prevFilterRCList.addAll(Collections.nCopies(filters.size(), null));
+  this.prevCellList.addAll(Collections.nCopies(filters.size(), null));
+}
   }
 
   /**
@@ -201,6 +229,10 @@ final public class FilterList extends FilterBase {
 int listize = filters.size();
 for (int i = 0; i < listize; i++) {
   filters.get(i).reset();
+  if (operator == Operator.MUST_PASS_ONE) {
+prevFilterRCList.set(i, null);
+prevCellList.set(i, null);
+  }
 }
 seekHintFilters.clear();
   }
@@ -283,6 +315,41 @@ final public class FilterList extends FilterBase {
 return this.transformedCell;
   }
 
+  /**
+   * For MUST_PASS_ONE, we cannot make sure that when filter-A in filter list 
return NEXT_COL then
+   * the next cell passing to filterList will be the first cell in next 
column, because if filter-B
+   * in filter list return SKIP, then the filter list will return SKIP. In 
this case, we should pass
+   * the cell following the previous cell, and it's possible that the next 
cell has the same column
+   * as the previous cell even if filter-A has NEXT_COL returned for the 
previous cell. So we should
+   * sa

[02/23] hbase git commit: HBASE-19049 Update Kerby to 1.0.1.

2017-10-25 Thread zhangduo
HBASE-19049 Update Kerby to 1.0.1.

Signed-off-by: Mike Drob 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/1c1906e0
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/1c1906e0
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/1c1906e0

Branch: refs/heads/HBASE-18410
Commit: 1c1906e03acefab82df4d7c08e1684fe08409406
Parents: a31e8e3
Author: Sean Busbey 
Authored: Thu Oct 19 09:05:50 2017 -0500
Committer: Sean Busbey 
Committed: Tue Oct 24 09:43:20 2017 -0500

--
 pom.xml | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/1c1906e0/pom.xml
--
diff --git a/pom.xml b/pom.xml
index 60599c6..6e3bfb3 100755
--- a/pom.xml
+++ b/pom.xml
@@ -1440,7 +1440,7 @@
 1.0.18
 2.12.2
 1.46
-1.0.0-RC2
+1.0.1
 1.0.0
 4.0.0
 



[12/23] hbase git commit: HBASE-19077 Have Region*CoprocessorEnvironment provide an ImmutableOnlineRegions

2017-10-25 Thread zhangduo
HBASE-19077 Have Region*CoprocessorEnvironment provide an ImmutableOnlineRegions

Change name of Interface OnlineRegions to MutableOnlineRegions.
Change name of Interface ImmutableOnlineRegions to OnlineRegions.
Did this since OnlineRegions is for consumer other than internals.

Add a getOnlineRegions to the RegionCoprocessorEnvironment and to
RegionServerCoprocessorEnvironment so CPs can 'access' local
Regions directly.


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/95528d12
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/95528d12
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/95528d12

Branch: refs/heads/HBASE-18410
Commit: 95528d12f33eeccb1bd1e80b3441c4f14153dcd2
Parents: 2e64813
Author: Michael Stack 
Authored: Tue Oct 24 15:37:32 2017 -0700
Committer: Michael Stack 
Committed: Tue Oct 24 21:58:14 2017 -0700

--
 .../RegionCoprocessorEnvironment.java   |  6 ++
 .../RegionServerCoprocessorEnvironment.java |  6 ++
 .../regionserver/ImmutableOnlineRegions.java| 60 
 .../regionserver/MutableOnlineRegions.java  | 44 ++
 .../hbase/regionserver/OnlineRegions.java   | 47 +--
 .../regionserver/RegionCoprocessorHost.java |  6 ++
 .../RegionServerCoprocessorHost.java|  7 +++
 .../regionserver/RegionServerServices.java  |  2 +-
 .../regionserver/handler/OpenRegionHandler.java |  2 +-
 .../security/token/TestTokenAuthentication.java |  6 ++
 10 files changed, 108 insertions(+), 78 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/95528d12/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/RegionCoprocessorEnvironment.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/RegionCoprocessorEnvironment.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/RegionCoprocessorEnvironment.java
index 8058a9d..05df33d 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/RegionCoprocessorEnvironment.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/RegionCoprocessorEnvironment.java
@@ -27,6 +27,7 @@ import org.apache.hadoop.hbase.ServerName;
 import org.apache.hadoop.hbase.client.Connection;
 import org.apache.hadoop.hbase.client.RegionInfo;
 import org.apache.hadoop.hbase.metrics.MetricRegistry;
+import org.apache.hadoop.hbase.regionserver.OnlineRegions;
 import org.apache.hadoop.hbase.regionserver.Region;
 import org.apache.yetus.audience.InterfaceAudience;
 import org.apache.yetus.audience.InterfaceStability;
@@ -40,6 +41,11 @@ public interface RegionCoprocessorEnvironment extends 
CoprocessorEnvironment getSharedData();
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/95528d12/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/RegionServerCoprocessorEnvironment.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/RegionServerCoprocessorEnvironment.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/RegionServerCoprocessorEnvironment.java
index d0a728e..116d86d 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/RegionServerCoprocessorEnvironment.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/RegionServerCoprocessorEnvironment.java
@@ -23,6 +23,7 @@ import org.apache.hadoop.hbase.HBaseInterfaceAudience;
 import org.apache.hadoop.hbase.ServerName;
 import org.apache.hadoop.hbase.client.Connection;
 import org.apache.hadoop.hbase.metrics.MetricRegistry;
+import org.apache.hadoop.hbase.regionserver.OnlineRegions;
 import org.apache.yetus.audience.InterfaceAudience;
 import org.apache.yetus.audience.InterfaceStability;
 
@@ -36,6 +37,11 @@ public interface RegionServerCoprocessorEnvironment
   ServerName getServerName();
 
   /**
+   * @return Interface to Map of regions online on this RegionServer {@link 
#getServerName()}}.
+   */
+  OnlineRegions getOnlineRegions();
+
+  /**
* Be careful RPC'ing from a Coprocessor context.
* RPC's will fail, stall, retry, and/or crawl because the remote side is 
not online, is
* struggling or it is on the other side of a network partition. Any use of 
Connection from

http://git-wip-us.apache.org/repos/asf/hbase/blob/95528d12/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ImmutableOnlineRegions.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ImmutableOnlineRegions.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ImmutableOnlineRegions.java
deleted f

[20/23] hbase git commit: HBASE-19057 Fix other code review comments about FilterList improvement

2017-10-25 Thread zhangduo
HBASE-19057 Fix other code review comments about FilterList improvement


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/705b3fa9
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/705b3fa9
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/705b3fa9

Branch: refs/heads/HBASE-18410
Commit: 705b3fa98c97806c7eba63617a99f62d829400d1
Parents: fcaf71d
Author: huzheng 
Authored: Tue Oct 24 15:30:55 2017 +0800
Committer: zhangduo 
Committed: Wed Oct 25 20:36:19 2017 +0800

--
 .../apache/hadoop/hbase/filter/FilterList.java  | 24 ++-
 .../hadoop/hbase/filter/FilterListBase.java | 16 --
 .../hadoop/hbase/filter/FilterListWithAND.java  | 12 
 .../hadoop/hbase/filter/FilterListWithOR.java   | 31 +++-
 .../hadoop/hbase/filter/TestFilterList.java |  8 +++--
 5 files changed, 59 insertions(+), 32 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/705b3fa9/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
index e87f1b3..d4242ae 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
@@ -21,17 +21,12 @@ package org.apache.hadoop.hbase.filter;
 import java.io.IOException;
 import java.util.ArrayList;
 import java.util.Arrays;
-import java.util.Collection;
 import java.util.Collections;
 import java.util.List;
 
 import org.apache.hadoop.hbase.Cell;
-import org.apache.hadoop.hbase.CellComparatorImpl;
-import org.apache.hadoop.hbase.CellUtil;
-import org.apache.hadoop.hbase.KeyValueUtil;
 import org.apache.yetus.audience.InterfaceAudience;
 import org.apache.hadoop.hbase.exceptions.DeserializationException;
-import org.apache.yetus.audience.InterfaceAudience;
 
 import 
org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException;
 import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
@@ -170,8 +165,23 @@ final public class FilterList extends FilterBase {
 return filterListBase.transformCell(c);
   }
 
-  ReturnCode internalFilterKeyValue(Cell c, Cell currentTransformedCell) 
throws IOException {
-return this.filterListBase.internalFilterKeyValue(c, 
currentTransformedCell);
+  /**
+   * Internal implementation of {@link #filterKeyValue(Cell)}. Compared to the
+   * {@link #filterKeyValue(Cell)} method, this method accepts an additional 
parameter named
+   * transformedCell. This parameter indicates the initial value of 
transformed cell before this
+   * filter operation. 
+   * For FilterList, we can consider a filter list as a node in a tree. 
sub-filters of the filter
+   * list are children of the relative node. The logic of transforming cell of 
a filter list, well,
+   * we can consider it as the process of post-order tree traverse. For a node 
, Before we traverse
+   * the current child, we should set the traverse result (transformed cell) 
of previous node(s) as
+   * the initial value. so the additional currentTransformedCell parameter is 
needed (HBASE-18879).
+   * @param c The cell in question.
+   * @param transformedCell The transformed cell of previous filter(s)
+   * @return ReturnCode of this filter operation.
+   * @throws IOException
+   */
+  ReturnCode internalFilterKeyValue(Cell c, Cell transformedCell) throws 
IOException {
+return this.filterListBase.internalFilterKeyValue(c, transformedCell);
   }
 
   @Override

http://git-wip-us.apache.org/repos/asf/hbase/blob/705b3fa9/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterListBase.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterListBase.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterListBase.java
index 60b0dc1..f92d2e7 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterListBase.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterListBase.java
@@ -107,8 +107,20 @@ public abstract class FilterListBase extends FilterBase {
 return cell;
   }
 
-  abstract ReturnCode internalFilterKeyValue(Cell c, Cell 
currentTransformedCell)
-  throws IOException;
+  /**
+   * Internal implementation of {@link #filterKeyValue(Cell)}
+   * @param c The cell in question.
+   * @param transformedCell The transformed cell of previous filter(s)
+   * @return ReturnCode of this filter operation.
+   * @throws IOException
+   * @see 
org.apache.hadoop.hbase.filter.FilterList#internalFilterKe

[15/23] hbase git commit: HBASE-18879 HBase FilterList cause KeyOnlyFilter not work

2017-10-25 Thread zhangduo
HBASE-18879 HBase FilterList cause KeyOnlyFilter not work


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/8a5114b4
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/8a5114b4
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/8a5114b4

Branch: refs/heads/HBASE-18410
Commit: 8a5114b4fa079ff165ba735691eb4d99e3b5d7ca
Parents: 5927510
Author: huzheng 
Authored: Wed Oct 11 21:17:03 2017 +0800
Committer: zhangduo 
Committed: Wed Oct 25 20:36:19 2017 +0800

--
 .../apache/hadoop/hbase/filter/FilterList.java  |  6 +++
 .../hadoop/hbase/filter/FilterListBase.java |  3 ++
 .../hadoop/hbase/filter/FilterListWithAND.java  | 22 +
 .../hadoop/hbase/filter/FilterListWithOR.java   | 22 +
 .../hadoop/hbase/filter/TestFilterList.java | 48 
 5 files changed, 85 insertions(+), 16 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/8a5114b4/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
index 97392d1..e87f1b3 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
@@ -72,6 +72,8 @@ final public class FilterList extends FilterBase {
   filterListBase = new FilterListWithAND(filters);
 } else if (operator == Operator.MUST_PASS_ONE) {
   filterListBase = new FilterListWithOR(filters);
+} else {
+  throw new IllegalArgumentException("Invalid operator: " + operator);
 }
 this.operator = operator;
   }
@@ -168,6 +170,10 @@ final public class FilterList extends FilterBase {
 return filterListBase.transformCell(c);
   }
 
+  ReturnCode internalFilterKeyValue(Cell c, Cell currentTransformedCell) 
throws IOException {
+return this.filterListBase.internalFilterKeyValue(c, 
currentTransformedCell);
+  }
+
   @Override
   public ReturnCode filterKeyValue(Cell c) throws IOException {
 return filterListBase.filterKeyValue(c);

http://git-wip-us.apache.org/repos/asf/hbase/blob/8a5114b4/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterListBase.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterListBase.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterListBase.java
index 7fa0245..60b0dc1 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterListBase.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterListBase.java
@@ -107,6 +107,9 @@ public abstract class FilterListBase extends FilterBase {
 return cell;
   }
 
+  abstract ReturnCode internalFilterKeyValue(Cell c, Cell 
currentTransformedCell)
+  throws IOException;
+
   /**
* Filters that never filter by modifying the returned List of Cells can 
inherit this
* implementation that does nothing. {@inheritDoc}

http://git-wip-us.apache.org/repos/asf/hbase/blob/8a5114b4/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterListWithAND.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterListWithAND.java
 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterListWithAND.java
index fa979c0..4909dfd 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterListWithAND.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterListWithAND.java
@@ -147,16 +147,26 @@ public class FilterListWithAND extends FilterListBase {
 "Received code is not valid. rc: " + rc + ", localRC: " + localRC);
   }
 
-  private ReturnCode filterKeyValueWithMustPassAll(Cell c) throws IOException {
+  @Override
+  ReturnCode internalFilterKeyValue(Cell c, Cell currentTransformedCell) 
throws IOException {
+if (isEmpty()) {
+  return ReturnCode.INCLUDE;
+}
 ReturnCode rc = ReturnCode.INCLUDE;
-Cell transformed = c;
+Cell transformed = currentTransformedCell;
+this.referenceCell = c;
 this.seekHintFilter.clear();
 for (int i = 0, n = filters.size(); i < n; i++) {
   Filter filter = filters.get(i);
   if (filter.filterAllRemaining()) {
 return ReturnCode.NEXT_ROW;
   }
-  ReturnCode localRC = filter.filterKeyValue(c);
+  ReturnCode localRC;
+  if (filter instanceof FilterList) {
+localRC = ((FilterList) filter).internalFilterKeyValue(c, transformed);
+  } else {
+localRC = filter.filterKeyValue(c

[17/23] hbase git commit: HBASE-18410 disable the HBASE-18957 test until we can fix it on the feature branch.

2017-10-25 Thread zhangduo
HBASE-18410 disable the HBASE-18957 test until we can fix it on the feature 
branch.


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/4e1c7208
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/4e1c7208
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/4e1c7208

Branch: refs/heads/HBASE-18410
Commit: 4e1c7208cbbe27a926a192a9e9526c0d02d0c516
Parents: 43a8ac0
Author: Sean Busbey 
Authored: Mon Oct 9 15:24:00 2017 -0500
Committer: zhangduo 
Committed: Wed Oct 25 20:36:19 2017 +0800

--
 .../java/org/apache/hadoop/hbase/filter/TestFilterListOnMini.java   | 1 +
 1 file changed, 1 insertion(+)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/4e1c7208/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterListOnMini.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterListOnMini.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterListOnMini.java
index dd2399f..590b26e 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterListOnMini.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterListOnMini.java
@@ -58,6 +58,7 @@ public class TestFilterListOnMini {
 TEST_UTIL.shutdownMiniCluster();
   }
 
+  @Ignore("HBASE-18410 Should not merge without this test running.")
   @Test
   public void testFiltersWithOR() throws Exception {
 TableName tn = TableName.valueOf(name.getMethodName());



[18/23] hbase git commit: HBASE-18411 Dividing FiterList into two separate sub-classes: FilterListWithOR , FilterListWithAND

2017-10-25 Thread zhangduo
HBASE-18411 Dividing FiterList into two separate sub-classes: FilterListWithOR 
, FilterListWithAND

Signed-off-by: zhangduo 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/59275100
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/59275100
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/59275100

Branch: refs/heads/HBASE-18410
Commit: 59275100e1d7889e73169ab0d0a70c710e260b7a
Parents: 6442d30
Author: huzheng 
Authored: Tue Oct 10 20:01:48 2017 +0800
Committer: zhangduo 
Committed: Wed Oct 25 20:36:19 2017 +0800

--
 .../apache/hadoop/hbase/filter/FilterList.java  | 661 ++-
 .../hadoop/hbase/filter/FilterListBase.java | 159 +
 .../hadoop/hbase/filter/FilterListWithAND.java  | 273 
 .../hadoop/hbase/filter/FilterListWithOR.java   | 383 +++
 .../hadoop/hbase/filter/TestFilterList.java |  89 +++
 5 files changed, 962 insertions(+), 603 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/59275100/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
index b518645..97392d1 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
@@ -29,6 +29,7 @@ import org.apache.hadoop.hbase.Cell;
 import org.apache.hadoop.hbase.CellComparatorImpl;
 import org.apache.hadoop.hbase.CellUtil;
 import org.apache.hadoop.hbase.KeyValueUtil;
+import org.apache.yetus.audience.InterfaceAudience;
 import org.apache.hadoop.hbase.exceptions.DeserializationException;
 import org.apache.yetus.audience.InterfaceAudience;
 
@@ -37,86 +38,60 @@ import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
 import org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos;
 
 /**
- * Implementation of {@link Filter} that represents an ordered List of Filters
- * which will be evaluated with a specified boolean operator {@link 
Operator#MUST_PASS_ALL}
- * (AND) or {@link Operator#MUST_PASS_ONE} (OR).
- * Since you can use Filter Lists as children of Filter Lists, you can create a
- * hierarchy of filters to be evaluated.
- *
- * 
- * {@link Operator#MUST_PASS_ALL} evaluates lazily: evaluation stops as soon 
as one filter does
- * not include the KeyValue.
- *
- * 
- * {@link Operator#MUST_PASS_ONE} evaluates non-lazily: all filters are always 
evaluated.
- *
- * 
+ * Implementation of {@link Filter} that represents an ordered List of Filters 
which will be
+ * evaluated with a specified boolean operator {@link Operator#MUST_PASS_ALL} 
(AND) or
+ * {@link Operator#MUST_PASS_ONE} (OR). Since you can use Filter 
Lists as children of
+ * Filter Lists, you can create a hierarchy of filters to be evaluated. 
+ * {@link Operator#MUST_PASS_ALL} evaluates lazily: evaluation stops as soon 
as one filter does not
+ * include the KeyValue. 
+ * {@link Operator#MUST_PASS_ONE} evaluates non-lazily: all filters are always 
evaluated. 
  * Defaults to {@link Operator#MUST_PASS_ALL}.
  */
 @InterfaceAudience.Public
 final public class FilterList extends FilterBase {
+
   /** set operator */
   @InterfaceAudience.Public
-  public static enum Operator {
+  public enum Operator {
 /** !AND */
 MUST_PASS_ALL,
 /** !OR */
 MUST_PASS_ONE
   }
 
-  private static final int MAX_LOG_FILTERS = 5;
-  private Operator operator = Operator.MUST_PASS_ALL;
-  private final List filters;
-  private Collection seekHintFilters = new ArrayList();
-
-  /**
-   * Save previous return code and previous cell for every filter in filter 
list. For MUST_PASS_ONE,
-   * we use the previous return code to decide whether we should pass current 
cell encountered to
-   * the filter. For MUST_PASS_ALL, the two list are meaningless.
-   */
-  private List prevFilterRCList = null;
-  private List prevCellList = null;
-
-  /** Reference Cell used by {@link #transformCell(Cell)} for validation 
purpose. */
-  private Cell referenceCell = null;
-
-  /**
-   * When filtering a given Cell in {@link #filterKeyValue(Cell)},
-   * this stores the transformed Cell to be returned by {@link 
#transformCell(Cell)}.
-   *
-   * Individual filters transformation are applied only when the filter 
includes the Cell.
-   * Transformations are composed in the order specified by {@link #filters}.
-   */
-  private Cell transformedCell = null;
+  private Operator operator;
+  private FilterListBase filterListBase;
 
   /**
* Constructor that takes a set of {@link Filter}s and an operator.
* @param operator Operator to process filter set with.
-   * @p

[04/23] hbase git commit: HBASE-19074 Miscellaneous Observer cleanups Breaks MemStoreSize into MemStoreSize (read-only) and MemStoreSizing (read/write). MemStoreSize we allow to Coprocesors. MemStoreS

2017-10-25 Thread zhangduo
HBASE-19074 Miscellaneous Observer cleanups
Breaks MemStoreSize into MemStoreSize (read-only) and MemStoreSizing
(read/write). MemStoreSize we allow to Coprocesors. MemStoreSizing we
use internally doing MemStore accounting.


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/cb506fd4
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/cb506fd4
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/cb506fd4

Branch: refs/heads/HBASE-18410
Commit: cb506fd4019556ec5535310b38e99b1ebfdf80bb
Parents: 9716f62
Author: Michael Stack 
Authored: Mon Oct 23 20:57:46 2017 -0700
Committer: Michael Stack 
Committed: Tue Oct 24 10:02:24 2017 -0700

--
 .../hbase/coprocessor/RegionObserver.java   |   6 ++
 .../hadoop/hbase/coprocessor/WALObserver.java   |   6 ++
 .../hbase/regionserver/AbstractMemStore.java|  28 ++---
 .../regionserver/CellArrayImmutableSegment.java |   5 +-
 .../regionserver/CellChunkImmutableSegment.java |   6 +-
 .../hbase/regionserver/CompactingMemStore.java  |  20 ++--
 .../hbase/regionserver/CompactionPipeline.java  |  22 ++--
 .../regionserver/CompositeImmutableSegment.java |   4 +-
 .../hbase/regionserver/DefaultMemStore.java |  16 +--
 .../hadoop/hbase/regionserver/HRegion.java  |  61 ++-
 .../hadoop/hbase/regionserver/HStore.java   |  18 ++--
 .../hadoop/hbase/regionserver/MemStore.java |  12 +--
 .../hadoop/hbase/regionserver/MemStoreSize.java |  68 
 .../hbase/regionserver/MemStoreSizing.java  | 103 +++
 .../MiniBatchOperationInProgress.java   |   3 +-
 .../hbase/regionserver/MutableSegment.java  |  16 ++-
 .../regionserver/RegionServerAccounting.java|   9 +-
 .../hadoop/hbase/regionserver/Segment.java  |  16 +--
 .../hbase/regionserver/SegmentFactory.java  |   7 +-
 .../TestCompactingToCellFlatMapMemStore.java|   8 +-
 .../hbase/regionserver/TestDefaultMemStore.java |   4 +-
 .../hadoop/hbase/regionserver/TestHStore.java   |  86 
 .../regionserver/wal/AbstractTestWALReplay.java |   8 +-
 23 files changed, 309 insertions(+), 223 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/cb506fd4/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/RegionObserver.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/RegionObserver.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/RegionObserver.java
index 815daf1..5c89149 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/RegionObserver.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/RegionObserver.java
@@ -880,7 +880,10 @@ public interface RegionObserver {
* Called before a {@link WALEdit}
* replayed for this region.
* @param ctx the environment provided by the region server
+   * @deprecated Since hbase-2.0.0. No replacement. To be removed in 
hbase-3.0.0 and replaced
+   * with something that doesn't expose IntefaceAudience.Private classes.
*/
+  @Deprecated
   default void preWALRestore(ObserverContext ctx,
 RegionInfo info, WALKey logKey, WALEdit logEdit) throws IOException {}
 
@@ -888,7 +891,10 @@ public interface RegionObserver {
* Called after a {@link WALEdit}
* replayed for this region.
* @param ctx the environment provided by the region server
+   * @deprecated Since hbase-2.0.0. No replacement. To be removed in 
hbase-3.0.0 and replaced
+   * with something that doesn't expose IntefaceAudience.Private classes.
*/
+  @Deprecated
   default void postWALRestore(ObserverContext ctx,
 RegionInfo info, WALKey logKey, WALEdit logEdit) throws IOException {}
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/cb506fd4/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/WALObserver.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/WALObserver.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/WALObserver.java
index 2190abf..ec8518b 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/WALObserver.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/WALObserver.java
@@ -72,8 +72,11 @@ public interface WALObserver {
* is writen to WAL.
*
* @return true if default behavior should be bypassed, false otherwise
+   * @deprecated Since hbase-2.0.0. To be replaced with an alternative that 
does not expose
+   * InterfaceAudience classes such as WALKey and WALEdit. Will be removed in 
hbase-3.0.0.
*/
   // TODO: return value is not used
+  @Deprecated
   default boolean preWALWrite(ObserverContext ctx,
   RegionI

[06/23] hbase git commit: HBASE-19065 HRegion#bulkLoadHFiles() should wait for concurrent Region#flush() to finish

2017-10-25 Thread zhangduo
HBASE-19065 HRegion#bulkLoadHFiles() should wait for concurrent Region#flush() 
to finish


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/3cced094
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/3cced094
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/3cced094

Branch: refs/heads/HBASE-18410
Commit: 3cced094c57d7ecaf4f5b22a01cda58294595ced
Parents: cda2949
Author: tedyu 
Authored: Tue Oct 24 11:53:21 2017 -0700
Committer: tedyu 
Committed: Tue Oct 24 11:53:21 2017 -0700

--
 .../main/java/org/apache/hadoop/hbase/regionserver/HRegion.java  | 4 
 1 file changed, 4 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/3cced094/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
index e03e4dd..5cbf889 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
@@ -5729,6 +5729,10 @@ public class HRegion implements HeapSize, 
PropagatingConfigurationObserver, Regi
   seqId = ((FlushResultImpl)fs).flushSequenceId;
 } else if (fs.getResult() == 
FlushResult.Result.CANNOT_FLUSH_MEMSTORE_EMPTY) {
   seqId = ((FlushResultImpl)fs).flushSequenceId;
+} else if (fs.getResult() == FlushResult.Result.CANNOT_FLUSH) {
+  // CANNOT_FLUSH may mean that a flush is already on-going
+  // we need to wait for that flush to complete
+  waitForFlushes();
 } else {
   throw new IOException("Could not bulk load with an assigned 
sequential ID because the "+
 "flush didn't run. Reason for not flushing: " + 
((FlushResultImpl)fs).failureReason);



[13/23] hbase git commit: BASE-19074 Miscellaneous Observer cleanups; ADDEDNUM to fix FindBugs

2017-10-25 Thread zhangduo
BASE-19074 Miscellaneous Observer cleanups; ADDEDNUM to fix FindBugs


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/43a8ac00
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/43a8ac00
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/43a8ac00

Branch: refs/heads/HBASE-18410
Commit: 43a8ac00158e92c3015af7753edd8e835dc6054b
Parents: 95528d1
Author: Michael Stack 
Authored: Tue Oct 24 22:40:30 2017 -0700
Committer: Michael Stack 
Committed: Tue Oct 24 22:41:04 2017 -0700

--
 .../java/org/apache/hadoop/hbase/regionserver/MemStoreSize.java| 2 +-
 .../java/org/apache/hadoop/hbase/regionserver/MemStoreSizing.java  | 2 +-
 2 files changed, 2 insertions(+), 2 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/43a8ac00/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MemStoreSize.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MemStoreSize.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MemStoreSize.java
index cf2ef6f..557a61a 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MemStoreSize.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MemStoreSize.java
@@ -62,7 +62,7 @@ public class MemStoreSize {
 
   @Override
   public boolean equals(Object obj) {
-if (obj == null || !(obj instanceof MemStoreSize)) {
+if (obj == null || getClass() != obj.getClass()) {
   return false;
 }
 MemStoreSize other = (MemStoreSize) obj;

http://git-wip-us.apache.org/repos/asf/hbase/blob/43a8ac00/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MemStoreSizing.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MemStoreSizing.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MemStoreSizing.java
index fade622..b13201d 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MemStoreSizing.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MemStoreSizing.java
@@ -82,7 +82,7 @@ public class MemStoreSizing extends MemStoreSize {
 
   @Override
   public boolean equals(Object obj) {
-if (obj == null || !(obj instanceof MemStoreSizing)) {
+if (obj == null || (getClass() != obj.getClass())) {
   return false;
 }
 MemStoreSizing other = (MemStoreSizing) obj;



[19/23] hbase git commit: HBASE-18160 Fix incorrect logic in FilterList.filterKeyValue

2017-10-25 Thread zhangduo
HBASE-18160 Fix incorrect logic in FilterList.filterKeyValue

Signed-off-by: zhangduo 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/a811fb41
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/a811fb41
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/a811fb41

Branch: refs/heads/HBASE-18410
Commit: a811fb410df2685253d49bcca9b4200beb279712
Parents: 1061225
Author: huzheng 
Authored: Thu Jun 8 15:58:42 2017 +0800
Committer: zhangduo 
Committed: Wed Oct 25 20:36:19 2017 +0800

--
 .../apache/hadoop/hbase/filter/FilterList.java  | 541 ---
 .../hadoop/hbase/filter/TestFilterList.java | 148 -
 2 files changed, 471 insertions(+), 218 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/a811fb41/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
index 3ff978d..3147ab0 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
@@ -90,62 +90,53 @@ final public class FilterList extends FilterBase {
   private Cell transformedCell = null;
 
   /**
-   * Constructor that takes a set of {@link Filter}s. The default operator
-   * MUST_PASS_ALL is assumed.
+   * Constructor that takes a set of {@link Filter}s and an operator.
+   * @param operator Operator to process filter set with.
+   * @param rowFilters Set of row filters.
+   */
+  public FilterList(final Operator operator, final List rowFilters) {
+reversed = checkAndGetReversed(rowFilters, reversed);
+this.filters = new ArrayList<>(rowFilters);
+this.operator = operator;
+initPrevListForMustPassOne(rowFilters.size());
+  }
+
+  /**
+   * Constructor that takes a set of {@link Filter}s. The default operator 
MUST_PASS_ALL is assumed.
* All filters are cloned to internal list.
* @param rowFilters list of filters
*/
   public FilterList(final List rowFilters) {
-reversed = getReversed(rowFilters, reversed);
-this.filters = new ArrayList<>(rowFilters);
-initPrevListForMustPassOne(rowFilters.size());
+this(Operator.MUST_PASS_ALL, rowFilters);
   }
 
   /**
-   * Constructor that takes a var arg number of {@link Filter}s. The fefault 
operator
-   * MUST_PASS_ALL is assumed.
+   * Constructor that takes a var arg number of {@link Filter}s. The default 
operator MUST_PASS_ALL
+   * is assumed.
* @param rowFilters
*/
   public FilterList(final Filter... rowFilters) {
-this(Arrays.asList(rowFilters));
+this(Operator.MUST_PASS_ALL, Arrays.asList(rowFilters));
   }
 
   /**
* Constructor that takes an operator.
-   *
* @param operator Operator to process filter set with.
*/
   public FilterList(final Operator operator) {
-this.operator = operator;
-this.filters = new ArrayList<>();
-initPrevListForMustPassOne(filters.size());
-  }
-
-  /**
-   * Constructor that takes a set of {@link Filter}s and an operator.
-   *
-   * @param operator Operator to process filter set with.
-   * @param rowFilters Set of row filters.
-   */
-  public FilterList(final Operator operator, final List rowFilters) {
-this(rowFilters);
-this.operator = operator;
-initPrevListForMustPassOne(rowFilters.size());
+this(operator, new ArrayList<>());
   }
 
   /**
* Constructor that takes a var arg number of {@link Filter}s and an 
operator.
-   *
* @param operator Operator to process filter set with.
* @param rowFilters Filters to use
*/
   public FilterList(final Operator operator, final Filter... rowFilters) {
-this(rowFilters);
-this.operator = operator;
-initPrevListForMustPassOne(rowFilters.length);
+this(operator, Arrays.asList(rowFilters));
   }
 
-  public void initPrevListForMustPassOne(int size) {
+  private void initPrevListForMustPassOne(int size) {
 if (operator == Operator.MUST_PASS_ONE) {
   if (this.prevFilterRCList == null) {
 prevFilterRCList = new ArrayList<>(Collections.nCopies(size, null));
@@ -156,10 +147,8 @@ final public class FilterList extends FilterBase {
 }
   }
 
-
   /**
* Get the operator.
-   *
* @return operator
*/
   public Operator getOperator() {
@@ -168,7 +157,6 @@ final public class FilterList extends FilterBase {
 
   /**
* Get the filters.
-   *
* @return filters
*/
   public List getFilters() {
@@ -183,33 +171,22 @@ final public class FilterList extends FilterBase {
 return filters.isEmpty();
   }
 
-  private static boolean getReversed(List rowFilters, boolean 
defaultV

[09/23] hbase git commit: HBASE-19073 Cleanup CoordinatedStateManager

2017-10-25 Thread zhangduo
http://git-wip-us.apache.org/repos/asf/hbase/blob/dd70cc30/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/AbstractTestWALReplay.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/AbstractTestWALReplay.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/AbstractTestWALReplay.java
index d23dd1a..e420959 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/AbstractTestWALReplay.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/AbstractTestWALReplay.java
@@ -906,7 +906,7 @@ public abstract class AbstractTestWALReplay {
 assertNotNull(listStatus);
 assertTrue(listStatus.length > 0);
 WALSplitter.splitLogFile(hbaseRootDir, listStatus[0],
-this.fs, this.conf, null, null, null, mode, wals);
+this.fs, this.conf, null, null, null, null, mode, wals);
 FileStatus[] listStatus1 = this.fs.listStatus(
   new Path(FSUtils.getTableDir(hbaseRootDir, tableName), new 
Path(hri.getEncodedName(),
   "recovered.edits")), new PathFilter() {
@@ -1059,9 +1059,9 @@ public abstract class AbstractTestWALReplay {
   first = fs.getFileStatus(smallFile);
   second = fs.getFileStatus(largeFile);
 }
-WALSplitter.splitLogFile(hbaseRootDir, first, fs, conf, null, null, null,
+WALSplitter.splitLogFile(hbaseRootDir, first, fs, conf, null, null, null, 
null,
   RecoveryMode.LOG_SPLITTING, wals);
-WALSplitter.splitLogFile(hbaseRootDir, second, fs, conf, null, null, null,
+WALSplitter.splitLogFile(hbaseRootDir, second, fs, conf, null, null, null, 
null,
   RecoveryMode.LOG_SPLITTING, wals);
 WAL wal = createWAL(this.conf, hbaseRootDir, logName);
 region = HRegion.openHRegion(conf, this.fs, hbaseRootDir, hri, htd, wal);

http://git-wip-us.apache.org/repos/asf/hbase/blob/dd70cc30/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationSource.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationSource.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationSource.java
index ebb1bf8..71b4def 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationSource.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationSource.java
@@ -277,11 +277,6 @@ public class TestReplicationSource {
   super(conf);
 }
 
-public ShutdownDelayRegionServer(Configuration conf, 
CoordinatedStateManager csm)
-throws IOException, InterruptedException {
-  super(conf, csm);
-}
-
 @Override
 protected void stopServiceThreads() {
   // Add a delay before service threads are shutdown.

http://git-wip-us.apache.org/repos/asf/hbase/blob/dd70cc30/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALReaderOnSecureWAL.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALReaderOnSecureWAL.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALReaderOnSecureWAL.java
index 570353b..c7203b3 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALReaderOnSecureWAL.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALReaderOnSecureWAL.java
@@ -134,7 +134,7 @@ public class TestWALReaderOnSecureWAL {
   wal.sync();
   final Path walPath = AbstractFSWALProvider.getCurrentFileName(wal);
   wal.shutdown();
-  
+
   return walPath;
 } finally {
   // restore the cell codec class
@@ -182,11 +182,11 @@ public class TestWALReaderOnSecureWAL {
 }
 
 FileStatus[] listStatus = fs.listStatus(walPath.getParent());
-RecoveryMode mode = 
(conf.getBoolean(HConstants.DISTRIBUTED_LOG_REPLAY_KEY, false) ? 
+RecoveryMode mode = 
(conf.getBoolean(HConstants.DISTRIBUTED_LOG_REPLAY_KEY, false) ?
 RecoveryMode.LOG_REPLAY : RecoveryMode.LOG_SPLITTING);
 Path rootdir = FSUtils.getRootDir(conf);
 try {
-  WALSplitter s = new WALSplitter(wals, conf, rootdir, fs, null, null, 
mode);
+  WALSplitter s = new WALSplitter(wals, conf, rootdir, fs, null, null, 
null, mode);
   s.splitLogFile(listStatus[0], null);
   Path file = new Path(ZKSplitLog.getSplitLogDir(rootdir, 
listStatus[0].getPath().getName()),
 "corrupt");
@@ -229,11 +229,11 @@ public class TestWALReaderOnSecureWAL {
 }
 
 FileStatus[] listStatus = fs.listStatus(walPath.getParent());
-RecoveryMode mode = 
(conf.getBoolean(HConstants.DISTRIBUTED_LOG_REPLAY_KEY, false) ? 
+RecoveryMode mode = 
(conf.getBoolean(HConstants.DISTRIBUTED_LOG_REPLAY_KEY, false) ?
 RecoveryMode.LOG_REPLAY : RecoveryMode.LOG_SPLITTING);
 Path rootdir = FS

[08/23] hbase git commit: HBASE-19054 switch precommit image to one from maven

2017-10-25 Thread zhangduo
HBASE-19054 switch precommit image to one from maven

Amending Author: Mike Drob 
Signed-off-by: Sean Busbey 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/eee3b018
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/eee3b018
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/eee3b018

Branch: refs/heads/HBASE-18410
Commit: eee3b0180ead73c09b33f9583bfee9c01bc3aed2
Parents: 9353c59
Author: Misty Stanley-Jones 
Authored: Tue Oct 24 14:50:23 2017 -0500
Committer: Mike Drob 
Committed: Tue Oct 24 14:51:16 2017 -0500

--
 dev-support/docker/Dockerfile | 143 -
 1 file changed, 15 insertions(+), 128 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/eee3b018/dev-support/docker/Dockerfile
--
diff --git a/dev-support/docker/Dockerfile b/dev-support/docker/Dockerfile
index da5f32e..49ad14d 100644
--- a/dev-support/docker/Dockerfile
+++ b/dev-support/docker/Dockerfile
@@ -1,4 +1,3 @@
-
 # Licensed to the Apache Software Foundation (ASF) under one
 # or more contributor license agreements.  See the NOTICE file
 # distributed with this work for additional information
@@ -18,142 +17,30 @@
 # Dockerfile for installing the necessary dependencies for building Hadoop.
 # See BUILDING.txt.
 
+FROM maven:3.5-jdk-8
 
-FROM ubuntu:trusty
-
-WORKDIR /root
-
-ENV DEBIAN_FRONTEND noninteractive
-ENV DEBCONF_TERSE true
-
-##
-# Install common dependencies from packages
-#
-# WARNING: DO NOT PUT JAVA APPS HERE! Otherwise they will install default
-# Ubuntu Java.  See Java section below!
-##
-RUN apt-get -q update && apt-get -q install --no-install-recommends -y \
-build-essential \
-bzip2 \
-cmake \
-curl \
-doxygen \
-fuse \
-g++ \
-gcc \
-git \
-gnupg-agent \
-make \
-libbz2-dev \
-libcurl4-openssl-dev \
-libfuse-dev \
-libperl-critic-perl \
-libprotobuf-dev \
-libprotoc-dev \
-libsnappy-dev \
-libssl-dev \
-libtool \
-pinentry-curses \
-pkg-config \
-protobuf-compiler \
-protobuf-c-compiler \
-python \
-python2.7 \
-python-pip \
-rsync \
-snappy \
-zlib1g-dev \
-wget
-
-###
-# OpenJDK 8
-###
-
-RUN echo "dot_style = mega" > "/root/.wgetrc"
-RUN echo "quiet = on" >> "/root/.wgetrc"
-
-RUN apt-get -q update && apt-get -q install --no-install-recommends -y 
software-properties-common
-RUN add-apt-repository -y ppa:openjdk-r/ppa
-RUN apt-get -q update
-RUN apt-get -q install --no-install-recommends -y openjdk-8-jdk
-RUN update-alternatives --config java
-RUN update-alternatives --config javac
-
-
-# Apps that require Java
-###
 RUN apt-get -q update && apt-get -q install --no-install-recommends -y \
-ant \
-maven
-
-# Fixing the Apache commons / Maven dependency problem under Ubuntu:
-# See http://wiki.apache.org/commons/VfsProblems
-RUN cd /usr/share/maven/lib && ln -s ../../java/commons-lang3-3.6.jar .
+   bats \
+   findbugs \
+   libperl-critic-perl \
+   pylint \
+   python-dateutil \
+   rsync \
+   ruby \
+   shellcheck \
+   wget \
+   && \
+gem install rake rubocop ruby-lint
 
-##
-# Install findbugs
-##
-RUN mkdir -p /opt/findbugs && \
-curl -L -s -S \
- 
https://sourceforge.net/projects/findbugs/files/findbugs/3.0.1/findbugs-noUpdateChecks-3.0.1.tar.gz/download
 \
- -o /opt/findbugs.tar.gz && \
-tar xzf /opt/findbugs.tar.gz --strip-components 1 -C /opt/findbugs
-ENV FINDBUGS_HOME /opt/findbugs
-
-
-# Install shellcheck
-
-RUN apt-get -q install -y cabal-install
-RUN mkdir /root/.cabal
-RUN echo "remote-repo: hackage.fpcomplete.com:http://hackage.fpcomplete.com/"; 
>> /root/.cabal/config
-#RUN echo "remote-repo: hackage.haskell.org:http://hackage.haskell.org/"; > 
/root/.cabal/config
-RUN echo "remote-repo-cache: /root/.cabal/packages" >> /root/.cabal/config
-RUN cabal update
-RUN cabal install shellcheck --global
-
-
-# Install bats
-
-RUN add-apt-repository -y ppa:duggan/bats
-RUN apt-get -q update
-RUN apt-get -q install --no-install-recommends -y bats
-
-
-# Install pylint
-
-RUN pip install pylint
-
-
-# Install dateutil.parser
-
-RUN pip install python-dateutil
-
-
-# Install Ruby 2, based on Yetus 0.4.0 dockerfile
-###
-RUN echo 'gem: --no-rdoc --no-ri' >> /root/.gemrc
-RUN apt-add-repository ppa:brightbox/ruby-ng
-RUN apt-get -q update
-
-RUN apt-get -q install --no-install-recommends -y ruby2.2 ruby-switch
-RUN ruby-switch --set ruby2.2
-
-
-# Install rubocop
-###
-RUN gem install rake
-RUN gem install rubocop
-
-
-# Install ruby-lint
-###
-RUN gem install ruby-lint
+ENV FINDBUGS_HOME /usr
 
 ###
 # Avoid out of m

[14/23] hbase git commit: HBASE-15410 Utilize the max seek value when all Filters in MUST_PASS_ALL FilterList return SEEK_NEXT_USING_HINT

2017-10-25 Thread zhangduo
HBASE-15410 Utilize the max seek value when all Filters in MUST_PASS_ALL 
FilterList return SEEK_NEXT_USING_HINT


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/10612254
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/10612254
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/10612254

Branch: refs/heads/HBASE-18410
Commit: 10612254a163d39e0ad70af683fe1890125c41ed
Parents: 5716e8d
Author: tedyu 
Authored: Thu Sep 7 04:07:09 2017 -0700
Committer: zhangduo 
Committed: Wed Oct 25 20:36:19 2017 +0800

--
 .../main/java/org/apache/hadoop/hbase/filter/FilterList.java| 5 +++--
 .../java/org/apache/hadoop/hbase/filter/TestFilterList.java | 4 ++--
 2 files changed, 5 insertions(+), 4 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/10612254/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
index 83db1f2..3ff978d 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
@@ -28,12 +28,13 @@ import java.util.List;
 import org.apache.hadoop.hbase.Cell;
 import org.apache.hadoop.hbase.CellComparatorImpl;
 import org.apache.hadoop.hbase.CellUtil;
-import org.apache.yetus.audience.InterfaceAudience;
 import org.apache.hadoop.hbase.KeyValueUtil;
 import org.apache.hadoop.hbase.exceptions.DeserializationException;
+import org.apache.yetus.audience.InterfaceAudience;
+
+import 
org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException;
 import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
 import org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos;
-import 
org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException;
 
 /**
  * Implementation of {@link Filter} that represents an ordered List of Filters

http://git-wip-us.apache.org/repos/asf/hbase/blob/10612254/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterList.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterList.java 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterList.java
index 46d44de..e414729 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterList.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterList.java
@@ -502,8 +502,8 @@ public class TestFilterList {
 // Should take the min if given two hints
 FilterList filterList = new FilterList(Operator.MUST_PASS_ONE,
 Arrays.asList(new Filter [] { filterMinHint, filterMaxHint } ));
-assertEquals(0,
-  CellComparatorImpl.COMPARATOR.compare(filterList.getNextCellHint(null), 
minKeyValue));
+assertEquals(0, 
CellComparatorImpl.COMPARATOR.compare(filterList.getNextCellHint(null),
+  minKeyValue));
 
 // Should have no hint if any filter has no hint
 filterList = new FilterList(Operator.MUST_PASS_ONE,



[22/23] hbase git commit: HBASE-18904 Missing break in NEXT_ROW case of FilterList#mergeReturnCodeForOrOperator()

2017-10-25 Thread zhangduo
HBASE-18904 Missing break in NEXT_ROW case of 
FilterList#mergeReturnCodeForOrOperator()

Signed-off-by: tedyu 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/6442d30f
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/6442d30f
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/6442d30f

Branch: refs/heads/HBASE-18410
Commit: 6442d30f0f3b4bec791dfa2ebc0cad5938b3b18c
Parents: a811fb4
Author: Biju Nair 
Authored: Fri Sep 29 16:55:54 2017 -0400
Committer: zhangduo 
Committed: Wed Oct 25 20:36:19 2017 +0800

--
 .../src/main/java/org/apache/hadoop/hbase/filter/FilterList.java   | 2 ++
 1 file changed, 2 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/6442d30f/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
index 3147ab0..b518645 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
@@ -566,6 +566,7 @@ final public class FilterList extends FilterBase {
   if (isInReturnCodes(rc, ReturnCode.NEXT_ROW)) {
 return ReturnCode.NEXT_ROW;
   }
+  break;
 case SEEK_NEXT_USING_HINT:
   if (isInReturnCodes(rc, ReturnCode.INCLUDE, 
ReturnCode.INCLUDE_AND_NEXT_COL,
 ReturnCode.INCLUDE_AND_SEEK_NEXT_ROW)) {
@@ -577,6 +578,7 @@ final public class FilterList extends FilterBase {
   if (isInReturnCodes(rc, ReturnCode.SEEK_NEXT_USING_HINT)) {
 return ReturnCode.SEEK_NEXT_USING_HINT;
   }
+  break;
 }
 throw new IllegalStateException(
 "Received code is not valid. rc: " + rc + ", localRC: " + localRC);



[05/23] hbase git commit: HBASE-19070 temporarily make the mvnsite nightly test non-voting.

2017-10-25 Thread zhangduo
HBASE-19070 temporarily make the mvnsite nightly test non-voting.

Signed-off-by: Mike Drob 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/cda2949b
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/cda2949b
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/cda2949b

Branch: refs/heads/HBASE-18410
Commit: cda2949b816fceb4d63d2d9ca445bc75912227e2
Parents: cb506fd
Author: Sean Busbey 
Authored: Tue Oct 24 10:30:00 2017 -0500
Committer: Sean Busbey 
Committed: Tue Oct 24 12:06:58 2017 -0500

--
 dev-support/Jenkinsfile | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/cda2949b/dev-support/Jenkinsfile
--
diff --git a/dev-support/Jenkinsfile b/dev-support/Jenkinsfile
index 28efa61..05c8895 100644
--- a/dev-support/Jenkinsfile
+++ b/dev-support/Jenkinsfile
@@ -49,7 +49,7 @@ pipeline {
 // output from surefire; sadly the archive function in yetus only works on 
file names.
 ARCHIVE_PATTERN_LIST = 'TEST-*.xml,org.apache.h*.txt'
 // These tests currently have known failures. Once they burn down to 0, 
remove from here so that new problems will cause a failure.
-TESTS_FILTER = 
'cc,checkstyle,javac,javadoc,pylint,shellcheck,whitespace,perlcritic,ruby-lint,rubocop'
+TESTS_FILTER = 
'cc,checkstyle,javac,javadoc,pylint,shellcheck,whitespace,perlcritic,ruby-lint,rubocop,mvnsite'
 BRANCH_SPECIFIC_DOCKERFILE = "${env.BASEDIR}/dev-support/docker/Dockerfile"
 EXCLUDE_TESTS_URL = 
'https://builds.apache.org/job/HBase-Find-Flaky-Tests/lastSuccessfulBuild/artifact/excludes/'
   }



[03/23] hbase git commit: HBASE-19021 Restore a few important missing logics for balancer in 2.0

2017-10-25 Thread zhangduo
HBASE-19021 Restore a few important missing logics for balancer in 2.0

Signed-off-by: Jerry He 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/9716f62f
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/9716f62f
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/9716f62f

Branch: refs/heads/HBASE-18410
Commit: 9716f62f43195ef024ac7a4bafb93a4716a7323e
Parents: 1c1906e
Author: Jerry He 
Authored: Tue Oct 24 07:53:17 2017 -0700
Committer: Jerry He 
Committed: Tue Oct 24 07:53:17 2017 -0700

--
 .../org/apache/hadoop/hbase/master/HMaster.java |  9 +---
 .../hbase/master/assignment/RegionStates.java   |  8 +++
 .../master/procedure/ServerCrashProcedure.java  |  1 +
 .../hadoop/hbase/TestRegionRebalancing.java | 24 
 4 files changed, 35 insertions(+), 7 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/9716f62f/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java
index 8f2ae6b..bb36520 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java
@@ -1421,16 +1421,19 @@ public class HMaster extends HRegionServer implements 
MasterServices {
 }
   }
 
+  boolean isByTable = 
getConfiguration().getBoolean("hbase.master.loadbalance.bytable", false);
   Map>> assignmentsByTable =
-this.assignmentManager.getRegionStates().getAssignmentsByTable();
+
this.assignmentManager.getRegionStates().getAssignmentsByTable(!isByTable);
 
   List plans = new ArrayList<>();
 
   //Give the balancer the current cluster state.
   this.balancer.setClusterStatus(getClusterStatus());
-  this.balancer.setClusterLoad(
-  
this.assignmentManager.getRegionStates().getAssignmentsByTable());
+  this.balancer.setClusterLoad(assignmentsByTable);
 
+  for (Map> serverMap : 
assignmentsByTable.values()) {
+
serverMap.keySet().removeAll(this.serverManager.getDrainingServersList());
+  }
   for (Entry>> e : 
assignmentsByTable.entrySet()) {
 List partialPlans = 
this.balancer.balanceCluster(e.getKey(), e.getValue());
 if (partialPlans != null) plans.addAll(partialPlans);

http://git-wip-us.apache.org/repos/asf/hbase/blob/9716f62f/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/RegionStates.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/RegionStates.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/RegionStates.java
index c13a49d..3b58fe2 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/RegionStates.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/RegionStates.java
@@ -756,6 +756,14 @@ public class RegionStates {
 
   serverResult.add(node.getRegionInfo());
 }
+// Add online servers with no assignment for the table.
+for (Map> table: result.values()) {
+for (ServerName svr : serverMap.keySet()) {
+  if (!table.containsKey(svr)) {
+table.put(svr, new ArrayList());
+  }
+}
+}
 return result;
   }
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/9716f62f/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/ServerCrashProcedure.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/ServerCrashProcedure.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/ServerCrashProcedure.java
index a0ee628..56efaeb 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/ServerCrashProcedure.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/ServerCrashProcedure.java
@@ -172,6 +172,7 @@ implements ServerProcedureInterface {
 break;
 
   case SERVER_CRASH_FINISH:
+
services.getAssignmentManager().getRegionStates().removeServer(serverName);
 services.getServerManager().getDeadServers().finish(serverName);
 return Flow.NO_MORE_STATE;
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/9716f62f/hbase-server/src/test/java/org/apache/hadoop/hbase/TestRegionRebalancing.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestRegionRebalancing.java 
b/hbase-

[16/23] hbase git commit: HBASE-17678 FilterList with MUST_PASS_ONE lead to redundancy cells returned - addendum

2017-10-25 Thread zhangduo
HBASE-17678 FilterList with MUST_PASS_ONE lead to redundancy cells returned - 
addendum

Signed-off-by: tedyu 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/5716e8d9
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/5716e8d9
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/5716e8d9

Branch: refs/heads/HBASE-18410
Commit: 5716e8d9ed5d3cc86ee6e03c57c1ce46cf18fa82
Parents: 4591098
Author: huzheng 
Authored: Wed Jun 7 14:49:29 2017 +0800
Committer: zhangduo 
Committed: Wed Oct 25 20:36:19 2017 +0800

--
 .../java/org/apache/hadoop/hbase/filter/FilterList.java | 12 ++--
 1 file changed, 10 insertions(+), 2 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/5716e8d9/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
index 3493082..83db1f2 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
@@ -29,6 +29,7 @@ import org.apache.hadoop.hbase.Cell;
 import org.apache.hadoop.hbase.CellComparatorImpl;
 import org.apache.hadoop.hbase.CellUtil;
 import org.apache.yetus.audience.InterfaceAudience;
+import org.apache.hadoop.hbase.KeyValueUtil;
 import org.apache.hadoop.hbase.exceptions.DeserializationException;
 import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
 import org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos;
@@ -145,7 +146,7 @@ final public class FilterList extends FilterBase {
 
   public void initPrevListForMustPassOne(int size) {
 if (operator == Operator.MUST_PASS_ONE) {
-  if (this.prevCellList == null) {
+  if (this.prevFilterRCList == null) {
 prevFilterRCList = new ArrayList<>(Collections.nCopies(size, null));
   }
   if (this.prevCellList == null) {
@@ -407,7 +408,14 @@ final public class FilterList extends FilterBase {
 ReturnCode localRC = filter.filterKeyValue(c);
 // Update previous cell and return code we encountered.
 prevFilterRCList.set(i, localRC);
-prevCellList.set(i, c);
+if (c == null || localRC == ReturnCode.INCLUDE || localRC == 
ReturnCode.SKIP) {
+  // If previous return code is INCLUDE or SKIP, we should always pass 
the next cell to the
+  // corresponding sub-filter(need not test 
shouldPassCurrentCellToFilter() method), So we
+  // need not save current cell to prevCellList for saving heap memory.
+  prevCellList.set(i, null);
+} else {
+  prevCellList.set(i, KeyValueUtil.toNewKeyCell(c));
+}
 
 if (localRC != ReturnCode.SEEK_NEXT_USING_HINT) {
   seenNonHintReturnCode = true;



[11/23] hbase git commit: HBASE-18754 (addendum) close the input resource

2017-10-25 Thread zhangduo
HBASE-18754 (addendum) close the input resource


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/2e648138
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/2e648138
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/2e648138

Branch: refs/heads/HBASE-18410
Commit: 2e6481386b80c2306e0e12a9c7cd137b63eaa006
Parents: dd70cc3
Author: Chia-Ping Tsai 
Authored: Wed Oct 25 12:31:37 2017 +0800
Committer: Chia-Ping Tsai 
Committed: Wed Oct 25 12:31:37 2017 +0800

--
 .../org/apache/hadoop/hbase/regionserver/TimeRangeTracker.java  | 5 +++--
 1 file changed, 3 insertions(+), 2 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/2e648138/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/TimeRangeTracker.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/TimeRangeTracker.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/TimeRangeTracker.java
index 08d9853..203bea3 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/TimeRangeTracker.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/TimeRangeTracker.java
@@ -199,8 +199,9 @@ public abstract class TimeRangeTracker {
   ProtobufUtil.mergeFrom(builder, data, pblen, data.length - pblen);
   return TimeRangeTracker.create(type, builder.getFrom(), builder.getTo());
 } else {
-  DataInputStream in = new DataInputStream(new ByteArrayInputStream(data));
-  return TimeRangeTracker.create(type, in.readLong(), in.readLong());
+  try (DataInputStream in = new DataInputStream(new 
ByteArrayInputStream(data))) {
+return TimeRangeTracker.create(type, in.readLong(), in.readLong());
+  }
 }
   }
 



[07/23] hbase git commit: HBASE-19018 tests that need bouncycastle must delcare dependency on it.

2017-10-25 Thread zhangduo
HBASE-19018 tests that need bouncycastle must delcare dependency on it.

Signed-off-by: Josh Elser 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/9353c59a
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/9353c59a
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/9353c59a

Branch: refs/heads/HBASE-18410
Commit: 9353c59a99f914716a227a1a7a2d099f92e0d06c
Parents: 3cced09
Author: Sean Busbey 
Authored: Mon Oct 16 12:44:11 2017 -0500
Committer: Sean Busbey 
Committed: Tue Oct 24 14:30:03 2017 -0500

--
 hbase-endpoint/pom.xml | 6 ++
 1 file changed, 6 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/9353c59a/hbase-endpoint/pom.xml
--
diff --git a/hbase-endpoint/pom.xml b/hbase-endpoint/pom.xml
index ef2584c..a127763 100644
--- a/hbase-endpoint/pom.xml
+++ b/hbase-endpoint/pom.xml
@@ -216,6 +216,12 @@
   mockito-all
   test
 
+
+
+  org.bouncycastle
+  bcprov-jdk16
+  test
+
   
   
 



[10/23] hbase git commit: HBASE-19073 Cleanup CoordinatedStateManager

2017-10-25 Thread zhangduo
HBASE-19073 Cleanup CoordinatedStateManager

- Merged BaseCSM class into CSM interface
- Removed config hbase.coordinated.state.manager.class
- Since state manager is not pluggable anymore, we don't need 
start/stop/initialize to setup unknown classes. Our internal ZkCSM now requires 
Server in constructor itself. Makes the dependency clearer too.
- Removed CSM from HRegionServer and HMaster constructor. Although it's a step 
back from dependency injection, but it's more consistent with our current (not 
good)  pattern where we initialize everything in the ctor itself.

Change-Id: Ifca06bb354adec5b11ea1bad4707e014410491fc


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/dd70cc30
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/dd70cc30
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/dd70cc30

Branch: refs/heads/HBASE-18410
Commit: dd70cc308158c435c6d8ec027e2435a29be4326b
Parents: eee3b01
Author: Apekshit Sharma 
Authored: Fri Oct 20 17:20:17 2017 -0700
Committer: Apekshit Sharma 
Committed: Tue Oct 24 19:56:04 2017 -0700

--
 .../master/LogRollMasterProcedureManager.java   |  9 +--
 .../LogRollRegionServerProcedureManager.java| 12 ++--
 .../org/apache/hadoop/hbase/HConstants.java |  6 +-
 .../hadoop/hbase/CoordinatedStateManager.java   | 31 
 .../hbase/CoordinatedStateManagerFactory.java   | 48 -
 .../apache/hadoop/hbase/LocalHBaseCluster.java  | 16 +
 .../BaseCoordinatedStateManager.java| 76 
 .../SplitLogManagerCoordination.java|  4 +-
 .../ZKSplitLogManagerCoordination.java  |  9 +--
 .../coordination/ZkCoordinatedStateManager.java | 21 +++---
 .../ZkSplitLogWorkerCoordination.java   |  9 +--
 .../org/apache/hadoop/hbase/master/HMaster.java | 12 ++--
 .../hadoop/hbase/master/HMasterCommandLine.java | 10 +--
 .../hadoop/hbase/master/SplitLogManager.java|  4 +-
 .../hbase/regionserver/HRegionServer.java   | 29 +++-
 .../regionserver/HRegionServerCommandLine.java  |  5 +-
 .../hbase/regionserver/SplitLogWorker.java  | 15 ++--
 .../hadoop/hbase/util/JVMClusterUtil.java   | 23 ++
 .../apache/hadoop/hbase/wal/WALSplitter.java| 49 ++---
 .../apache/hadoop/hbase/MiniHBaseCluster.java   |  4 +-
 .../hadoop/hbase/TestLocalHBaseCluster.java | 10 ++-
 .../hadoop/hbase/TestMovedRegionsCleaner.java   |  5 +-
 .../client/TestClientScannerRPCTimeout.java |  6 +-
 .../hadoop/hbase/client/TestMetaCache.java  |  4 +-
 .../master/TestDistributedLogSplitting.java |  5 +-
 .../hbase/master/TestHMasterRPCException.java   |  5 +-
 .../hadoop/hbase/master/TestMasterMetrics.java  |  5 +-
 .../hbase/master/TestMasterNoCluster.java   | 17 ++---
 .../hbase/master/TestMetaShutdownHandler.java   |  5 +-
 .../hbase/master/TestSplitLogManager.java   |  6 +-
 .../hbase/regionserver/OOMERegionServer.java|  5 +-
 .../hbase/regionserver/TestClusterId.java   | 10 +--
 .../TestCompactionInDeadRegionServer.java   |  4 --
 .../hbase/regionserver/TestPriorityRpc.java |  5 +-
 .../TestRSKilledWhenInitializing.java   |  4 +-
 .../TestRegionMergeTransactionOnCluster.java|  6 +-
 .../regionserver/TestRegionServerHostname.java  |  2 +-
 .../TestRegionServerReportForDuty.java  |  5 +-
 .../TestScannerHeartbeatMessages.java   |  5 --
 .../hbase/regionserver/TestSplitLogWorker.java  |  5 +-
 .../TestSplitTransactionOnCluster.java  |  6 +-
 .../regionserver/wal/AbstractTestWALReplay.java |  6 +-
 .../replication/TestReplicationSource.java  |  5 --
 .../hbase/wal/TestWALReaderOnSecureWAL.java | 10 +--
 .../apache/hadoop/hbase/wal/TestWALSplit.java   | 14 ++--
 src/main/asciidoc/_chapters/hbase-default.adoc  | 11 ---
 46 files changed, 158 insertions(+), 405 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/dd70cc30/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/master/LogRollMasterProcedureManager.java
--
diff --git 
a/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/master/LogRollMasterProcedureManager.java
 
b/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/master/LogRollMasterProcedureManager.java
index 62b2df7..567d5ec 100644
--- 
a/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/master/LogRollMasterProcedureManager.java
+++ 
b/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/master/LogRollMasterProcedureManager.java
@@ -26,12 +26,12 @@ import java.util.concurrent.ThreadPoolExecutor;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hbase.CoordinatedStateManagerFactory;
 import org.ap

[21/23] hbase git commit: HBASE-18368 FilterList with multiple FamilyFilters concatenated by OR does not work

2017-10-25 Thread zhangduo
HBASE-18368 FilterList with multiple FamilyFilters concatenated by OR does not 
work

Signed-off-by: Guanghao Zhang 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/fcaf71d2
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/fcaf71d2
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/fcaf71d2

Branch: refs/heads/HBASE-18410
Commit: fcaf71d20687e350d9357e1a303da6caf4c6ac08
Parents: 8a5114b
Author: huzheng 
Authored: Tue Oct 17 19:25:23 2017 +0800
Committer: zhangduo 
Committed: Wed Oct 25 20:36:19 2017 +0800

--
 .../org/apache/hadoop/hbase/filter/Filter.java  | 10 +---
 .../hadoop/hbase/filter/FilterListWithOR.java   | 10 ++--
 .../hadoop/hbase/filter/TestFilterList.java | 26 
 .../hbase/filter/TestFilterListOnMini.java  |  7 +++---
 4 files changed, 44 insertions(+), 9 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/fcaf71d2/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/Filter.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/Filter.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/Filter.java
index 70c68b6..a92ea0b 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/Filter.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/Filter.java
@@ -172,8 +172,12 @@ public abstract class Filter {
  */
 NEXT_COL,
 /**
- * Done with columns, skip to next row. Note that filterRow() will
- * still be called.
+ * Seek to next row in current family. It may still pass a cell whose 
family is different but
+ * row is the same as previous cell to {@link #filterKeyValue(Cell)} , 
even if we get a NEXT_ROW
+ * returned for previous cell. For more details see HBASE-18368. 
+ * Once reset() method was invoked, then we switch to the next row for all 
family, and you can
+ * catch the event by invoking CellUtils.matchingRows(previousCell, 
currentCell). 
+ * Note that filterRow() will still be called. 
  */
 NEXT_ROW,
 /**
@@ -181,7 +185,7 @@ public abstract class Filter {
  */
 SEEK_NEXT_USING_HINT,
 /**
- * Include KeyValue and done with row, seek to next.
+ * Include KeyValue and done with row, seek to next. See NEXT_ROW.
  */
 INCLUDE_AND_SEEK_NEXT_ROW,
 }

http://git-wip-us.apache.org/repos/asf/hbase/blob/fcaf71d2/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterListWithOR.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterListWithOR.java
 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterListWithOR.java
index bac9023..31e2a55 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterListWithOR.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterListWithOR.java
@@ -74,7 +74,12 @@ public class FilterListWithOR extends FilterListBase {
* as the previous cell even if filter-A has NEXT_COL returned for the 
previous cell. So we should
* save the previous cell and the return code list when checking previous 
cell for every filter in
* filter list, and verify if currentCell fit the previous return code, if 
fit then pass the
-   * currentCell to the corresponding filter. (HBASE-17678)
+   * currentCell to the corresponding filter. (HBASE-17678) 
+   * Note that: In StoreScanner level, NEXT_ROW will skip to the next row in 
current family, and in
+   * RegionScanner level, NEXT_ROW will skip to the next row in current family 
and switch to the
+   * next family for RegionScanner, INCLUDE_AND_NEXT_ROW is the same. so we 
should pass current cell
+   * to the filter, if row mismatch or row match but column family mismatch. 
(HBASE-18368)
+   * @see org.apache.hadoop.hbase.filter.Filter.ReturnCode
*/
   private boolean shouldPassCurrentCellToFilter(Cell prevCell, Cell 
currentCell, int filterIdx)
   throws IOException {
@@ -94,7 +99,8 @@ public class FilterListWithOR extends FilterListBase {
   return !CellUtil.matchingRowColumn(prevCell, currentCell);
 case NEXT_ROW:
 case INCLUDE_AND_SEEK_NEXT_ROW:
-  return !CellUtil.matchingRows(prevCell, currentCell);
+  return !CellUtil.matchingRows(prevCell, currentCell)
+  || !CellUtil.matchingFamily(prevCell, currentCell);
 default:
   throw new IllegalStateException("Received code is not valid.");
 }

http://git-wip-us.apache.org/repos/asf/hbase/blob/fcaf71d2/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterList.java
--
diff --git 
a/hbase-server/src/test/java

[04/10] hbase git commit: HBASE-18410 disable the HBASE-18957 test until we can fix it on the feature branch.

2017-10-25 Thread zhangduo
HBASE-18410 disable the HBASE-18957 test until we can fix it on the feature 
branch.


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/4e1c7208
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/4e1c7208
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/4e1c7208

Branch: refs/heads/master
Commit: 4e1c7208cbbe27a926a192a9e9526c0d02d0c516
Parents: 43a8ac0
Author: Sean Busbey 
Authored: Mon Oct 9 15:24:00 2017 -0500
Committer: zhangduo 
Committed: Wed Oct 25 20:36:19 2017 +0800

--
 .../java/org/apache/hadoop/hbase/filter/TestFilterListOnMini.java   | 1 +
 1 file changed, 1 insertion(+)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/4e1c7208/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterListOnMini.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterListOnMini.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterListOnMini.java
index dd2399f..590b26e 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterListOnMini.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterListOnMini.java
@@ -58,6 +58,7 @@ public class TestFilterListOnMini {
 TEST_UTIL.shutdownMiniCluster();
   }
 
+  @Ignore("HBASE-18410 Should not merge without this test running.")
   @Test
   public void testFiltersWithOR() throws Exception {
 TableName tn = TableName.valueOf(name.getMethodName());



[01/10] hbase git commit: HBASE-19057 Fix other code review comments about FilterList improvement

2017-10-25 Thread zhangduo
Repository: hbase
Updated Branches:
  refs/heads/master 43a8ac001 -> 705b3fa98


HBASE-19057 Fix other code review comments about FilterList improvement


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/705b3fa9
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/705b3fa9
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/705b3fa9

Branch: refs/heads/master
Commit: 705b3fa98c97806c7eba63617a99f62d829400d1
Parents: fcaf71d
Author: huzheng 
Authored: Tue Oct 24 15:30:55 2017 +0800
Committer: zhangduo 
Committed: Wed Oct 25 20:36:19 2017 +0800

--
 .../apache/hadoop/hbase/filter/FilterList.java  | 24 ++-
 .../hadoop/hbase/filter/FilterListBase.java | 16 --
 .../hadoop/hbase/filter/FilterListWithAND.java  | 12 
 .../hadoop/hbase/filter/FilterListWithOR.java   | 31 +++-
 .../hadoop/hbase/filter/TestFilterList.java |  8 +++--
 5 files changed, 59 insertions(+), 32 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/705b3fa9/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
index e87f1b3..d4242ae 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
@@ -21,17 +21,12 @@ package org.apache.hadoop.hbase.filter;
 import java.io.IOException;
 import java.util.ArrayList;
 import java.util.Arrays;
-import java.util.Collection;
 import java.util.Collections;
 import java.util.List;
 
 import org.apache.hadoop.hbase.Cell;
-import org.apache.hadoop.hbase.CellComparatorImpl;
-import org.apache.hadoop.hbase.CellUtil;
-import org.apache.hadoop.hbase.KeyValueUtil;
 import org.apache.yetus.audience.InterfaceAudience;
 import org.apache.hadoop.hbase.exceptions.DeserializationException;
-import org.apache.yetus.audience.InterfaceAudience;
 
 import 
org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException;
 import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
@@ -170,8 +165,23 @@ final public class FilterList extends FilterBase {
 return filterListBase.transformCell(c);
   }
 
-  ReturnCode internalFilterKeyValue(Cell c, Cell currentTransformedCell) 
throws IOException {
-return this.filterListBase.internalFilterKeyValue(c, 
currentTransformedCell);
+  /**
+   * Internal implementation of {@link #filterKeyValue(Cell)}. Compared to the
+   * {@link #filterKeyValue(Cell)} method, this method accepts an additional 
parameter named
+   * transformedCell. This parameter indicates the initial value of 
transformed cell before this
+   * filter operation. 
+   * For FilterList, we can consider a filter list as a node in a tree. 
sub-filters of the filter
+   * list are children of the relative node. The logic of transforming cell of 
a filter list, well,
+   * we can consider it as the process of post-order tree traverse. For a node 
, Before we traverse
+   * the current child, we should set the traverse result (transformed cell) 
of previous node(s) as
+   * the initial value. so the additional currentTransformedCell parameter is 
needed (HBASE-18879).
+   * @param c The cell in question.
+   * @param transformedCell The transformed cell of previous filter(s)
+   * @return ReturnCode of this filter operation.
+   * @throws IOException
+   */
+  ReturnCode internalFilterKeyValue(Cell c, Cell transformedCell) throws 
IOException {
+return this.filterListBase.internalFilterKeyValue(c, transformedCell);
   }
 
   @Override

http://git-wip-us.apache.org/repos/asf/hbase/blob/705b3fa9/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterListBase.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterListBase.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterListBase.java
index 60b0dc1..f92d2e7 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterListBase.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterListBase.java
@@ -107,8 +107,20 @@ public abstract class FilterListBase extends FilterBase {
 return cell;
   }
 
-  abstract ReturnCode internalFilterKeyValue(Cell c, Cell 
currentTransformedCell)
-  throws IOException;
+  /**
+   * Internal implementation of {@link #filterKeyValue(Cell)}
+   * @param c The cell in question.
+   * @param transformedCell The transformed cell of previous filter(s)
+   * @return ReturnCode of this filter operation.
+   * @throws IOExce

[08/10] hbase git commit: HBASE-18904 Missing break in NEXT_ROW case of FilterList#mergeReturnCodeForOrOperator()

2017-10-25 Thread zhangduo
HBASE-18904 Missing break in NEXT_ROW case of 
FilterList#mergeReturnCodeForOrOperator()

Signed-off-by: tedyu 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/6442d30f
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/6442d30f
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/6442d30f

Branch: refs/heads/master
Commit: 6442d30f0f3b4bec791dfa2ebc0cad5938b3b18c
Parents: a811fb4
Author: Biju Nair 
Authored: Fri Sep 29 16:55:54 2017 -0400
Committer: zhangduo 
Committed: Wed Oct 25 20:36:19 2017 +0800

--
 .../src/main/java/org/apache/hadoop/hbase/filter/FilterList.java   | 2 ++
 1 file changed, 2 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/6442d30f/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
index 3147ab0..b518645 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
@@ -566,6 +566,7 @@ final public class FilterList extends FilterBase {
   if (isInReturnCodes(rc, ReturnCode.NEXT_ROW)) {
 return ReturnCode.NEXT_ROW;
   }
+  break;
 case SEEK_NEXT_USING_HINT:
   if (isInReturnCodes(rc, ReturnCode.INCLUDE, 
ReturnCode.INCLUDE_AND_NEXT_COL,
 ReturnCode.INCLUDE_AND_SEEK_NEXT_ROW)) {
@@ -577,6 +578,7 @@ final public class FilterList extends FilterBase {
   if (isInReturnCodes(rc, ReturnCode.SEEK_NEXT_USING_HINT)) {
 return ReturnCode.SEEK_NEXT_USING_HINT;
   }
+  break;
 }
 throw new IllegalStateException(
 "Received code is not valid. rc: " + rc + ", localRC: " + localRC);



[02/10] hbase git commit: HBASE-18879 HBase FilterList cause KeyOnlyFilter not work

2017-10-25 Thread zhangduo
HBASE-18879 HBase FilterList cause KeyOnlyFilter not work


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/8a5114b4
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/8a5114b4
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/8a5114b4

Branch: refs/heads/master
Commit: 8a5114b4fa079ff165ba735691eb4d99e3b5d7ca
Parents: 5927510
Author: huzheng 
Authored: Wed Oct 11 21:17:03 2017 +0800
Committer: zhangduo 
Committed: Wed Oct 25 20:36:19 2017 +0800

--
 .../apache/hadoop/hbase/filter/FilterList.java  |  6 +++
 .../hadoop/hbase/filter/FilterListBase.java |  3 ++
 .../hadoop/hbase/filter/FilterListWithAND.java  | 22 +
 .../hadoop/hbase/filter/FilterListWithOR.java   | 22 +
 .../hadoop/hbase/filter/TestFilterList.java | 48 
 5 files changed, 85 insertions(+), 16 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/8a5114b4/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
index 97392d1..e87f1b3 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
@@ -72,6 +72,8 @@ final public class FilterList extends FilterBase {
   filterListBase = new FilterListWithAND(filters);
 } else if (operator == Operator.MUST_PASS_ONE) {
   filterListBase = new FilterListWithOR(filters);
+} else {
+  throw new IllegalArgumentException("Invalid operator: " + operator);
 }
 this.operator = operator;
   }
@@ -168,6 +170,10 @@ final public class FilterList extends FilterBase {
 return filterListBase.transformCell(c);
   }
 
+  ReturnCode internalFilterKeyValue(Cell c, Cell currentTransformedCell) 
throws IOException {
+return this.filterListBase.internalFilterKeyValue(c, 
currentTransformedCell);
+  }
+
   @Override
   public ReturnCode filterKeyValue(Cell c) throws IOException {
 return filterListBase.filterKeyValue(c);

http://git-wip-us.apache.org/repos/asf/hbase/blob/8a5114b4/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterListBase.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterListBase.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterListBase.java
index 7fa0245..60b0dc1 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterListBase.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterListBase.java
@@ -107,6 +107,9 @@ public abstract class FilterListBase extends FilterBase {
 return cell;
   }
 
+  abstract ReturnCode internalFilterKeyValue(Cell c, Cell 
currentTransformedCell)
+  throws IOException;
+
   /**
* Filters that never filter by modifying the returned List of Cells can 
inherit this
* implementation that does nothing. {@inheritDoc}

http://git-wip-us.apache.org/repos/asf/hbase/blob/8a5114b4/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterListWithAND.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterListWithAND.java
 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterListWithAND.java
index fa979c0..4909dfd 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterListWithAND.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterListWithAND.java
@@ -147,16 +147,26 @@ public class FilterListWithAND extends FilterListBase {
 "Received code is not valid. rc: " + rc + ", localRC: " + localRC);
   }
 
-  private ReturnCode filterKeyValueWithMustPassAll(Cell c) throws IOException {
+  @Override
+  ReturnCode internalFilterKeyValue(Cell c, Cell currentTransformedCell) 
throws IOException {
+if (isEmpty()) {
+  return ReturnCode.INCLUDE;
+}
 ReturnCode rc = ReturnCode.INCLUDE;
-Cell transformed = c;
+Cell transformed = currentTransformedCell;
+this.referenceCell = c;
 this.seekHintFilter.clear();
 for (int i = 0, n = filters.size(); i < n; i++) {
   Filter filter = filters.get(i);
   if (filter.filterAllRemaining()) {
 return ReturnCode.NEXT_ROW;
   }
-  ReturnCode localRC = filter.filterKeyValue(c);
+  ReturnCode localRC;
+  if (filter instanceof FilterList) {
+localRC = ((FilterList) filter).internalFilterKeyValue(c, transformed);
+  } else {
+localRC = filter.filterKeyValue(c);
+ 

[10/10] hbase git commit: HBASE-18411 Dividing FiterList into two separate sub-classes: FilterListWithOR , FilterListWithAND

2017-10-25 Thread zhangduo
HBASE-18411 Dividing FiterList into two separate sub-classes: FilterListWithOR 
, FilterListWithAND

Signed-off-by: zhangduo 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/59275100
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/59275100
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/59275100

Branch: refs/heads/master
Commit: 59275100e1d7889e73169ab0d0a70c710e260b7a
Parents: 6442d30
Author: huzheng 
Authored: Tue Oct 10 20:01:48 2017 +0800
Committer: zhangduo 
Committed: Wed Oct 25 20:36:19 2017 +0800

--
 .../apache/hadoop/hbase/filter/FilterList.java  | 661 ++-
 .../hadoop/hbase/filter/FilterListBase.java | 159 +
 .../hadoop/hbase/filter/FilterListWithAND.java  | 273 
 .../hadoop/hbase/filter/FilterListWithOR.java   | 383 +++
 .../hadoop/hbase/filter/TestFilterList.java |  89 +++
 5 files changed, 962 insertions(+), 603 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/59275100/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
index b518645..97392d1 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
@@ -29,6 +29,7 @@ import org.apache.hadoop.hbase.Cell;
 import org.apache.hadoop.hbase.CellComparatorImpl;
 import org.apache.hadoop.hbase.CellUtil;
 import org.apache.hadoop.hbase.KeyValueUtil;
+import org.apache.yetus.audience.InterfaceAudience;
 import org.apache.hadoop.hbase.exceptions.DeserializationException;
 import org.apache.yetus.audience.InterfaceAudience;
 
@@ -37,86 +38,60 @@ import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
 import org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos;
 
 /**
- * Implementation of {@link Filter} that represents an ordered List of Filters
- * which will be evaluated with a specified boolean operator {@link 
Operator#MUST_PASS_ALL}
- * (AND) or {@link Operator#MUST_PASS_ONE} (OR).
- * Since you can use Filter Lists as children of Filter Lists, you can create a
- * hierarchy of filters to be evaluated.
- *
- * 
- * {@link Operator#MUST_PASS_ALL} evaluates lazily: evaluation stops as soon 
as one filter does
- * not include the KeyValue.
- *
- * 
- * {@link Operator#MUST_PASS_ONE} evaluates non-lazily: all filters are always 
evaluated.
- *
- * 
+ * Implementation of {@link Filter} that represents an ordered List of Filters 
which will be
+ * evaluated with a specified boolean operator {@link Operator#MUST_PASS_ALL} 
(AND) or
+ * {@link Operator#MUST_PASS_ONE} (OR). Since you can use Filter 
Lists as children of
+ * Filter Lists, you can create a hierarchy of filters to be evaluated. 
+ * {@link Operator#MUST_PASS_ALL} evaluates lazily: evaluation stops as soon 
as one filter does not
+ * include the KeyValue. 
+ * {@link Operator#MUST_PASS_ONE} evaluates non-lazily: all filters are always 
evaluated. 
  * Defaults to {@link Operator#MUST_PASS_ALL}.
  */
 @InterfaceAudience.Public
 final public class FilterList extends FilterBase {
+
   /** set operator */
   @InterfaceAudience.Public
-  public static enum Operator {
+  public enum Operator {
 /** !AND */
 MUST_PASS_ALL,
 /** !OR */
 MUST_PASS_ONE
   }
 
-  private static final int MAX_LOG_FILTERS = 5;
-  private Operator operator = Operator.MUST_PASS_ALL;
-  private final List filters;
-  private Collection seekHintFilters = new ArrayList();
-
-  /**
-   * Save previous return code and previous cell for every filter in filter 
list. For MUST_PASS_ONE,
-   * we use the previous return code to decide whether we should pass current 
cell encountered to
-   * the filter. For MUST_PASS_ALL, the two list are meaningless.
-   */
-  private List prevFilterRCList = null;
-  private List prevCellList = null;
-
-  /** Reference Cell used by {@link #transformCell(Cell)} for validation 
purpose. */
-  private Cell referenceCell = null;
-
-  /**
-   * When filtering a given Cell in {@link #filterKeyValue(Cell)},
-   * this stores the transformed Cell to be returned by {@link 
#transformCell(Cell)}.
-   *
-   * Individual filters transformation are applied only when the filter 
includes the Cell.
-   * Transformations are composed in the order specified by {@link #filters}.
-   */
-  private Cell transformedCell = null;
+  private Operator operator;
+  private FilterListBase filterListBase;
 
   /**
* Constructor that takes a set of {@link Filter}s and an operator.
* @param operator Operator to process filter set with.
-   * @param 

[07/10] hbase git commit: HBASE-17678 FilterList with MUST_PASS_ONE lead to redundancy cells returned - addendum

2017-10-25 Thread zhangduo
HBASE-17678 FilterList with MUST_PASS_ONE lead to redundancy cells returned - 
addendum

Signed-off-by: tedyu 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/5716e8d9
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/5716e8d9
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/5716e8d9

Branch: refs/heads/master
Commit: 5716e8d9ed5d3cc86ee6e03c57c1ce46cf18fa82
Parents: 4591098
Author: huzheng 
Authored: Wed Jun 7 14:49:29 2017 +0800
Committer: zhangduo 
Committed: Wed Oct 25 20:36:19 2017 +0800

--
 .../java/org/apache/hadoop/hbase/filter/FilterList.java | 12 ++--
 1 file changed, 10 insertions(+), 2 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/5716e8d9/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
index 3493082..83db1f2 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
@@ -29,6 +29,7 @@ import org.apache.hadoop.hbase.Cell;
 import org.apache.hadoop.hbase.CellComparatorImpl;
 import org.apache.hadoop.hbase.CellUtil;
 import org.apache.yetus.audience.InterfaceAudience;
+import org.apache.hadoop.hbase.KeyValueUtil;
 import org.apache.hadoop.hbase.exceptions.DeserializationException;
 import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
 import org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos;
@@ -145,7 +146,7 @@ final public class FilterList extends FilterBase {
 
   public void initPrevListForMustPassOne(int size) {
 if (operator == Operator.MUST_PASS_ONE) {
-  if (this.prevCellList == null) {
+  if (this.prevFilterRCList == null) {
 prevFilterRCList = new ArrayList<>(Collections.nCopies(size, null));
   }
   if (this.prevCellList == null) {
@@ -407,7 +408,14 @@ final public class FilterList extends FilterBase {
 ReturnCode localRC = filter.filterKeyValue(c);
 // Update previous cell and return code we encountered.
 prevFilterRCList.set(i, localRC);
-prevCellList.set(i, c);
+if (c == null || localRC == ReturnCode.INCLUDE || localRC == 
ReturnCode.SKIP) {
+  // If previous return code is INCLUDE or SKIP, we should always pass 
the next cell to the
+  // corresponding sub-filter(need not test 
shouldPassCurrentCellToFilter() method), So we
+  // need not save current cell to prevCellList for saving heap memory.
+  prevCellList.set(i, null);
+} else {
+  prevCellList.set(i, KeyValueUtil.toNewKeyCell(c));
+}
 
 if (localRC != ReturnCode.SEEK_NEXT_USING_HINT) {
   seenNonHintReturnCode = true;



[09/10] hbase git commit: HBASE-18368 FilterList with multiple FamilyFilters concatenated by OR does not work

2017-10-25 Thread zhangduo
HBASE-18368 FilterList with multiple FamilyFilters concatenated by OR does not 
work

Signed-off-by: Guanghao Zhang 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/fcaf71d2
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/fcaf71d2
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/fcaf71d2

Branch: refs/heads/master
Commit: fcaf71d20687e350d9357e1a303da6caf4c6ac08
Parents: 8a5114b
Author: huzheng 
Authored: Tue Oct 17 19:25:23 2017 +0800
Committer: zhangduo 
Committed: Wed Oct 25 20:36:19 2017 +0800

--
 .../org/apache/hadoop/hbase/filter/Filter.java  | 10 +---
 .../hadoop/hbase/filter/FilterListWithOR.java   | 10 ++--
 .../hadoop/hbase/filter/TestFilterList.java | 26 
 .../hbase/filter/TestFilterListOnMini.java  |  7 +++---
 4 files changed, 44 insertions(+), 9 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/fcaf71d2/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/Filter.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/Filter.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/Filter.java
index 70c68b6..a92ea0b 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/Filter.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/Filter.java
@@ -172,8 +172,12 @@ public abstract class Filter {
  */
 NEXT_COL,
 /**
- * Done with columns, skip to next row. Note that filterRow() will
- * still be called.
+ * Seek to next row in current family. It may still pass a cell whose 
family is different but
+ * row is the same as previous cell to {@link #filterKeyValue(Cell)} , 
even if we get a NEXT_ROW
+ * returned for previous cell. For more details see HBASE-18368. 
+ * Once reset() method was invoked, then we switch to the next row for all 
family, and you can
+ * catch the event by invoking CellUtils.matchingRows(previousCell, 
currentCell). 
+ * Note that filterRow() will still be called. 
  */
 NEXT_ROW,
 /**
@@ -181,7 +185,7 @@ public abstract class Filter {
  */
 SEEK_NEXT_USING_HINT,
 /**
- * Include KeyValue and done with row, seek to next.
+ * Include KeyValue and done with row, seek to next. See NEXT_ROW.
  */
 INCLUDE_AND_SEEK_NEXT_ROW,
 }

http://git-wip-us.apache.org/repos/asf/hbase/blob/fcaf71d2/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterListWithOR.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterListWithOR.java
 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterListWithOR.java
index bac9023..31e2a55 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterListWithOR.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterListWithOR.java
@@ -74,7 +74,12 @@ public class FilterListWithOR extends FilterListBase {
* as the previous cell even if filter-A has NEXT_COL returned for the 
previous cell. So we should
* save the previous cell and the return code list when checking previous 
cell for every filter in
* filter list, and verify if currentCell fit the previous return code, if 
fit then pass the
-   * currentCell to the corresponding filter. (HBASE-17678)
+   * currentCell to the corresponding filter. (HBASE-17678) 
+   * Note that: In StoreScanner level, NEXT_ROW will skip to the next row in 
current family, and in
+   * RegionScanner level, NEXT_ROW will skip to the next row in current family 
and switch to the
+   * next family for RegionScanner, INCLUDE_AND_NEXT_ROW is the same. so we 
should pass current cell
+   * to the filter, if row mismatch or row match but column family mismatch. 
(HBASE-18368)
+   * @see org.apache.hadoop.hbase.filter.Filter.ReturnCode
*/
   private boolean shouldPassCurrentCellToFilter(Cell prevCell, Cell 
currentCell, int filterIdx)
   throws IOException {
@@ -94,7 +99,8 @@ public class FilterListWithOR extends FilterListBase {
   return !CellUtil.matchingRowColumn(prevCell, currentCell);
 case NEXT_ROW:
 case INCLUDE_AND_SEEK_NEXT_ROW:
-  return !CellUtil.matchingRows(prevCell, currentCell);
+  return !CellUtil.matchingRows(prevCell, currentCell)
+  || !CellUtil.matchingFamily(prevCell, currentCell);
 default:
   throw new IllegalStateException("Received code is not valid.");
 }

http://git-wip-us.apache.org/repos/asf/hbase/blob/fcaf71d2/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterList.java
--
diff --git 
a/hbase-server/src/test/java/org/

[06/10] hbase git commit: HBASE-18160 Fix incorrect logic in FilterList.filterKeyValue

2017-10-25 Thread zhangduo
HBASE-18160 Fix incorrect logic in FilterList.filterKeyValue

Signed-off-by: zhangduo 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/a811fb41
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/a811fb41
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/a811fb41

Branch: refs/heads/master
Commit: a811fb410df2685253d49bcca9b4200beb279712
Parents: 1061225
Author: huzheng 
Authored: Thu Jun 8 15:58:42 2017 +0800
Committer: zhangduo 
Committed: Wed Oct 25 20:36:19 2017 +0800

--
 .../apache/hadoop/hbase/filter/FilterList.java  | 541 ---
 .../hadoop/hbase/filter/TestFilterList.java | 148 -
 2 files changed, 471 insertions(+), 218 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/a811fb41/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
index 3ff978d..3147ab0 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
@@ -90,62 +90,53 @@ final public class FilterList extends FilterBase {
   private Cell transformedCell = null;
 
   /**
-   * Constructor that takes a set of {@link Filter}s. The default operator
-   * MUST_PASS_ALL is assumed.
+   * Constructor that takes a set of {@link Filter}s and an operator.
+   * @param operator Operator to process filter set with.
+   * @param rowFilters Set of row filters.
+   */
+  public FilterList(final Operator operator, final List rowFilters) {
+reversed = checkAndGetReversed(rowFilters, reversed);
+this.filters = new ArrayList<>(rowFilters);
+this.operator = operator;
+initPrevListForMustPassOne(rowFilters.size());
+  }
+
+  /**
+   * Constructor that takes a set of {@link Filter}s. The default operator 
MUST_PASS_ALL is assumed.
* All filters are cloned to internal list.
* @param rowFilters list of filters
*/
   public FilterList(final List rowFilters) {
-reversed = getReversed(rowFilters, reversed);
-this.filters = new ArrayList<>(rowFilters);
-initPrevListForMustPassOne(rowFilters.size());
+this(Operator.MUST_PASS_ALL, rowFilters);
   }
 
   /**
-   * Constructor that takes a var arg number of {@link Filter}s. The fefault 
operator
-   * MUST_PASS_ALL is assumed.
+   * Constructor that takes a var arg number of {@link Filter}s. The default 
operator MUST_PASS_ALL
+   * is assumed.
* @param rowFilters
*/
   public FilterList(final Filter... rowFilters) {
-this(Arrays.asList(rowFilters));
+this(Operator.MUST_PASS_ALL, Arrays.asList(rowFilters));
   }
 
   /**
* Constructor that takes an operator.
-   *
* @param operator Operator to process filter set with.
*/
   public FilterList(final Operator operator) {
-this.operator = operator;
-this.filters = new ArrayList<>();
-initPrevListForMustPassOne(filters.size());
-  }
-
-  /**
-   * Constructor that takes a set of {@link Filter}s and an operator.
-   *
-   * @param operator Operator to process filter set with.
-   * @param rowFilters Set of row filters.
-   */
-  public FilterList(final Operator operator, final List rowFilters) {
-this(rowFilters);
-this.operator = operator;
-initPrevListForMustPassOne(rowFilters.size());
+this(operator, new ArrayList<>());
   }
 
   /**
* Constructor that takes a var arg number of {@link Filter}s and an 
operator.
-   *
* @param operator Operator to process filter set with.
* @param rowFilters Filters to use
*/
   public FilterList(final Operator operator, final Filter... rowFilters) {
-this(rowFilters);
-this.operator = operator;
-initPrevListForMustPassOne(rowFilters.length);
+this(operator, Arrays.asList(rowFilters));
   }
 
-  public void initPrevListForMustPassOne(int size) {
+  private void initPrevListForMustPassOne(int size) {
 if (operator == Operator.MUST_PASS_ONE) {
   if (this.prevFilterRCList == null) {
 prevFilterRCList = new ArrayList<>(Collections.nCopies(size, null));
@@ -156,10 +147,8 @@ final public class FilterList extends FilterBase {
 }
   }
 
-
   /**
* Get the operator.
-   *
* @return operator
*/
   public Operator getOperator() {
@@ -168,7 +157,6 @@ final public class FilterList extends FilterBase {
 
   /**
* Get the filters.
-   *
* @return filters
*/
   public List getFilters() {
@@ -183,33 +171,22 @@ final public class FilterList extends FilterBase {
 return filters.isEmpty();
   }
 
-  private static boolean getReversed(List rowFilters, boolean 
defaultValue)

[03/10] hbase git commit: HBASE-15410 Utilize the max seek value when all Filters in MUST_PASS_ALL FilterList return SEEK_NEXT_USING_HINT

2017-10-25 Thread zhangduo
HBASE-15410 Utilize the max seek value when all Filters in MUST_PASS_ALL 
FilterList return SEEK_NEXT_USING_HINT


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/10612254
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/10612254
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/10612254

Branch: refs/heads/master
Commit: 10612254a163d39e0ad70af683fe1890125c41ed
Parents: 5716e8d
Author: tedyu 
Authored: Thu Sep 7 04:07:09 2017 -0700
Committer: zhangduo 
Committed: Wed Oct 25 20:36:19 2017 +0800

--
 .../main/java/org/apache/hadoop/hbase/filter/FilterList.java| 5 +++--
 .../java/org/apache/hadoop/hbase/filter/TestFilterList.java | 4 ++--
 2 files changed, 5 insertions(+), 4 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/10612254/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
index 83db1f2..3ff978d 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
@@ -28,12 +28,13 @@ import java.util.List;
 import org.apache.hadoop.hbase.Cell;
 import org.apache.hadoop.hbase.CellComparatorImpl;
 import org.apache.hadoop.hbase.CellUtil;
-import org.apache.yetus.audience.InterfaceAudience;
 import org.apache.hadoop.hbase.KeyValueUtil;
 import org.apache.hadoop.hbase.exceptions.DeserializationException;
+import org.apache.yetus.audience.InterfaceAudience;
+
+import 
org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException;
 import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
 import org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos;
-import 
org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException;
 
 /**
  * Implementation of {@link Filter} that represents an ordered List of Filters

http://git-wip-us.apache.org/repos/asf/hbase/blob/10612254/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterList.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterList.java 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterList.java
index 46d44de..e414729 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterList.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterList.java
@@ -502,8 +502,8 @@ public class TestFilterList {
 // Should take the min if given two hints
 FilterList filterList = new FilterList(Operator.MUST_PASS_ONE,
 Arrays.asList(new Filter [] { filterMinHint, filterMaxHint } ));
-assertEquals(0,
-  CellComparatorImpl.COMPARATOR.compare(filterList.getNextCellHint(null), 
minKeyValue));
+assertEquals(0, 
CellComparatorImpl.COMPARATOR.compare(filterList.getNextCellHint(null),
+  minKeyValue));
 
 // Should have no hint if any filter has no hint
 filterList = new FilterList(Operator.MUST_PASS_ONE,



[05/10] hbase git commit: HBASE-17678 FilterList with MUST_PASS_ONE may lead to redundant cells returned

2017-10-25 Thread zhangduo
HBASE-17678 FilterList with MUST_PASS_ONE may lead to redundant cells returned

Signed-off-by: tedyu 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/45910981
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/45910981
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/45910981

Branch: refs/heads/master
Commit: 45910981b78ea2d238b9bcd2caef1270ca0261b4
Parents: 4e1c720
Author: huzheng 
Authored: Sat May 27 16:58:00 2017 +0800
Committer: zhangduo 
Committed: Wed Oct 25 20:36:19 2017 +0800

--
 .../apache/hadoop/hbase/filter/FilterList.java  |  74 +-
 .../hadoop/hbase/filter/TestFilterList.java | 136 +--
 2 files changed, 200 insertions(+), 10 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/45910981/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
index 2f11472..3493082 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
@@ -67,6 +67,14 @@ final public class FilterList extends FilterBase {
   private final List filters;
   private Collection seekHintFilters = new ArrayList();
 
+  /**
+   * Save previous return code and previous cell for every filter in filter 
list. For MUST_PASS_ONE,
+   * we use the previous return code to decide whether we should pass current 
cell encountered to
+   * the filter. For MUST_PASS_ALL, the two list are meaningless.
+   */
+  private List prevFilterRCList = null;
+  private List prevCellList = null;
+
   /** Reference Cell used by {@link #transformCell(Cell)} for validation 
purpose. */
   private Cell referenceCell = null;
 
@@ -88,6 +96,7 @@ final public class FilterList extends FilterBase {
   public FilterList(final List rowFilters) {
 reversed = getReversed(rowFilters, reversed);
 this.filters = new ArrayList<>(rowFilters);
+initPrevListForMustPassOne(rowFilters.size());
   }
 
   /**
@@ -107,6 +116,7 @@ final public class FilterList extends FilterBase {
   public FilterList(final Operator operator) {
 this.operator = operator;
 this.filters = new ArrayList<>();
+initPrevListForMustPassOne(filters.size());
   }
 
   /**
@@ -118,6 +128,7 @@ final public class FilterList extends FilterBase {
   public FilterList(final Operator operator, final List rowFilters) {
 this(rowFilters);
 this.operator = operator;
+initPrevListForMustPassOne(rowFilters.size());
   }
 
   /**
@@ -129,8 +140,21 @@ final public class FilterList extends FilterBase {
   public FilterList(final Operator operator, final Filter... rowFilters) {
 this(rowFilters);
 this.operator = operator;
+initPrevListForMustPassOne(rowFilters.length);
+  }
+
+  public void initPrevListForMustPassOne(int size) {
+if (operator == Operator.MUST_PASS_ONE) {
+  if (this.prevCellList == null) {
+prevFilterRCList = new ArrayList<>(Collections.nCopies(size, null));
+  }
+  if (this.prevCellList == null) {
+prevCellList = new ArrayList<>(Collections.nCopies(size, null));
+  }
+}
   }
 
+
   /**
* Get the operator.
*
@@ -185,6 +209,10 @@ final public class FilterList extends FilterBase {
   public void addFilter(List filters) {
 checkReversed(filters, isReversed());
 this.filters.addAll(filters);
+if (operator == Operator.MUST_PASS_ONE) {
+  this.prevFilterRCList.addAll(Collections.nCopies(filters.size(), null));
+  this.prevCellList.addAll(Collections.nCopies(filters.size(), null));
+}
   }
 
   /**
@@ -201,6 +229,10 @@ final public class FilterList extends FilterBase {
 int listize = filters.size();
 for (int i = 0; i < listize; i++) {
   filters.get(i).reset();
+  if (operator == Operator.MUST_PASS_ONE) {
+prevFilterRCList.set(i, null);
+prevCellList.set(i, null);
+  }
 }
 seekHintFilters.clear();
   }
@@ -283,6 +315,41 @@ final public class FilterList extends FilterBase {
 return this.transformedCell;
   }
 
+  /**
+   * For MUST_PASS_ONE, we cannot make sure that when filter-A in filter list 
return NEXT_COL then
+   * the next cell passing to filterList will be the first cell in next 
column, because if filter-B
+   * in filter list return SKIP, then the filter list will return SKIP. In 
this case, we should pass
+   * the cell following the previous cell, and it's possible that the next 
cell has the same column
+   * as the previous cell even if filter-A has NEXT_COL returned for the 
previous cell. So we should
+   * save th

[07/10] hbase git commit: HBASE-18904 Missing break in NEXT_ROW case of FilterList#mergeReturnCodeForOrOperator()

2017-10-25 Thread zhangduo
HBASE-18904 Missing break in NEXT_ROW case of 
FilterList#mergeReturnCodeForOrOperator()

Signed-off-by: tedyu 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/1a5b3a33
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/1a5b3a33
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/1a5b3a33

Branch: refs/heads/branch-2
Commit: 1a5b3a336337cb5b4d2db83d52838dc3b9b75eb6
Parents: 15eae6a
Author: Biju Nair 
Authored: Fri Sep 29 16:55:54 2017 -0400
Committer: zhangduo 
Committed: Wed Oct 25 20:41:24 2017 +0800

--
 .../src/main/java/org/apache/hadoop/hbase/filter/FilterList.java   | 2 ++
 1 file changed, 2 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/1a5b3a33/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
index 3147ab0..b518645 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
@@ -566,6 +566,7 @@ final public class FilterList extends FilterBase {
   if (isInReturnCodes(rc, ReturnCode.NEXT_ROW)) {
 return ReturnCode.NEXT_ROW;
   }
+  break;
 case SEEK_NEXT_USING_HINT:
   if (isInReturnCodes(rc, ReturnCode.INCLUDE, 
ReturnCode.INCLUDE_AND_NEXT_COL,
 ReturnCode.INCLUDE_AND_SEEK_NEXT_ROW)) {
@@ -577,6 +578,7 @@ final public class FilterList extends FilterBase {
   if (isInReturnCodes(rc, ReturnCode.SEEK_NEXT_USING_HINT)) {
 return ReturnCode.SEEK_NEXT_USING_HINT;
   }
+  break;
 }
 throw new IllegalStateException(
 "Received code is not valid. rc: " + rc + ", localRC: " + localRC);



[10/10] hbase git commit: HBASE-19057 Fix other code review comments about FilterList improvement

2017-10-25 Thread zhangduo
HBASE-19057 Fix other code review comments about FilterList improvement


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/c2dbef14
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/c2dbef14
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/c2dbef14

Branch: refs/heads/branch-2
Commit: c2dbef1465de18767948a092f26d836e45d91ded
Parents: 7a2da02
Author: huzheng 
Authored: Tue Oct 24 15:30:55 2017 +0800
Committer: zhangduo 
Committed: Wed Oct 25 20:41:25 2017 +0800

--
 .../apache/hadoop/hbase/filter/FilterList.java  | 24 ++-
 .../hadoop/hbase/filter/FilterListBase.java | 16 --
 .../hadoop/hbase/filter/FilterListWithAND.java  | 12 
 .../hadoop/hbase/filter/FilterListWithOR.java   | 31 +++-
 .../hadoop/hbase/filter/TestFilterList.java |  8 +++--
 5 files changed, 59 insertions(+), 32 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/c2dbef14/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
index e87f1b3..d4242ae 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
@@ -21,17 +21,12 @@ package org.apache.hadoop.hbase.filter;
 import java.io.IOException;
 import java.util.ArrayList;
 import java.util.Arrays;
-import java.util.Collection;
 import java.util.Collections;
 import java.util.List;
 
 import org.apache.hadoop.hbase.Cell;
-import org.apache.hadoop.hbase.CellComparatorImpl;
-import org.apache.hadoop.hbase.CellUtil;
-import org.apache.hadoop.hbase.KeyValueUtil;
 import org.apache.yetus.audience.InterfaceAudience;
 import org.apache.hadoop.hbase.exceptions.DeserializationException;
-import org.apache.yetus.audience.InterfaceAudience;
 
 import 
org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException;
 import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
@@ -170,8 +165,23 @@ final public class FilterList extends FilterBase {
 return filterListBase.transformCell(c);
   }
 
-  ReturnCode internalFilterKeyValue(Cell c, Cell currentTransformedCell) 
throws IOException {
-return this.filterListBase.internalFilterKeyValue(c, 
currentTransformedCell);
+  /**
+   * Internal implementation of {@link #filterKeyValue(Cell)}. Compared to the
+   * {@link #filterKeyValue(Cell)} method, this method accepts an additional 
parameter named
+   * transformedCell. This parameter indicates the initial value of 
transformed cell before this
+   * filter operation. 
+   * For FilterList, we can consider a filter list as a node in a tree. 
sub-filters of the filter
+   * list are children of the relative node. The logic of transforming cell of 
a filter list, well,
+   * we can consider it as the process of post-order tree traverse. For a node 
, Before we traverse
+   * the current child, we should set the traverse result (transformed cell) 
of previous node(s) as
+   * the initial value. so the additional currentTransformedCell parameter is 
needed (HBASE-18879).
+   * @param c The cell in question.
+   * @param transformedCell The transformed cell of previous filter(s)
+   * @return ReturnCode of this filter operation.
+   * @throws IOException
+   */
+  ReturnCode internalFilterKeyValue(Cell c, Cell transformedCell) throws 
IOException {
+return this.filterListBase.internalFilterKeyValue(c, transformedCell);
   }
 
   @Override

http://git-wip-us.apache.org/repos/asf/hbase/blob/c2dbef14/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterListBase.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterListBase.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterListBase.java
index 60b0dc1..f92d2e7 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterListBase.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterListBase.java
@@ -107,8 +107,20 @@ public abstract class FilterListBase extends FilterBase {
 return cell;
   }
 
-  abstract ReturnCode internalFilterKeyValue(Cell c, Cell 
currentTransformedCell)
-  throws IOException;
+  /**
+   * Internal implementation of {@link #filterKeyValue(Cell)}
+   * @param c The cell in question.
+   * @param transformedCell The transformed cell of previous filter(s)
+   * @return ReturnCode of this filter operation.
+   * @throws IOException
+   * @see 
org.apache.hadoop.hbase.filter.FilterList#internalFilterKeyVa

[03/10] hbase git commit: HBASE-15410 Utilize the max seek value when all Filters in MUST_PASS_ALL FilterList return SEEK_NEXT_USING_HINT

2017-10-25 Thread zhangduo
HBASE-15410 Utilize the max seek value when all Filters in MUST_PASS_ALL 
FilterList return SEEK_NEXT_USING_HINT


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/47d8549c
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/47d8549c
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/47d8549c

Branch: refs/heads/branch-2
Commit: 47d8549cec0e869fec5e2a8dea20aadcae2759a7
Parents: 50c9a41
Author: tedyu 
Authored: Thu Sep 7 04:07:09 2017 -0700
Committer: zhangduo 
Committed: Wed Oct 25 20:41:24 2017 +0800

--
 .../main/java/org/apache/hadoop/hbase/filter/FilterList.java| 5 +++--
 .../java/org/apache/hadoop/hbase/filter/TestFilterList.java | 4 ++--
 2 files changed, 5 insertions(+), 4 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/47d8549c/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
index 83db1f2..3ff978d 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
@@ -28,12 +28,13 @@ import java.util.List;
 import org.apache.hadoop.hbase.Cell;
 import org.apache.hadoop.hbase.CellComparatorImpl;
 import org.apache.hadoop.hbase.CellUtil;
-import org.apache.yetus.audience.InterfaceAudience;
 import org.apache.hadoop.hbase.KeyValueUtil;
 import org.apache.hadoop.hbase.exceptions.DeserializationException;
+import org.apache.yetus.audience.InterfaceAudience;
+
+import 
org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException;
 import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
 import org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos;
-import 
org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException;
 
 /**
  * Implementation of {@link Filter} that represents an ordered List of Filters

http://git-wip-us.apache.org/repos/asf/hbase/blob/47d8549c/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterList.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterList.java 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterList.java
index 46d44de..e414729 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterList.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterList.java
@@ -502,8 +502,8 @@ public class TestFilterList {
 // Should take the min if given two hints
 FilterList filterList = new FilterList(Operator.MUST_PASS_ONE,
 Arrays.asList(new Filter [] { filterMinHint, filterMaxHint } ));
-assertEquals(0,
-  CellComparatorImpl.COMPARATOR.compare(filterList.getNextCellHint(null), 
minKeyValue));
+assertEquals(0, 
CellComparatorImpl.COMPARATOR.compare(filterList.getNextCellHint(null),
+  minKeyValue));
 
 // Should have no hint if any filter has no hint
 filterList = new FilterList(Operator.MUST_PASS_ONE,



[02/10] hbase git commit: HBASE-18160 Fix incorrect logic in FilterList.filterKeyValue

2017-10-25 Thread zhangduo
HBASE-18160 Fix incorrect logic in FilterList.filterKeyValue

Signed-off-by: zhangduo 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/15eae6ac
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/15eae6ac
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/15eae6ac

Branch: refs/heads/branch-2
Commit: 15eae6ac3592979804a798a035af3d1efa3b76c0
Parents: 47d8549
Author: huzheng 
Authored: Thu Jun 8 15:58:42 2017 +0800
Committer: zhangduo 
Committed: Wed Oct 25 20:41:24 2017 +0800

--
 .../apache/hadoop/hbase/filter/FilterList.java  | 541 ---
 .../hadoop/hbase/filter/TestFilterList.java | 148 -
 2 files changed, 471 insertions(+), 218 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/15eae6ac/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
index 3ff978d..3147ab0 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
@@ -90,62 +90,53 @@ final public class FilterList extends FilterBase {
   private Cell transformedCell = null;
 
   /**
-   * Constructor that takes a set of {@link Filter}s. The default operator
-   * MUST_PASS_ALL is assumed.
+   * Constructor that takes a set of {@link Filter}s and an operator.
+   * @param operator Operator to process filter set with.
+   * @param rowFilters Set of row filters.
+   */
+  public FilterList(final Operator operator, final List rowFilters) {
+reversed = checkAndGetReversed(rowFilters, reversed);
+this.filters = new ArrayList<>(rowFilters);
+this.operator = operator;
+initPrevListForMustPassOne(rowFilters.size());
+  }
+
+  /**
+   * Constructor that takes a set of {@link Filter}s. The default operator 
MUST_PASS_ALL is assumed.
* All filters are cloned to internal list.
* @param rowFilters list of filters
*/
   public FilterList(final List rowFilters) {
-reversed = getReversed(rowFilters, reversed);
-this.filters = new ArrayList<>(rowFilters);
-initPrevListForMustPassOne(rowFilters.size());
+this(Operator.MUST_PASS_ALL, rowFilters);
   }
 
   /**
-   * Constructor that takes a var arg number of {@link Filter}s. The fefault 
operator
-   * MUST_PASS_ALL is assumed.
+   * Constructor that takes a var arg number of {@link Filter}s. The default 
operator MUST_PASS_ALL
+   * is assumed.
* @param rowFilters
*/
   public FilterList(final Filter... rowFilters) {
-this(Arrays.asList(rowFilters));
+this(Operator.MUST_PASS_ALL, Arrays.asList(rowFilters));
   }
 
   /**
* Constructor that takes an operator.
-   *
* @param operator Operator to process filter set with.
*/
   public FilterList(final Operator operator) {
-this.operator = operator;
-this.filters = new ArrayList<>();
-initPrevListForMustPassOne(filters.size());
-  }
-
-  /**
-   * Constructor that takes a set of {@link Filter}s and an operator.
-   *
-   * @param operator Operator to process filter set with.
-   * @param rowFilters Set of row filters.
-   */
-  public FilterList(final Operator operator, final List rowFilters) {
-this(rowFilters);
-this.operator = operator;
-initPrevListForMustPassOne(rowFilters.size());
+this(operator, new ArrayList<>());
   }
 
   /**
* Constructor that takes a var arg number of {@link Filter}s and an 
operator.
-   *
* @param operator Operator to process filter set with.
* @param rowFilters Filters to use
*/
   public FilterList(final Operator operator, final Filter... rowFilters) {
-this(rowFilters);
-this.operator = operator;
-initPrevListForMustPassOne(rowFilters.length);
+this(operator, Arrays.asList(rowFilters));
   }
 
-  public void initPrevListForMustPassOne(int size) {
+  private void initPrevListForMustPassOne(int size) {
 if (operator == Operator.MUST_PASS_ONE) {
   if (this.prevFilterRCList == null) {
 prevFilterRCList = new ArrayList<>(Collections.nCopies(size, null));
@@ -156,10 +147,8 @@ final public class FilterList extends FilterBase {
 }
   }
 
-
   /**
* Get the operator.
-   *
* @return operator
*/
   public Operator getOperator() {
@@ -168,7 +157,6 @@ final public class FilterList extends FilterBase {
 
   /**
* Get the filters.
-   *
* @return filters
*/
   public List getFilters() {
@@ -183,33 +171,22 @@ final public class FilterList extends FilterBase {
 return filters.isEmpty();
   }
 
-  private static boolean getReversed(List rowFilters, boolean 
defaultValu

[06/10] hbase git commit: HBASE-18411 Dividing FiterList into two separate sub-classes: FilterListWithOR , FilterListWithAND

2017-10-25 Thread zhangduo
HBASE-18411 Dividing FiterList into two separate sub-classes: FilterListWithOR 
, FilterListWithAND

Signed-off-by: zhangduo 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/aa23cca5
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/aa23cca5
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/aa23cca5

Branch: refs/heads/branch-2
Commit: aa23cca58ae07f7e811e005d510be7619c658043
Parents: 1a5b3a3
Author: huzheng 
Authored: Tue Oct 10 20:01:48 2017 +0800
Committer: zhangduo 
Committed: Wed Oct 25 20:41:24 2017 +0800

--
 .../apache/hadoop/hbase/filter/FilterList.java  | 661 ++-
 .../hadoop/hbase/filter/FilterListBase.java | 159 +
 .../hadoop/hbase/filter/FilterListWithAND.java  | 273 
 .../hadoop/hbase/filter/FilterListWithOR.java   | 383 +++
 .../hadoop/hbase/filter/TestFilterList.java |  89 +++
 5 files changed, 962 insertions(+), 603 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/aa23cca5/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
index b518645..97392d1 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
@@ -29,6 +29,7 @@ import org.apache.hadoop.hbase.Cell;
 import org.apache.hadoop.hbase.CellComparatorImpl;
 import org.apache.hadoop.hbase.CellUtil;
 import org.apache.hadoop.hbase.KeyValueUtil;
+import org.apache.yetus.audience.InterfaceAudience;
 import org.apache.hadoop.hbase.exceptions.DeserializationException;
 import org.apache.yetus.audience.InterfaceAudience;
 
@@ -37,86 +38,60 @@ import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
 import org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos;
 
 /**
- * Implementation of {@link Filter} that represents an ordered List of Filters
- * which will be evaluated with a specified boolean operator {@link 
Operator#MUST_PASS_ALL}
- * (AND) or {@link Operator#MUST_PASS_ONE} (OR).
- * Since you can use Filter Lists as children of Filter Lists, you can create a
- * hierarchy of filters to be evaluated.
- *
- * 
- * {@link Operator#MUST_PASS_ALL} evaluates lazily: evaluation stops as soon 
as one filter does
- * not include the KeyValue.
- *
- * 
- * {@link Operator#MUST_PASS_ONE} evaluates non-lazily: all filters are always 
evaluated.
- *
- * 
+ * Implementation of {@link Filter} that represents an ordered List of Filters 
which will be
+ * evaluated with a specified boolean operator {@link Operator#MUST_PASS_ALL} 
(AND) or
+ * {@link Operator#MUST_PASS_ONE} (OR). Since you can use Filter 
Lists as children of
+ * Filter Lists, you can create a hierarchy of filters to be evaluated. 
+ * {@link Operator#MUST_PASS_ALL} evaluates lazily: evaluation stops as soon 
as one filter does not
+ * include the KeyValue. 
+ * {@link Operator#MUST_PASS_ONE} evaluates non-lazily: all filters are always 
evaluated. 
  * Defaults to {@link Operator#MUST_PASS_ALL}.
  */
 @InterfaceAudience.Public
 final public class FilterList extends FilterBase {
+
   /** set operator */
   @InterfaceAudience.Public
-  public static enum Operator {
+  public enum Operator {
 /** !AND */
 MUST_PASS_ALL,
 /** !OR */
 MUST_PASS_ONE
   }
 
-  private static final int MAX_LOG_FILTERS = 5;
-  private Operator operator = Operator.MUST_PASS_ALL;
-  private final List filters;
-  private Collection seekHintFilters = new ArrayList();
-
-  /**
-   * Save previous return code and previous cell for every filter in filter 
list. For MUST_PASS_ONE,
-   * we use the previous return code to decide whether we should pass current 
cell encountered to
-   * the filter. For MUST_PASS_ALL, the two list are meaningless.
-   */
-  private List prevFilterRCList = null;
-  private List prevCellList = null;
-
-  /** Reference Cell used by {@link #transformCell(Cell)} for validation 
purpose. */
-  private Cell referenceCell = null;
-
-  /**
-   * When filtering a given Cell in {@link #filterKeyValue(Cell)},
-   * this stores the transformed Cell to be returned by {@link 
#transformCell(Cell)}.
-   *
-   * Individual filters transformation are applied only when the filter 
includes the Cell.
-   * Transformations are composed in the order specified by {@link #filters}.
-   */
-  private Cell transformedCell = null;
+  private Operator operator;
+  private FilterListBase filterListBase;
 
   /**
* Constructor that takes a set of {@link Filter}s and an operator.
* @param operator Operator to process filter set with.
-   * @para

[08/10] hbase git commit: HBASE-17678 FilterList with MUST_PASS_ONE lead to redundancy cells returned - addendum

2017-10-25 Thread zhangduo
HBASE-17678 FilterList with MUST_PASS_ONE lead to redundancy cells returned - 
addendum

Signed-off-by: tedyu 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/50c9a412
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/50c9a412
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/50c9a412

Branch: refs/heads/branch-2
Commit: 50c9a412fe9291104a36e5f6f49177c0cd823f87
Parents: 7237ecc
Author: huzheng 
Authored: Wed Jun 7 14:49:29 2017 +0800
Committer: zhangduo 
Committed: Wed Oct 25 20:41:24 2017 +0800

--
 .../java/org/apache/hadoop/hbase/filter/FilterList.java | 12 ++--
 1 file changed, 10 insertions(+), 2 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/50c9a412/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
index 3493082..83db1f2 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
@@ -29,6 +29,7 @@ import org.apache.hadoop.hbase.Cell;
 import org.apache.hadoop.hbase.CellComparatorImpl;
 import org.apache.hadoop.hbase.CellUtil;
 import org.apache.yetus.audience.InterfaceAudience;
+import org.apache.hadoop.hbase.KeyValueUtil;
 import org.apache.hadoop.hbase.exceptions.DeserializationException;
 import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
 import org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos;
@@ -145,7 +146,7 @@ final public class FilterList extends FilterBase {
 
   public void initPrevListForMustPassOne(int size) {
 if (operator == Operator.MUST_PASS_ONE) {
-  if (this.prevCellList == null) {
+  if (this.prevFilterRCList == null) {
 prevFilterRCList = new ArrayList<>(Collections.nCopies(size, null));
   }
   if (this.prevCellList == null) {
@@ -407,7 +408,14 @@ final public class FilterList extends FilterBase {
 ReturnCode localRC = filter.filterKeyValue(c);
 // Update previous cell and return code we encountered.
 prevFilterRCList.set(i, localRC);
-prevCellList.set(i, c);
+if (c == null || localRC == ReturnCode.INCLUDE || localRC == 
ReturnCode.SKIP) {
+  // If previous return code is INCLUDE or SKIP, we should always pass 
the next cell to the
+  // corresponding sub-filter(need not test 
shouldPassCurrentCellToFilter() method), So we
+  // need not save current cell to prevCellList for saving heap memory.
+  prevCellList.set(i, null);
+} else {
+  prevCellList.set(i, KeyValueUtil.toNewKeyCell(c));
+}
 
 if (localRC != ReturnCode.SEEK_NEXT_USING_HINT) {
   seenNonHintReturnCode = true;



[05/10] hbase git commit: HBASE-17678 FilterList with MUST_PASS_ONE may lead to redundant cells returned

2017-10-25 Thread zhangduo
HBASE-17678 FilterList with MUST_PASS_ONE may lead to redundant cells returned

Signed-off-by: tedyu 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/7237ecc3
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/7237ecc3
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/7237ecc3

Branch: refs/heads/branch-2
Commit: 7237ecc3b334c69f71a3884042b196dd8c3ff0d9
Parents: e6f61b9
Author: huzheng 
Authored: Sat May 27 16:58:00 2017 +0800
Committer: zhangduo 
Committed: Wed Oct 25 20:41:24 2017 +0800

--
 .../apache/hadoop/hbase/filter/FilterList.java  |  74 +-
 .../hadoop/hbase/filter/TestFilterList.java | 136 +--
 2 files changed, 200 insertions(+), 10 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/7237ecc3/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
index 2f11472..3493082 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
@@ -67,6 +67,14 @@ final public class FilterList extends FilterBase {
   private final List filters;
   private Collection seekHintFilters = new ArrayList();
 
+  /**
+   * Save previous return code and previous cell for every filter in filter 
list. For MUST_PASS_ONE,
+   * we use the previous return code to decide whether we should pass current 
cell encountered to
+   * the filter. For MUST_PASS_ALL, the two list are meaningless.
+   */
+  private List prevFilterRCList = null;
+  private List prevCellList = null;
+
   /** Reference Cell used by {@link #transformCell(Cell)} for validation 
purpose. */
   private Cell referenceCell = null;
 
@@ -88,6 +96,7 @@ final public class FilterList extends FilterBase {
   public FilterList(final List rowFilters) {
 reversed = getReversed(rowFilters, reversed);
 this.filters = new ArrayList<>(rowFilters);
+initPrevListForMustPassOne(rowFilters.size());
   }
 
   /**
@@ -107,6 +116,7 @@ final public class FilterList extends FilterBase {
   public FilterList(final Operator operator) {
 this.operator = operator;
 this.filters = new ArrayList<>();
+initPrevListForMustPassOne(filters.size());
   }
 
   /**
@@ -118,6 +128,7 @@ final public class FilterList extends FilterBase {
   public FilterList(final Operator operator, final List rowFilters) {
 this(rowFilters);
 this.operator = operator;
+initPrevListForMustPassOne(rowFilters.size());
   }
 
   /**
@@ -129,8 +140,21 @@ final public class FilterList extends FilterBase {
   public FilterList(final Operator operator, final Filter... rowFilters) {
 this(rowFilters);
 this.operator = operator;
+initPrevListForMustPassOne(rowFilters.length);
+  }
+
+  public void initPrevListForMustPassOne(int size) {
+if (operator == Operator.MUST_PASS_ONE) {
+  if (this.prevCellList == null) {
+prevFilterRCList = new ArrayList<>(Collections.nCopies(size, null));
+  }
+  if (this.prevCellList == null) {
+prevCellList = new ArrayList<>(Collections.nCopies(size, null));
+  }
+}
   }
 
+
   /**
* Get the operator.
*
@@ -185,6 +209,10 @@ final public class FilterList extends FilterBase {
   public void addFilter(List filters) {
 checkReversed(filters, isReversed());
 this.filters.addAll(filters);
+if (operator == Operator.MUST_PASS_ONE) {
+  this.prevFilterRCList.addAll(Collections.nCopies(filters.size(), null));
+  this.prevCellList.addAll(Collections.nCopies(filters.size(), null));
+}
   }
 
   /**
@@ -201,6 +229,10 @@ final public class FilterList extends FilterBase {
 int listize = filters.size();
 for (int i = 0; i < listize; i++) {
   filters.get(i).reset();
+  if (operator == Operator.MUST_PASS_ONE) {
+prevFilterRCList.set(i, null);
+prevCellList.set(i, null);
+  }
 }
 seekHintFilters.clear();
   }
@@ -283,6 +315,41 @@ final public class FilterList extends FilterBase {
 return this.transformedCell;
   }
 
+  /**
+   * For MUST_PASS_ONE, we cannot make sure that when filter-A in filter list 
return NEXT_COL then
+   * the next cell passing to filterList will be the first cell in next 
column, because if filter-B
+   * in filter list return SKIP, then the filter list will return SKIP. In 
this case, we should pass
+   * the cell following the previous cell, and it's possible that the next 
cell has the same column
+   * as the previous cell even if filter-A has NEXT_COL returned for the 
previous cell. So we should
+   * save 

[01/10] hbase git commit: HBASE-18879 HBase FilterList cause KeyOnlyFilter not work

2017-10-25 Thread zhangduo
Repository: hbase
Updated Branches:
  refs/heads/branch-2 962d7e9bf -> c2dbef146


HBASE-18879 HBase FilterList cause KeyOnlyFilter not work


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/f6dd5e8b
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/f6dd5e8b
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/f6dd5e8b

Branch: refs/heads/branch-2
Commit: f6dd5e8b640578f39a2c9c35a866c36e80062bd7
Parents: aa23cca
Author: huzheng 
Authored: Wed Oct 11 21:17:03 2017 +0800
Committer: zhangduo 
Committed: Wed Oct 25 20:41:24 2017 +0800

--
 .../apache/hadoop/hbase/filter/FilterList.java  |  6 +++
 .../hadoop/hbase/filter/FilterListBase.java |  3 ++
 .../hadoop/hbase/filter/FilterListWithAND.java  | 22 +
 .../hadoop/hbase/filter/FilterListWithOR.java   | 22 +
 .../hadoop/hbase/filter/TestFilterList.java | 48 
 5 files changed, 85 insertions(+), 16 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/f6dd5e8b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
index 97392d1..e87f1b3 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
@@ -72,6 +72,8 @@ final public class FilterList extends FilterBase {
   filterListBase = new FilterListWithAND(filters);
 } else if (operator == Operator.MUST_PASS_ONE) {
   filterListBase = new FilterListWithOR(filters);
+} else {
+  throw new IllegalArgumentException("Invalid operator: " + operator);
 }
 this.operator = operator;
   }
@@ -168,6 +170,10 @@ final public class FilterList extends FilterBase {
 return filterListBase.transformCell(c);
   }
 
+  ReturnCode internalFilterKeyValue(Cell c, Cell currentTransformedCell) 
throws IOException {
+return this.filterListBase.internalFilterKeyValue(c, 
currentTransformedCell);
+  }
+
   @Override
   public ReturnCode filterKeyValue(Cell c) throws IOException {
 return filterListBase.filterKeyValue(c);

http://git-wip-us.apache.org/repos/asf/hbase/blob/f6dd5e8b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterListBase.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterListBase.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterListBase.java
index 7fa0245..60b0dc1 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterListBase.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterListBase.java
@@ -107,6 +107,9 @@ public abstract class FilterListBase extends FilterBase {
 return cell;
   }
 
+  abstract ReturnCode internalFilterKeyValue(Cell c, Cell 
currentTransformedCell)
+  throws IOException;
+
   /**
* Filters that never filter by modifying the returned List of Cells can 
inherit this
* implementation that does nothing. {@inheritDoc}

http://git-wip-us.apache.org/repos/asf/hbase/blob/f6dd5e8b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterListWithAND.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterListWithAND.java
 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterListWithAND.java
index fa979c0..4909dfd 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterListWithAND.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterListWithAND.java
@@ -147,16 +147,26 @@ public class FilterListWithAND extends FilterListBase {
 "Received code is not valid. rc: " + rc + ", localRC: " + localRC);
   }
 
-  private ReturnCode filterKeyValueWithMustPassAll(Cell c) throws IOException {
+  @Override
+  ReturnCode internalFilterKeyValue(Cell c, Cell currentTransformedCell) 
throws IOException {
+if (isEmpty()) {
+  return ReturnCode.INCLUDE;
+}
 ReturnCode rc = ReturnCode.INCLUDE;
-Cell transformed = c;
+Cell transformed = currentTransformedCell;
+this.referenceCell = c;
 this.seekHintFilter.clear();
 for (int i = 0, n = filters.size(); i < n; i++) {
   Filter filter = filters.get(i);
   if (filter.filterAllRemaining()) {
 return ReturnCode.NEXT_ROW;
   }
-  ReturnCode localRC = filter.filterKeyValue(c);
+  ReturnCode localRC;
+  if (filter instanceof FilterList) {
+localRC = ((FilterList) filter).internalFilterKeyV

[09/10] hbase git commit: HBASE-18368 FilterList with multiple FamilyFilters concatenated by OR does not work

2017-10-25 Thread zhangduo
HBASE-18368 FilterList with multiple FamilyFilters concatenated by OR does not 
work

Signed-off-by: Guanghao Zhang 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/7a2da02e
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/7a2da02e
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/7a2da02e

Branch: refs/heads/branch-2
Commit: 7a2da02e6dcd4f1a5c5d204baf0647bc17421cd0
Parents: f6dd5e8
Author: huzheng 
Authored: Tue Oct 17 19:25:23 2017 +0800
Committer: zhangduo 
Committed: Wed Oct 25 20:41:25 2017 +0800

--
 .../org/apache/hadoop/hbase/filter/Filter.java  | 10 +---
 .../hadoop/hbase/filter/FilterListWithOR.java   | 10 ++--
 .../hadoop/hbase/filter/TestFilterList.java | 26 
 .../hbase/filter/TestFilterListOnMini.java  |  7 +++---
 4 files changed, 44 insertions(+), 9 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/7a2da02e/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/Filter.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/Filter.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/Filter.java
index 70c68b6..a92ea0b 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/Filter.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/Filter.java
@@ -172,8 +172,12 @@ public abstract class Filter {
  */
 NEXT_COL,
 /**
- * Done with columns, skip to next row. Note that filterRow() will
- * still be called.
+ * Seek to next row in current family. It may still pass a cell whose 
family is different but
+ * row is the same as previous cell to {@link #filterKeyValue(Cell)} , 
even if we get a NEXT_ROW
+ * returned for previous cell. For more details see HBASE-18368. 
+ * Once reset() method was invoked, then we switch to the next row for all 
family, and you can
+ * catch the event by invoking CellUtils.matchingRows(previousCell, 
currentCell). 
+ * Note that filterRow() will still be called. 
  */
 NEXT_ROW,
 /**
@@ -181,7 +185,7 @@ public abstract class Filter {
  */
 SEEK_NEXT_USING_HINT,
 /**
- * Include KeyValue and done with row, seek to next.
+ * Include KeyValue and done with row, seek to next. See NEXT_ROW.
  */
 INCLUDE_AND_SEEK_NEXT_ROW,
 }

http://git-wip-us.apache.org/repos/asf/hbase/blob/7a2da02e/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterListWithOR.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterListWithOR.java
 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterListWithOR.java
index bac9023..31e2a55 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterListWithOR.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterListWithOR.java
@@ -74,7 +74,12 @@ public class FilterListWithOR extends FilterListBase {
* as the previous cell even if filter-A has NEXT_COL returned for the 
previous cell. So we should
* save the previous cell and the return code list when checking previous 
cell for every filter in
* filter list, and verify if currentCell fit the previous return code, if 
fit then pass the
-   * currentCell to the corresponding filter. (HBASE-17678)
+   * currentCell to the corresponding filter. (HBASE-17678) 
+   * Note that: In StoreScanner level, NEXT_ROW will skip to the next row in 
current family, and in
+   * RegionScanner level, NEXT_ROW will skip to the next row in current family 
and switch to the
+   * next family for RegionScanner, INCLUDE_AND_NEXT_ROW is the same. so we 
should pass current cell
+   * to the filter, if row mismatch or row match but column family mismatch. 
(HBASE-18368)
+   * @see org.apache.hadoop.hbase.filter.Filter.ReturnCode
*/
   private boolean shouldPassCurrentCellToFilter(Cell prevCell, Cell 
currentCell, int filterIdx)
   throws IOException {
@@ -94,7 +99,8 @@ public class FilterListWithOR extends FilterListBase {
   return !CellUtil.matchingRowColumn(prevCell, currentCell);
 case NEXT_ROW:
 case INCLUDE_AND_SEEK_NEXT_ROW:
-  return !CellUtil.matchingRows(prevCell, currentCell);
+  return !CellUtil.matchingRows(prevCell, currentCell)
+  || !CellUtil.matchingFamily(prevCell, currentCell);
 default:
   throw new IllegalStateException("Received code is not valid.");
 }

http://git-wip-us.apache.org/repos/asf/hbase/blob/7a2da02e/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterList.java
--
diff --git 
a/hbase-server/src/test/java/or

[04/10] hbase git commit: HBASE-18410 disable the HBASE-18957 test until we can fix it on the feature branch.

2017-10-25 Thread zhangduo
HBASE-18410 disable the HBASE-18957 test until we can fix it on the feature 
branch.


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/e6f61b99
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/e6f61b99
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/e6f61b99

Branch: refs/heads/branch-2
Commit: e6f61b997ab3eeef2f5215863d1725d15b71143b
Parents: 962d7e9
Author: Sean Busbey 
Authored: Mon Oct 9 15:24:00 2017 -0500
Committer: zhangduo 
Committed: Wed Oct 25 20:41:24 2017 +0800

--
 .../java/org/apache/hadoop/hbase/filter/TestFilterListOnMini.java   | 1 +
 1 file changed, 1 insertion(+)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/e6f61b99/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterListOnMini.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterListOnMini.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterListOnMini.java
index dd2399f..590b26e 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterListOnMini.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterListOnMini.java
@@ -58,6 +58,7 @@ public class TestFilterListOnMini {
 TEST_UTIL.shutdownMiniCluster();
   }
 
+  @Ignore("HBASE-18410 Should not merge without this test running.")
   @Test
   public void testFiltersWithOR() throws Exception {
 TableName tn = TableName.valueOf(name.getMethodName());



[1/2] hbase git commit: HBASE-18905 Allow CPs to request flush on Region and know the completion of the requested flush

2017-10-25 Thread zhangduo
Repository: hbase
Updated Branches:
  refs/heads/master 705b3fa98 -> 0b799fdbf


http://git-wip-us.apache.org/repos/asf/hbase/blob/0b799fdb/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHeapMemoryManager.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHeapMemoryManager.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHeapMemoryManager.java
index 739519f..aae04df 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHeapMemoryManager.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHeapMemoryManager.java
@@ -135,11 +135,11 @@ public class TestHeapMemoryManager {
 final ChoreService choreService = new ChoreService("TEST_SERVER_NAME");
 heapMemoryManager.start(choreService);
 memStoreFlusher.flushType = FlushType.ABOVE_ONHEAP_HIGHER_MARK;
-memStoreFlusher.requestFlush(null, false);
-memStoreFlusher.requestFlush(null, false);
-memStoreFlusher.requestFlush(null, false);
+memStoreFlusher.requestFlush(null, false, FlushLifeCycleTracker.DUMMY);
+memStoreFlusher.requestFlush(null, false, FlushLifeCycleTracker.DUMMY);
+memStoreFlusher.requestFlush(null, false, FlushLifeCycleTracker.DUMMY);
 memStoreFlusher.flushType = FlushType.ABOVE_ONHEAP_LOWER_MARK;
-memStoreFlusher.requestFlush(null, false);
+memStoreFlusher.requestFlush(null, false, FlushLifeCycleTracker.DUMMY);
 // Allow the tuner to run once and do necessary memory up
 Thread.sleep(1500);
 // No changes should be made by tuner as we already have lot of empty space
@@ -178,10 +178,10 @@ public class TestHeapMemoryManager {
 // do some offheap flushes also. So there should be decrease in memstore 
but
 // not as that when we don't have offheap flushes
 memStoreFlusher.flushType = FlushType.ABOVE_OFFHEAP_HIGHER_MARK;
-memStoreFlusher.requestFlush(null, false);
-memStoreFlusher.requestFlush(null, false);
-memStoreFlusher.requestFlush(null, false);
-memStoreFlusher.requestFlush(null, false);
+memStoreFlusher.requestFlush(null, false, FlushLifeCycleTracker.DUMMY);
+memStoreFlusher.requestFlush(null, false, FlushLifeCycleTracker.DUMMY);
+memStoreFlusher.requestFlush(null, false, FlushLifeCycleTracker.DUMMY);
+memStoreFlusher.requestFlush(null, false, FlushLifeCycleTracker.DUMMY);
 // Allow the tuner to run once and do necessary memory up
 waitForTune(memStoreFlusher, memStoreFlusher.memstoreSize);
 assertHeapSpaceDelta(-maxStepValue, oldMemstoreHeapSize, 
memStoreFlusher.memstoreSize);
@@ -226,10 +226,10 @@ public class TestHeapMemoryManager {
 // do some offheap flushes also. So there should be decrease in memstore 
but
 // not as that when we don't have offheap flushes
 memStoreFlusher.flushType = FlushType.ABOVE_OFFHEAP_HIGHER_MARK;
-memStoreFlusher.requestFlush(null, false);
-memStoreFlusher.requestFlush(null, false);
-memStoreFlusher.requestFlush(null, false);
-memStoreFlusher.requestFlush(null, false);
+memStoreFlusher.requestFlush(null, false, FlushLifeCycleTracker.DUMMY);
+memStoreFlusher.requestFlush(null, false, FlushLifeCycleTracker.DUMMY);
+memStoreFlusher.requestFlush(null, false, FlushLifeCycleTracker.DUMMY);
+memStoreFlusher.requestFlush(null, false, FlushLifeCycleTracker.DUMMY);
 // Allow the tuner to run once and do necessary memory up
 waitForTune(memStoreFlusher, memStoreFlusher.memstoreSize);
 assertHeapSpaceDelta(-maxStepValue, oldMemstoreHeapSize, 
memStoreFlusher.memstoreSize);
@@ -242,10 +242,10 @@ public class TestHeapMemoryManager {
 // flushes are due to onheap overhead. This should once again call for 
increase in
 // memstore size but that increase should be to the safe size
 memStoreFlusher.flushType = FlushType.ABOVE_ONHEAP_HIGHER_MARK;
-memStoreFlusher.requestFlush(null, false);
-memStoreFlusher.requestFlush(null, false);
-memStoreFlusher.requestFlush(null, false);
-memStoreFlusher.requestFlush(null, false);
+memStoreFlusher.requestFlush(null, false, FlushLifeCycleTracker.DUMMY);
+memStoreFlusher.requestFlush(null, false, FlushLifeCycleTracker.DUMMY);
+memStoreFlusher.requestFlush(null, false, FlushLifeCycleTracker.DUMMY);
+memStoreFlusher.requestFlush(null, false, FlushLifeCycleTracker.DUMMY);
 // Allow the tuner to run once and do necessary memory up
 waitForTune(memStoreFlusher, memStoreFlusher.memstoreSize);
 assertHeapSpaceDelta(maxStepValue, oldMemstoreHeapSize, 
memStoreFlusher.memstoreSize);
@@ -308,10 +308,10 @@ public class TestHeapMemoryManager {
 final ChoreService choreService = new ChoreService("TEST_SERVER_NAME");
 heapMemoryManager.start(choreService);
 memStoreFlusher.flushType = FlushType.ABOVE_ONHEAP_LOWER_MARK;
-memStoreFlusher.requestFlush(null, false);
-m

[2/2] hbase git commit: HBASE-18905 Allow CPs to request flush on Region and know the completion of the requested flush

2017-10-25 Thread zhangduo
HBASE-18905 Allow CPs to request flush on Region and know the completion of the 
requested flush


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/0b799fdb
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/0b799fdb
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/0b799fdb

Branch: refs/heads/master
Commit: 0b799fdbf027da6234d725b23ea92cdb77afcccd
Parents: 705b3fa
Author: zhangduo 
Authored: Wed Oct 25 11:00:44 2017 +0800
Committer: zhangduo 
Committed: Wed Oct 25 20:45:47 2017 +0800

--
 .../example/ZooKeeperScanPolicyObserver.java|   3 +-
 .../hbase/coprocessor/RegionObserver.java   |  15 +-
 .../hbase/mob/DefaultMobStoreFlusher.java   |   6 +-
 .../hbase/regionserver/DefaultStoreFlusher.java |   5 +-
 .../regionserver/FlushLifeCycleTracker.java |  51 
 .../hbase/regionserver/FlushRequester.java  |   2 +-
 .../hadoop/hbase/regionserver/HRegion.java  | 118 +
 .../hadoop/hbase/regionserver/HStore.java   |  20 +-
 .../hadoop/hbase/regionserver/LogRoller.java|   2 +-
 .../hbase/regionserver/MemStoreFlusher.java |  67 +++---
 .../hbase/regionserver/RSRpcServices.java   |   3 +-
 .../hadoop/hbase/regionserver/Region.java   |   5 +
 .../regionserver/RegionCoprocessorHost.java |  23 +-
 .../hadoop/hbase/regionserver/StoreFlusher.java |   9 +-
 .../hbase/regionserver/StripeStoreFlusher.java  |   5 +-
 .../hbase/security/access/AccessController.java |   6 +-
 .../client/TestMobCloneSnapshotFromClient.java  |   4 +-
 .../hbase/coprocessor/SimpleRegionObserver.java |   5 +-
 .../coprocessor/TestCoprocessorInterface.java   |   9 +-
 .../TestRegionObserverInterface.java|   4 +-
 .../TestRegionObserverScannerOpenHook.java  |   3 +-
 .../regionserver/NoOpScanPolicyObserver.java|   2 +-
 .../regionserver/TestFlushLifeCycleTracker.java | 240 +++
 .../regionserver/TestFlushRegionEntry.java  |  43 ++--
 .../hbase/regionserver/TestHMobStore.java   |   2 +-
 .../hadoop/hbase/regionserver/TestHRegion.java  |  17 +-
 .../regionserver/TestHRegionReplayEvents.java   |   8 +-
 .../hadoop/hbase/regionserver/TestHStore.java   |  10 +-
 .../regionserver/TestHeapMemoryManager.java |  75 +++---
 .../regionserver/TestSplitWalDataLoss.java  |  11 +-
 .../regionserver/wal/AbstractTestWALReplay.java |  20 +-
 .../security/access/TestAccessController.java   |   4 +-
 .../access/TestWithDisabledAuthorization.java   |   4 +-
 .../hbase/util/TestCoprocessorScanPolicy.java   |   3 +-
 34 files changed, 589 insertions(+), 215 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/0b799fdb/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/ZooKeeperScanPolicyObserver.java
--
diff --git 
a/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/ZooKeeperScanPolicyObserver.java
 
b/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/ZooKeeperScanPolicyObserver.java
index f849c86..d6d66bb 100644
--- 
a/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/ZooKeeperScanPolicyObserver.java
+++ 
b/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/ZooKeeperScanPolicyObserver.java
@@ -34,6 +34,7 @@ import org.apache.hadoop.hbase.coprocessor.ObserverContext;
 import org.apache.hadoop.hbase.coprocessor.RegionCoprocessor;
 import org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment;
 import org.apache.hadoop.hbase.coprocessor.RegionObserver;
+import org.apache.hadoop.hbase.regionserver.FlushLifeCycleTracker;
 import org.apache.hadoop.hbase.regionserver.InternalScanner;
 import org.apache.hadoop.hbase.regionserver.ScanType;
 import org.apache.hadoop.hbase.regionserver.ScannerContext;
@@ -188,7 +189,7 @@ public class ZooKeeperScanPolicyObserver implements 
RegionCoprocessor, RegionObs
 
   @Override
   public InternalScanner 
preFlush(ObserverContext c, Store store,
-  InternalScanner scanner) throws IOException {
+  InternalScanner scanner, FlushLifeCycleTracker tracker) throws 
IOException {
 return wrap(scanner);
   }
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/0b799fdb/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/RegionObserver.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/RegionObserver.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/RegionObserver.java
index 5c89149..2ca1495 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/RegionObserver.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/RegionObserver.java
@@ -42,6 +42

[1/2] hbase git commit: HBASE-18905 Allow CPs to request flush on Region and know the completion of the requested flush

2017-10-25 Thread zhangduo
Repository: hbase
Updated Branches:
  refs/heads/branch-2 c2dbef146 -> ca79a9156


http://git-wip-us.apache.org/repos/asf/hbase/blob/ca79a915/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHeapMemoryManager.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHeapMemoryManager.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHeapMemoryManager.java
index 739519f..aae04df 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHeapMemoryManager.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHeapMemoryManager.java
@@ -135,11 +135,11 @@ public class TestHeapMemoryManager {
 final ChoreService choreService = new ChoreService("TEST_SERVER_NAME");
 heapMemoryManager.start(choreService);
 memStoreFlusher.flushType = FlushType.ABOVE_ONHEAP_HIGHER_MARK;
-memStoreFlusher.requestFlush(null, false);
-memStoreFlusher.requestFlush(null, false);
-memStoreFlusher.requestFlush(null, false);
+memStoreFlusher.requestFlush(null, false, FlushLifeCycleTracker.DUMMY);
+memStoreFlusher.requestFlush(null, false, FlushLifeCycleTracker.DUMMY);
+memStoreFlusher.requestFlush(null, false, FlushLifeCycleTracker.DUMMY);
 memStoreFlusher.flushType = FlushType.ABOVE_ONHEAP_LOWER_MARK;
-memStoreFlusher.requestFlush(null, false);
+memStoreFlusher.requestFlush(null, false, FlushLifeCycleTracker.DUMMY);
 // Allow the tuner to run once and do necessary memory up
 Thread.sleep(1500);
 // No changes should be made by tuner as we already have lot of empty space
@@ -178,10 +178,10 @@ public class TestHeapMemoryManager {
 // do some offheap flushes also. So there should be decrease in memstore 
but
 // not as that when we don't have offheap flushes
 memStoreFlusher.flushType = FlushType.ABOVE_OFFHEAP_HIGHER_MARK;
-memStoreFlusher.requestFlush(null, false);
-memStoreFlusher.requestFlush(null, false);
-memStoreFlusher.requestFlush(null, false);
-memStoreFlusher.requestFlush(null, false);
+memStoreFlusher.requestFlush(null, false, FlushLifeCycleTracker.DUMMY);
+memStoreFlusher.requestFlush(null, false, FlushLifeCycleTracker.DUMMY);
+memStoreFlusher.requestFlush(null, false, FlushLifeCycleTracker.DUMMY);
+memStoreFlusher.requestFlush(null, false, FlushLifeCycleTracker.DUMMY);
 // Allow the tuner to run once and do necessary memory up
 waitForTune(memStoreFlusher, memStoreFlusher.memstoreSize);
 assertHeapSpaceDelta(-maxStepValue, oldMemstoreHeapSize, 
memStoreFlusher.memstoreSize);
@@ -226,10 +226,10 @@ public class TestHeapMemoryManager {
 // do some offheap flushes also. So there should be decrease in memstore 
but
 // not as that when we don't have offheap flushes
 memStoreFlusher.flushType = FlushType.ABOVE_OFFHEAP_HIGHER_MARK;
-memStoreFlusher.requestFlush(null, false);
-memStoreFlusher.requestFlush(null, false);
-memStoreFlusher.requestFlush(null, false);
-memStoreFlusher.requestFlush(null, false);
+memStoreFlusher.requestFlush(null, false, FlushLifeCycleTracker.DUMMY);
+memStoreFlusher.requestFlush(null, false, FlushLifeCycleTracker.DUMMY);
+memStoreFlusher.requestFlush(null, false, FlushLifeCycleTracker.DUMMY);
+memStoreFlusher.requestFlush(null, false, FlushLifeCycleTracker.DUMMY);
 // Allow the tuner to run once and do necessary memory up
 waitForTune(memStoreFlusher, memStoreFlusher.memstoreSize);
 assertHeapSpaceDelta(-maxStepValue, oldMemstoreHeapSize, 
memStoreFlusher.memstoreSize);
@@ -242,10 +242,10 @@ public class TestHeapMemoryManager {
 // flushes are due to onheap overhead. This should once again call for 
increase in
 // memstore size but that increase should be to the safe size
 memStoreFlusher.flushType = FlushType.ABOVE_ONHEAP_HIGHER_MARK;
-memStoreFlusher.requestFlush(null, false);
-memStoreFlusher.requestFlush(null, false);
-memStoreFlusher.requestFlush(null, false);
-memStoreFlusher.requestFlush(null, false);
+memStoreFlusher.requestFlush(null, false, FlushLifeCycleTracker.DUMMY);
+memStoreFlusher.requestFlush(null, false, FlushLifeCycleTracker.DUMMY);
+memStoreFlusher.requestFlush(null, false, FlushLifeCycleTracker.DUMMY);
+memStoreFlusher.requestFlush(null, false, FlushLifeCycleTracker.DUMMY);
 // Allow the tuner to run once and do necessary memory up
 waitForTune(memStoreFlusher, memStoreFlusher.memstoreSize);
 assertHeapSpaceDelta(maxStepValue, oldMemstoreHeapSize, 
memStoreFlusher.memstoreSize);
@@ -308,10 +308,10 @@ public class TestHeapMemoryManager {
 final ChoreService choreService = new ChoreService("TEST_SERVER_NAME");
 heapMemoryManager.start(choreService);
 memStoreFlusher.flushType = FlushType.ABOVE_ONHEAP_LOWER_MARK;
-memStoreFlusher.requestFlush(null, false);
-   

[2/2] hbase git commit: HBASE-18905 Allow CPs to request flush on Region and know the completion of the requested flush

2017-10-25 Thread zhangduo
HBASE-18905 Allow CPs to request flush on Region and know the completion of the 
requested flush


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/ca79a915
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/ca79a915
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/ca79a915

Branch: refs/heads/branch-2
Commit: ca79a9156669f4f635b5ffd5dae115f870eef3ca
Parents: c2dbef1
Author: zhangduo 
Authored: Wed Oct 25 11:00:44 2017 +0800
Committer: zhangduo 
Committed: Wed Oct 25 20:45:53 2017 +0800

--
 .../example/ZooKeeperScanPolicyObserver.java|   3 +-
 .../hbase/coprocessor/RegionObserver.java   |  15 +-
 .../hbase/mob/DefaultMobStoreFlusher.java   |   6 +-
 .../hbase/regionserver/DefaultStoreFlusher.java |   5 +-
 .../regionserver/FlushLifeCycleTracker.java |  51 
 .../hbase/regionserver/FlushRequester.java  |   2 +-
 .../hadoop/hbase/regionserver/HRegion.java  | 118 +
 .../hadoop/hbase/regionserver/HStore.java   |  20 +-
 .../hadoop/hbase/regionserver/LogRoller.java|   2 +-
 .../hbase/regionserver/MemStoreFlusher.java |  67 +++---
 .../hbase/regionserver/RSRpcServices.java   |   3 +-
 .../hadoop/hbase/regionserver/Region.java   |   5 +
 .../regionserver/RegionCoprocessorHost.java |  23 +-
 .../hadoop/hbase/regionserver/StoreFlusher.java |   9 +-
 .../hbase/regionserver/StripeStoreFlusher.java  |   5 +-
 .../hbase/security/access/AccessController.java |   6 +-
 .../client/TestMobCloneSnapshotFromClient.java  |   4 +-
 .../hbase/coprocessor/SimpleRegionObserver.java |   5 +-
 .../coprocessor/TestCoprocessorInterface.java   |   9 +-
 .../TestRegionObserverInterface.java|   4 +-
 .../TestRegionObserverScannerOpenHook.java  |   3 +-
 .../regionserver/NoOpScanPolicyObserver.java|   2 +-
 .../regionserver/TestFlushLifeCycleTracker.java | 240 +++
 .../regionserver/TestFlushRegionEntry.java  |  43 ++--
 .../hbase/regionserver/TestHMobStore.java   |   2 +-
 .../hadoop/hbase/regionserver/TestHRegion.java  |  17 +-
 .../regionserver/TestHRegionReplayEvents.java   |   8 +-
 .../hadoop/hbase/regionserver/TestHStore.java   |  10 +-
 .../regionserver/TestHeapMemoryManager.java |  75 +++---
 .../regionserver/TestSplitWalDataLoss.java  |  11 +-
 .../regionserver/wal/AbstractTestWALReplay.java |  20 +-
 .../security/access/TestAccessController.java   |   4 +-
 .../access/TestWithDisabledAuthorization.java   |   4 +-
 .../hbase/util/TestCoprocessorScanPolicy.java   |   3 +-
 34 files changed, 589 insertions(+), 215 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/ca79a915/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/ZooKeeperScanPolicyObserver.java
--
diff --git 
a/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/ZooKeeperScanPolicyObserver.java
 
b/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/ZooKeeperScanPolicyObserver.java
index f849c86..d6d66bb 100644
--- 
a/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/ZooKeeperScanPolicyObserver.java
+++ 
b/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/ZooKeeperScanPolicyObserver.java
@@ -34,6 +34,7 @@ import org.apache.hadoop.hbase.coprocessor.ObserverContext;
 import org.apache.hadoop.hbase.coprocessor.RegionCoprocessor;
 import org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment;
 import org.apache.hadoop.hbase.coprocessor.RegionObserver;
+import org.apache.hadoop.hbase.regionserver.FlushLifeCycleTracker;
 import org.apache.hadoop.hbase.regionserver.InternalScanner;
 import org.apache.hadoop.hbase.regionserver.ScanType;
 import org.apache.hadoop.hbase.regionserver.ScannerContext;
@@ -188,7 +189,7 @@ public class ZooKeeperScanPolicyObserver implements 
RegionCoprocessor, RegionObs
 
   @Override
   public InternalScanner 
preFlush(ObserverContext c, Store store,
-  InternalScanner scanner) throws IOException {
+  InternalScanner scanner, FlushLifeCycleTracker tracker) throws 
IOException {
 return wrap(scanner);
   }
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/ca79a915/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/RegionObserver.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/RegionObserver.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/RegionObserver.java
index 5c89149..2ca1495 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/RegionObserver.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/RegionObserver.java
@@ -42,6 +

hbase git commit: HBASE-19065 HRegion#bulkLoadHFiles() should wait for concurrent Region#flush() to finish - revert, waiting for alpha4 to come out

2017-10-25 Thread tedyu
Repository: hbase
Updated Branches:
  refs/heads/branch-2 ca79a9156 -> 111380b64


HBASE-19065 HRegion#bulkLoadHFiles() should wait for concurrent Region#flush() 
to finish - revert, waiting for alpha4 to come out


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/111380b6
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/111380b6
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/111380b6

Branch: refs/heads/branch-2
Commit: 111380b64782703381b2b852c6425abe0bc9d21a
Parents: ca79a91
Author: tedyu 
Authored: Wed Oct 25 06:28:24 2017 -0700
Committer: tedyu 
Committed: Wed Oct 25 06:28:24 2017 -0700

--
 .../main/java/org/apache/hadoop/hbase/regionserver/HRegion.java  | 4 
 1 file changed, 4 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/111380b6/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
index f0c9ec2..c90a702 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
@@ -5710,10 +5710,6 @@ public class HRegion implements HeapSize, 
PropagatingConfigurationObserver, Regi
   seqId = ((FlushResultImpl)fs).flushSequenceId;
 } else if (fs.getResult() == 
FlushResult.Result.CANNOT_FLUSH_MEMSTORE_EMPTY) {
   seqId = ((FlushResultImpl)fs).flushSequenceId;
-} else if (fs.getResult() == FlushResult.Result.CANNOT_FLUSH) {
-  // CANNOT_FLUSH may mean that a flush is already on-going
-  // we need to wait for that flush to complete
-  waitForFlushes();
 } else {
   throw new IOException("Could not bulk load with an assigned 
sequential ID because the "+
 "flush didn't run. Reason for not flushing: " + 
((FlushResultImpl)fs).failureReason);



[35/51] [partial] hbase-site git commit: Published site at .

2017-10-25 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/21726f5a/devapidocs/org/apache/hadoop/hbase/class-use/CoordinatedStateManager.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/class-use/CoordinatedStateManager.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/CoordinatedStateManager.html
index e3be129..6e42535 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/CoordinatedStateManager.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/CoordinatedStateManager.html
@@ -91,23 +91,11 @@
  
 
 
-org.apache.hadoop.hbase.master
- 
-
-
 org.apache.hadoop.hbase.regionserver
  
 
-
-org.apache.hadoop.hbase.replication.regionserver
- 
-
 
-org.apache.hadoop.hbase.util
- 
-
-
-org.apache.hadoop.hbase.wal
+org.apache.hadoop.hbase.replication.regionserver
  
 
 
@@ -132,12 +120,6 @@
 Get CoordinatedStateManager instance for this server.
 
 
-
-static CoordinatedStateManager
-CoordinatedStateManagerFactory.getCoordinatedStateManager(org.apache.hadoop.conf.Configuration conf)
-Creates consensus provider from the given 
configuration.
-
-
 
 
 
@@ -154,103 +136,40 @@
 
 
 class 
-BaseCoordinatedStateManager
-Base class for CoordinatedStateManager 
implementations.
-
-
-
-class 
 ZkCoordinatedStateManager
 ZooKeeper-based implementation of CoordinatedStateManager.
 
 
 
 
-
-Constructors in org.apache.hadoop.hbase.coordination
 with parameters of type CoordinatedStateManager 
-
-Constructor and Description
-
-
-
-ZKSplitLogManagerCoordination(CoordinatedStateManager manager,
- ZooKeeperWatcher watcher) 
-
-
-
 
-
+
 
 
-Uses of CoordinatedStateManager in org.apache.hadoop.hbase.master
-
-Methods in org.apache.hadoop.hbase.master
 with parameters of type CoordinatedStateManager 
+Uses of CoordinatedStateManager in org.apache.hadoop.hbase.regionserver
+
+Fields in org.apache.hadoop.hbase.regionserver
 declared as CoordinatedStateManager 
 
 Modifier and Type
-Method and Description
+Field and Description
 
 
 
-static HMaster
-HMaster.constructMaster(http://docs.oracle.com/javase/8/docs/api/java/lang/Class.html?is-external=true";
 title="class or interface in java.lang">Class masterClass,
-   org.apache.hadoop.conf.Configuration conf,
-   CoordinatedStateManager cp)
-Utility for constructing an instance of the passed HMaster 
class.
-
+protected CoordinatedStateManager
+HRegionServer.csm 
 
 
 
-
-Constructors in org.apache.hadoop.hbase.master
 with parameters of type CoordinatedStateManager 
-
-Constructor and Description
-
-
-
-HMaster(org.apache.hadoop.conf.Configuration conf,
-   CoordinatedStateManager csm)
-Initializes the HMaster.
-
-
-
-LocalHMaster(org.apache.hadoop.conf.Configuration conf,
-CoordinatedStateManager csm) 
-
-
-
-
-
-
-
-Uses of CoordinatedStateManager in org.apache.hadoop.hbase.regionserver
 
-Methods in org.apache.hadoop.hbase.regionserver
 with parameters of type CoordinatedStateManager 
+Methods in org.apache.hadoop.hbase.regionserver
 that return CoordinatedStateManager 
 
 Modifier and Type
 Method and Description
 
 
 
-static HRegionServer
-HRegionServer.constructRegionServer(http://docs.oracle.com/javase/8/docs/api/java/lang/Class.html?is-external=true";
 title="class or interface in java.lang">Class regionServerClass,
- org.apache.hadoop.conf.Configuration conf2,
- CoordinatedStateManager cp)
-Utility for constructing an instance of the passed 
HRegionServer class.
-
-
-
-
-
-Constructors in org.apache.hadoop.hbase.regionserver
 with parameters of type CoordinatedStateManager 
-
-Constructor and Description
-
-
-
-HRegionServer(org.apache.hadoop.conf.Configuration conf,
- CoordinatedStateManager csm)
-Starts a HRegionServer at the default location
-
+CoordinatedStateManager
+HRegionServer.getCoordinatedStateManager() 
 
 
 
@@ -273,83 +192,6 @@
 
 
 
-
-
-
-Uses of CoordinatedStateManager in org.apache.hadoop.hbase.util
-
-Methods in org.apache.hadoop.hbase.util
 with parameters of type CoordinatedStateManager 
-
-Modifier and Type
-Method and Description
-
-
-
-static JVMClusterUtil.MasterThread
-JVMClusterUtil.createMasterThread(org.apache.hadoop.conf.Configuration c,
-  CoordinatedStateManager cp,
-  http://docs.oracle.com/javase/8/docs/api/java/lang/Class.html?is-external=true";
 title="class or interface in java.lang">Class hmc,
-  int index)
-Creates a JVMClusterUtil.MasterThread.
-
-
-
-static JVMClusterUtil.RegionServerThread
-JVMClusterUtil.createRegionServerThread(org.apache.hadoop.conf.Configuration c,
-CoordinatedStateManager cp,
-http://docs.oracle.com/javase/8/docs/api/java/lang/Class.html?is-external=true";
 title="class or interface in java.lang">Class hrsc,
-i

[43/51] [partial] hbase-site git commit: Published site at .

2017-10-25 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/21726f5a/checkstyle-aggregate.html
--
diff --git a/checkstyle-aggregate.html b/checkstyle-aggregate.html
index 8787446..ddc9ff4 100644
--- a/checkstyle-aggregate.html
+++ b/checkstyle-aggregate.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase – Checkstyle Results
 
@@ -286,10 +286,10 @@
  Warnings
  Errors
 
-2057
+2060
 0
 0
-13507
+13620
 
 Files
 
@@ -404,7 +404,7 @@
 0
 1
 
-org/apache/hadoop/hbase/CoordinatedStateManagerFactory.java
+org/apache/hadoop/hbase/CoordinatedStateManager.java
 0
 0
 1
@@ -1824,370 +1824,380 @@
 0
 61
 
-org/apache/hadoop/hbase/coordination/BaseCoordinatedStateManager.java
-0
-0
-1
-
 org/apache/hadoop/hbase/coordination/SplitLogManagerCoordination.java
 0
 0
 5
-
+
 org/apache/hadoop/hbase/coordination/SplitLogWorkerCoordination.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/coordination/ZKSplitLogManagerCoordination.java
 0
 0
-11
-
+12
+
 org/apache/hadoop/hbase/coordination/ZkCoordinatedStateManager.java
 0
 0
-1
-
+3
+
 org/apache/hadoop/hbase/coordination/ZkSplitLogWorkerCoordination.java
 0
 0
 8
-
+
 org/apache/hadoop/hbase/coprocessor/AggregateImplementation.java
 0
 0
 8
-
+
 org/apache/hadoop/hbase/coprocessor/BaseEnvironment.java
 0
 0
 3
-
+
 org/apache/hadoop/hbase/coprocessor/BaseRowProcessorEndpoint.java
 0
 0
 4
-
+
 org/apache/hadoop/hbase/coprocessor/BulkLoadObserver.java
 0
 0
 8
-
+
 org/apache/hadoop/hbase/coprocessor/ColumnInterpreter.java
 0
 0
 21
-
+
 org/apache/hadoop/hbase/coprocessor/CoprocessorException.java
 0
 0
 3
-
+
 org/apache/hadoop/hbase/coprocessor/CoprocessorHost.java
 0
 0
 17
-
+
 org/apache/hadoop/hbase/coprocessor/CoprocessorService.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/coprocessor/CoprocessorServiceBackwardCompatiblity.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/coprocessor/CoreCoprocessor.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/coprocessor/EndpointObserver.java
 0
 0
 3
-
+
 org/apache/hadoop/hbase/coprocessor/Export.java
 0
 0
 18
-
+
 org/apache/hadoop/hbase/coprocessor/HasMasterServices.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/coprocessor/HasRegionServerServices.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/coprocessor/MasterCoprocessor.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/coprocessor/MasterCoprocessorEnvironment.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/coprocessor/MasterObserver.java
 0
 0
 22
-
+
 org/apache/hadoop/hbase/coprocessor/MetricsCoprocessor.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/coprocessor/MultiRowMutationEndpoint.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/coprocessor/ObserverContext.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/coprocessor/ObserverContextImpl.java
 0
 0
 6
-
+
 org/apache/hadoop/hbase/coprocessor/RegionCoprocessor.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/coprocessor/RegionCoprocessorEnvironment.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/coprocessor/RegionObserver.java
 0
 0
-22
-
+24
+
 org/apache/hadoop/hbase/coprocessor/RegionServerCoprocessor.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/coprocessor/RegionServerObserver.java
 0
 0
 3
-
+
 org/apache/hadoop/hbase/coprocessor/SingletonCoprocessorService.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/coprocessor/WALCoprocessor.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/coprocessor/WALCoprocessorEnvironment.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/coprocessor/WALObserver.java
 0
 0
-1
-
+3
+
 org/apache/hadoop/hbase/coprocessor/example/BulkDeleteEndpoint.java
 0
 0
 3
-
+
 org/apache/hadoop/hbase/coprocessor/example/ExampleMasterObserverWithMetrics.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/coprocessor/example/RefreshHFilesEndpoint.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/coprocessor/example/RowCountEndpoint.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/coprocessor/package-info.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/errorhandling/ForeignException.java
 0
 0
 8
-
+
 org/apache/hadoop/hbase/errorhandling/ForeignExceptionDispatcher.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/errorhandling/TimeoutExceptionInjector.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/errorprone/AlwaysPasses.java
 0
 0
 4
-
+
 org/apache/hadoop/hbase/exceptions/ClientExceptionsUtil.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/exceptions/FailedSanityCheckException.java
 0
 0
 3
-
+
 org/apache/hadoop/hbase/exceptions/MergeRegionException.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/exceptions/UnexpectedStateException.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/executor/EventHandler.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/executor/EventType.java
 0
 0
 40
-
+
 org/apache/hadoop/hbase/executor/ExecutorService.java
 0
 0
 5
-
+
 org/apache/hadoop/hbase/executor/ExecutorType.java
 0
 0
 19
-
+
 org/apache/hadoop/hbase/favored/FavoredNodeAssignmentHelper.java
 0
 0
 28
-
+
 org/apache/hadoop/hbase/favored/FavoredNodeLoadBalancer.java
 0
 0
 5
-
+
 org/apache/hadoop/hbase/favored/FavoredNodesManager.java
 0
 0
 1
-
+
 org/apache/h

[09/51] [partial] hbase-site git commit: Published site at .

2017-10-25 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/21726f5a/devapidocs/org/apache/hadoop/hbase/regionserver/HStore.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/regionserver/HStore.html 
b/devapidocs/org/apache/hadoop/hbase/regionserver/HStore.html
index efd5f9e..056f88e 100644
--- a/devapidocs/org/apache/hadoop/hbase/regionserver/HStore.html
+++ b/devapidocs/org/apache/hadoop/hbase/regionserver/HStore.html
@@ -414,15 +414,15 @@ implements 
 void
-add(Cell cell,
-   MemStoreSize memstoreSize)
+add(Cell cell,
+   MemStoreSizing memstoreSizing)
 Adds a value to the memstore
 
 
 
 void
-add(http://docs.oracle.com/javase/8/docs/api/java/lang/Iterable.html?is-external=true";
 title="class or interface in java.lang">Iterable cells,
-   MemStoreSize memstoreSize)
+add(http://docs.oracle.com/javase/8/docs/api/java/lang/Iterable.html?is-external=true";
 title="class or interface in java.lang">Iterable cells,
+   MemStoreSizing memstoreSizing)
 Adds the specified value to the memstore
 
 
@@ -529,7 +529,8 @@ implements 
 StoreFlushContext
-createFlushContext(long cacheFlushId) 
+createFlushContext(long cacheFlushId,
+  FlushLifeCycleTracker tracker) 
 
 
 protected KeyValueScanner
@@ -582,10 +583,11 @@ implements 
 protected http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in 
java.util">List
-flushCache(long logCacheFlushId,
+flushCache(long logCacheFlushId,
   MemStoreSnapshot snapshot,
   MonitoredTask status,
-  ThroughputController throughputController)
+  ThroughputController throughputController,
+  FlushLifeCycleTracker tracker)
 Write out current snapshot.
 
 
@@ -1164,9 +1166,9 @@ implements 
 void
-upsert(http://docs.oracle.com/javase/8/docs/api/java/lang/Iterable.html?is-external=true";
 title="class or interface in java.lang">Iterable cells,
+upsert(http://docs.oracle.com/javase/8/docs/api/java/lang/Iterable.html?is-external=true";
 title="class or interface in java.lang">Iterable cells,
   long readpoint,
-  MemStoreSize memstoreSize)
+  MemStoreSizing memstoreSizing)
 Adds or replaces the specified KeyValues.
 
 
@@ -1678,7 +1680,7 @@ implements 
 
 FIXED_OVERHEAD
-public static final long FIXED_OVERHEAD
+public static final long FIXED_OVERHEAD
 
 
 
@@ -1687,7 +1689,7 @@ implements 
 
 DEEP_OVERHEAD
-public static final long DEEP_OVERHEAD
+public static final long DEEP_OVERHEAD
 
 
 
@@ -2214,36 +2216,26 @@ public static org.apache.hadoop.fs.Path 
+
 
 
 
 
 add
-public void add(Cell cell,
-MemStoreSize memstoreSize)
+public void add(Cell cell,
+MemStoreSizing memstoreSizing)
 Adds a value to the memstore
-
-Parameters:
-cell - 
-memstoreSize - 
-
 
 
-
+
 
 
 
 
 add
-public void add(http://docs.oracle.com/javase/8/docs/api/java/lang/Iterable.html?is-external=true";
 title="class or interface in java.lang">Iterable cells,
-MemStoreSize memstoreSize)
+public void add(http://docs.oracle.com/javase/8/docs/api/java/lang/Iterable.html?is-external=true";
 title="class or interface in java.lang">Iterable cells,
+MemStoreSizing memstoreSizing)
 Adds the specified value to the memstore
-
-Parameters:
-cells - 
-memstoreSize - 
-
 
 
 
@@ -2252,7 +2244,7 @@ public static org.apache.hadoop.fs.Path 
 
 timeOfOldestEdit
-public long timeOfOldestEdit()
+public long timeOfOldestEdit()
 Description copied from 
interface: Store
 When was the last edit done in the memstore
 
@@ -2267,7 +2259,7 @@ public static org.apache.hadoop.fs.Path 
 
 getStorefiles
-public http://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true";
 title="class or interface in java.util">Collection getStorefiles()
+public http://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true";
 title="class or interface in java.util">Collection getStorefiles()
 
 Specified by:
 getStorefiles in
 interface Store
@@ -2282,7 +2274,7 @@ public static org.apache.hadoop.fs.Path 
 
 getCompactedFiles
-public http://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true";
 title="class or interface in java.util">Collection getCompactedFiles()
+public http://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true";
 title="class or interface in java.util">Collection getCompactedFiles()
 
 Specified by:
 getCompactedFiles in
 interface Store
@@ -2295,7 +2287,7 @@ public static org.apache.hadoop.fs.Path 
 
 assertBulkLoadHFileOk
-public void assertBulkLoadHFileOk(org.apache.hadoop.fs.Path srcPath)
+public void assertBulkLoadHFileOk(org.apache.hadoop.fs.Path srcPath)
throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true";
 title="class

hbase-site git commit: INFRA-10751 Empty commit

2017-10-25 Thread git-site-role
Repository: hbase-site
Updated Branches:
  refs/heads/asf-site 21726f5a2 -> 7bee80a09


INFRA-10751 Empty commit


Project: http://git-wip-us.apache.org/repos/asf/hbase-site/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase-site/commit/7bee80a0
Tree: http://git-wip-us.apache.org/repos/asf/hbase-site/tree/7bee80a0
Diff: http://git-wip-us.apache.org/repos/asf/hbase-site/diff/7bee80a0

Branch: refs/heads/asf-site
Commit: 7bee80a09c4db5388959ede737f53bfc9fb02bc8
Parents: 21726f5
Author: jenkins 
Authored: Wed Oct 25 15:16:41 2017 +
Committer: jenkins 
Committed: Wed Oct 25 15:16:41 2017 +

--

--




[27/51] [partial] hbase-site git commit: Published site at .

2017-10-25 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/21726f5a/devapidocs/org/apache/hadoop/hbase/filter/FilterListBase.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/filter/FilterListBase.html 
b/devapidocs/org/apache/hadoop/hbase/filter/FilterListBase.html
new file mode 100644
index 000..dfa465f
--- /dev/null
+++ b/devapidocs/org/apache/hadoop/hbase/filter/FilterListBase.html
@@ -0,0 +1,763 @@
+http://www.w3.org/TR/html4/loose.dtd";>
+
+
+
+
+
+FilterListBase (Apache HBase 3.0.0-SNAPSHOT API)
+
+
+
+
+
+var methods = 
{"i0":6,"i1":9,"i2":10,"i3":10,"i4":10,"i5":6,"i6":10,"i7":10,"i8":6,"i9":10,"i10":10,"i11":9,"i12":10,"i13":10,"i14":10,"i15":10};
+var tabs = {65535:["t0","All Methods"],1:["t1","Static 
Methods"],2:["t2","Instance Methods"],4:["t3","Abstract 
Methods"],8:["t4","Concrete Methods"]};
+var altColor = "altColor";
+var rowColor = "rowColor";
+var tableTab = "tableTab";
+var activeTableTab = "activeTableTab";
+
+
+JavaScript is disabled on your browser.
+
+
+
+
+
+Skip navigation links
+
+
+
+
+Overview
+Package
+Class
+Use
+Tree
+Deprecated
+Index
+Help
+
+
+
+
+Prev Class
+Next Class
+
+
+Frames
+No Frames
+
+
+All Classes
+
+
+
+
+
+
+
+Summary: 
+Nested | 
+Field | 
+Constr | 
+Method
+
+
+Detail: 
+Field | 
+Constr | 
+Method
+
+
+
+
+
+
+
+
+org.apache.hadoop.hbase.filter
+Class FilterListBase
+
+
+
+http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true";
 title="class or interface in java.lang">java.lang.Object
+
+
+org.apache.hadoop.hbase.filter.Filter
+
+
+org.apache.hadoop.hbase.filter.FilterBase
+
+
+org.apache.hadoop.hbase.filter.FilterListBase
+
+
+
+
+
+
+
+
+
+
+
+Direct Known Subclasses:
+FilterListWithAND, FilterListWithOR
+
+
+
+@InterfaceAudience.Private
+public abstract class FilterListBase
+extends FilterBase
+Base class for FilterList. Currently, we have two 
sub-classes to extend this class:
+ FilterListWithOR, FilterListWithAND.
+
+
+
+
+
+
+
+
+
+
+
+Nested Class Summary
+
+
+
+
+Nested classes/interfaces inherited from 
class org.apache.hadoop.hbase.filter.Filter
+Filter.ReturnCode
+
+
+
+
+
+
+
+
+Field Summary
+
+Fields 
+
+Modifier and Type
+Field and Description
+
+
+protected http://docs.oracle.com/javase/8/docs/api/java/util/ArrayList.html?is-external=true";
 title="class or interface in java.util">ArrayList
+filters 
+
+
+private static int
+MAX_LOG_FILTERS 
+
+
+protected Cell
+referenceCell
+Reference Cell used by transformCell(Cell)
 for validation purpose.
+
+
+
+protected Cell
+transformedCell
+When filtering a given Cell in filterKeyValue(Cell),
 this stores the transformed Cell
+ to be returned by transformCell(Cell).
+
+
+
+
+
+
+
+Fields inherited from class org.apache.hadoop.hbase.filter.Filter
+reversed
+
+
+
+
+
+
+
+
+Constructor Summary
+
+Constructors 
+
+Constructor and Description
+
+
+FilterListBase(http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List filters) 
+
+
+
+
+
+
+
+
+
+Method Summary
+
+All Methods Static Methods Instance Methods Abstract Methods Concrete Methods 
+
+Modifier and Type
+Method and Description
+
+
+abstract void
+addFilterLists(http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List filters) 
+
+
+protected static boolean
+checkAndGetReversed(http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List rowFilters,
+   boolean defaultValue) 
+
+
+protected int
+compareCell(Cell a,
+   Cell b) 
+
+
+Filter.ReturnCode
+filterKeyValue(Cell c)
+A way to filter based on the column family, column 
qualifier and/or the column value.
+
+
+
+void
+filterRowCells(http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List cells)
+Filters that never filter by modifying the returned List of 
Cells can inherit this
+ implementation that does nothing.
+
+
+
+protected abstract http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
+formatLogFilters(http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List logFilters) 
+
+
+http://docs.oracle.com/javase/8/docs/api/java/util/ArrayList.html?is-external=true";
 title="class or inte

[34/51] [partial] hbase-site git commit: Published site at .

2017-10-25 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/21726f5a/devapidocs/org/apache/hadoop/hbase/constraint/ConstraintProcessor.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/constraint/ConstraintProcessor.html 
b/devapidocs/org/apache/hadoop/hbase/constraint/ConstraintProcessor.html
index 8afe9f5..63dd80d 100644
--- a/devapidocs/org/apache/hadoop/hbase/constraint/ConstraintProcessor.html
+++ b/devapidocs/org/apache/hadoop/hbase/constraint/ConstraintProcessor.html
@@ -271,7 +271,7 @@ implements RegionObserver
-postAppend,
 postBatchMutate,
 postBatchMutateIndispensably,
 postBulkLoadHFile,
 postCheckAndDelete,
 postCheckAndPut,
 postClose,
 postCloseRegionOperation, postCommitStoreFile,
 postCompact,
 postCompactSelection, postDelete,
 postExists,
 postFlush,
 postFlush,
 postGetOp,
 postIncrement,
 postInstantiateDeleteTracker,
 postLogReplay,
 postMutationBeforeWAL,
 postOpen,
 postPut,
 postReplayWALs,
 postScannerClose, postScannerNext,
 postScannerOpen,
 postStartRegionOperation,
 postStoreFileReaderOpen,
 postWALRestore,
 preAppend,
 preAppendAfterRowLock,
 preBatchMutate,
 preBulkLoadHFile,
 preCheckAndDelete,
 preCheckAndDeleteAfterRowLock,
 preCheckAndPut,
 preCheckAndPutAfterRowLock,
 preClose,
 preCommitStoreFile,
 preCompact,
 preCompactSelection,
 preDelete,
 preExists,
 preFlush,
 preFlush,
 preGetOp,
 preIncrement,
 preIncrementAfterRowLock,
 preOpen,
 prePrepareTimeStampForDeleteVersion,
 preReplayWALs,
 preScannerClose,
 preScannerNext,
 preScannerOpen,
 preStoreFileReaderOpen,
 preWALRestore
+postAppend,
 postBatchMutate,
 postBatchMutateIndispensably,
 postBulkLoadHFile,
 postCheckAndDelete,
 postCheckAndPut,
 postClose,
 postCloseRegionOperation, postCommitStoreFile,
 postCompact,
 postCompactSelection, postDelete,
 postExists,
 postFlush,
 postFlush, postGetOp,
 postIncrement,
 postInstantiateDeleteTracker,
 postLogReplay,
 postMutationBeforeWAL,
 postOpen,
 postPut,
 postReplayWALs,
 postScannerClose,
 postScannerNext,
 postScannerOpen,
 postStartRegionOperation,
 postStoreFileReaderOpen,
 postWALRestore,
 preAppend,
 preAppendAfterRowLock,
 preBatchMutate,
 preBulkLoadHFile,
 preCheckAndDelete,
 preCheckAndDeleteAfterRowLock,
 preCheckAndPut,
 preCheckAndPutAfterRowLock,
 preClose, preCommitStoreFile,
 preCompact,
 preCompactSelection, href="../../../../../org/apache/hadoop/hbase/coprocessor/RegionObserver.html#preDelete-org.apache.hadoop.hbase.coprocessor.ObserverContext-org.apache.hadoop.hbase.client.Delete-org.apache.hadoop.hbase.wal.WALEdit-org.apache.hadoop.hbase.client.Durability-">preDelete,
 > href="../../../../../org/apache/hadoop/hbase/coprocessor/RegionObserver.html#preExists-org.apache.hadoop.hbase.coprocessor.ObserverContext-org.apache.hadoop.hbase.client.Get-boolean-">preExists,
 > href="../../../../../org/apache/hadoop/hbase/coprocessor/RegionObserver.html#preFlush-org.apache.hadoop.hbase.coprocessor.ObserverContext-org.apache.hadoop.hbase.regionserver.FlushLifeCycleTracker-">preFlush,
 > href="../../../../../org/apache/hadoop/hbase/coprocessor/RegionObserver.html#preFlush-org.apache.hadoop.hbase.coprocessor.ObserverContext-org.apache.hadoop.hbase.regionserver.Store-org.apache.hadoop.hbase.regionserver.InternalScanner-org.apache.hadoop.hbase.regionserver.FlushLifeCycleTracker-">pre
 Flush, preGetOp,
 preIncrement,
 preIncrementAfterRowLock,
 preOpen,
 prePrepareTimeStampForDeleteVersion,
 preReplayWALs,
 preScannerClose,
 preScannerNext,
 preScannerOpen,
 preStoreFileReaderOpen,
 preWALRestore
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/21726f5a/devapidocs/org/apache/hadoop/hbase/coordination/BaseCoordinatedStateManager.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/coordination/BaseCoordinatedStateManager.html
 
b/devapidocs/org/apache/hadoop/hbase/coordination/BaseCoordinatedStateManager.html
deleted file mode 100644
index 2f663de..000
--- 
a/devapidocs/org/apache/hadoop/hbase/coordination/BaseCoordinatedStateManager.html
+++ /dev/null
@@ -1,433 +0,0 @@
-http://www.w3.org/TR/html4/loose.dtd";>
-
-
-
-
-
-BaseCoordinatedStateManager (Apache HBase 3.0.0-SNAPSHOT API)
-
-
-
-
-
-var methods = {"i0":6,"i1":6,"i2":10,"i3":6,"i4":6,"i5":10,"i6":10,"i7":10};
-var tabs = {65535:["t0","All Methods"],2:["t2","Instance 
Methods"],4:["t3","Abstract Methods"],8:["t4","Concrete Methods"]};
-var altColor = "altColor";
-var rowColor = "rowColor";
-var tableTab = "tableTab";
-var activeTableTab = "activeTableTab";
-
-
-JavaScript is disabled on your browser.
-
-
-
-
-
-Skip navigation links
-
-
-
-
-Overview
-Package
-Class
-Use
-Tree
-Deprecated
-Index
-Help
-
-
-
-
-Prev Class
-N

[30/51] [partial] hbase-site git commit: Published site at .

2017-10-25 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/21726f5a/devapidocs/org/apache/hadoop/hbase/coprocessor/RegionServerCoprocessorEnvironment.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/coprocessor/RegionServerCoprocessorEnvironment.html
 
b/devapidocs/org/apache/hadoop/hbase/coprocessor/RegionServerCoprocessorEnvironment.html
index cde91a1..382d7b6 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/coprocessor/RegionServerCoprocessorEnvironment.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/coprocessor/RegionServerCoprocessorEnvironment.html
@@ -18,7 +18,7 @@
 catch(err) {
 }
 //-->
-var methods = {"i0":6,"i1":6,"i2":6};
+var methods = {"i0":6,"i1":6,"i2":6,"i3":6};
 var tabs = {65535:["t0","All Methods"],2:["t2","Instance 
Methods"],4:["t3","Abstract Methods"]};
 var altColor = "altColor";
 var rowColor = "rowColor";
@@ -111,7 +111,7 @@ var activeTableTab = "activeTableTab";
 
 @InterfaceAudience.LimitedPrivate(value="Coprocesssor")
  @InterfaceStability.Evolving
-public interface RegionServerCoprocessorEnvironment
+public interface RegionServerCoprocessorEnvironment
 extends CoprocessorEnvironment
 
 
@@ -144,6 +144,10 @@ extends 
+OnlineRegions
+getOnlineRegions() 
+
+
 ServerName
 getServerName() 
 
@@ -175,20 +179,33 @@ extends 
 
 getServerName
-ServerName getServerName()
+ServerName getServerName()
 
 Returns:
 Hosting Server's ServerName
 
 
 
+
+
+
+
+
+getOnlineRegions
+OnlineRegions getOnlineRegions()
+
+Returns:
+Interface to Map of regions online on this RegionServer getServerName()}.
+
+
+
 
 
 
 
 
 getConnection
-Connection getConnection()
+Connection getConnection()
 Be careful RPC'ing from a Coprocessor context.
  RPC's will fail, stall, retry, and/or crawl because the remote side is not 
online, is
  struggling or it is on the other side of a network partition. Any use of 
Connection from
@@ -210,7 +227,7 @@ extends 
 
 getMetricRegistryForRegionServer
-MetricRegistry getMetricRegistryForRegionServer()
+MetricRegistry getMetricRegistryForRegionServer()
 Returns a MetricRegistry that can be used to track metrics 
at the region server level.
 
  See ExampleMasterObserverWithMetrics class in the hbase-examples modules 
for examples

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/21726f5a/devapidocs/org/apache/hadoop/hbase/coprocessor/WALObserver.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/coprocessor/WALObserver.html 
b/devapidocs/org/apache/hadoop/hbase/coprocessor/WALObserver.html
index 75b441b..1ebcf70 100644
--- a/devapidocs/org/apache/hadoop/hbase/coprocessor/WALObserver.html
+++ b/devapidocs/org/apache/hadoop/hbase/coprocessor/WALObserver.html
@@ -18,8 +18,8 @@
 catch(err) {
 }
 //-->
-var methods = {"i0":18,"i1":18,"i2":18,"i3":18};
-var tabs = {65535:["t0","All Methods"],2:["t2","Instance 
Methods"],16:["t5","Default Methods"]};
+var methods = {"i0":18,"i1":50,"i2":18,"i3":50};
+var tabs = {65535:["t0","All Methods"],2:["t2","Instance 
Methods"],16:["t5","Default Methods"],32:["t6","Deprecated Methods"]};
 var altColor = "altColor";
 var rowColor = "rowColor";
 var tableTab = "tableTab";
@@ -149,7 +149,7 @@ public interface 
-All Methods Instance Methods Default Methods 
+All Methods Instance Methods Default Methods Deprecated Methods 
 
 Modifier and Type
 Method and Description
@@ -168,8 +168,10 @@ public interface RegionInfo info,
 WALKey logKey,
 WALEdit logEdit)
-Called after a WALEdit
- is writen to WAL.
+Deprecated. 
+Since hbase-2.0.0. To be 
replaced with an alternative that does not expose
+ InterfaceAudience classes such as WALKey and WALEdit. Will be removed in 
hbase-3.0.0.
+
 
 
 
@@ -186,8 +188,10 @@ public interface RegionInfo info,
WALKey logKey,
WALEdit logEdit)
-Called before a WALEdit
- is writen to WAL.
+Deprecated. 
+Since hbase-2.0.0. To be 
replaced with an alternative that does not expose
+ InterfaceAudience classes such as WALKey and WALEdit. Will be removed in 
hbase-3.0.0.
+
 
 
 
@@ -211,11 +215,14 @@ public interface 
 
 preWALWrite
-default boolean preWALWrite(ObserverContext ctx,
-RegionInfo info,
-WALKey logKey,
-WALEdit logEdit)
- throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true";
 title="class or interface in java.io">IOException
+http://docs.oracle.com/javase/8/docs/api/java/lang/Deprecated.html?is-external=true";
 title="class or interface in java.lang">@Deprecated
+default boolean preWALWrite(ObserverContext ctx,
+RegionInfo info,
+WALKey logKey,
+WALEdit l

[48/51] [partial] hbase-site git commit: Published site at .

2017-10-25 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/21726f5a/apidocs/org/apache/hadoop/hbase/LocalHBaseCluster.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/LocalHBaseCluster.html 
b/apidocs/org/apache/hadoop/hbase/LocalHBaseCluster.html
index 766b46c..50b0db1 100644
--- a/apidocs/org/apache/hadoop/hbase/LocalHBaseCluster.html
+++ b/apidocs/org/apache/hadoop/hbase/LocalHBaseCluster.html
@@ -513,7 +513,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 addRegionServer
-public org.apache.hadoop.hbase.util.JVMClusterUtil.RegionServerThread addRegionServer(org.apache.hadoop.conf.Configuration config,
+public org.apache.hadoop.hbase.util.JVMClusterUtil.RegionServerThread addRegionServer(org.apache.hadoop.conf.Configuration config,

   int index,

   User user)

throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true";
 title="class or interface in java.io">IOException,
@@ -531,7 +531,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 addMaster
-public org.apache.hadoop.hbase.util.JVMClusterUtil.MasterThread addMaster()
+public org.apache.hadoop.hbase.util.JVMClusterUtil.MasterThread addMaster()
throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true";
 title="class or interface in java.io">IOException
 
 Throws:
@@ -545,7 +545,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 addMaster
-public org.apache.hadoop.hbase.util.JVMClusterUtil.MasterThread addMaster(org.apache.hadoop.conf.Configuration c,
+public org.apache.hadoop.hbase.util.JVMClusterUtil.MasterThread addMaster(org.apache.hadoop.conf.Configuration c,
   
int index)
throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true";
 title="class or interface in java.io">IOException
 
@@ -560,7 +560,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 addMaster
-public org.apache.hadoop.hbase.util.JVMClusterUtil.MasterThread addMaster(org.apache.hadoop.conf.Configuration c,
+public org.apache.hadoop.hbase.util.JVMClusterUtil.MasterThread addMaster(org.apache.hadoop.conf.Configuration c,
   
int index,
   User user)
throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true";
 title="class or interface in java.io">IOException,
@@ -578,7 +578,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 getRegionServer
-public org.apache.hadoop.hbase.regionserver.HRegionServer getRegionServer(int serverNumber)
+public org.apache.hadoop.hbase.regionserver.HRegionServer getRegionServer(int serverNumber)
 
 Parameters:
 serverNumber - 
@@ -593,7 +593,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 getRegionServers
-public http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in 
java.util">List getRegionServers()
+public http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in 
java.util">List getRegionServers()
 
 Returns:
 Read-only list of region server threads.
@@ -606,7 +606,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 getLiveRegionServers
-public http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in 
java.util">List getLiveRegionServers()
+public http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in 
java.util">List getLiveRegionServers()
 
 Returns:
 List of running servers (Some servers may have been killed or
@@ -621,7 +621,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 getConfiguration
-public org.apache.hadoop.conf.Configuration getConfiguration()
+public org.apache.hadoop.conf.Configuration getConfiguration()
 
 Returns:
 the Configuration used by this LocalHBaseCluster
@@ -634,7 +63

[46/51] [partial] hbase-site git commit: Published site at .

2017-10-25 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/21726f5a/apidocs/src-html/org/apache/hadoop/hbase/filter/Filter.ReturnCode.html
--
diff --git 
a/apidocs/src-html/org/apache/hadoop/hbase/filter/Filter.ReturnCode.html 
b/apidocs/src-html/org/apache/hadoop/hbase/filter/Filter.ReturnCode.html
index 2356cfb..35d409c 100644
--- a/apidocs/src-html/org/apache/hadoop/hbase/filter/Filter.ReturnCode.html
+++ b/apidocs/src-html/org/apache/hadoop/hbase/filter/Filter.ReturnCode.html
@@ -180,128 +180,132 @@
 172 */
 173NEXT_COL,
 174/**
-175 * Done with columns, skip to next 
row. Note that filterRow() will
-176 * still be called.
-177 */
-178NEXT_ROW,
-179/**
-180 * Seek to next key which is given as 
hint by the filter.
+175 * Seek to next row in current 
family. It may still pass a cell whose family is different but
+176 * row is the same as previous cell 
to {@link #filterKeyValue(Cell)} , even if we get a NEXT_ROW
+177 * returned for previous cell. For 
more details see HBASE-18368. 
+178 * Once reset() method was invoked, then we switch to the next row for all family, and you can +179 * catch the event by invoking CellUtils.matchingRows(previousCell, currentCell).
+180 * Note that filterRow() will still be called.
181 */ -182SEEK_NEXT_USING_HINT, +182NEXT_ROW, 183/** -184 * Include KeyValue and done with row, seek to next. +184 * Seek to next key which is given as hint by the filter. 185 */ -186INCLUDE_AND_SEEK_NEXT_ROW, -187} -188 -189 /** -190 * Chance to alter the list of Cells to be submitted. Modifications to the list will carry on -191 * -192 * Concrete implementers can signal a failure condition in their code by throwing an -193 * {@link IOException}. -194 * -195 * @param kvs the list of Cells to be filtered -196 * @throws IOException in case an I/O or an filter specific failure needs to be signaled. -197 */ -198 abstract public void filterRowCells(List kvs) throws IOException; -199 -200 /** -201 * Primarily used to check for conflicts with scans(such as scans that do not read a full row at a -202 * time). -203 * -204 * @return True if this filter actively uses filterRowCells(List) or filterRow(). -205 */ -206 abstract public boolean hasFilterRow(); -207 -208 /** -209 * Last chance to veto row based on previous {@link #filterKeyValue(Cell)} calls. The filter -210 * needs to retain state then return a particular value for this call if they wish to exclude a -211 * row if a certain column is missing (for example). -212 * -213 * Concrete implementers can signal a failure condition in their code by throwing an -214 * {@link IOException}. -215 * -216 * @return true to exclude row, false to include row. -217 * @throws IOException in case an I/O or an filter specific failure needs to be signaled. -218 */ -219 abstract public boolean filterRow() throws IOException; -220 -221 /** -222 * If the filter returns the match code SEEK_NEXT_USING_HINT, then it should also tell which is -223 * the next key it must seek to. After receiving the match code SEEK_NEXT_USING_HINT, the -224 * QueryMatcher would call this function to find out which key it must next seek to. -225 * -226 * Concrete implementers can signal a failure condition in their code by throwing an -227 * {@link IOException}. -228 * -229 * @return KeyValue which must be next seeked. return null if the filter is not sure which key to -230 * seek to next. -231 * @throws IOException in case an I/O or an filter specific failure needs to be signaled. -232 */ -233 abstract public Cell getNextCellHint(final Cell currentCell) throws IOException; -234 -235 /** -236 * Check that given column family is essential for filter to check row. Most filters always return -237 * true here. But some could have more sophisticated logic which could significantly reduce -238 * scanning process by not even touching columns until we are 100% sure that it's data is needed -239 * in result. -240 * -241 * Concrete implementers can signal a failure condition in their code by throwing an -242 * {@link IOException}. -243 * -244 * @throws IOException in case an I/O or an filter specific failure needs to be signaled. -245 */ -246 abstract public boolean isFamilyEssential(byte[] name) throws IOException; -247 -248 /** -249 * TODO: JAVADOC -250 * -251 * Concrete implementers can signal a failure condition in their code by throwing an -252 * {@link IOException}. -253 * -254 * @return The filter serialized using pb -255 * @throws IOException in case an I/O or an filter specific failure needs to be signaled. -256 */ -257 abstract public byte[] toByteArray() throws IOException; -258 -259 /** -260 * -261 * Concrete implementers can signal a f

[42/51] [partial] hbase-site git commit: Published site at .

2017-10-25 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/21726f5a/checkstyle.rss
--
diff --git a/checkstyle.rss b/checkstyle.rss
index 3141540..d0f9173 100644
--- a/checkstyle.rss
+++ b/checkstyle.rss
@@ -25,8 +25,8 @@ under the License.
 en-us
 ©2007 - 2017 The Apache Software Foundation
 
-  File: 2057,
- Errors: 13507,
+  File: 2060,
+ Errors: 13620,
  Warnings: 0,
  Infos: 0
   
@@ -391,7 +391,7 @@ under the License.
   0
 
 
-  3
+  2
 
   
   
@@ -1007,7 +1007,7 @@ under the License.
   0
 
 
-  60
+  54
 
   
   
@@ -1656,20 +1656,6 @@ under the License.
   
   
 
-  http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.regionserver.ImmutableOnlineRegions.java";>org/apache/hadoop/hbase/regionserver/ImmutableOnlineRegions.java
-
-
-  0
-
-
-  0
-
-
-  6
-
-  
-  
-
   http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.types.RawBytesFixedLength.java";>org/apache/hadoop/hbase/types/RawBytesFixedLength.java
 
 
@@ -2636,6 +2622,20 @@ under the License.
   
   
 
+  http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.regionserver.MemStoreSizing.java";>org/apache/hadoop/hbase/regionserver/MemStoreSizing.java
+
+
+  0
+
+
+  0
+
+
+  0
+
+  
+  
+
   http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.procedure.Subprocedure.java";>org/apache/hadoop/hbase/procedure/Subprocedure.java
 
 
@@ -3149,7 +3149,7 @@ under the License.
   0
 
 
-  5
+  3
 
   
   
@@ -4451,7 +4451,7 @@ under the License.
   0
 
 
-  6
+  5
 
   
   
@@ -7643,7 +7643,7 @@ under the License.
   0
 
 
-  1
+  3
 
   
   
@@ -7811,7 +7811,7 @@ under the License.
   0
 
 
-  22
+  21
 
   
   
@@ -12865,7 +12865,7 @@ under the License.
   0
 
 
-  41
+  7
 
   
   
@@ -15628,6 +15628,20 @@ under the License.
   
   
 
+  http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.regionserver.FlushLifeCycleTracker.java";>org/apache/hadoop/hbase/regionserver/FlushLifeCycleTracker.java
+
+
+  0
+
+
+  0
+
+
+  0
+
+  
+  
+
   http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.coprocessor.AggregateImplementation.java";>org/apache/hadoop/hbase/coprocessor/AggregateImplementation.java
 
 
@@ -16379,7 +16393,7 @@ under the License.
   0
 
 
-  3
+  4
 
   
   
@@ -16981,7 +16995,7 @@ under the License.
   0
 
 
-  11
+  12
 
   
   
@@ -17532,20 +17546,6 @@ under the License.
   
   
 
-  http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.coordination.BaseCoordinatedStateManager.java";>org/apache/hadoop/hbase/coordination/BaseCoordinatedStateManager.java
-
-
-  0
-
-  

[51/51] [partial] hbase-site git commit: Published site at .

2017-10-25 Thread git-site-role
Published site at .


Project: http://git-wip-us.apache.org/repos/asf/hbase-site/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase-site/commit/21726f5a
Tree: http://git-wip-us.apache.org/repos/asf/hbase-site/tree/21726f5a
Diff: http://git-wip-us.apache.org/repos/asf/hbase-site/diff/21726f5a

Branch: refs/heads/asf-site
Commit: 21726f5a2f4351b307a6a686410a5dabb23ca374
Parents: d7a614f
Author: jenkins 
Authored: Wed Oct 25 15:15:56 2017 +
Committer: jenkins 
Committed: Wed Oct 25 15:15:56 2017 +

--
 acid-semantics.html | 4 +-
 apache_hbase_reference_guide.pdf| 4 +-
 apidocs/constant-values.html|   309 +-
 apidocs/index-all.html  |26 +-
 apidocs/org/apache/hadoop/hbase/HConstants.html |   442 +-
 .../apache/hadoop/hbase/LocalHBaseCluster.html  |42 +-
 .../org/apache/hadoop/hbase/class-use/Cell.html | 4 +-
 .../hadoop/hbase/filter/Filter.ReturnCode.html  |18 +-
 .../org/apache/hadoop/hbase/filter/Filter.html  |18 +-
 .../hbase/filter/FilterList.Operator.html   |10 +-
 .../apache/hadoop/hbase/filter/FilterList.html  |   171 +-
 .../hadoop/hbase/filter/class-use/Filter.html   |14 +-
 .../filter/class-use/FilterList.Operator.html   | 4 +-
 .../hadoop/hbase/filter/package-summary.html| 6 +-
 .../apache/hadoop/hbase/filter/package-use.html | 6 +-
 .../org/apache/hadoop/hbase/HConstants.html |   238 +-
 .../apache/hadoop/hbase/LocalHBaseCluster.html  |   520 +-
 .../hadoop/hbase/filter/Filter.ReturnCode.html  |   240 +-
 .../org/apache/hadoop/hbase/filter/Filter.html  |   240 +-
 .../hbase/filter/FilterList.Operator.html   |   785 +-
 .../apache/hadoop/hbase/filter/FilterList.html  |   785 +-
 book.html   | 2 +-
 bulk-loads.html | 4 +-
 checkstyle-aggregate.html   | 22746 +
 checkstyle.rss  |   150 +-
 coc.html| 4 +-
 cygwin.html | 4 +-
 dependencies.html   | 4 +-
 dependency-convergence.html |27 +-
 dependency-info.html| 4 +-
 dependency-management.html  |16 +-
 devapidocs/allclasses-frame.html| 9 +-
 devapidocs/allclasses-noframe.html  | 9 +-
 devapidocs/constant-values.html |   321 +-
 devapidocs/deprecated-list.html |24 +
 devapidocs/index-all.html   |   501 +-
 .../hadoop/hbase/CoordinatedStateManager.html   |84 +-
 .../hbase/CoordinatedStateManagerFactory.html   |   289 -
 .../org/apache/hadoop/hbase/Coprocessor.html| 4 +-
 .../org/apache/hadoop/hbase/HConstants.html |   444 +-
 .../HealthChecker.HealthCheckerExitStatus.html  | 4 +-
 .../apache/hadoop/hbase/LocalHBaseCluster.html  |42 +-
 .../hadoop/hbase/backup/BackupObserver.html | 2 +-
 .../master/LogRollMasterProcedureManager.html   | 8 +-
 .../hadoop/hbase/backup/package-tree.html   | 4 +-
 .../LogRollRegionServerProcedureManager.html| 2 +-
 .../org/apache/hadoop/hbase/class-use/Cell.html |   295 +-
 .../class-use/CoordinatedStateManager.html  |   180 +-
 .../CoordinatedStateManagerFactory.html |   125 -
 .../apache/hadoop/hbase/class-use/Server.html   |93 +-
 .../hadoop/hbase/class-use/ServerName.html  |29 +-
 .../hadoop/hbase/class-use/TableName.html   | 2 +-
 .../hbase/client/class-use/Connection.html  |64 +
 .../hbase/client/class-use/RegionInfo.html  |24 +-
 .../hadoop/hbase/client/package-tree.html   |26 +-
 .../apache/hadoop/hbase/client/package-use.html |12 +-
 .../hbase/constraint/ConstraintProcessor.html   | 2 +-
 .../BaseCoordinatedStateManager.html|   433 -
 .../SplitLogManagerCoordination.html| 7 +-
 .../ZKSplitLogManagerCoordination.html  |38 +-
 .../coordination/ZkCoordinatedStateManager.html |   120 +-
 ...WorkerCoordination.GetDataAsyncCallback.html | 8 +-
 ...ogWorkerCoordination.ZkSplitTaskDetails.html |20 +-
 .../ZkSplitLogWorkerCoordination.html   |66 +-
 .../class-use/BaseCoordinatedStateManager.html  |   224 -
 .../class-use/SplitLogManagerCoordination.html  |32 +-
 .../class-use/SplitLogWorkerCoordination.html   |98 +-
 .../class-use/ZkCoordinatedStateManager.html|54 +-
 .../hbase/coordination/package-frame.html   | 1 -
 .../hbase/coordination/package-summary.html |14 +-
 .../hadoop/hbase/coordination/package-tree.html | 6 +-
 .../hadoop/hbase/coordination/package-use.html  |75 +-
 .../RegionCoprocessorEnvironment.html   |39 +-
 .../RegionObserver.Muta

[26/51] [partial] hbase-site git commit: Published site at .

2017-10-25 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/21726f5a/devapidocs/org/apache/hadoop/hbase/filter/FilterListWithAND.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/filter/FilterListWithAND.html 
b/devapidocs/org/apache/hadoop/hbase/filter/FilterListWithAND.html
new file mode 100644
index 000..129106d
--- /dev/null
+++ b/devapidocs/org/apache/hadoop/hbase/filter/FilterListWithAND.html
@@ -0,0 +1,697 @@
+http://www.w3.org/TR/html4/loose.dtd";>
+
+
+
+
+
+FilterListWithAND (Apache HBase 3.0.0-SNAPSHOT API)
+
+
+
+
+
+var methods = 
{"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10};
+var tabs = {65535:["t0","All Methods"],2:["t2","Instance 
Methods"],8:["t4","Concrete Methods"]};
+var altColor = "altColor";
+var rowColor = "rowColor";
+var tableTab = "tableTab";
+var activeTableTab = "activeTableTab";
+
+
+JavaScript is disabled on your browser.
+
+
+
+
+
+Skip navigation links
+
+
+
+
+Overview
+Package
+Class
+Use
+Tree
+Deprecated
+Index
+Help
+
+
+
+
+Prev Class
+Next Class
+
+
+Frames
+No Frames
+
+
+All Classes
+
+
+
+
+
+
+
+Summary: 
+Nested | 
+Field | 
+Constr | 
+Method
+
+
+Detail: 
+Field | 
+Constr | 
+Method
+
+
+
+
+
+
+
+
+org.apache.hadoop.hbase.filter
+Class FilterListWithAND
+
+
+
+http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true";
 title="class or interface in java.lang">java.lang.Object
+
+
+org.apache.hadoop.hbase.filter.Filter
+
+
+org.apache.hadoop.hbase.filter.FilterBase
+
+
+org.apache.hadoop.hbase.filter.FilterListBase
+
+
+org.apache.hadoop.hbase.filter.FilterListWithAND
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+@InterfaceAudience.Private
+public class FilterListWithAND
+extends FilterListBase
+FilterListWithAND represents an ordered list of filters 
which will be evaluated with an AND
+ operator.
+
+
+
+
+
+
+
+
+
+
+
+Nested Class Summary
+
+
+
+
+Nested classes/interfaces inherited from 
class org.apache.hadoop.hbase.filter.Filter
+Filter.ReturnCode
+
+
+
+
+
+
+
+
+Field Summary
+
+Fields 
+
+Modifier and Type
+Field and Description
+
+
+private http://docs.oracle.com/javase/8/docs/api/java/util/Set.html?is-external=true";
 title="class or interface in java.util">Set
+seekHintFilter 
+
+
+
+
+
+
+Fields inherited from class org.apache.hadoop.hbase.filter.FilterListBase
+filters,
 referenceCell,
 transformedCell
+
+
+
+
+
+Fields inherited from class org.apache.hadoop.hbase.filter.Filter
+reversed
+
+
+
+
+
+
+
+
+Constructor Summary
+
+Constructors 
+
+Constructor and Description
+
+
+FilterListWithAND(http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List filters) 
+
+
+
+
+
+
+
+
+
+Method Summary
+
+All Methods Instance Methods Concrete Methods 
+
+Modifier and Type
+Method and Description
+
+
+void
+addFilterLists(http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List filters) 
+
+
+boolean
+filterAllRemaining()
+Filters that never filter all remaining can inherit this 
implementation that
+ never stops the filter early.
+
+
+
+boolean
+filterRow()
+Filters that never filter by rows based on previously 
gathered state from
+ Filter.filterKeyValue(Cell)
 can inherit this implementation that
+ never filters a row.
+
+
+
+boolean
+filterRowKey(byte[] rowKey,
+int offset,
+int length)
+Filters that do not filter by row key can inherit this 
implementation that
+ never filters anything.
+
+
+
+boolean
+filterRowKey(Cell firstRowCell)
+Filters a row based on the row key.
+
+
+
+protected http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
+formatLogFilters(http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List logFilters) 
+
+
+Cell
+getNextCellHint(Cell currentCell)
+Filters that are not sure which key must be next seeked to, 
can inherit
+ this implementation that, by default, returns a null Cell.
+
+
+
+(package private) Filter.ReturnCode
+internalFilterKeyValue(Cell c,
+  Cell transformedCell)
+Internal implementation of FilterListBase.filterKeyValue(Cell)
+
+
+
+private Filter.ReturnCode
+mergeReturnCode(Filter.ReturnCode rc,
+   Filter.ReturnCode localRC)
+FilterList with MUST_PASS_ALL choose the maximal forward 
step among sub-filters in fil

[13/51] [partial] hbase-site git commit: Published site at .

2017-10-25 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/21726f5a/devapidocs/org/apache/hadoop/hbase/regionserver/HRegion.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/regionserver/HRegion.html 
b/devapidocs/org/apache/hadoop/hbase/regionserver/HRegion.html
index c7714c8..5a1a83b 100644
--- a/devapidocs/org/apache/hadoop/hbase/regionserver/HRegion.html
+++ b/devapidocs/org/apache/hadoop/hbase/regionserver/HRegion.html
@@ -18,7 +18,7 @@
 catch(err) {
 }
 //-->
-var methods = 
{"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10,"i14":10,"i15":10,"i16":10,"i17":10,"i18":10,"i19":10,"i20":10,"i21":10,"i22":10,"i23":10,"i24":10,"i25":10,"i26":10,"i27":10,"i28":10,"i29":10,"i30":10,"i31":10,"i32":10,"i33":10,"i34":10,"i35":10,"i36":10,"i37":10,"i38":10,"i39":10,"i40":10,"i41":10,"i42":10,"i43":10,"i44":10,"i45":10,"i46":9,"i47":9,"i48":10,"i49":9,"i50":9,"i51":10,"i52":10,"i53":10,"i54":10,"i55":10,"i56":10,"i57":10,"i58":10,"i59":10,"i60":10,"i61":10,"i62":10,"i63":10,"i64":10,"i65":10,"i66":10,"i67":9,"i68":10,"i69":10,"i70":10,"i71":10,"i72":10,"i73":10,"i74":10,"i75":10,"i76":10,"i77":10,"i78":10,"i79":10,"i80":10,"i81":10,"i82":10,"i83":10,"i84":10,"i85":10,"i86":10,"i87":10,"i88":10,"i89":10,"i90":10,"i91":10,"i92":10,"i93":10,"i94":10,"i95":10,"i96":10,"i97":10,"i98":10,"i99":9,"i100":10,"i101":10,"i102":10,"i103":10,"i104":10,"i105":10,"i106":10,"i107":10,"i108":10,"i109":1
 
0,"i110":10,"i111":9,"i112":10,"i113":10,"i114":10,"i115":10,"i116":10,"i117":10,"i118":41,"i119":41,"i120":10,"i121":10,"i122":10,"i123":10,"i124":10,"i125":10,"i126":10,"i127":10,"i128":10,"i129":10,"i130":10,"i131":10,"i132":10,"i133":10,"i134":10,"i135":10,"i136":10,"i137":10,"i138":10,"i139":10,"i140":10,"i141":10,"i142":10,"i143":10,"i144":9,"i145":10,"i146":10,"i147":10,"i148":10,"i149":10,"i150":10,"i151":42,"i152":10,"i153":10,"i154":10,"i155":10,"i156":10,"i157":10,"i158":10,"i159":10,"i160":10,"i161":10,"i162":10,"i163":10,"i164":10,"i165":10,"i166":10,"i167":10,"i168":10,"i169":10,"i170":10,"i171":10,"i172":10,"i173":10,"i174":9,"i175":10,"i176":10,"i177":10,"i178":10,"i179":10,"i180":10,"i181":10,"i182":10,"i183":10,"i184":10,"i185":9,"i186":10,"i187":10,"i188":9,"i189":9,"i190":9,"i191":9,"i192":9,"i193":9,"i194":9,"i195":9,"i196":9,"i197":10,"i198":10,"i199":10,"i200":10,"i201":10,"i202":10,"i203":10,"i204":10,"i205":10,"i206":9,"i207":10,"i208":10,"i209":10,"i210":10
 
,"i211":10,"i212":10,"i213":10,"i214":10,"i215":10,"i216":10,"i217":10,"i218":10,"i219":10,"i220":10,"i221":10,"i222":10,"i223":10,"i224":10,"i225":10,"i226":10,"i227":10,"i228":10,"i229":10,"i230":10,"i231":10,"i232":10,"i233":10,"i234":10,"i235":10,"i236":9,"i237":9,"i238":10,"i239":10,"i240":10,"i241":10,"i242":10,"i243":10,"i244":10,"i245":10,"i246":10,"i247":10,"i248":10,"i249":10,"i250":9,"i251":10,"i252":10,"i253":10,"i254":10,"i255":10,"i256":10,"i257":10,"i258":10,"i259":10,"i260":10,"i261":10,"i262":10,"i263":10,"i264":10,"i265":9,"i266":10,"i267":10,"i268":10,"i269":10};
+var methods = 
{"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10,"i14":10,"i15":10,"i16":10,"i17":10,"i18":10,"i19":10,"i20":10,"i21":10,"i22":10,"i23":10,"i24":10,"i25":10,"i26":10,"i27":10,"i28":10,"i29":10,"i30":10,"i31":10,"i32":10,"i33":10,"i34":10,"i35":10,"i36":10,"i37":10,"i38":10,"i39":10,"i40":10,"i41":10,"i42":10,"i43":10,"i44":10,"i45":10,"i46":9,"i47":9,"i48":10,"i49":9,"i50":9,"i51":10,"i52":10,"i53":10,"i54":10,"i55":10,"i56":10,"i57":10,"i58":10,"i59":10,"i60":10,"i61":10,"i62":10,"i63":10,"i64":10,"i65":10,"i66":10,"i67":9,"i68":10,"i69":10,"i70":10,"i71":10,"i72":10,"i73":10,"i74":10,"i75":10,"i76":10,"i77":10,"i78":10,"i79":10,"i80":10,"i81":10,"i82":10,"i83":10,"i84":10,"i85":10,"i86":10,"i87":10,"i88":10,"i89":10,"i90":10,"i91":10,"i92":10,"i93":10,"i94":10,"i95":10,"i96":10,"i97":10,"i98":10,"i99":9,"i100":10,"i101":10,"i102":10,"i103":10,"i104":10,"i105":10,"i106":10,"i107":10,"i108":10,"i109":1
 
0,"i110":10,"i111":9,"i112":10,"i113":10,"i114":10,"i115":10,"i116":10,"i117":10,"i118":41,"i119":41,"i120":10,"i121":10,"i122":10,"i123":10,"i124":10,"i125":10,"i126":10,"i127":10,"i128":10,"i129":10,"i130":10,"i131":10,"i132":10,"i133":10,"i134":10,"i135":10,"i136":10,"i137":10,"i138":10,"i139":10,"i140":10,"i141":10,"i142":10,"i143":10,"i144":9,"i145":10,"i146":10,"i147":10,"i148":10,"i149":10,"i150":10,"i151":42,"i152":10,"i153":10,"i154":10,"i155":10,"i156":10,"i157":10,"i158":10,"i159":10,"i160":10,"i161":10,"i162":10,"i163":10,"i164":10,"i165":10,"i166":10,"i167":10,"i168":10,"i169":10,"i170":10,"i171":10,"i172":10,"i173":10,"i174":9,"i175":10,"i176":10,"i177":10,"i178":10,"i179":10,"i180":10,"i181":10,"i182":10,"i183":10,"i184":10,"i185":9,"i186":10,"i187":10,"i188":9,"i189":9,"i190":9,"i191":9,"i192":9,"i193":9,"i194":9,"i195":9,"i196":9

[04/51] [partial] hbase-site git commit: Published site at .

2017-10-25 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/21726f5a/devapidocs/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.TableCoprocessorAttribute.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.TableCoprocessorAttribute.html
 
b/devapidocs/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.TableCoprocessorAttribute.html
index 3d954b1..c760621 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.TableCoprocessorAttribute.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.TableCoprocessorAttribute.html
@@ -113,7 +113,7 @@ var activeTableTab = "activeTableTab";
 
 
 
-static class RegionCoprocessorHost.TableCoprocessorAttribute
+static class RegionCoprocessorHost.TableCoprocessorAttribute
 extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true";
 title="class or interface in java.lang">Object
 
 
@@ -228,7 +228,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 path
-private org.apache.hadoop.fs.Path path
+private org.apache.hadoop.fs.Path path
 
 
 
@@ -237,7 +237,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 className
-private http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String className
+private http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String className
 
 
 
@@ -246,7 +246,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 priority
-private int priority
+private int priority
 
 
 
@@ -255,7 +255,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 conf
-private org.apache.hadoop.conf.Configuration conf
+private org.apache.hadoop.conf.Configuration conf
 
 
 
@@ -272,7 +272,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 TableCoprocessorAttribute
-public TableCoprocessorAttribute(org.apache.hadoop.fs.Path path,
+public TableCoprocessorAttribute(org.apache.hadoop.fs.Path path,
  http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String className,
  int priority,
  
org.apache.hadoop.conf.Configuration conf)
@@ -292,7 +292,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 getPath
-public org.apache.hadoop.fs.Path getPath()
+public org.apache.hadoop.fs.Path getPath()
 
 
 
@@ -301,7 +301,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 getClassName
-public http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String getClassName()
+public http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String getClassName()
 
 
 
@@ -310,7 +310,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 getPriority
-public int getPriority()
+public int getPriority()
 
 
 
@@ -319,7 +319,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 getConf
-public org.apache.hadoop.conf.Configuration getConf()
+public org.apache.hadoop.conf.Configuration getConf()
 
 
 



[07/51] [partial] hbase-site git commit: Published site at .

2017-10-25 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/21726f5a/devapidocs/org/apache/hadoop/hbase/regionserver/MemStoreFlusher.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/regionserver/MemStoreFlusher.html 
b/devapidocs/org/apache/hadoop/hbase/regionserver/MemStoreFlusher.html
index 985c6c6..a89b54c 100644
--- a/devapidocs/org/apache/hadoop/hbase/regionserver/MemStoreFlusher.html
+++ b/devapidocs/org/apache/hadoop/hbase/regionserver/MemStoreFlusher.html
@@ -114,7 +114,7 @@ var activeTableTab = "activeTableTab";
 
 
 @InterfaceAudience.Private
-class MemStoreFlusher
+class MemStoreFlusher
 extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true";
 title="class or interface in java.lang">Object
 implements FlushRequester
 Thread that flushes cache on request
@@ -276,9 +276,10 @@ implements 
 private boolean
-flushRegion(HRegion region,
+flushRegion(HRegion region,
boolean emergencyFlush,
-   boolean forceFlushAllStores)
+   boolean forceFlushAllStores,
+   FlushLifeCycleTracker tracker)
 Flush a region.
 
 
@@ -375,8 +376,9 @@ implements 
 void
-requestFlush(HRegion r,
-boolean forceFlushAllStores)
+requestFlush(HRegion r,
+boolean forceFlushAllStores,
+FlushLifeCycleTracker tracker)
 Tell the listener the cache needs to be flushed.
 
 
@@ -436,7 +438,7 @@ implements 
 
 LOG
-private static final org.apache.commons.logging.Log LOG
+private static final org.apache.commons.logging.Log LOG
 
 
 
@@ -445,7 +447,7 @@ implements 
 
 conf
-private org.apache.hadoop.conf.Configuration conf
+private org.apache.hadoop.conf.Configuration conf
 
 
 
@@ -454,7 +456,7 @@ implements 
 
 flushQueue
-private final http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/BlockingQueue.html?is-external=true";
 title="class or interface in java.util.concurrent">BlockingQueue 
flushQueue
+private final http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/BlockingQueue.html?is-external=true";
 title="class or interface in java.util.concurrent">BlockingQueue 
flushQueue
 
 
 
@@ -463,7 +465,7 @@ implements 
 
 regionsInQueue
-private final http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true";
 title="class or interface in java.util">Map 
regionsInQueue
+private final http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true";
 title="class or interface in java.util">Map 
regionsInQueue
 
 
 
@@ -472,7 +474,7 @@ implements 
 
 wakeupPending
-private http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/AtomicBoolean.html?is-external=true";
 title="class or interface in java.util.concurrent.atomic">AtomicBoolean wakeupPending
+private http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/AtomicBoolean.html?is-external=true";
 title="class or interface in java.util.concurrent.atomic">AtomicBoolean wakeupPending
 
 
 
@@ -481,7 +483,7 @@ implements 
 
 threadWakeFrequency
-private final long threadWakeFrequency
+private final long threadWakeFrequency
 
 
 
@@ -490,7 +492,7 @@ implements 
 
 server
-private final HRegionServer server
+private final HRegionServer server
 
 
 
@@ -499,7 +501,7 @@ implements 
 
 lock
-private final http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/locks/ReentrantReadWriteLock.html?is-external=true";
 title="class or interface in 
java.util.concurrent.locks">ReentrantReadWriteLock lock
+private final http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/locks/ReentrantReadWriteLock.html?is-external=true";
 title="class or interface in 
java.util.concurrent.locks">ReentrantReadWriteLock lock
 
 
 
@@ -508,7 +510,7 @@ implements 
 
 blockSignal
-private final http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true";
 title="class or interface in java.lang">Object blockSignal
+private final http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true";
 title="class or interface in java.lang">Object blockSignal
 
 
 
@@ -517,7 +519,7 @@ implements 
 
 blockingWaitTime
-private long blockingWaitTime
+private long blockingWaitTime
 
 
 
@@ -526,7 +528,7 @@ implements 
 
 updatesBlockedMsHighWater
-private final http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/LongAdder.html?is-external=true";
 title="class or interface in java.util.concurrent.atomic">LongAdder updatesBlockedMsHighWater
+private final http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/LongAdder.html?is-external=true";
 title="class or interface in java.util.concurrent.atomic">LongAdder updatesBlockedMsHighWater
 
 
 
@@ -535,7 +537,7 @@ implements 
 
 flushHandlers
-private final MemStoreFlusher.FlushHandler[] flushHandlers
+private 

[18/51] [partial] hbase-site git commit: Published site at .

2017-10-25 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/21726f5a/devapidocs/org/apache/hadoop/hbase/monitoring/class-use/MonitoredTask.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/monitoring/class-use/MonitoredTask.html 
b/devapidocs/org/apache/hadoop/hbase/monitoring/class-use/MonitoredTask.html
index 3f20bd2..d5d4afc 100644
--- a/devapidocs/org/apache/hadoop/hbase/monitoring/class-use/MonitoredTask.html
+++ b/devapidocs/org/apache/hadoop/hbase/monitoring/class-use/MonitoredTask.html
@@ -267,10 +267,11 @@
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in 
java.util">List
-DefaultMobStoreFlusher.flushSnapshot(MemStoreSnapshot snapshot,
+DefaultMobStoreFlusher.flushSnapshot(MemStoreSnapshot snapshot,
  long cacheFlushId,
  MonitoredTask status,
- ThroughputController throughputController)
+ ThroughputController throughputController,
+ FlushLifeCycleTracker tracker)
 Flushes the snapshot of the MemStore.
 
 
@@ -489,10 +490,11 @@
 
 
 protected http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in 
java.util">List
-HStore.flushCache(long logCacheFlushId,
+HStore.flushCache(long logCacheFlushId,
   MemStoreSnapshot snapshot,
   MonitoredTask status,
-  ThroughputController throughputController)
+  ThroughputController throughputController,
+  FlushLifeCycleTracker tracker)
 Write out current snapshot.
 
 
@@ -511,26 +513,29 @@
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in 
java.util">List
-StripeStoreFlusher.flushSnapshot(MemStoreSnapshot snapshot,
+StripeStoreFlusher.flushSnapshot(MemStoreSnapshot snapshot,
  long cacheFlushSeqNum,
  MonitoredTask status,
- ThroughputController throughputController) 
+ ThroughputController throughputController,
+ FlushLifeCycleTracker tracker) 
 
 
 abstract http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in 
java.util">List
-StoreFlusher.flushSnapshot(MemStoreSnapshot snapshot,
+StoreFlusher.flushSnapshot(MemStoreSnapshot snapshot,
  long cacheFlushSeqNum,
  MonitoredTask status,
- ThroughputController throughputController)
+ ThroughputController throughputController,
+ FlushLifeCycleTracker tracker)
 Turns a snapshot of memstore into a set of store 
files.
 
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in 
java.util">List
-DefaultStoreFlusher.flushSnapshot(MemStoreSnapshot snapshot,
+DefaultStoreFlusher.flushSnapshot(MemStoreSnapshot snapshot,
  long cacheFlushId,
  MonitoredTask status,
- ThroughputController throughputController) 
+ ThroughputController throughputController,
+ FlushLifeCycleTracker tracker) 
 
 
 private long
@@ -546,9 +551,10 @@
 
 
 private HRegion.FlushResultImpl
-HRegion.internalFlushcache(http://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true";
 title="class or interface in java.util">Collection storesToFlush,
+HRegion.internalFlushcache(http://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true";
 title="class or interface in java.util">Collection storesToFlush,
   MonitoredTask status,
-  boolean writeFlushWalMarker)
+  boolean writeFlushWalMarker,
+  FlushLifeCycleTracker tracker)
 Flushing given stores.
 
 
@@ -560,11 +566,12 @@
 
 
 protected HRegion.FlushResultImpl
-HRegion.internalFlushcache(WAL wal,
+HRegion.internalFlushcache(WAL wal,
   long myseqid,
   http://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true";
 title="class or interface in java.util">Collection storesToFlush,
   MonitoredTask status,
-  boolean writeFlushWalMarker)
+  boolean writeFlushWalMarker,
+  FlushLifeCycleTracker tracker)
 Flush the memstore.
 
 
@@ -577,11 +584,12 @@
 
 
 protected HRegion.PrepareFlushResult
-HRegion.internalPrepareFlushCache(WAL wal,
+HRegion.internalPrepareFlushCache(WAL wal,
  long myseqid,
  http://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true";
 title="class or interface in java.util">Collection storesToFlush,
  MonitoredTask status,
- boolean writeFlushW

[01/51] [partial] hbase-site git commit: Published site at .

2017-10-25 Thread git-site-role
Repository: hbase-site
Updated Branches:
  refs/heads/asf-site d7a614fb4 -> 21726f5a2


http://git-wip-us.apache.org/repos/asf/hbase-site/blob/21726f5a/devapidocs/org/apache/hadoop/hbase/regionserver/SplitLogWorker.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/regionserver/SplitLogWorker.html 
b/devapidocs/org/apache/hadoop/hbase/regionserver/SplitLogWorker.html
index 69d7e8a..17ff857 100644
--- a/devapidocs/org/apache/hadoop/hbase/regionserver/SplitLogWorker.html
+++ b/devapidocs/org/apache/hadoop/hbase/regionserver/SplitLogWorker.html
@@ -114,12 +114,12 @@ var activeTableTab = "activeTableTab";
 
 
 @InterfaceAudience.Private
-public class SplitLogWorker
+public class SplitLogWorker
 extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true";
 title="class or interface in java.lang">Object
 implements http://docs.oracle.com/javase/8/docs/api/java/lang/Runnable.html?is-external=true";
 title="class or interface in java.lang">Runnable
 This worker is spawned in every regionserver, including 
master. The Worker waits for log
- splitting tasks to be put up by the SplitLogManager 
- running in the master and races with other workers in other serves to acquire 
those tasks. 
+ splitting tasks to be put up by the SplitLogManager
+ running in the master and races with other workers in other serves to acquire 
those tasks.
  The coordination is done via coordination engine.
  
  If a worker has successfully moved the task from state UNASSIGNED to OWNED 
then it owns the task.
@@ -290,7 +290,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/lang/Runnable.
 
 
 LOG
-private static final org.apache.commons.logging.Log LOG
+private static final org.apache.commons.logging.Log LOG
 
 
 
@@ -299,7 +299,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/lang/Runnable.
 
 
 worker
-http://docs.oracle.com/javase/8/docs/api/java/lang/Thread.html?is-external=true";
 title="class or interface in java.lang">Thread worker
+http://docs.oracle.com/javase/8/docs/api/java/lang/Thread.html?is-external=true";
 title="class or interface in java.lang">Thread worker
 
 
 
@@ -308,7 +308,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/lang/Runnable.
 
 
 coordination
-private SplitLogWorkerCoordination coordination
+private SplitLogWorkerCoordination coordination
 
 
 
@@ -317,7 +317,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/lang/Runnable.
 
 
 conf
-private org.apache.hadoop.conf.Configuration conf
+private org.apache.hadoop.conf.Configuration conf
 
 
 
@@ -326,7 +326,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/lang/Runnable.
 
 
 server
-private RegionServerServices server
+private RegionServerServices server
 
 
 
@@ -343,7 +343,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/lang/Runnable.
 
 
 SplitLogWorker
-public SplitLogWorker(Server hserver,
+public SplitLogWorker(Server hserver,
   org.apache.hadoop.conf.Configuration conf,
   RegionServerServices server,
   SplitLogWorker.TaskExecutor splitTaskExecutor)
@@ -355,7 +355,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/lang/Runnable.
 
 
 SplitLogWorker
-public SplitLogWorker(Server hserver,
+public SplitLogWorker(Server hserver,
   org.apache.hadoop.conf.Configuration conf,
   RegionServerServices server,
   LastSequenceId sequenceIdChecker,
@@ -376,7 +376,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/lang/Runnable.
 
 
 run
-public void run()
+public void run()
 
 Specified by:
 http://docs.oracle.com/javase/8/docs/api/java/lang/Runnable.html?is-external=true#run--";
 title="class or interface in java.lang">run in 
interface http://docs.oracle.com/javase/8/docs/api/java/lang/Runnable.html?is-external=true";
 title="class or interface in java.lang">Runnable
@@ -389,7 +389,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/lang/Runnable.
 
 
 stopTask
-public void stopTask()
+public void stopTask()
 If the worker is doing a task i.e. splitting a log file 
then stop the task.
  It doesn't exit the worker thread.
 
@@ -400,7 +400,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/lang/Runnable.
 
 
 start
-public void start()
+public void start()
 start the SplitLogWorker thread
 
 
@@ -410,7 +410,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/lang/Runnable.
 
 
 stop
-public void stop()
+public void stop()
 stop the SplitLogWorker thread
 
 
@@ -420,7 +420,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/lang/Runnable.
 
 
 getTaskReadySeq
-public int getTaskReadySeq()
+public int getTaskReadySeq()
 Returns the number of tasks processed by coordination.
  This method is used by tests only
 



[20/51] [partial] hbase-site git commit: Published site at .

2017-10-25 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/21726f5a/devapidocs/org/apache/hadoop/hbase/master/SplitLogManager.ResubmitDirective.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/master/SplitLogManager.ResubmitDirective.html
 
b/devapidocs/org/apache/hadoop/hbase/master/SplitLogManager.ResubmitDirective.html
index 7f7d0d8..30153a5 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/master/SplitLogManager.ResubmitDirective.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/master/SplitLogManager.ResubmitDirective.html
@@ -122,7 +122,7 @@ var activeTableTab = "activeTableTab";
 
 
 
-public static enum SplitLogManager.ResubmitDirective
+public static enum SplitLogManager.ResubmitDirective
 extends http://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true";
 title="class or interface in java.lang">Enum
 
 
@@ -210,7 +210,7 @@ the order they are declared.
 
 
 CHECK
-public static final SplitLogManager.ResubmitDirective CHECK
+public static final SplitLogManager.ResubmitDirective CHECK
 
 
 
@@ -219,7 +219,7 @@ the order they are declared.
 
 
 FORCE
-public static final SplitLogManager.ResubmitDirective FORCE
+public static final SplitLogManager.ResubmitDirective FORCE
 
 
 
@@ -236,7 +236,7 @@ the order they are declared.
 
 
 values
-public static SplitLogManager.ResubmitDirective[] values()
+public static SplitLogManager.ResubmitDirective[] values()
 Returns an array containing the constants of this enum 
type, in
 the order they are declared.  This method may be used to iterate
 over the constants as follows:
@@ -256,7 +256,7 @@ for (SplitLogManager.ResubmitDirective c : 
SplitLogManager.ResubmitDirective.val
 
 
 valueOf
-public static SplitLogManager.ResubmitDirective valueOf(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String name)
+public static SplitLogManager.ResubmitDirective valueOf(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String name)
 Returns the enum constant of this type with the specified 
name.
 The string must match exactly an identifier used to declare an
 enum constant in this type.  (Extraneous whitespace characters are 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/21726f5a/devapidocs/org/apache/hadoop/hbase/master/SplitLogManager.Task.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/master/SplitLogManager.Task.html 
b/devapidocs/org/apache/hadoop/hbase/master/SplitLogManager.Task.html
index 8578f20..f875016 100644
--- a/devapidocs/org/apache/hadoop/hbase/master/SplitLogManager.Task.html
+++ b/devapidocs/org/apache/hadoop/hbase/master/SplitLogManager.Task.html
@@ -114,7 +114,7 @@ var activeTableTab = "activeTableTab";
 
 
 @InterfaceAudience.Private
-public static class SplitLogManager.Task
+public static class SplitLogManager.Task
 extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true";
 title="class or interface in java.lang">Object
 in memory state of an active task.
 
@@ -253,7 +253,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 last_update
-public volatile long last_update
+public volatile long last_update
 
 
 
@@ -262,7 +262,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 last_version
-public volatile int last_version
+public volatile int last_version
 
 
 
@@ -271,7 +271,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 cur_worker_name
-public volatile ServerName cur_worker_name
+public volatile ServerName cur_worker_name
 
 
 
@@ -280,7 +280,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 batch
-public volatile SplitLogManager.TaskBatch 
batch
+public volatile SplitLogManager.TaskBatch 
batch
 
 
 
@@ -289,7 +289,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 status
-public volatile SplitLogManager.TerminationStatus status
+public volatile SplitLogManager.TerminationStatus status
 
 
 
@@ -298,7 +298,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 incarnation
-public volatile http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/AtomicInteger.html?is-external=true";
 title="class or interface in java.util.concurrent.atomic">AtomicInteger incarnation
+public volatile http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/AtomicInteger.html?is-external=true";
 title="class or interface in java.util.concurrent.atomic">AtomicInteger incarnation
 
 
 
@@ -307,7 +307,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 unforcedResubmits
-public final http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/At

[02/51] [partial] hbase-site git commit: Published site at .

2017-10-25 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/21726f5a/devapidocs/org/apache/hadoop/hbase/regionserver/RegionServerAccounting.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/regionserver/RegionServerAccounting.html 
b/devapidocs/org/apache/hadoop/hbase/regionserver/RegionServerAccounting.html
index 0dc5650..f586704 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/regionserver/RegionServerAccounting.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/regionserver/RegionServerAccounting.html
@@ -166,7 +166,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 memType 
 
 
-private http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/ConcurrentMap.html?is-external=true";
 title="class or interface in 
java.util.concurrent">ConcurrentMap
+private http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/ConcurrentMap.html?is-external=true";
 title="class or interface in 
java.util.concurrent">ConcurrentMap
 replayEditsPerRegion 
 
 
@@ -305,7 +305,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 globalMemstoreDataSize
-private final http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/LongAdder.html?is-external=true";
 title="class or interface in java.util.concurrent.atomic">LongAdder globalMemstoreDataSize
+private final http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/LongAdder.html?is-external=true";
 title="class or interface in java.util.concurrent.atomic">LongAdder globalMemstoreDataSize
 
 
 
@@ -314,7 +314,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 globalMemstoreHeapSize
-private final http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/LongAdder.html?is-external=true";
 title="class or interface in java.util.concurrent.atomic">LongAdder globalMemstoreHeapSize
+private final http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/LongAdder.html?is-external=true";
 title="class or interface in java.util.concurrent.atomic">LongAdder globalMemstoreHeapSize
 
 
 
@@ -323,7 +323,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 replayEditsPerRegion
-private final http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/ConcurrentMap.html?is-external=true";
 title="class or interface in 
java.util.concurrent">ConcurrentMap replayEditsPerRegion
+private final http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/ConcurrentMap.html?is-external=true";
 title="class or interface in 
java.util.concurrent">ConcurrentMap replayEditsPerRegion
 
 
 
@@ -332,7 +332,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 globalMemStoreLimit
-private long globalMemStoreLimit
+private long globalMemStoreLimit
 
 
 
@@ -341,7 +341,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 globalMemStoreLimitLowMarkPercent
-private final float globalMemStoreLimitLowMarkPercent
+private final float globalMemStoreLimitLowMarkPercent
 
 
 
@@ -350,7 +350,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 globalMemStoreLimitLowMark
-private long globalMemStoreLimitLowMark
+private long globalMemStoreLimitLowMark
 
 
 
@@ -359,7 +359,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 memType
-private final http://docs.oracle.com/javase/8/docs/api/java/lang/management/MemoryType.html?is-external=true";
 title="class or interface in java.lang.management">MemoryType memType
+private final http://docs.oracle.com/javase/8/docs/api/java/lang/management/MemoryType.html?is-external=true";
 title="class or interface in java.lang.management">MemoryType memType
 
 
 
@@ -368,7 +368,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 globalOnHeapMemstoreLimit
-private long globalOnHeapMemstoreLimit
+private long globalOnHeapMemstoreLimit
 
 
 
@@ -377,7 +377,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 globalOnHeapMemstoreLimitLowMark
-private long globalOnHeapMemstoreLimitLowMark
+private long globalOnHeapMemstoreLimitLowMark
 
 
 
@@ -394,7 +394,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 RegionServerAccounting
-public RegionServerAccounting(org.apache.hadoop.conf.Configuration conf)
+public RegionServerAccounting(org.apache.hadoop.conf.Configuration conf)
 
 
 
@@ -411,7 +411,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 getGlobalMemStoreLimit
-long getGlobalMemStoreLimit()
+long getGlobalMemStoreLimit()
 
 
 
@@ -420,7 +420,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 getGlobalOnHeapMemStoreLimit
-long getGlobalOnHeapMemStoreLimit()
+long getGlobalOnHeapMemStoreLimit()

[23/51] [partial] hbase-site git commit: Published site at .

2017-10-25 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/21726f5a/devapidocs/org/apache/hadoop/hbase/filter/class-use/FilterListBase.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/filter/class-use/FilterListBase.html 
b/devapidocs/org/apache/hadoop/hbase/filter/class-use/FilterListBase.html
new file mode 100644
index 000..712721b
--- /dev/null
+++ b/devapidocs/org/apache/hadoop/hbase/filter/class-use/FilterListBase.html
@@ -0,0 +1,191 @@
+http://www.w3.org/TR/html4/loose.dtd";>
+
+
+
+
+
+Uses of Class org.apache.hadoop.hbase.filter.FilterListBase (Apache 
HBase 3.0.0-SNAPSHOT API)
+
+
+
+
+
+
+
+JavaScript is disabled on your browser.
+
+
+
+
+
+Skip navigation links
+
+
+
+
+Overview
+Package
+Class
+Use
+Tree
+Deprecated
+Index
+Help
+
+
+
+
+Prev
+Next
+
+
+Frames
+No Frames
+
+
+All Classes
+
+
+
+
+
+
+
+
+
+
+Uses of 
Classorg.apache.hadoop.hbase.filter.FilterListBase
+
+
+
+
+
+Packages that use FilterListBase 
+
+Package
+Description
+
+
+
+org.apache.hadoop.hbase.filter
+
+Provides row-level filters applied to HRegion scan results 
during calls to
+ ResultScanner.next().
+
+
+
+
+
+
+
+
+
+
+Uses of FilterListBase in org.apache.hadoop.hbase.filter
+
+Subclasses of FilterListBase in org.apache.hadoop.hbase.filter 
+
+Modifier and Type
+Class and Description
+
+
+
+class 
+FilterListWithAND
+FilterListWithAND represents an ordered list of filters 
which will be evaluated with an AND
+ operator.
+
+
+
+class 
+FilterListWithOR
+FilterListWithOR represents an ordered list of filters 
which will be evaluated with an OR
+ operator.
+
+
+
+
+
+Fields in org.apache.hadoop.hbase.filter
 declared as FilterListBase 
+
+Modifier and Type
+Field and Description
+
+
+
+private FilterListBase
+FilterList.filterListBase 
+
+
+
+
+
+
+
+
+
+
+
+
+Skip navigation links
+
+
+
+
+Overview
+Package
+Class
+Use
+Tree
+Deprecated
+Index
+Help
+
+
+
+
+Prev
+Next
+
+
+Frames
+No Frames
+
+
+All Classes
+
+
+
+
+
+
+
+
+
+Copyright © 2007–2017 https://www.apache.org/";>The Apache Software Foundation. All rights 
reserved.
+
+

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/21726f5a/devapidocs/org/apache/hadoop/hbase/filter/class-use/FilterListWithAND.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/filter/class-use/FilterListWithAND.html 
b/devapidocs/org/apache/hadoop/hbase/filter/class-use/FilterListWithAND.html
new file mode 100644
index 000..cae5bb1
--- /dev/null
+++ b/devapidocs/org/apache/hadoop/hbase/filter/class-use/FilterListWithAND.html
@@ -0,0 +1,125 @@
+http://www.w3.org/TR/html4/loose.dtd";>
+
+
+
+
+
+Uses of Class org.apache.hadoop.hbase.filter.FilterListWithAND (Apache 
HBase 3.0.0-SNAPSHOT API)
+
+
+
+
+
+
+
+JavaScript is disabled on your browser.
+
+
+
+
+
+Skip navigation links
+
+
+
+
+Overview
+Package
+Class
+Use
+Tree
+Deprecated
+Index
+Help
+
+
+
+
+Prev
+Next
+
+
+Frames
+No Frames
+
+
+All Classes
+
+
+
+
+
+
+
+
+
+
+Uses of 
Classorg.apache.hadoop.hbase.filter.FilterListWithAND
+
+No usage of 
org.apache.hadoop.hbase.filter.FilterListWithAND
+
+
+
+
+Skip navigation links
+
+
+
+
+Overview
+Package
+Class
+Use
+Tree
+Deprecated
+Index
+Help
+
+
+
+
+Prev
+Next
+
+
+Frames
+No Frames
+
+
+All Classes
+
+
+
+
+
+
+
+
+
+Copyright © 2007–2017 https://www.apache.org/";>The Apache Software Foundation. All rights 
reserved.
+
+

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/21726f5a/devapidocs/org/apache/hadoop/hbase/filter/class-use/FilterListWithOR.html

[14/51] [partial] hbase-site git commit: Published site at .

2017-10-25 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/21726f5a/devapidocs/org/apache/hadoop/hbase/regionserver/HRegion.RegionScannerImpl.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/regionserver/HRegion.RegionScannerImpl.html
 
b/devapidocs/org/apache/hadoop/hbase/regionserver/HRegion.RegionScannerImpl.html
index f014424..32cc2f6 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/regionserver/HRegion.RegionScannerImpl.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/regionserver/HRegion.RegionScannerImpl.html
@@ -121,7 +121,7 @@ var activeTableTab = "activeTableTab";
 
 
 
-class HRegion.RegionScannerImpl
+class HRegion.RegionScannerImpl
 extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true";
 title="class or interface in java.lang">Object
 implements RegionScanner, RpcCallback
 RegionScannerImpl is used to combine scanners from multiple 
Stores (aka column families).
@@ -425,7 +425,7 @@ implements 
 
 storeHeap
-KeyValueHeap storeHeap
+KeyValueHeap storeHeap
 
 
 
@@ -434,7 +434,7 @@ implements 
 
 joinedHeap
-KeyValueHeap joinedHeap
+KeyValueHeap joinedHeap
 Heap of key-values that are not essential for the provided 
filters and are thus read
  on demand, if on-demand column family loading is enabled.
 
@@ -445,7 +445,7 @@ implements 
 
 joinedContinuationRow
-protected Cell joinedContinuationRow
+protected Cell joinedContinuationRow
 If the joined heap data gathering is interrupted due to 
scan limits, this will
  contain the row for which we are populating the values.
 
@@ -456,7 +456,7 @@ implements 
 
 filterClosed
-private boolean filterClosed
+private boolean filterClosed
 
 
 
@@ -465,7 +465,7 @@ implements 
 
 stopRow
-protected final byte[] stopRow
+protected final byte[] stopRow
 
 
 
@@ -474,7 +474,7 @@ implements 
 
 includeStopRow
-protected final boolean includeStopRow
+protected final boolean includeStopRow
 
 
 
@@ -483,7 +483,7 @@ implements 
 
 region
-protected final HRegion region
+protected final HRegion region
 
 
 
@@ -492,7 +492,7 @@ implements 
 
 comparator
-protected final CellComparator comparator
+protected final CellComparator comparator
 
 
 
@@ -501,7 +501,7 @@ implements 
 
 readPt
-private final long readPt
+private final long readPt
 
 
 
@@ -510,7 +510,7 @@ implements 
 
 maxResultSize
-private final long maxResultSize
+private final long maxResultSize
 
 
 
@@ -519,7 +519,7 @@ implements 
 
 defaultScannerContext
-private final ScannerContext defaultScannerContext
+private final ScannerContext defaultScannerContext
 
 
 
@@ -528,7 +528,7 @@ implements 
 
 filter
-private final FilterWrapper filter
+private final FilterWrapper filter
 
 
 
@@ -545,7 +545,7 @@ implements 
 
 RegionScannerImpl
-RegionScannerImpl(Scan scan,
+RegionScannerImpl(Scan scan,
   http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List additionalScanners,
   HRegion region)
throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true";
 title="class or interface in java.io">IOException
@@ -561,7 +561,7 @@ implements 
 
 RegionScannerImpl
-RegionScannerImpl(Scan scan,
+RegionScannerImpl(Scan scan,
   http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List additionalScanners,
   HRegion region,
   long nonceGroup,
@@ -587,7 +587,7 @@ implements 
 
 getRegionInfo
-public RegionInfo getRegionInfo()
+public RegionInfo getRegionInfo()
 
 Specified by:
 getRegionInfo in
 interface RegionScanner
@@ -602,7 +602,7 @@ implements 
 
 initializeScanners
-protected void initializeScanners(Scan scan,
+protected void initializeScanners(Scan scan,
   http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List additionalScanners)
throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true";
 title="class or interface in java.io">IOException
 
@@ -617,7 +617,7 @@ implements 
 
 initializeKVHeap
-protected void initializeKVHeap(http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List scanners,
+protected void initializeKVHeap(http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List scanners,
 http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List joinedScanners,
 HRegion region)
  throws http://docs.oracle

[19/51] [partial] hbase-site git commit: Published site at .

2017-10-25 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/21726f5a/devapidocs/org/apache/hadoop/hbase/master/assignment/RegionStates.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/master/assignment/RegionStates.html 
b/devapidocs/org/apache/hadoop/hbase/master/assignment/RegionStates.html
index 7017241..c71b9c3 100644
--- a/devapidocs/org/apache/hadoop/hbase/master/assignment/RegionStates.html
+++ b/devapidocs/org/apache/hadoop/hbase/master/assignment/RegionStates.html
@@ -969,7 +969,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 addRegionInTransition
-protected boolean addRegionInTransition(RegionStates.RegionStateNode regionNode,
+protected boolean addRegionInTransition(RegionStates.RegionStateNode regionNode,
 RegionTransitionProcedure procedure)
 
 
@@ -979,7 +979,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 removeRegionInTransition
-protected void removeRegionInTransition(RegionStates.RegionStateNode regionNode,
+protected void removeRegionInTransition(RegionStates.RegionStateNode regionNode,
 RegionTransitionProcedure procedure)
 
 
@@ -989,7 +989,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 hasRegionsInTransition
-public boolean hasRegionsInTransition()
+public boolean hasRegionsInTransition()
 
 
 
@@ -998,7 +998,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 isRegionInTransition
-public boolean isRegionInTransition(RegionInfo regionInfo)
+public boolean isRegionInTransition(RegionInfo regionInfo)
 
 
 
@@ -1007,7 +1007,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 getRegionTransitionProcedure
-public RegionTransitionProcedure getRegionTransitionProcedure(RegionInfo hri)
+public RegionTransitionProcedure getRegionTransitionProcedure(RegionInfo hri)
 
 Returns:
 If a procedure-in-transition for hri, return it else 
null.
@@ -1020,7 +1020,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 getRegionTransitionState
-public RegionState getRegionTransitionState(RegionInfo hri)
+public RegionState getRegionTransitionState(RegionInfo hri)
 
 
 
@@ -1029,7 +1029,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 getRegionsInTransition
-public http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List getRegionsInTransition()
+public http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List getRegionsInTransition()
 
 
 
@@ -1038,7 +1038,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 getRegionsInTransitionCount
-public int getRegionsInTransitionCount()
+public int getRegionsInTransitionCount()
 Get the number of regions in transition.
 
 
@@ -1048,7 +1048,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 getRegionsStateInTransition
-public http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List getRegionsStateInTransition()
+public http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List getRegionsStateInTransition()
 
 
 
@@ -1057,7 +1057,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 getRegionsInTransitionOrderedByTimestamp
-public http://docs.oracle.com/javase/8/docs/api/java/util/SortedSet.html?is-external=true";
 title="class or interface in java.util">SortedSet getRegionsInTransitionOrderedByTimestamp()
+public http://docs.oracle.com/javase/8/docs/api/java/util/SortedSet.html?is-external=true";
 title="class or interface in java.util">SortedSet getRegionsInTransitionOrderedByTimestamp()
 
 
 
@@ -1066,7 +1066,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 addToOfflineRegions
-public void addToOfflineRegions(RegionStates.RegionStateNode regionNode)
+public void addToOfflineRegions(RegionStates.RegionStateNode regionNode)
 
 
 
@@ -1075,7 +1075,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 removeFromOfflineRegions
-public void removeFromOfflineRegions(RegionInfo regionInfo)
+public void removeFromOfflineRegions(RegionInfo regionInfo)
 
 
 
@@ -1084,7 +1084,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 addToFailedOpen
-public RegionStates.RegionFailedOpen addToFailedOpen(RegionStates.RegionStateNode regionNode)
+public RegionStates.RegionFailedOpen addToFailedOpen(RegionStates.RegionStateNode regionNode)
 
 
 
@@ -1093,7 +1093,7 @@ 

[32/51] [partial] hbase-site git commit: Published site at .

2017-10-25 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/21726f5a/devapidocs/org/apache/hadoop/hbase/coordination/class-use/SplitLogWorkerCoordination.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/coordination/class-use/SplitLogWorkerCoordination.html
 
b/devapidocs/org/apache/hadoop/hbase/coordination/class-use/SplitLogWorkerCoordination.html
index 92ad329..db455e3 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/coordination/class-use/SplitLogWorkerCoordination.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/coordination/class-use/SplitLogWorkerCoordination.html
@@ -83,22 +83,50 @@
 
 
 
-org.apache.hadoop.hbase.coordination
+org.apache.hadoop.hbase
  
 
 
-org.apache.hadoop.hbase.regionserver
+org.apache.hadoop.hbase.coordination
  
 
 
+org.apache.hadoop.hbase.regionserver
+ 
+
+
 org.apache.hadoop.hbase.regionserver.handler
  
 
+
+org.apache.hadoop.hbase.wal
+ 
+
 
 
 
 
 
+
+
+
+Uses of SplitLogWorkerCoordination in org.apache.hadoop.hbase
+
+Methods in org.apache.hadoop.hbase
 that return SplitLogWorkerCoordination 
+
+Modifier and Type
+Method and Description
+
+
+
+SplitLogWorkerCoordination
+CoordinatedStateManager.getSplitLogWorkerCoordination()
+Method to retrieve coordination for split log worker
+
+
+
+
+
 
 
 
@@ -140,12 +168,6 @@
 
 
 
-abstract SplitLogWorkerCoordination
-BaseCoordinatedStateManager.getSplitLogWorkerCoordination()
-Method to retrieve coordination for split log worker
-
-
-
 SplitLogWorkerCoordination
 ZkCoordinatedStateManager.getSplitLogWorkerCoordination() 
 
@@ -205,6 +227,66 @@
 
 
 
+
+
+
+Uses of SplitLogWorkerCoordination in org.apache.hadoop.hbase.wal
+
+Fields in org.apache.hadoop.hbase.wal
 declared as SplitLogWorkerCoordination 
+
+Modifier and Type
+Field and Description
+
+
+
+private SplitLogWorkerCoordination
+WALSplitter.splitLogWorkerCoordination 
+
+
+
+
+Methods in org.apache.hadoop.hbase.wal
 with parameters of type SplitLogWorkerCoordination 
+
+Modifier and Type
+Method and Description
+
+
+
+static boolean
+WALSplitter.splitLogFile(org.apache.hadoop.fs.Path rootDir,
+org.apache.hadoop.fs.FileStatus logfile,
+org.apache.hadoop.fs.FileSystem fs,
+org.apache.hadoop.conf.Configuration conf,
+CancelableProgressable reporter,
+LastSequenceId idChecker,
+SplitLogWorkerCoordination splitLogWorkerCoordination,
+Connection connection,
+
org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.SplitLogTask.RecoveryMode mode,
+WALFactory factory)
+Splits a WAL file into region's recovered-edits 
directory.
+
+
+
+
+
+Constructors in org.apache.hadoop.hbase.wal
 with parameters of type SplitLogWorkerCoordination 
+
+Constructor and Description
+
+
+
+WALSplitter(WALFactory factory,
+   org.apache.hadoop.conf.Configuration conf,
+   org.apache.hadoop.fs.Path rootDir,
+   org.apache.hadoop.fs.FileSystem fs,
+   LastSequenceId idChecker,
+   SplitLogWorkerCoordination splitLogWorkerCoordination,
+   Connection connection,
+   
org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.SplitLogTask.RecoveryMode mode) 
+
+
+
+
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/21726f5a/devapidocs/org/apache/hadoop/hbase/coordination/class-use/ZkCoordinatedStateManager.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/coordination/class-use/ZkCoordinatedStateManager.html
 
b/devapidocs/org/apache/hadoop/hbase/coordination/class-use/ZkCoordinatedStateManager.html
index 97e74dc..aa610f3 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/coordination/class-use/ZkCoordinatedStateManager.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/coordination/class-use/ZkCoordinatedStateManager.html
@@ -72,59 +72,7 @@
 
 Uses of 
Classorg.apache.hadoop.hbase.coordination.ZkCoordinatedStateManager
 
-
-
-
-
-Packages that use ZkCoordinatedStateManager 
-
-Package
-Description
-
-
-
-org.apache.hadoop.hbase.coordination
- 
-
-
-
-
-
-
-
-
-
-Uses of ZkCoordinatedStateManager in org.apache.hadoop.hbase.coordination
-
-Fields in org.apache.hadoop.hbase.coordination
 declared as ZkCoordinatedStateManager 
-
-Modifier and Type
-Field and Description
-
-
-
-private ZkCoordinatedStateManager
-ZkSplitLogWorkerCoordination.manager 
-
-
-
-
-Constructors in org.apache.hadoop.hbase.coordination
 with parameters of type ZkCoordinatedStateManager 
-
-Constructor and Description
-
-
-
-ZkSplitLogWorkerCoordination(ZkCoordinatedStateManager zkCoordinatedStateManager,
-ZooKeeperWatcher watcher) 
-
-
-
-
-
-
-
-
+No usage of 
org.apache.hadoop.hbase.coordination.ZkCoordinatedStateManager
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/21726f5a/devapidocs/org/apache/hadoop/hbase/coordination/package-frame.html
---

[24/51] [partial] hbase-site git commit: Published site at .

2017-10-25 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/21726f5a/devapidocs/org/apache/hadoop/hbase/filter/class-use/Filter.ReturnCode.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/filter/class-use/Filter.ReturnCode.html 
b/devapidocs/org/apache/hadoop/hbase/filter/class-use/Filter.ReturnCode.html
index 1c65a36..606d5fb 100644
--- a/devapidocs/org/apache/hadoop/hbase/filter/class-use/Filter.ReturnCode.html
+++ b/devapidocs/org/apache/hadoop/hbase/filter/class-use/Filter.ReturnCode.html
@@ -127,6 +127,21 @@
 
 
 
+
+Fields in org.apache.hadoop.hbase.filter
 with type parameters of type Filter.ReturnCode 
+
+Modifier and Type
+Field and Description
+
+
+
+private http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List
+FilterListWithOR.prevFilterRCList
+Save previous return code and previous cell for every 
filter in filter list.
+
+
+
+
 
 Methods in org.apache.hadoop.hbase.filter
 that return Filter.ReturnCode 
 
@@ -152,111 +167,153 @@
 
 
 Filter.ReturnCode
-FamilyFilter.filterKeyValue(Cell v) 
+FilterListBase.filterKeyValue(Cell c) 
 
 
 Filter.ReturnCode
-FilterWrapper.filterKeyValue(Cell v) 
+FamilyFilter.filterKeyValue(Cell v) 
 
 
 Filter.ReturnCode
-ColumnPrefixFilter.filterKeyValue(Cell cell) 
+FilterWrapper.filterKeyValue(Cell v) 
 
 
 Filter.ReturnCode
-PageFilter.filterKeyValue(Cell ignored) 
+ColumnPrefixFilter.filterKeyValue(Cell cell) 
 
 
 Filter.ReturnCode
-RowFilter.filterKeyValue(Cell v) 
+PageFilter.filterKeyValue(Cell ignored) 
 
 
 Filter.ReturnCode
-ColumnRangeFilter.filterKeyValue(Cell kv) 
+RowFilter.filterKeyValue(Cell v) 
 
 
 Filter.ReturnCode
-ColumnCountGetFilter.filterKeyValue(Cell v) 
+ColumnRangeFilter.filterKeyValue(Cell kv) 
 
 
 Filter.ReturnCode
-MultipleColumnPrefixFilter.filterKeyValue(Cell kv) 
+ColumnCountGetFilter.filterKeyValue(Cell v) 
 
 
 Filter.ReturnCode
-ColumnPaginationFilter.filterKeyValue(Cell v) 
+MultipleColumnPrefixFilter.filterKeyValue(Cell kv) 
 
 
 Filter.ReturnCode
-DependentColumnFilter.filterKeyValue(Cell c) 
+ColumnPaginationFilter.filterKeyValue(Cell v) 
 
 
 Filter.ReturnCode
-InclusiveStopFilter.filterKeyValue(Cell v) 
+DependentColumnFilter.filterKeyValue(Cell c) 
 
 
 Filter.ReturnCode
-KeyOnlyFilter.filterKeyValue(Cell ignored) 
+InclusiveStopFilter.filterKeyValue(Cell v) 
 
 
 Filter.ReturnCode
-MultiRowRangeFilter.filterKeyValue(Cell ignored) 
+KeyOnlyFilter.filterKeyValue(Cell ignored) 
 
 
+Filter.ReturnCode
+MultiRowRangeFilter.filterKeyValue(Cell ignored) 
+
+
 abstract Filter.ReturnCode
 Filter.filterKeyValue(Cell v)
 A way to filter based on the column family, column 
qualifier and/or the column value.
 
 
-
+
 Filter.ReturnCode
 FirstKeyOnlyFilter.filterKeyValue(Cell v) 
 
-
+
 Filter.ReturnCode
 WhileMatchFilter.filterKeyValue(Cell v) 
 
-
+
 Filter.ReturnCode
 FirstKeyValueMatchingQualifiersFilter.filterKeyValue(Cell v)
 Deprecated. 
  
 
-
+
 Filter.ReturnCode
 TimestampsFilter.filterKeyValue(Cell v) 
 
-
+
 Filter.ReturnCode
 FuzzyRowFilter.filterKeyValue(Cell c) 
 
-
+
 Filter.ReturnCode
 FilterList.filterKeyValue(Cell c) 
 
-
+
 Filter.ReturnCode
 RandomRowFilter.filterKeyValue(Cell v) 
 
-
+
 Filter.ReturnCode
 PrefixFilter.filterKeyValue(Cell v) 
 
-
+
 Filter.ReturnCode
 SingleColumnValueFilter.filterKeyValue(Cell c) 
 
-
+
 Filter.ReturnCode
 QualifierFilter.filterKeyValue(Cell v) 
 
+
+(package private) Filter.ReturnCode
+FilterListWithAND.internalFilterKeyValue(Cell c,
+  Cell transformedCell) 
+
 
+(package private) abstract Filter.ReturnCode
+FilterListBase.internalFilterKeyValue(Cell c,
+  Cell transformedCell)
+Internal implementation of FilterListBase.filterKeyValue(Cell)
+
+
+
+(package private) Filter.ReturnCode
+FilterListWithOR.internalFilterKeyValue(Cell c,
+  Cell transformCell) 
+
+
+(package private) Filter.ReturnCode
+FilterList.internalFilterKeyValue(Cell c,
+  Cell transformedCell)
+Internal implementation of FilterList.filterKeyValue(Cell).
+
+
+
+private Filter.ReturnCode
+FilterListWithAND.mergeReturnCode(Filter.ReturnCode rc,
+   Filter.ReturnCode localRC)
+FilterList with MUST_PASS_ALL choose the maximal forward 
step among sub-filters in filter list.
+
+
+
+private Filter.ReturnCode
+FilterListWithOR.mergeReturnCode(Filter.ReturnCode rc,
+   Filter.ReturnCode localRC)
+FilterList with MUST_PASS_ONE choose the minimal forward 
step among sub-filter in filter list.
+
+
+
 static Filter.ReturnCode
 Filter.ReturnCode.valueOf(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String name)
 Returns the enum constant of this type with the specified 
name.
 
 
-
+
 static Filter.ReturnCode[]
 Filter.ReturnCode.values()
 Returns an array containing the constants of this

[28/51] [partial] hbase-site git commit: Published site at .

2017-10-25 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/21726f5a/devapidocs/org/apache/hadoop/hbase/filter/FilterList.Operator.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/filter/FilterList.Operator.html 
b/devapidocs/org/apache/hadoop/hbase/filter/FilterList.Operator.html
index f277088..3f5942a 100644
--- a/devapidocs/org/apache/hadoop/hbase/filter/FilterList.Operator.html
+++ b/devapidocs/org/apache/hadoop/hbase/filter/FilterList.Operator.html
@@ -50,7 +50,7 @@ var activeTableTab = "activeTableTab";
 
 
 Prev Class
-Next Class
+Next Class
 
 
 Frames
@@ -123,7 +123,7 @@ var activeTableTab = "activeTableTab";
 
 
 @InterfaceAudience.Public
-public static enum FilterList.Operator
+public static enum FilterList.Operator
 extends http://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true";
 title="class or interface in java.lang">Enum
 set operator
 
@@ -216,7 +216,7 @@ the order they are declared.
 
 
 MUST_PASS_ALL
-public static final FilterList.Operator MUST_PASS_ALL
+public static final FilterList.Operator MUST_PASS_ALL
 !AND
 
 
@@ -226,7 +226,7 @@ the order they are declared.
 
 
 MUST_PASS_ONE
-public static final FilterList.Operator MUST_PASS_ONE
+public static final FilterList.Operator MUST_PASS_ONE
 !OR
 
 
@@ -244,7 +244,7 @@ the order they are declared.
 
 
 values
-public static FilterList.Operator[] values()
+public static FilterList.Operator[] values()
 Returns an array containing the constants of this enum 
type, in
 the order they are declared.  This method may be used to iterate
 over the constants as follows:
@@ -264,7 +264,7 @@ for (FilterList.Operator c : FilterList.Operator.values())
 
 
 valueOf
-public static FilterList.Operator valueOf(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String name)
+public static FilterList.Operator valueOf(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String name)
 Returns the enum constant of this type with the specified 
name.
 The string must match exactly an identifier used to declare an
 enum constant in this type.  (Extraneous whitespace characters are 
@@ -309,7 +309,7 @@ not permitted.)
 
 
 Prev Class
-Next Class
+Next Class
 
 
 Frames

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/21726f5a/devapidocs/org/apache/hadoop/hbase/filter/FilterList.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/filter/FilterList.html 
b/devapidocs/org/apache/hadoop/hbase/filter/FilterList.html
index 8164e0d..84bb3e1 100644
--- a/devapidocs/org/apache/hadoop/hbase/filter/FilterList.html
+++ b/devapidocs/org/apache/hadoop/hbase/filter/FilterList.html
@@ -18,7 +18,7 @@
 catch(err) {
 }
 //-->
-var methods = 
{"i0":10,"i1":10,"i2":10,"i3":9,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":9,"i14":10,"i15":10,"i16":10,"i17":9,"i18":10,"i19":10,"i20":10,"i21":10,"i22":10,"i23":10,"i24":10};
+var methods = 
{"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10,"i14":10,"i15":10,"i16":9,"i17":10,"i18":10,"i19":10,"i20":10,"i21":10,"i22":10};
 var tabs = {65535:["t0","All Methods"],1:["t1","Static 
Methods"],2:["t2","Instance Methods"],8:["t4","Concrete Methods"]};
 var altColor = "altColor";
 var rowColor = "rowColor";
@@ -120,22 +120,15 @@ var activeTableTab = "activeTableTab";
 
 
 @InterfaceAudience.Public
-public final class FilterList
+public final class FilterList
 extends FilterBase
-Implementation of Filter that represents an 
ordered List of Filters
- which will be evaluated with a specified boolean operator FilterList.Operator.MUST_PASS_ALL
- (AND) or FilterList.Operator.MUST_PASS_ONE
 (OR).
- Since you can use Filter Lists as children of Filter Lists, you can create a
- hierarchy of filters to be evaluated.
-
- 
- FilterList.Operator.MUST_PASS_ALL
 evaluates lazily: evaluation stops as soon as one filter does
- not include the KeyValue.
-
- 
- FilterList.Operator.MUST_PASS_ONE
 evaluates non-lazily: all filters are always evaluated.
-
- 
+Implementation of Filter that represents an 
ordered List of Filters which will be
+ evaluated with a specified boolean operator FilterList.Operator.MUST_PASS_ALL
 (AND) or
+ FilterList.Operator.MUST_PASS_ONE
 (OR). Since you can use Filter Lists as children of
+ Filter Lists, you can create a hierarchy of filters to be evaluated. 
+ FilterList.Operator.MUST_PASS_ALL
 evaluates lazily: evaluation stops as soon as one filter does not
+ include the KeyValue. 
+ FilterList.Operator.MUST_PASS_ONE
 evaluates non-lazily: all filters are always evaluated. 
  Defaults to FilterList.Operator.MUST_PASS_ALL.
 
 
@@ -184,34 +177,13 @@ extends Field and Description
 
 
-private http://docs

[41/51] [partial] hbase-site git commit: Published site at .

2017-10-25 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/21726f5a/devapidocs/constant-values.html
--
diff --git a/devapidocs/constant-values.html b/devapidocs/constant-values.html
index 6372b10..0327563 100644
--- a/devapidocs/constant-values.html
+++ b/devapidocs/constant-values.html
@@ -1668,1070 +1668,1063 @@
 0.2000298023224f
 
 
-
-
-public static final http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
-HBASE_COORDINATED_STATE_MANAGER_CLASS
-"hbase.coordinated.state.manager.class"
-
-
 
 
 public static final http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
 HBASE_DIR
 "hbase.rootdir"
 
-
+
 
 
 public static final http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
 HBASE_MASTER_BALANCER_MAX_RIT_PERCENT
 "hbase.master.balancer.maxRitPercent"
 
-
+
 
 
 public static final http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
 HBASE_MASTER_LOADBALANCE_BYTABLE
 "hbase.master.loadbalance.bytable"
 
-
+
 
 
 public static final http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
 HBASE_MASTER_LOADBALANCER_CLASS
 "hbase.master.loadbalancer.class"
 
-
+
 
 
 public static final http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
 HBASE_MASTER_LOGCLEANER_PLUGINS
 "hbase.master.logcleaner.plugins"
 
-
+
 
 
 public static final http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
 HBASE_MASTER_NORMALIZER_CLASS
 "hbase.master.normalizer.class"
 
-
+
 
 
 public static final http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
 HBASE_META_BLOCK_SIZE
 "hbase.meta.blocksize"
 
-
+
 
 
 public static final http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
 HBASE_META_SCANNER_CACHING
 "hbase.meta.scanner.caching"
 
-
+
 
 
 public static final http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
 HBASE_META_VERSIONS
 "hbase.meta.versions"
 
-
+
 
 
 public static final http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
 HBASE_REGION_SPLIT_POLICY_KEY
 "hbase.regionserver.region.split.policy"
 
-
+
 
 
 public static final http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
 HBASE_REGIONSERVER_LEASE_PERIOD_KEY
 "hbase.regionserver.lease.period"
 
-
+
 
 
 public static final http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
 HBASE_RPC_READ_TIMEOUT_KEY
 "hbase.rpc.read.timeout"
 
-
+
 
 
 public static final http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
 HBASE_RPC_SHORTOPERATION_TIMEOUT_KEY
 "hbase.rpc.shortoperation.timeout"
 
-
+
 
 
 public static final http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
 HBASE_RPC_TIMEOUT_KEY
 "hbase.rpc.timeout"
 
-
+
 
 
 public static final http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
 HBASE_RPC_WRITE_TIMEOUT_KEY
 "hbase.rpc.write.timeout"
 
-
+
 
 
 public static final http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
 HBASE_RS_NONCES_ENABLED
 "hbase.regionserver.nonces.enabled"
 
-
+
 
 
 public static final http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
 HBASE_SERVER_PAUSE
 "hbase.server.pause"
 
-
+
 
 
 public static final http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
 HBASE_SERVER_SCANNER_MAX_RESULT_SIZE_KEY
 "hbase.server.scanner.max.result.size"
 
-
+
 
 
 public static final http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
 HBASE_SPLITLOG_MANAGER_TIMEOUT
 "hbase.splitlog.manager.timeout"
 
-
+
 
 
 public static final http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lan

[08/51] [partial] hbase-site git commit: Published site at .

2017-10-25 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/21726f5a/devapidocs/org/apache/hadoop/hbase/regionserver/ImmutableMemStoreLAB.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/regionserver/ImmutableMemStoreLAB.html 
b/devapidocs/org/apache/hadoop/hbase/regionserver/ImmutableMemStoreLAB.html
index f4bb834..6bf7f94 100644
--- a/devapidocs/org/apache/hadoop/hbase/regionserver/ImmutableMemStoreLAB.html
+++ b/devapidocs/org/apache/hadoop/hbase/regionserver/ImmutableMemStoreLAB.html
@@ -50,7 +50,7 @@ var activeTableTab = "activeTableTab";
 
 
 Prev Class
-Next Class
+Next Class
 
 
 Frames
@@ -420,7 +420,7 @@ implements 
 
 Prev Class
-Next Class
+Next Class
 
 
 Frames

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/21726f5a/devapidocs/org/apache/hadoop/hbase/regionserver/ImmutableOnlineRegions.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/regionserver/ImmutableOnlineRegions.html 
b/devapidocs/org/apache/hadoop/hbase/regionserver/ImmutableOnlineRegions.html
deleted file mode 100644
index 76ebae8..000
--- 
a/devapidocs/org/apache/hadoop/hbase/regionserver/ImmutableOnlineRegions.html
+++ /dev/null
@@ -1,291 +0,0 @@
-http://www.w3.org/TR/html4/loose.dtd";>
-
-
-
-
-
-ImmutableOnlineRegions (Apache HBase 3.0.0-SNAPSHOT API)
-
-
-
-
-
-var methods = {"i0":6,"i1":6,"i2":6};
-var tabs = {65535:["t0","All Methods"],2:["t2","Instance 
Methods"],4:["t3","Abstract Methods"]};
-var altColor = "altColor";
-var rowColor = "rowColor";
-var tableTab = "tableTab";
-var activeTableTab = "activeTableTab";
-
-
-JavaScript is disabled on your browser.
-
-
-
-
-
-Skip navigation links
-
-
-
-
-Overview
-Package
-Class
-Use
-Tree
-Deprecated
-Index
-Help
-
-
-
-
-Prev Class
-Next Class
-
-
-Frames
-No Frames
-
-
-All Classes
-
-
-
-
-
-
-
-Summary: 
-Nested | 
-Field | 
-Constr | 
-Method
-
-
-Detail: 
-Field | 
-Constr | 
-Method
-
-
-
-
-
-
-
-
-org.apache.hadoop.hbase.regionserver
-Interface 
ImmutableOnlineRegions
-
-
-
-
-
-
-All Known Subinterfaces:
-OnlineRegions, RegionServerServices
-
-
-All Known Implementing Classes:
-HMaster, HMasterCommandLine.LocalHMaster, HRegionServer
-
-
-
-@InterfaceAudience.LimitedPrivate(value="Coprocesssor")
- @InterfaceStability.Evolving
-public interface ImmutableOnlineRegions
-Interface to Map of online regions.  In the  Map, the key 
is the region's
- encoded name and the value is an Region instance.
-
-
-
-
-
-
-
-
-
-
-
-Method Summary
-
-All Methods Instance Methods Abstract Methods 
-
-Modifier and Type
-Method and Description
-
-
-Region
-getRegion(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in 
java.lang">String encodedRegionName)
-Return Region instance.
-
-
-
-http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List
-getRegions()
-Get all online regions in this RS.
-
-
-
-http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List
-getRegions(TableName tableName)
-Get all online regions of a table in this RS.
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-Method Detail
-
-
-
-
-
-getRegion
-Region getRegion(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in 
java.lang">String encodedRegionName)
-Return Region instance.
- Only works if caller is in same context, in same JVM. Region is not
- serializable.
-
-Parameters:
-encodedRegionName - 
-Returns:
-Region for the passed encoded encodedRegionName or
- null if named region is not member of the online regions.
-
-
-
-
-
-
-
-
-getRegions
-http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List getRegions(TableName tableName)
-   throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true";
 title="class or interface in java.io">IOException
-Get all online regions of a table in this RS.
-
-Parameters:
-tableName - 
-Returns:
-List of Region
-Throws:
-http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true";
 title="class or interface in java.io">IOException
-
-
-
-
-
-
-
-
-getRegions
-http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in 

[40/51] [partial] hbase-site git commit: Published site at .

2017-10-25 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/21726f5a/devapidocs/index-all.html
--
diff --git a/devapidocs/index-all.html b/devapidocs/index-all.html
index 1d77008..6aee8ec 100644
--- a/devapidocs/index-all.html
+++ b/devapidocs/index-all.html
@@ -1076,31 +1076,31 @@
  
 add(ProcedureProtos.Procedure)
 - Method in class org.apache.hadoop.hbase.procedure2.store.wal.ProcedureWALFormatReader.WalProcedureMap
  
-add(Iterable,
 MemStoreSize) - Method in class 
org.apache.hadoop.hbase.regionserver.AbstractMemStore
+add(Iterable,
 MemStoreSizing) - Method in class 
org.apache.hadoop.hbase.regionserver.AbstractMemStore
  
-add(Cell,
 MemStoreSize) - Method in class 
org.apache.hadoop.hbase.regionserver.AbstractMemStore
+add(Cell,
 MemStoreSizing) - Method in class 
org.apache.hadoop.hbase.regionserver.AbstractMemStore
  
 add(Cell)
 - Method in class org.apache.hadoop.hbase.regionserver.CellFlatMap.CellFlatMapCollection
  
 add(Cell)
 - Method in class org.apache.hadoop.hbase.regionserver.CellSet
  
-add(Cell,
 MemStoreSize) - Method in class 
org.apache.hadoop.hbase.regionserver.HStore
+add(Cell,
 MemStoreSizing) - Method in class 
org.apache.hadoop.hbase.regionserver.HStore
 
 Adds a value to the memstore
 
-add(Iterable,
 MemStoreSize) - Method in class 
org.apache.hadoop.hbase.regionserver.HStore
+add(Iterable,
 MemStoreSizing) - Method in class 
org.apache.hadoop.hbase.regionserver.HStore
 
 Adds the specified value to the memstore
 
-add(Cell,
 MemStoreSize) - Method in interface 
org.apache.hadoop.hbase.regionserver.MemStore
+add(Cell,
 MemStoreSizing) - Method in interface 
org.apache.hadoop.hbase.regionserver.MemStore
 
 Write an update
 
-add(Iterable,
 MemStoreSize) - Method in interface 
org.apache.hadoop.hbase.regionserver.MemStore
+add(Iterable,
 MemStoreSizing) - Method in interface 
org.apache.hadoop.hbase.regionserver.MemStore
 
 Write the updates
 
-add(Cell,
 boolean, MemStoreSize) - Method in class 
org.apache.hadoop.hbase.regionserver.MutableSegment
+add(Cell,
 boolean, MemStoreSizing) - Method in class 
org.apache.hadoop.hbase.regionserver.MutableSegment
 
 Adds the given cell into the segment
 
@@ -1684,6 +1684,12 @@
 
 Add a Filter to be instantiated on import
 
+addFilterLists(List)
 - Method in class org.apache.hadoop.hbase.filter.FilterListBase
+ 
+addFilterLists(List)
 - Method in class org.apache.hadoop.hbase.filter.FilterListWithAND
+ 
+addFilterLists(List)
 - Method in class org.apache.hadoop.hbase.filter.FilterListWithOR
+ 
 addFilterPathMapping(String,
 WebAppContext) - Method in class org.apache.hadoop.hbase.http.HttpServer
 
 Add the path spec to the filter path mapping.
@@ -2056,7 +2062,7 @@
  
 addRegion(HRegion)
 - Method in class org.apache.hadoop.hbase.regionserver.HRegionServer
  
-addRegion(HRegion)
 - Method in interface org.apache.hadoop.hbase.regionserver.OnlineRegions
+addRegion(HRegion)
 - Method in interface org.apache.hadoop.hbase.regionserver.MutableOnlineRegions
 
 Add to online regions.
 
@@ -2649,6 +2655,10 @@
 
 Called after compaction is executed by 
CompactSplitThread.
 
+afterExecution()
 - Method in interface org.apache.hadoop.hbase.regionserver.FlushLifeCycleTracker
+
+Called after flush is executed.
+
 afterLast
 - Variable in class org.apache.hadoop.hbase.codec.prefixtree.decode.PrefixTreeArrayScanner
  
 afterReplay(TEnvironment)
 - Method in class org.apache.hadoop.hbase.procedure2.Procedure
@@ -3274,11 +3284,11 @@
  choose CompactSelection from candidates --
  First exclude bulk-load files if indicated in configuration.
 
-applyFamilyMapToMemStore(Map>, MemStoreSize) - Method in class 
org.apache.hadoop.hbase.regionserver.HRegion
+applyFamilyMapToMemStore(Map>, MemStoreSizing) - Method in class 
org.apache.hadoop.hbase.regionserver.HRegion
  
-applyToMemStore(HStore,
 List, boolean, MemStoreSize) - Method in class 
org.apache.hadoop.hbase.regionserver.HRegion
+applyToMemStore(HStore,
 List, boolean, MemStoreSizing) - Method in class 
org.apache.hadoop.hbase.regionserver.HRegion
  
-applyToMemStore(HStore,
 Cell, MemStoreSize) - Method in class 
org.apache.hadoop.hbase.regionserver.HRegion
+applyToMemStore(HStore,
 Cell, MemStoreSizing) - Method in class 
org.apache.hadoop.hbase.regionserver.HRegion
  
 archivedTables
 - Variable in class org.apache.hadoop.hbase.backup.example.HFileArchiveTableMonitor
  
@@ -5338,12 +5348,6 @@
 
 BaseConstraint()
 - Constructor for class org.apache.hadoop.hbase.constraint.BaseConstraint
  
-BaseCoordinatedStateManager - Class in org.apache.hadoop.hbase.coordination
-
-Base class for CoordinatedStateManager 
implementations.
-
-BaseCoordinatedStateManager()
 - Constructor for class org.apache.hadoop.hbase.coordination.BaseCoordinatedStateManager
- 
 BaseDecoder - Class in org.apache.hadoop.hbase.codec
 
 TODO javadoc
@@ -5665,6 +5669

[38/51] [partial] hbase-site git commit: Published site at .

2017-10-25 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/21726f5a/devapidocs/org/apache/hadoop/hbase/HConstants.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/HConstants.html 
b/devapidocs/org/apache/hadoop/hbase/HConstants.html
index 4c5d989..232cafc 100644
--- a/devapidocs/org/apache/hadoop/hbase/HConstants.html
+++ b/devapidocs/org/apache/hadoop/hbase/HConstants.html
@@ -941,80 +941,74 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
-HBASE_COORDINATED_STATE_MANAGER_CLASS
-Config for pluggable consensus provider
-
-
-
-static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
 HBASE_DIR
 Parameter name for HBase instance root directory
 
 
-
+
 static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
 HBASE_MASTER_BALANCER_MAX_RIT_PERCENT
 Config for the max percent of regions in transition
 
 
-
+
 static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
 HBASE_MASTER_LOADBALANCE_BYTABLE
 Config for balancing the cluster by table
 
 
-
+
 static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
 HBASE_MASTER_LOADBALANCER_CLASS
 Config for pluggable load balancers
 
 
-
+
 static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
 HBASE_MASTER_LOGCLEANER_PLUGINS 
 
-
+
 static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
 HBASE_MASTER_NORMALIZER_CLASS
 Config for pluggable region normalizer
 
 
-
+
 static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
 HBASE_META_BLOCK_SIZE
 Parameter name for number of versions, kept by meta 
table.
 
 
-
+
 static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
 HBASE_META_SCANNER_CACHING
 Parameter name for number of rows that will be fetched when 
calling next on
  a scanner if it is not served from memory.
 
 
-
+
 static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
 HBASE_META_VERSIONS
 Parameter name for number of versions, kept by meta 
table.
 
 
-
+
 static http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">ListString>
 HBASE_NON_TABLE_DIRS
 Directories that are not HBase table directories
 
 
-
+
 static http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">ListString>
 HBASE_NON_USER_TABLE_DIRS
 Directories that are not HBase user table directories
 
 
-
+
 static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
 HBASE_REGION_SPLIT_POLICY_KEY 
 
-
+
 static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
 HBASE_REGIONSERVER_LEASE_PERIOD_KEY
 Deprecated. 
@@ -1022,371 +1016,371 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 
-
+
 static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
 HBASE_RPC_READ_TIMEOUT_KEY
 timeout for each read RPC
 
 
-
+
 static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
 HBASE_RPC_SHORTOPERATION_TIMEOUT_KEY
 timeout for short operation RPC
 
 
-
+
 static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
 HBASE_RPC_TIMEOUT_KEY
 timeout for each RPC
 
 
-
+
 static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
 HBASE_RPC_WRITE_TIMEOUT_KEY
 timeout for each write RPC
 
 
-
+
 static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
 HBASE_RS_NONCES_ENABLED
 Whether nonces are enabled; default is true.
 

[22/51] [partial] hbase-site git commit: Published site at .

2017-10-25 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/21726f5a/devapidocs/org/apache/hadoop/hbase/master/HMaster.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/master/HMaster.html 
b/devapidocs/org/apache/hadoop/hbase/master/HMaster.html
index 5b1c58c..c50ef11 100644
--- a/devapidocs/org/apache/hadoop/hbase/master/HMaster.html
+++ b/devapidocs/org/apache/hadoop/hbase/master/HMaster.html
@@ -119,7 +119,7 @@ var activeTableTab = "activeTableTab";
 
 
 All Implemented Interfaces:
-http://docs.oracle.com/javase/8/docs/api/java/lang/Runnable.html?is-external=true";
 title="class or interface in java.lang">Runnable, Abortable, ConfigurationObserver, MasterServices, FavoredNodesForRegion, ImmutableOnlineRegions, LastSequenceId, OnlineRegions, RegionServerServices, Server, Stoppable
+http://docs.oracle.com/javase/8/docs/api/java/lang/Runnable.html?is-external=true";
 title="class or interface in java.lang">Runnable, Abortable, ConfigurationObserver, MasterServices, FavoredNodesForRegion, LastSequenceId, 
MutableOnlineRegions, OnlineRegions, RegionServerServices, Server, Stoppable
 
 
 Direct Known Subclasses:
@@ -475,8 +475,7 @@ implements Constructor and Description
 
 
-HMaster(org.apache.hadoop.conf.Configuration conf,
-   CoordinatedStateManager csm)
+HMaster(org.apache.hadoop.conf.Configuration conf)
 Initializes the HMaster.
 
 
@@ -614,9 +613,8 @@ implements 
 static HMaster
-constructMaster(http://docs.oracle.com/javase/8/docs/api/java/lang/Class.html?is-external=true";
 title="class or interface in java.lang">Class masterClass,
-   org.apache.hadoop.conf.Configuration conf,
-   CoordinatedStateManager cp)
+constructMaster(http://docs.oracle.com/javase/8/docs/api/java/lang/Class.html?is-external=true";
 title="class or interface in java.lang">Class masterClass,
+   org.apache.hadoop.conf.Configuration conf)
 Utility for constructing an instance of the passed HMaster 
class.
 
 
@@ -1363,7 +1361,7 @@ implements HRegionServer
-abort,
 addRegion,
 addToMovedRegions,
 checkFileSystem,
 cleanMovedRegions,
 closeAllRegions,
 closeAndOfflineRegionForSplitOrMerge, closeRegion,
 constructRegionServer,
 convertThrowableToIOE,
 createClusterConnection,
 createRegionLoad,
 createRegionServerStatusStub,
 createRegionServerStatusStub,
 execRegionServerService,
 getCacheConfig,
 getChoreService,
 getClusterConnection,
 getClusterId,
 getCompactionPressure, getCompactionRequestor,
 getCompactSplitThread,
 getConfiguration,
 getConfigurationManager,
 getConnection,
 getCoordinatedStateManager,
 getEventLoopGroupConfig,
 getExecutorService,
 getFavoredNodesForRegion,
 getFileSystem,
 getFlushPressure,
 getFlushRequester,
 getFlushThroughputController,
 getFsTableDescriptors,
 getHeapMemoryManager, getInfoServer,
 getLastSequenceId,
 getLeases,
 getMasterAddressTracker,
 getMetaTableLocator,
 getMetrics,
 getMostLoadedRegions,
 getNonceM
 anager, getNumberOfOnlineRegions,
 getOnlineRegion,
 getOnlineRegionsLocalContext,
 getOnlineTables,
 getRecoveringRegions,
 getRegion,
 getRegion,
 getRegionBlockLocations, getRegionByEncodedName,
 getRegionByEncodedName,
 getRegions,
 getRegions,
 getRegionServerAccounting,
 getRegionServerCoprocessorHost,
 getRegionServerCoprocessors, getRegionServerMetrics,
 getRegionServerRpcQuotaManager,
 getRegionServerSpaceQuotaManager,
 getRegionsInTransitionInRS,
 getReplicationSourceService,
 getRootDir,
 getRp
 cServer, getRSRpcServices,
 getSecureBulkLoadManager,
 getStartcode,
 getThreadWakeFrequency,
 getWAL,
 getWALFileSystem,
 getWalRoller,
 
 getWALRootDir, getWALs,
 handleReportForDutyResponse,
 initializeMemStoreChunkCreator,
 isAborted,
 isOnline,
 isStopped,
 isStopping,
 kill, movedRegionCleanerPeriod,
 onConfigurationChange,
 postOpenDeployTasks,
 regionLock,
 removeRegion,
 reportRegionSizesForQuotas,
 reportRegionStateTransition,
 sendShutdownInterrupt,
 setInitLatch,
 setupClusterConnection,
 shouldUseThisHostnameInstead,
 stop,
 <
 a 
href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#stop-java.lang.String-boolean-org.apache.hadoop.hbase.security.User-">stop,
 toString,
 tryRegionServerReport,
 unassign,
 updateConfiguration,
 updateRegionFavoredNodesMapping,
 waitForServerOnline,
 walRollRequestFinished
+abort,
 addRegion,
 addToMovedRegions,
 checkFileSystem,
 cleanMovedRegions,
 closeAllRegions,
 closeAndOfflineRegionForSplitOrMerge, closeRegion,
 constructRegionServer,
 convertThrowableToIOE,
 createClusterConnection,
 createRegionLoad,
 createRegionServerStatusStub,
 createRegionServerStatusStub,
 execRegionServerService,
 getCacheConfig,
 getChoreService,
 getClusterConnection,
 getClusterId,
 getCompactionPress
 ure, getCompactionRequestor,
 getCompactSplitThread,
 getConfiguration,
 getConfigurationManager,
 getConnection,
 getCoordinatedStateManager,
 getEventLoopGroupConfig,
 getExecutorServ

[17/51] [partial] hbase-site git commit: Published site at .

2017-10-25 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/21726f5a/devapidocs/org/apache/hadoop/hbase/quotas/package-tree.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/quotas/package-tree.html 
b/devapidocs/org/apache/hadoop/hbase/quotas/package-tree.html
index ca99df6..5dbfc59 100644
--- a/devapidocs/org/apache/hadoop/hbase/quotas/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/quotas/package-tree.html
@@ -207,11 +207,11 @@
 java.lang.http://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true";
 title="class or interface in java.lang">Enum (implements java.lang.http://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true";
 title="class or interface in java.lang">Comparable, java.io.http://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true";
 title="class or interface in java.io">Serializable)
 
 org.apache.hadoop.hbase.quotas.OperationQuota.OperationType
-org.apache.hadoop.hbase.quotas.SpaceViolationPolicy
-org.apache.hadoop.hbase.quotas.ThrottleType
 org.apache.hadoop.hbase.quotas.QuotaScope
 org.apache.hadoop.hbase.quotas.QuotaType
+org.apache.hadoop.hbase.quotas.ThrottleType
 org.apache.hadoop.hbase.quotas.ThrottlingException.Type
+org.apache.hadoop.hbase.quotas.SpaceViolationPolicy
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/21726f5a/devapidocs/org/apache/hadoop/hbase/regionserver/AbstractMemStore.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/regionserver/AbstractMemStore.html 
b/devapidocs/org/apache/hadoop/hbase/regionserver/AbstractMemStore.html
index 07ee7d6..9c2eef4 100644
--- a/devapidocs/org/apache/hadoop/hbase/regionserver/AbstractMemStore.html
+++ b/devapidocs/org/apache/hadoop/hbase/regionserver/AbstractMemStore.html
@@ -18,7 +18,7 @@
 catch(err) {
 }
 //-->
-var methods = 
{"i0":10,"i1":10,"i2":9,"i3":9,"i4":6,"i5":10,"i6":9,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":6,"i14":10,"i15":10,"i16":6,"i17":10,"i18":6,"i19":10,"i20":10,"i21":10,"i22":10,"i23":10,"i24":6,"i25":10,"i26":10};
+var methods = 
{"i0":10,"i1":10,"i2":9,"i3":9,"i4":6,"i5":10,"i6":9,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":6,"i14":10,"i15":10,"i16":10,"i17":6,"i18":10,"i19":6,"i20":10,"i21":10,"i22":10,"i23":10,"i24":10,"i25":6,"i26":10,"i27":10};
 var tabs = {65535:["t0","All Methods"],1:["t1","Static 
Methods"],2:["t2","Instance Methods"],4:["t3","Abstract 
Methods"],8:["t4","Concrete Methods"]};
 var altColor = "altColor";
 var rowColor = "rowColor";
@@ -213,15 +213,15 @@ implements 
 void
-add(Cell cell,
-   MemStoreSize memstoreSize)
+add(Cell cell,
+   MemStoreSizing memstoreSizing)
 Write an update
 
 
 
 void
-add(http://docs.oracle.com/javase/8/docs/api/java/lang/Iterable.html?is-external=true";
 title="class or interface in java.lang">Iterable cells,
-   MemStoreSize memstoreSize)
+add(http://docs.oracle.com/javase/8/docs/api/java/lang/Iterable.html?is-external=true";
 title="class or interface in java.lang">Iterable cells,
+   MemStoreSizing memstoreSizing)
 Write the updates
 
 
@@ -296,56 +296,60 @@ implements 
+(package private) MemStoreSizing
+getSnapshotSizing() 
+
+
 protected abstract long
 heapSize() 
 
-
+
 private void
-internalAdd(Cell toAdd,
+internalAdd(Cell toAdd,
boolean mslabUsed,
-   MemStoreSize memstoreSize) 
+   MemStoreSizing memstoreSizing) 
 
-
+
 protected abstract long
 keySize() 
 
-
+
 private Cell
 maybeCloneWithAllocator(Cell cell) 
 
-
+
 protected void
 resetActive() 
 
-
+
 private void
 setOldestEditTimeToNow() 
 
-
+
 long
 timeOfOldestEdit() 
 
-
+
 http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
 toString() 
 
-
+
 abstract void
 updateLowestUnflushedSequenceIdInWAL(boolean onlyIfMoreRecent)
 Updates the wal with the lowest sequence id (oldest entry) 
that is still in memory
 
 
-
+
 private void
-upsert(Cell cell,
+upsert(Cell cell,
   long readpoint,
-  MemStoreSize memstoreSize) 
+  MemStoreSizing memstoreSizing) 
 
-
+
 void
-upsert(http://docs.oracle.com/javase/8/docs/api/java/lang/Iterable.html?is-external=true";
 title="class or interface in java.lang">Iterable cells,
+upsert(http://docs.oracle.com/javase/8/docs/api/java/lang/Iterable.html?is-external=true";
 title="class or interface in java.lang">Iterable cells,
   long readpoint,
-  MemStoreSize memstoreSize)
+  MemStoreSizing memstoreSizing)
 Update or insert the specified cells.
 
 
@@ -537,38 +541,38 @@ implements 
+
 
 
 
 
 add
 public void add(http://docs.oracle.com/javase/8/docs/api/java/lang/Iterable.html?is-external=true";
 title="class or interface in java.lang">Iterable cells,
-MemStoreSize memstoreSize)
-Description copied from 
interface

  1   2   >