hbase git commit: HBASE-19255 PerformanceEvaluation class not found when run PE test

2017-11-14 Thread zghao
Repository: hbase
Updated Branches:
  refs/heads/branch-2 3ad300a2b -> 2dc191485


HBASE-19255 PerformanceEvaluation class not found when run PE test


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/2dc19148
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/2dc19148
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/2dc19148

Branch: refs/heads/branch-2
Commit: 2dc191485f8a8ce18e89b0ee4bf2d0b0bf11ee1c
Parents: 3ad300a
Author: Guanghao Zhang 
Authored: Tue Nov 14 18:20:15 2017 +0800
Committer: Guanghao Zhang 
Committed: Wed Nov 15 15:06:45 2017 +0800

--
 hbase-assembly/pom.xml | 5 +
 1 file changed, 5 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/2dc19148/hbase-assembly/pom.xml
--
diff --git a/hbase-assembly/pom.xml b/hbase-assembly/pom.xml
index 105fe8e..77e45cb 100644
--- a/hbase-assembly/pom.xml
+++ b/hbase-assembly/pom.xml
@@ -183,6 +183,11 @@
   org.apache.hbase
   hbase-mapreduce
 
+
+  org.apache.hbase
+  hbase-mapreduce
+  test-jar
+
 
 
   org.apache.hbase



hbase git commit: HBASE-19255 PerformanceEvaluation class not found when run PE test

2017-11-14 Thread zghao
Repository: hbase
Updated Branches:
  refs/heads/master 797670b12 -> 249bc09d8


HBASE-19255 PerformanceEvaluation class not found when run PE test


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/249bc09d
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/249bc09d
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/249bc09d

Branch: refs/heads/master
Commit: 249bc09d8ddf3ddc3cd4b96b8395770898a1c721
Parents: 797670b
Author: Guanghao Zhang 
Authored: Tue Nov 14 18:20:15 2017 +0800
Committer: Guanghao Zhang 
Committed: Wed Nov 15 15:04:33 2017 +0800

--
 hbase-assembly/pom.xml | 5 +
 1 file changed, 5 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/249bc09d/hbase-assembly/pom.xml
--
diff --git a/hbase-assembly/pom.xml b/hbase-assembly/pom.xml
index 473f0d3..71e90f3 100644
--- a/hbase-assembly/pom.xml
+++ b/hbase-assembly/pom.xml
@@ -199,6 +199,11 @@
   org.apache.hbase
   hbase-mapreduce
 
+
+  org.apache.hbase
+  hbase-mapreduce
+  test-jar
+
 
 
   org.apache.hbase



hbase git commit: HBASE-19245 MultiTableInputFormatBase#getSplits creates a Connection per Table

2017-11-14 Thread stack
Repository: hbase
Updated Branches:
  refs/heads/branch-1.4 b93a80596 -> 846753c18


HBASE-19245 MultiTableInputFormatBase#getSplits creates a Connection per Table

We make one Connection only instead of a Connection per table (Change is
just moving one line it involves right-shifting body of the function)


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/846753c1
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/846753c1
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/846753c1

Branch: refs/heads/branch-1.4
Commit: 846753c185cb6771b7d64cbdae82cbc38e213c94
Parents: b93a805
Author: Michael Stack 
Authored: Mon Nov 13 11:42:10 2017 -0800
Committer: Michael Stack 
Committed: Tue Nov 14 21:51:25 2017 -0800

--
 .../mapreduce/MultiTableInputFormatBase.java| 108 ++-
 1 file changed, 55 insertions(+), 53 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/846753c1/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/MultiTableInputFormatBase.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/MultiTableInputFormatBase.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/MultiTableInputFormatBase.java
index 4931c3f..e91d20a 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/MultiTableInputFormatBase.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/MultiTableInputFormatBase.java
@@ -181,60 +181,62 @@ public abstract class MultiTableInputFormatBase extends
 
 List splits = new ArrayList();
 Iterator iter = tableMaps.entrySet().iterator();
-while (iter.hasNext()) {
-  Map.Entry> entry = (Map.Entry>) iter.next();
-  TableName tableName = entry.getKey();
-  List scanList = entry.getValue();
-
-  try (Connection conn = 
ConnectionFactory.createConnection(context.getConfiguration());
-Table table = conn.getTable(tableName);
-RegionLocator regionLocator = conn.getRegionLocator(tableName)) {
-RegionSizeCalculator sizeCalculator = new RegionSizeCalculator(
-regionLocator, conn.getAdmin());
-Pair keys = regionLocator.getStartEndKeys();
-for (Scan scan : scanList) {
-  if (keys == null || keys.getFirst() == null || 
keys.getFirst().length == 0) {
-throw new IOException("Expecting at least one region for table : "
-+ tableName.getNameAsString());
-  }
-  int count = 0;
-
-  byte[] startRow = scan.getStartRow();
-  byte[] stopRow = scan.getStopRow();
-
-  for (int i = 0; i < keys.getFirst().length; i++) {
-if (!includeRegionInSplit(keys.getFirst()[i], 
keys.getSecond()[i])) {
-  continue;
+// Make a single Connection to the Cluster and use it across all tables.
+try (Connection conn = 
ConnectionFactory.createConnection(context.getConfiguration())) {
+  while (iter.hasNext()) {
+Map.Entry> entry = (Map.Entry>) iter.next();
+TableName tableName = entry.getKey();
+List scanList = entry.getValue();
+try (Table table = conn.getTable(tableName);
+ RegionLocator regionLocator = conn.getRegionLocator(tableName)) {
+  RegionSizeCalculator sizeCalculator = new RegionSizeCalculator(
+  regionLocator, conn.getAdmin());
+  Pair keys = regionLocator.getStartEndKeys();
+  for (Scan scan : scanList) {
+if (keys == null || keys.getFirst() == null || 
keys.getFirst().length == 0) {
+  throw new IOException("Expecting at least one region for table : 
"
+  + tableName.getNameAsString());
 }
-
-if ((startRow.length == 0 || keys.getSecond()[i].length == 0 ||
-Bytes.compareTo(startRow, keys.getSecond()[i]) < 0) &&
-(stopRow.length == 0 || Bytes.compareTo(stopRow,
-keys.getFirst()[i]) > 0)) {
-  byte[] splitStart = startRow.length == 0 ||
-  Bytes.compareTo(keys.getFirst()[i], startRow) >= 0 ?
-  keys.getFirst()[i] : startRow;
-  byte[] splitStop = (stopRow.length == 0 ||
-  Bytes.compareTo(keys.getSecond()[i], stopRow) <= 0) &&
-  keys.getSecond()[i].length > 0 ?
-  keys.getSecond()[i] : stopRow;
-
-  HRegionLocation hregionLocation = 
regionLocator.getRegionLocation(
-  keys.getFirst()[i], false);
-  String regionHostname = hregionLocation.getHostname();
-  HRegionInfo regionInfo = hregionLocation.getRegionInfo();
-  String encodedRegi

hbase git commit: HBASE-19245 MultiTableInputFormatBase#getSplits creates a Connection per Table

2017-11-14 Thread stack
Repository: hbase
Updated Branches:
  refs/heads/branch-1.3 1fad75296 -> 17f11ae6c


HBASE-19245 MultiTableInputFormatBase#getSplits creates a Connection per Table

We make one Connection only instead of a Connection per table (Change is
just moving one line it involves right-shifting body of the function)


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/17f11ae6
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/17f11ae6
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/17f11ae6

Branch: refs/heads/branch-1.3
Commit: 17f11ae6c8e4c1be520167eb5b22d61b05a49c9f
Parents: 1fad752
Author: Michael Stack 
Authored: Mon Nov 13 11:42:10 2017 -0800
Committer: Michael Stack 
Committed: Tue Nov 14 21:51:47 2017 -0800

--
 .../mapreduce/MultiTableInputFormatBase.java| 108 ++-
 1 file changed, 55 insertions(+), 53 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/17f11ae6/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/MultiTableInputFormatBase.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/MultiTableInputFormatBase.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/MultiTableInputFormatBase.java
index 4931c3f..e91d20a 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/MultiTableInputFormatBase.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/MultiTableInputFormatBase.java
@@ -181,60 +181,62 @@ public abstract class MultiTableInputFormatBase extends
 
 List splits = new ArrayList();
 Iterator iter = tableMaps.entrySet().iterator();
-while (iter.hasNext()) {
-  Map.Entry> entry = (Map.Entry>) iter.next();
-  TableName tableName = entry.getKey();
-  List scanList = entry.getValue();
-
-  try (Connection conn = 
ConnectionFactory.createConnection(context.getConfiguration());
-Table table = conn.getTable(tableName);
-RegionLocator regionLocator = conn.getRegionLocator(tableName)) {
-RegionSizeCalculator sizeCalculator = new RegionSizeCalculator(
-regionLocator, conn.getAdmin());
-Pair keys = regionLocator.getStartEndKeys();
-for (Scan scan : scanList) {
-  if (keys == null || keys.getFirst() == null || 
keys.getFirst().length == 0) {
-throw new IOException("Expecting at least one region for table : "
-+ tableName.getNameAsString());
-  }
-  int count = 0;
-
-  byte[] startRow = scan.getStartRow();
-  byte[] stopRow = scan.getStopRow();
-
-  for (int i = 0; i < keys.getFirst().length; i++) {
-if (!includeRegionInSplit(keys.getFirst()[i], 
keys.getSecond()[i])) {
-  continue;
+// Make a single Connection to the Cluster and use it across all tables.
+try (Connection conn = 
ConnectionFactory.createConnection(context.getConfiguration())) {
+  while (iter.hasNext()) {
+Map.Entry> entry = (Map.Entry>) iter.next();
+TableName tableName = entry.getKey();
+List scanList = entry.getValue();
+try (Table table = conn.getTable(tableName);
+ RegionLocator regionLocator = conn.getRegionLocator(tableName)) {
+  RegionSizeCalculator sizeCalculator = new RegionSizeCalculator(
+  regionLocator, conn.getAdmin());
+  Pair keys = regionLocator.getStartEndKeys();
+  for (Scan scan : scanList) {
+if (keys == null || keys.getFirst() == null || 
keys.getFirst().length == 0) {
+  throw new IOException("Expecting at least one region for table : 
"
+  + tableName.getNameAsString());
 }
-
-if ((startRow.length == 0 || keys.getSecond()[i].length == 0 ||
-Bytes.compareTo(startRow, keys.getSecond()[i]) < 0) &&
-(stopRow.length == 0 || Bytes.compareTo(stopRow,
-keys.getFirst()[i]) > 0)) {
-  byte[] splitStart = startRow.length == 0 ||
-  Bytes.compareTo(keys.getFirst()[i], startRow) >= 0 ?
-  keys.getFirst()[i] : startRow;
-  byte[] splitStop = (stopRow.length == 0 ||
-  Bytes.compareTo(keys.getSecond()[i], stopRow) <= 0) &&
-  keys.getSecond()[i].length > 0 ?
-  keys.getSecond()[i] : stopRow;
-
-  HRegionLocation hregionLocation = 
regionLocator.getRegionLocation(
-  keys.getFirst()[i], false);
-  String regionHostname = hregionLocation.getHostname();
-  HRegionInfo regionInfo = hregionLocation.getRegionInfo();
-  String encodedRegi

hbase git commit: HBASE-19245 MultiTableInputFormatBase#getSplits creates a Connection per Table

2017-11-14 Thread stack
Repository: hbase
Updated Branches:
  refs/heads/branch-1 f9833a780 -> 641e797e0


HBASE-19245 MultiTableInputFormatBase#getSplits creates a Connection per Table

We make one Connection only instead of a Connection per table (Change is
just moving one line it involves right-shifting body of the function)


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/641e797e
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/641e797e
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/641e797e

Branch: refs/heads/branch-1
Commit: 641e797e02ef1285de8b8efc8d48989c686d42bd
Parents: f9833a7
Author: Michael Stack 
Authored: Mon Nov 13 11:42:10 2017 -0800
Committer: Michael Stack 
Committed: Tue Nov 14 21:48:45 2017 -0800

--
 .../mapreduce/MultiTableInputFormatBase.java| 108 ++-
 1 file changed, 55 insertions(+), 53 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/641e797e/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/MultiTableInputFormatBase.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/MultiTableInputFormatBase.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/MultiTableInputFormatBase.java
index 4931c3f..e91d20a 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/MultiTableInputFormatBase.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/MultiTableInputFormatBase.java
@@ -181,60 +181,62 @@ public abstract class MultiTableInputFormatBase extends
 
 List splits = new ArrayList();
 Iterator iter = tableMaps.entrySet().iterator();
-while (iter.hasNext()) {
-  Map.Entry> entry = (Map.Entry>) iter.next();
-  TableName tableName = entry.getKey();
-  List scanList = entry.getValue();
-
-  try (Connection conn = 
ConnectionFactory.createConnection(context.getConfiguration());
-Table table = conn.getTable(tableName);
-RegionLocator regionLocator = conn.getRegionLocator(tableName)) {
-RegionSizeCalculator sizeCalculator = new RegionSizeCalculator(
-regionLocator, conn.getAdmin());
-Pair keys = regionLocator.getStartEndKeys();
-for (Scan scan : scanList) {
-  if (keys == null || keys.getFirst() == null || 
keys.getFirst().length == 0) {
-throw new IOException("Expecting at least one region for table : "
-+ tableName.getNameAsString());
-  }
-  int count = 0;
-
-  byte[] startRow = scan.getStartRow();
-  byte[] stopRow = scan.getStopRow();
-
-  for (int i = 0; i < keys.getFirst().length; i++) {
-if (!includeRegionInSplit(keys.getFirst()[i], 
keys.getSecond()[i])) {
-  continue;
+// Make a single Connection to the Cluster and use it across all tables.
+try (Connection conn = 
ConnectionFactory.createConnection(context.getConfiguration())) {
+  while (iter.hasNext()) {
+Map.Entry> entry = (Map.Entry>) iter.next();
+TableName tableName = entry.getKey();
+List scanList = entry.getValue();
+try (Table table = conn.getTable(tableName);
+ RegionLocator regionLocator = conn.getRegionLocator(tableName)) {
+  RegionSizeCalculator sizeCalculator = new RegionSizeCalculator(
+  regionLocator, conn.getAdmin());
+  Pair keys = regionLocator.getStartEndKeys();
+  for (Scan scan : scanList) {
+if (keys == null || keys.getFirst() == null || 
keys.getFirst().length == 0) {
+  throw new IOException("Expecting at least one region for table : 
"
+  + tableName.getNameAsString());
 }
-
-if ((startRow.length == 0 || keys.getSecond()[i].length == 0 ||
-Bytes.compareTo(startRow, keys.getSecond()[i]) < 0) &&
-(stopRow.length == 0 || Bytes.compareTo(stopRow,
-keys.getFirst()[i]) > 0)) {
-  byte[] splitStart = startRow.length == 0 ||
-  Bytes.compareTo(keys.getFirst()[i], startRow) >= 0 ?
-  keys.getFirst()[i] : startRow;
-  byte[] splitStop = (stopRow.length == 0 ||
-  Bytes.compareTo(keys.getSecond()[i], stopRow) <= 0) &&
-  keys.getSecond()[i].length > 0 ?
-  keys.getSecond()[i] : stopRow;
-
-  HRegionLocation hregionLocation = 
regionLocator.getRegionLocation(
-  keys.getFirst()[i], false);
-  String regionHostname = hregionLocation.getHostname();
-  HRegionInfo regionInfo = hregionLocation.getRegionInfo();
-  String encodedRegionNa

hbase git commit: HBASE-19245 MultiTableInputFormatBase#getSplits creates a Connection per Table

2017-11-14 Thread stack
Repository: hbase
Updated Branches:
  refs/heads/branch-2 7139113fd -> 3ad300a2b


HBASE-19245 MultiTableInputFormatBase#getSplits creates a Connection per Table

We make one Connection only instead of a Connection per table (Change is
just moving one line it involves right-shifting body of the function)


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/3ad300a2
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/3ad300a2
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/3ad300a2

Branch: refs/heads/branch-2
Commit: 3ad300a2b02d761f2d0b7f6ba644fe8a73155fba
Parents: 7139113
Author: Michael Stack 
Authored: Mon Nov 13 11:42:10 2017 -0800
Committer: Michael Stack 
Committed: Tue Nov 14 21:48:06 2017 -0800

--
 .../mapreduce/MultiTableInputFormatBase.java| 108 ++-
 1 file changed, 55 insertions(+), 53 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/3ad300a2/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/MultiTableInputFormatBase.java
--
diff --git 
a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/MultiTableInputFormatBase.java
 
b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/MultiTableInputFormatBase.java
index 65c2e13..82a86b4 100644
--- 
a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/MultiTableInputFormatBase.java
+++ 
b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/MultiTableInputFormatBase.java
@@ -178,60 +178,62 @@ public abstract class MultiTableInputFormatBase extends
 
 List splits = new ArrayList<>();
 Iterator iter = tableMaps.entrySet().iterator();
-while (iter.hasNext()) {
-  Map.Entry> entry = (Map.Entry>) iter.next();
-  TableName tableName = entry.getKey();
-  List scanList = entry.getValue();
-
-  try (Connection conn = 
ConnectionFactory.createConnection(context.getConfiguration());
-Table table = conn.getTable(tableName);
-RegionLocator regionLocator = conn.getRegionLocator(tableName)) {
-RegionSizeCalculator sizeCalculator = new RegionSizeCalculator(
-regionLocator, conn.getAdmin());
-Pair keys = regionLocator.getStartEndKeys();
-for (Scan scan : scanList) {
-  if (keys == null || keys.getFirst() == null || 
keys.getFirst().length == 0) {
-throw new IOException("Expecting at least one region for table : "
-+ tableName.getNameAsString());
-  }
-  int count = 0;
-
-  byte[] startRow = scan.getStartRow();
-  byte[] stopRow = scan.getStopRow();
-
-  for (int i = 0; i < keys.getFirst().length; i++) {
-if (!includeRegionInSplit(keys.getFirst()[i], 
keys.getSecond()[i])) {
-  continue;
+// Make a single Connection to the Cluster and use it across all tables.
+try (Connection conn = 
ConnectionFactory.createConnection(context.getConfiguration())) {
+  while (iter.hasNext()) {
+Map.Entry> entry = (Map.Entry>) iter.next();
+TableName tableName = entry.getKey();
+List scanList = entry.getValue();
+try (Table table = conn.getTable(tableName);
+ RegionLocator regionLocator = conn.getRegionLocator(tableName)) {
+  RegionSizeCalculator sizeCalculator = new RegionSizeCalculator(
+  regionLocator, conn.getAdmin());
+  Pair keys = regionLocator.getStartEndKeys();
+  for (Scan scan : scanList) {
+if (keys == null || keys.getFirst() == null || 
keys.getFirst().length == 0) {
+  throw new IOException("Expecting at least one region for table : 
"
+  + tableName.getNameAsString());
 }
-
-if ((startRow.length == 0 || keys.getSecond()[i].length == 0 ||
-Bytes.compareTo(startRow, keys.getSecond()[i]) < 0) &&
-(stopRow.length == 0 || Bytes.compareTo(stopRow,
-keys.getFirst()[i]) > 0)) {
-  byte[] splitStart = startRow.length == 0 ||
-  Bytes.compareTo(keys.getFirst()[i], startRow) >= 0 ?
-  keys.getFirst()[i] : startRow;
-  byte[] splitStop = (stopRow.length == 0 ||
-  Bytes.compareTo(keys.getSecond()[i], stopRow) <= 0) &&
-  keys.getSecond()[i].length > 0 ?
-  keys.getSecond()[i] : stopRow;
-
-  HRegionLocation hregionLocation = 
regionLocator.getRegionLocation(
-  keys.getFirst()[i], false);
-  String regionHostname = hregionLocation.getHostname();
-  HRegionInfo regionInfo = hregionLocation.getRegionInfo();
-  Strin

hbase git commit: HBASE-19245 MultiTableInputFormatBase#getSplits creates a Connection per Table

2017-11-14 Thread stack
Repository: hbase
Updated Branches:
  refs/heads/master 0ac18a812 -> 797670b12


HBASE-19245 MultiTableInputFormatBase#getSplits creates a Connection per Table

We make one Connection only instead of a Connection per table (Change is
just moving one line it involves right-shifting body of the function)


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/797670b1
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/797670b1
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/797670b1

Branch: refs/heads/master
Commit: 797670b129526697eefb2578fedb2b272c65f52e
Parents: 0ac18a8
Author: Michael Stack 
Authored: Mon Nov 13 11:42:10 2017 -0800
Committer: Michael Stack 
Committed: Tue Nov 14 21:47:35 2017 -0800

--
 .../mapreduce/MultiTableInputFormatBase.java| 108 ++-
 1 file changed, 55 insertions(+), 53 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/797670b1/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/MultiTableInputFormatBase.java
--
diff --git 
a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/MultiTableInputFormatBase.java
 
b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/MultiTableInputFormatBase.java
index 65c2e13..82a86b4 100644
--- 
a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/MultiTableInputFormatBase.java
+++ 
b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/MultiTableInputFormatBase.java
@@ -178,60 +178,62 @@ public abstract class MultiTableInputFormatBase extends
 
 List splits = new ArrayList<>();
 Iterator iter = tableMaps.entrySet().iterator();
-while (iter.hasNext()) {
-  Map.Entry> entry = (Map.Entry>) iter.next();
-  TableName tableName = entry.getKey();
-  List scanList = entry.getValue();
-
-  try (Connection conn = 
ConnectionFactory.createConnection(context.getConfiguration());
-Table table = conn.getTable(tableName);
-RegionLocator regionLocator = conn.getRegionLocator(tableName)) {
-RegionSizeCalculator sizeCalculator = new RegionSizeCalculator(
-regionLocator, conn.getAdmin());
-Pair keys = regionLocator.getStartEndKeys();
-for (Scan scan : scanList) {
-  if (keys == null || keys.getFirst() == null || 
keys.getFirst().length == 0) {
-throw new IOException("Expecting at least one region for table : "
-+ tableName.getNameAsString());
-  }
-  int count = 0;
-
-  byte[] startRow = scan.getStartRow();
-  byte[] stopRow = scan.getStopRow();
-
-  for (int i = 0; i < keys.getFirst().length; i++) {
-if (!includeRegionInSplit(keys.getFirst()[i], 
keys.getSecond()[i])) {
-  continue;
+// Make a single Connection to the Cluster and use it across all tables.
+try (Connection conn = 
ConnectionFactory.createConnection(context.getConfiguration())) {
+  while (iter.hasNext()) {
+Map.Entry> entry = (Map.Entry>) iter.next();
+TableName tableName = entry.getKey();
+List scanList = entry.getValue();
+try (Table table = conn.getTable(tableName);
+ RegionLocator regionLocator = conn.getRegionLocator(tableName)) {
+  RegionSizeCalculator sizeCalculator = new RegionSizeCalculator(
+  regionLocator, conn.getAdmin());
+  Pair keys = regionLocator.getStartEndKeys();
+  for (Scan scan : scanList) {
+if (keys == null || keys.getFirst() == null || 
keys.getFirst().length == 0) {
+  throw new IOException("Expecting at least one region for table : 
"
+  + tableName.getNameAsString());
 }
-
-if ((startRow.length == 0 || keys.getSecond()[i].length == 0 ||
-Bytes.compareTo(startRow, keys.getSecond()[i]) < 0) &&
-(stopRow.length == 0 || Bytes.compareTo(stopRow,
-keys.getFirst()[i]) > 0)) {
-  byte[] splitStart = startRow.length == 0 ||
-  Bytes.compareTo(keys.getFirst()[i], startRow) >= 0 ?
-  keys.getFirst()[i] : startRow;
-  byte[] splitStop = (stopRow.length == 0 ||
-  Bytes.compareTo(keys.getSecond()[i], stopRow) <= 0) &&
-  keys.getSecond()[i].length > 0 ?
-  keys.getSecond()[i] : stopRow;
-
-  HRegionLocation hregionLocation = 
regionLocator.getRegionLocation(
-  keys.getFirst()[i], false);
-  String regionHostname = hregionLocation.getHostname();
-  HRegionInfo regionInfo = hregionLocation.getRegionInfo();
-  String en

hbase git commit: HBASE-18963 Removed MultiRowMutationProcessor and modified mutateRows...() to use batchMutate() with atomic set to true

2017-11-14 Thread stack
Repository: hbase
Updated Branches:
  refs/heads/branch-2 3d4fd9094 -> 7139113fd


HBASE-18963 Removed MultiRowMutationProcessor and modified mutateRows...() to 
use batchMutate() with atomic set to true


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/7139113f
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/7139113f
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/7139113f

Branch: refs/heads/branch-2
Commit: 7139113fde5066eeeb95ff19bd2157668c0b6e78
Parents: 3d4fd90
Author: Umesh Agashe 
Authored: Tue Nov 7 13:57:59 2017 -0800
Committer: Michael Stack 
Committed: Tue Nov 14 21:45:01 2017 -0800

--
 .../coprocessor/MultiRowMutationEndpoint.java   |   7 +-
 .../hadoop/hbase/regionserver/HRegion.java  |  45 ++--
 .../regionserver/MultiRowMutationProcessor.java | 209 ---
 .../hbase/regionserver/RSRpcServices.java   |  66 ++
 4 files changed, 51 insertions(+), 276 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/7139113f/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/MultiRowMutationEndpoint.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/MultiRowMutationEndpoint.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/MultiRowMutationEndpoint.java
index 5d44cd6..d230773 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/MultiRowMutationEndpoint.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/MultiRowMutationEndpoint.java
@@ -1,4 +1,4 @@
-/**
+/*
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information
@@ -19,6 +19,7 @@ package org.apache.hadoop.hbase.coprocessor;
 
 import java.io.IOException;
 import java.util.ArrayList;
+import java.util.Collection;
 import java.util.Collections;
 import java.util.List;
 import java.util.SortedSet;
@@ -47,14 +48,14 @@ import com.google.protobuf.Service;
 
 /**
  * This class demonstrates how to implement atomic multi row transactions using
- * {@link HRegion#mutateRowsWithLocks(java.util.Collection, 
java.util.Collection)}
+ * {@link HRegion#mutateRowsWithLocks(Collection, Collection, long, long)}
  * and Coprocessor endpoints.
  *
  * Defines a protocol to perform multi row transactions.
  * See {@link MultiRowMutationEndpoint} for the implementation.
  * 
  * See
- * {@link HRegion#mutateRowsWithLocks(java.util.Collection, 
java.util.Collection)}
+ * {@link HRegion#mutateRowsWithLocks(Collection, Collection, long, long)}
  * for details and limitations.
  * 
  * Example:

http://git-wip-us.apache.org/repos/asf/hbase/blob/7139113f/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
index 197aa3c..3a3cb03 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
@@ -3279,6 +3279,8 @@ public class HRegion implements HeapSize, 
PropagatingConfigurationObserver, Regi
 private void addFamilyMapToWALEdit(Map> familyMap,
 WALEdit walEdit) {
   for (List edits : familyMap.values()) {
+// Optimization: 'foreach' loop is not used. See:
+// HBASE-12023 HRegion.applyFamilyMapToMemstore creates too many 
iterator objects
 assert edits instanceof RandomAccess;
 int listSize = edits.size();
 for (int i=0; i < listSize; i++) {
@@ -4109,6 +4111,8 @@ public class HRegion implements HeapSize, 
PropagatingConfigurationObserver, Regi
   throws IOException {
 for (List cells: cellItr) {
   if (cells == null) continue;
+  // Optimization: 'foreach' loop is not used. See:
+  // HBASE-12023 HRegion.applyFamilyMapToMemstore creates too many 
iterator objects
   assert cells instanceof RandomAccess;
   int listSize = cells.size();
   for (int i = 0; i < listSize; i++) {
@@ -4259,6 +4263,8 @@ public class HRegion implements HeapSize, 
PropagatingConfigurationObserver, Regi
 }
 long maxTs = now + timestampSlop;
 for (List kvs : familyMap.values()) {
+  // Optimization: 'foreach' loop is not used. See:
+  // HBASE-12023 HRegion.applyFamilyMapToMemstore creates too many 
iterator objects
   assert kvs instanceof RandomAccess;
   int listSize  = kvs.size();
   for (int i=0; i < listSize; i++) {
@@ -7135,20 +7141,13 @@ public class HR

hbase git commit: HBASE-18963 Removed MultiRowMutationProcessor and modified mutateRows...() to use batchMutate() with atomic set to true

2017-11-14 Thread stack
Repository: hbase
Updated Branches:
  refs/heads/master 0c4fbcc32 -> 0ac18a812


HBASE-18963 Removed MultiRowMutationProcessor and modified mutateRows...() to 
use batchMutate() with atomic set to true


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/0ac18a81
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/0ac18a81
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/0ac18a81

Branch: refs/heads/master
Commit: 0ac18a81262be2b1040455fb878178045bc7764a
Parents: 0c4fbcc
Author: Umesh Agashe 
Authored: Tue Nov 7 13:57:59 2017 -0800
Committer: Michael Stack 
Committed: Tue Nov 14 21:44:30 2017 -0800

--
 .../coprocessor/MultiRowMutationEndpoint.java   |   7 +-
 .../hadoop/hbase/regionserver/HRegion.java  |  45 ++--
 .../regionserver/MultiRowMutationProcessor.java | 209 ---
 .../hbase/regionserver/RSRpcServices.java   |  66 ++
 4 files changed, 51 insertions(+), 276 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/0ac18a81/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/MultiRowMutationEndpoint.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/MultiRowMutationEndpoint.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/MultiRowMutationEndpoint.java
index 5d44cd6..d230773 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/MultiRowMutationEndpoint.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/MultiRowMutationEndpoint.java
@@ -1,4 +1,4 @@
-/**
+/*
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information
@@ -19,6 +19,7 @@ package org.apache.hadoop.hbase.coprocessor;
 
 import java.io.IOException;
 import java.util.ArrayList;
+import java.util.Collection;
 import java.util.Collections;
 import java.util.List;
 import java.util.SortedSet;
@@ -47,14 +48,14 @@ import com.google.protobuf.Service;
 
 /**
  * This class demonstrates how to implement atomic multi row transactions using
- * {@link HRegion#mutateRowsWithLocks(java.util.Collection, 
java.util.Collection)}
+ * {@link HRegion#mutateRowsWithLocks(Collection, Collection, long, long)}
  * and Coprocessor endpoints.
  *
  * Defines a protocol to perform multi row transactions.
  * See {@link MultiRowMutationEndpoint} for the implementation.
  * 
  * See
- * {@link HRegion#mutateRowsWithLocks(java.util.Collection, 
java.util.Collection)}
+ * {@link HRegion#mutateRowsWithLocks(Collection, Collection, long, long)}
  * for details and limitations.
  * 
  * Example:

http://git-wip-us.apache.org/repos/asf/hbase/blob/0ac18a81/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
index 197aa3c..3a3cb03 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
@@ -3279,6 +3279,8 @@ public class HRegion implements HeapSize, 
PropagatingConfigurationObserver, Regi
 private void addFamilyMapToWALEdit(Map> familyMap,
 WALEdit walEdit) {
   for (List edits : familyMap.values()) {
+// Optimization: 'foreach' loop is not used. See:
+// HBASE-12023 HRegion.applyFamilyMapToMemstore creates too many 
iterator objects
 assert edits instanceof RandomAccess;
 int listSize = edits.size();
 for (int i=0; i < listSize; i++) {
@@ -4109,6 +4111,8 @@ public class HRegion implements HeapSize, 
PropagatingConfigurationObserver, Regi
   throws IOException {
 for (List cells: cellItr) {
   if (cells == null) continue;
+  // Optimization: 'foreach' loop is not used. See:
+  // HBASE-12023 HRegion.applyFamilyMapToMemstore creates too many 
iterator objects
   assert cells instanceof RandomAccess;
   int listSize = cells.size();
   for (int i = 0; i < listSize; i++) {
@@ -4259,6 +4263,8 @@ public class HRegion implements HeapSize, 
PropagatingConfigurationObserver, Regi
 }
 long maxTs = now + timestampSlop;
 for (List kvs : familyMap.values()) {
+  // Optimization: 'foreach' loop is not used. See:
+  // HBASE-12023 HRegion.applyFamilyMapToMemstore creates too many 
iterator objects
   assert kvs instanceof RandomAccess;
   int listSize  = kvs.size();
   for (int i=0; i < listSize; i++) {
@@ -7135,20 +7141,13 @@ public class HRegio

hbase git commit: HBASE-12091 Optionally ignore edits for dropped tables for replication.

2017-11-14 Thread apurtell
Repository: hbase
Updated Branches:
  refs/heads/branch-1.4 dace9d776 -> b93a80596


HBASE-12091 Optionally ignore edits for dropped tables for replication.


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/b93a8059
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/b93a8059
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/b93a8059

Branch: refs/heads/branch-1.4
Commit: b93a80596dfc01b67a3b1561ef49f4b4c7eb900b
Parents: dace9d7
Author: Lars Hofhansl 
Authored: Tue Nov 14 16:33:12 2017 -0800
Committer: Andrew Purtell 
Committed: Tue Nov 14 16:54:11 2017 -0800

--
 .../RetriesExhaustedWithDetailsException.java   |   9 +
 .../hadoop/hbase/protobuf/ProtobufUtil.java |  18 ++
 .../org/apache/hadoop/hbase/HConstants.java |   5 +
 .../hbase/protobuf/ReplicationProtbufUtil.java  |   2 +-
 .../hbase/replication/ReplicationEndpoint.java  |   6 +
 .../HBaseInterClusterReplicationEndpoint.java   |  66 -
 .../regionserver/ReplicationSink.java   |   8 +
 .../regionserver/ReplicationSourceManager.java  |   2 +-
 .../TestReplicationDroppedTables.java   | 292 +++
 9 files changed, 403 insertions(+), 5 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/b93a8059/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RetriesExhaustedWithDetailsException.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RetriesExhaustedWithDetailsException.java
 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RetriesExhaustedWithDetailsException.java
index 21ab156..e78f810 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RetriesExhaustedWithDetailsException.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RetriesExhaustedWithDetailsException.java
@@ -24,6 +24,7 @@ import 
org.apache.hadoop.hbase.classification.InterfaceStability;
 import org.apache.hadoop.hbase.DoNotRetryIOException;
 import org.apache.hadoop.hbase.util.Bytes;
 
+import java.io.IOException;
 import java.io.PrintWriter;
 import java.io.StringWriter;
 import java.util.Collection;
@@ -51,6 +52,14 @@ extends RetriesExhaustedException {
   List actions;
   List hostnameAndPort;
 
+  public RetriesExhaustedWithDetailsException(final String msg) {
+super(msg);
+  }
+
+  public RetriesExhaustedWithDetailsException(final String msg, final 
IOException e) {
+super(msg, e);
+  }
+
   public RetriesExhaustedWithDetailsException(List exceptions,
   List actions,
   List hostnameAndPort) {

http://git-wip-us.apache.org/repos/asf/hbase/blob/b93a8059/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java
index 5945e5e..84d4a67 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java
@@ -331,6 +331,24 @@ public final class ProtobufUtil {
   }
 
   /**
+   * Return the Exception thrown by the remote server wrapped in
+   * ServiceException as cause. RemoteException are left untouched.
+   *
+   * @param se ServiceException that wraps IO exception thrown by the server
+   * @return Exception wrapped in ServiceException.
+   */
+  public static IOException getServiceException(ServiceException e) {
+Throwable t = e;
+if (e instanceof ServiceException) {
+  t = e.getCause();
+}
+if (ExceptionUtil.isInterrupt(t)) {
+  return ExceptionUtil.asInterrupt(t);
+}
+return t instanceof IOException ? (IOException) t : new 
HBaseIOException(t);
+  }
+
+  /**
* Like {@link #getRemoteException(ServiceException)} but more generic, able 
to handle more than
* just {@link ServiceException}. Prefer this method to
* {@link #getRemoteException(ServiceException)} because trying to

http://git-wip-us.apache.org/repos/asf/hbase/blob/b93a8059/hbase-common/src/main/java/org/apache/hadoop/hbase/HConstants.java
--
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/HConstants.java 
b/hbase-common/src/main/java/org/apache/hadoop/hbase/HConstants.java
index 79fb00d..8242a17 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/HConstants.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/HConstants.java
@@ -1214,6 +1214,11 @@ public final class HConstants {
   public static final S

hbase git commit: HBASE-12091 Optionally ignore edits for dropped tables for replication.

2017-11-14 Thread larsh
Repository: hbase
Updated Branches:
  refs/heads/branch-2 76b9d4617 -> 3d4fd9094


HBASE-12091 Optionally ignore edits for dropped tables for replication.


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/3d4fd909
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/3d4fd909
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/3d4fd909

Branch: refs/heads/branch-2
Commit: 3d4fd90947105d47d5c35d5a37f36cc56b9705de
Parents: 76b9d46
Author: Lars Hofhansl 
Authored: Tue Nov 14 17:16:56 2017 -0800
Committer: Lars Hofhansl 
Committed: Tue Nov 14 17:16:56 2017 -0800

--
 .../RetriesExhaustedWithDetailsException.java   |   9 +
 .../hadoop/hbase/protobuf/ProtobufUtil.java |  15 +
 .../org/apache/hadoop/hbase/HConstants.java |   5 +
 .../hbase/protobuf/ReplicationProtbufUtil.java  |   2 +-
 .../hbase/replication/ReplicationEndpoint.java  |   6 +
 .../HBaseInterClusterReplicationEndpoint.java   |  63 +++-
 .../regionserver/ReplicationSink.java   |   8 +
 .../regionserver/ReplicationSourceManager.java  |   2 +-
 .../TestReplicationDroppedTables.java   | 292 +++
 9 files changed, 397 insertions(+), 5 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/3d4fd909/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RetriesExhaustedWithDetailsException.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RetriesExhaustedWithDetailsException.java
 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RetriesExhaustedWithDetailsException.java
index ff414be..cb00675 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RetriesExhaustedWithDetailsException.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RetriesExhaustedWithDetailsException.java
@@ -19,6 +19,7 @@
 
 package org.apache.hadoop.hbase.client;
 
+import java.io.IOException;
 import java.io.PrintWriter;
 import java.io.StringWriter;
 import java.util.Collection;
@@ -49,6 +50,14 @@ extends RetriesExhaustedException {
   List actions;
   List hostnameAndPort;
 
+  public RetriesExhaustedWithDetailsException(final String msg) {
+super(msg);
+  }
+
+  public RetriesExhaustedWithDetailsException(final String msg, final 
IOException e) {
+super(msg, e);
+  }
+
   public RetriesExhaustedWithDetailsException(List exceptions,
   List actions,
   List hostnameAndPort) {

http://git-wip-us.apache.org/repos/asf/hbase/blob/3d4fd909/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java
index d154177..5f12c8f 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java
@@ -255,6 +255,21 @@ public final class ProtobufUtil {
   }
 
   /**
+   * Return the Exception thrown by the remote server wrapped in
+   * ServiceException as cause. RemoteException are left untouched.
+   *
+   * @param se ServiceException that wraps IO exception thrown by the server
+   * @return Exception wrapped in ServiceException.
+   */
+  public static IOException 
getServiceException(org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException
 e) {
+Throwable t = e.getCause();
+if (ExceptionUtil.isInterrupt(t)) {
+  return ExceptionUtil.asInterrupt(t);
+}
+return t instanceof IOException ? (IOException) t : new 
HBaseIOException(t);
+  }
+
+  /**
* Like {@link #getRemoteException(ServiceException)} but more generic, able 
to handle more than
* just {@link ServiceException}. Prefer this method to
* {@link #getRemoteException(ServiceException)} because trying to

http://git-wip-us.apache.org/repos/asf/hbase/blob/3d4fd909/hbase-common/src/main/java/org/apache/hadoop/hbase/HConstants.java
--
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/HConstants.java 
b/hbase-common/src/main/java/org/apache/hadoop/hbase/HConstants.java
index 7fb1035..589fae3 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/HConstants.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/HConstants.java
@@ -1230,6 +1230,11 @@ public final class HConstants {
   public static final String REPLICATION_SOURCE_MAXTHREADS_KEY =
   "hbase.replication.source.maxthreads";
 
+  /** Drop edits for tables that been deleted fro

hbase git commit: HBASE-12091 Optionally ignore edits for dropped tables for replication.

2017-11-14 Thread larsh
Repository: hbase
Updated Branches:
  refs/heads/master f9b19c6e3 -> 0c4fbcc32


HBASE-12091 Optionally ignore edits for dropped tables for replication.


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/0c4fbcc3
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/0c4fbcc3
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/0c4fbcc3

Branch: refs/heads/master
Commit: 0c4fbcc32973e9fb3840cb2b5e397155044c2b0c
Parents: f9b19c6
Author: Lars Hofhansl 
Authored: Tue Nov 14 17:08:14 2017 -0800
Committer: Lars Hofhansl 
Committed: Tue Nov 14 17:08:14 2017 -0800

--
 .../RetriesExhaustedWithDetailsException.java   |   9 +
 .../hadoop/hbase/protobuf/ProtobufUtil.java |  15 +
 .../org/apache/hadoop/hbase/HConstants.java |   5 +
 .../hbase/protobuf/ReplicationProtbufUtil.java  |   2 +-
 .../hbase/replication/ReplicationEndpoint.java  |   6 +
 .../HBaseInterClusterReplicationEndpoint.java   |  63 +++-
 .../regionserver/ReplicationSink.java   |   8 +
 .../regionserver/ReplicationSourceManager.java  |   2 +-
 .../TestReplicationDroppedTables.java   | 292 +++
 9 files changed, 397 insertions(+), 5 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/0c4fbcc3/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RetriesExhaustedWithDetailsException.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RetriesExhaustedWithDetailsException.java
 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RetriesExhaustedWithDetailsException.java
index ff414be..cb00675 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RetriesExhaustedWithDetailsException.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RetriesExhaustedWithDetailsException.java
@@ -19,6 +19,7 @@
 
 package org.apache.hadoop.hbase.client;
 
+import java.io.IOException;
 import java.io.PrintWriter;
 import java.io.StringWriter;
 import java.util.Collection;
@@ -49,6 +50,14 @@ extends RetriesExhaustedException {
   List actions;
   List hostnameAndPort;
 
+  public RetriesExhaustedWithDetailsException(final String msg) {
+super(msg);
+  }
+
+  public RetriesExhaustedWithDetailsException(final String msg, final 
IOException e) {
+super(msg, e);
+  }
+
   public RetriesExhaustedWithDetailsException(List exceptions,
   List actions,
   List hostnameAndPort) {

http://git-wip-us.apache.org/repos/asf/hbase/blob/0c4fbcc3/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java
index f8195f1..d86fc62 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java
@@ -254,6 +254,21 @@ public final class ProtobufUtil {
   }
 
   /**
+   * Return the Exception thrown by the remote server wrapped in
+   * ServiceException as cause. RemoteException are left untouched.
+   *
+   * @param se ServiceException that wraps IO exception thrown by the server
+   * @return Exception wrapped in ServiceException.
+   */
+  public static IOException 
getServiceException(org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException
 e) {
+Throwable t = e.getCause();
+if (ExceptionUtil.isInterrupt(t)) {
+  return ExceptionUtil.asInterrupt(t);
+}
+return t instanceof IOException ? (IOException) t : new 
HBaseIOException(t);
+  }
+
+  /**
* Like {@link #getRemoteException(ServiceException)} but more generic, able 
to handle more than
* just {@link ServiceException}. Prefer this method to
* {@link #getRemoteException(ServiceException)} because trying to

http://git-wip-us.apache.org/repos/asf/hbase/blob/0c4fbcc3/hbase-common/src/main/java/org/apache/hadoop/hbase/HConstants.java
--
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/HConstants.java 
b/hbase-common/src/main/java/org/apache/hadoop/hbase/HConstants.java
index cc9fc57..834e5bb 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/HConstants.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/HConstants.java
@@ -1200,6 +1200,11 @@ public final class HConstants {
   public static final String REPLICATION_SOURCE_MAXTHREADS_KEY =
   "hbase.replication.source.maxthreads";
 
+  /** Drop edits for tables that been deleted from th

hbase git commit: HBASE-12091 Optionally ignore edits for dropped tables for replication.

2017-11-14 Thread larsh
Repository: hbase
Updated Branches:
  refs/heads/branch-1 c0639d271 -> f9833a780


HBASE-12091 Optionally ignore edits for dropped tables for replication.


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/f9833a78
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/f9833a78
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/f9833a78

Branch: refs/heads/branch-1
Commit: f9833a7802bd0d4291e8b6ec6baf2f85fe2e401f
Parents: c0639d2
Author: Lars Hofhansl 
Authored: Tue Nov 14 16:33:12 2017 -0800
Committer: Lars Hofhansl 
Committed: Tue Nov 14 16:33:12 2017 -0800

--
 .../RetriesExhaustedWithDetailsException.java   |   9 +
 .../hadoop/hbase/protobuf/ProtobufUtil.java |  18 ++
 .../org/apache/hadoop/hbase/HConstants.java |   5 +
 .../hbase/protobuf/ReplicationProtbufUtil.java  |   2 +-
 .../hbase/replication/ReplicationEndpoint.java  |   6 +
 .../HBaseInterClusterReplicationEndpoint.java   |  66 -
 .../regionserver/ReplicationSink.java   |   8 +
 .../regionserver/ReplicationSourceManager.java  |   2 +-
 .../TestReplicationDroppedTables.java   | 292 +++
 9 files changed, 403 insertions(+), 5 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/f9833a78/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RetriesExhaustedWithDetailsException.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RetriesExhaustedWithDetailsException.java
 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RetriesExhaustedWithDetailsException.java
index 21ab156..e78f810 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RetriesExhaustedWithDetailsException.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RetriesExhaustedWithDetailsException.java
@@ -24,6 +24,7 @@ import 
org.apache.hadoop.hbase.classification.InterfaceStability;
 import org.apache.hadoop.hbase.DoNotRetryIOException;
 import org.apache.hadoop.hbase.util.Bytes;
 
+import java.io.IOException;
 import java.io.PrintWriter;
 import java.io.StringWriter;
 import java.util.Collection;
@@ -51,6 +52,14 @@ extends RetriesExhaustedException {
   List actions;
   List hostnameAndPort;
 
+  public RetriesExhaustedWithDetailsException(final String msg) {
+super(msg);
+  }
+
+  public RetriesExhaustedWithDetailsException(final String msg, final 
IOException e) {
+super(msg, e);
+  }
+
   public RetriesExhaustedWithDetailsException(List exceptions,
   List actions,
   List hostnameAndPort) {

http://git-wip-us.apache.org/repos/asf/hbase/blob/f9833a78/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java
index 5945e5e..84d4a67 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java
@@ -331,6 +331,24 @@ public final class ProtobufUtil {
   }
 
   /**
+   * Return the Exception thrown by the remote server wrapped in
+   * ServiceException as cause. RemoteException are left untouched.
+   *
+   * @param se ServiceException that wraps IO exception thrown by the server
+   * @return Exception wrapped in ServiceException.
+   */
+  public static IOException getServiceException(ServiceException e) {
+Throwable t = e;
+if (e instanceof ServiceException) {
+  t = e.getCause();
+}
+if (ExceptionUtil.isInterrupt(t)) {
+  return ExceptionUtil.asInterrupt(t);
+}
+return t instanceof IOException ? (IOException) t : new 
HBaseIOException(t);
+  }
+
+  /**
* Like {@link #getRemoteException(ServiceException)} but more generic, able 
to handle more than
* just {@link ServiceException}. Prefer this method to
* {@link #getRemoteException(ServiceException)} because trying to

http://git-wip-us.apache.org/repos/asf/hbase/blob/f9833a78/hbase-common/src/main/java/org/apache/hadoop/hbase/HConstants.java
--
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/HConstants.java 
b/hbase-common/src/main/java/org/apache/hadoop/hbase/HConstants.java
index 8df7bd8..c9f9ded 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/HConstants.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/HConstants.java
@@ -1247,6 +1247,11 @@ public final class HConstants {
   public static final String

hbase git commit: HBASE-19210 TestNamespacesInstanceResource fails

2017-11-14 Thread tedyu
Repository: hbase
Updated Branches:
  refs/heads/branch-2 ffb8c641d -> 76b9d4617


HBASE-19210 TestNamespacesInstanceResource fails


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/76b9d461
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/76b9d461
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/76b9d461

Branch: refs/heads/branch-2
Commit: 76b9d4617c657b8e5a5244f06ff6c4165c69896e
Parents: ffb8c64
Author: tedyu 
Authored: Tue Nov 14 11:05:36 2017 -0800
Committer: tedyu 
Committed: Tue Nov 14 11:05:36 2017 -0800

--
 .../apache/hadoop/hbase/rest/TestNamespacesInstanceResource.java   | 2 ++
 1 file changed, 2 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/76b9d461/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestNamespacesInstanceResource.java
--
diff --git 
a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestNamespacesInstanceResource.java
 
b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestNamespacesInstanceResource.java
index 9c724cd..6d3e89d 100644
--- 
a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestNamespacesInstanceResource.java
+++ 
b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestNamespacesInstanceResource.java
@@ -53,6 +53,7 @@ import static org.junit.Assert.*;
 
 import org.junit.AfterClass;
 import org.junit.BeforeClass;
+import org.junit.Ignore;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
 
@@ -241,6 +242,7 @@ public class TestNamespacesInstanceResource {
 assertEquals(503, response.getCode());
   }
 
+  @Ignore("HBASE-19210")
   @Test
   public void testInvalidNamespacePostsAndPuts() throws IOException, 
JAXBException {
 String namespacePath1 = "/namespaces/" + NAMESPACE1;



hbase git commit: HBASE-19210 TestNamespacesInstanceResource fails

2017-11-14 Thread tedyu
Repository: hbase
Updated Branches:
  refs/heads/master e0580b20d -> f9b19c6e3


HBASE-19210 TestNamespacesInstanceResource fails


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/f9b19c6e
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/f9b19c6e
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/f9b19c6e

Branch: refs/heads/master
Commit: f9b19c6e339829db1df9d9953bdb4c134280f117
Parents: e0580b2
Author: tedyu 
Authored: Tue Nov 14 11:04:31 2017 -0800
Committer: tedyu 
Committed: Tue Nov 14 11:04:31 2017 -0800

--
 .../apache/hadoop/hbase/rest/TestNamespacesInstanceResource.java   | 2 ++
 1 file changed, 2 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/f9b19c6e/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestNamespacesInstanceResource.java
--
diff --git 
a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestNamespacesInstanceResource.java
 
b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestNamespacesInstanceResource.java
index 9c724cd..6d3e89d 100644
--- 
a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestNamespacesInstanceResource.java
+++ 
b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestNamespacesInstanceResource.java
@@ -53,6 +53,7 @@ import static org.junit.Assert.*;
 
 import org.junit.AfterClass;
 import org.junit.BeforeClass;
+import org.junit.Ignore;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
 
@@ -241,6 +242,7 @@ public class TestNamespacesInstanceResource {
 assertEquals(503, response.getCode());
   }
 
+  @Ignore("HBASE-19210")
   @Test
   public void testInvalidNamespacePostsAndPuts() throws IOException, 
JAXBException {
 String namespacePath1 = "/namespaces/" + NAMESPACE1;



hbase-thirdparty git commit: HBASE-19247 update netty version

2017-11-14 Thread mdrob
Repository: hbase-thirdparty
Updated Branches:
  refs/heads/master 61fbbd8f1 -> 3d4bc1986


HBASE-19247 update netty version


Project: http://git-wip-us.apache.org/repos/asf/hbase-thirdparty/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase-thirdparty/commit/3d4bc198
Tree: http://git-wip-us.apache.org/repos/asf/hbase-thirdparty/tree/3d4bc198
Diff: http://git-wip-us.apache.org/repos/asf/hbase-thirdparty/diff/3d4bc198

Branch: refs/heads/master
Commit: 3d4bc198658be25d2215377cb7631aa2a66737f9
Parents: 61fbbd8
Author: Mike Drob 
Authored: Mon Nov 13 16:50:37 2017 -0600
Committer: Mike Drob 
Committed: Mon Nov 13 16:50:37 2017 -0600

--
 hbase-shaded-netty/pom.xml | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/hbase-thirdparty/blob/3d4bc198/hbase-shaded-netty/pom.xml
--
diff --git a/hbase-shaded-netty/pom.xml b/hbase-shaded-netty/pom.xml
index 9726c68..8f915f2 100644
--- a/hbase-shaded-netty/pom.xml
+++ b/hbase-shaded-netty/pom.xml
@@ -144,7 +144,7 @@
 
   io.netty
   netty-all
-  4.1.12.Final
+  4.1.17.Final
 
   
 



[14/51] [partial] hbase-site git commit: Published site at .

2017-11-14 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6607d33c/devapidocs/org/apache/hadoop/hbase/client/class-use/Result.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/client/class-use/Result.html 
b/devapidocs/org/apache/hadoop/hbase/client/class-use/Result.html
index e75cd67..ec0de14 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/class-use/Result.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/class-use/Result.html
@@ -298,7 +298,7 @@ service.
 
 
 private static HRegionLocation
-AsyncMetaTableAccessor.getRegionLocation(Result r,
+MetaTableAccessor.getRegionLocation(Result r,
  RegionInfo regionInfo,
  int replicaId)
 Returns the HRegionLocation parsed from the given meta row 
Result
@@ -307,7 +307,7 @@ service.
 
 
 private static HRegionLocation
-MetaTableAccessor.getRegionLocation(Result r,
+AsyncMetaTableAccessor.getRegionLocation(Result r,
  RegionInfo regionInfo,
  int replicaId)
 Returns the HRegionLocation parsed from the given meta row 
Result
@@ -315,55 +315,55 @@ service.
 
 
 
-private static http://docs.oracle.com/javase/8/docs/api/java/util/Optional.html?is-external=true";
 title="class or interface in java.util">Optional
-AsyncMetaTableAccessor.getRegionLocations(Result r)
+static RegionLocations
+MetaTableAccessor.getRegionLocations(Result r)
 Returns an HRegionLocationList extracted from the 
result.
 
 
 
-static RegionLocations
-MetaTableAccessor.getRegionLocations(Result r)
+private static http://docs.oracle.com/javase/8/docs/api/java/util/Optional.html?is-external=true";
 title="class or interface in java.util">Optional
+AsyncMetaTableAccessor.getRegionLocations(Result r)
 Returns an HRegionLocationList extracted from the 
result.
 
 
 
 private static long
-AsyncMetaTableAccessor.getSeqNumDuringOpen(Result r,
+MetaTableAccessor.getSeqNumDuringOpen(Result r,
int replicaId)
 The latest seqnum that the server writing to meta observed 
when opening the region.
 
 
 
 private static long
-MetaTableAccessor.getSeqNumDuringOpen(Result r,
+AsyncMetaTableAccessor.getSeqNumDuringOpen(Result r,
int replicaId)
 The latest seqnum that the server writing to meta observed 
when opening the region.
 
 
 
-private static http://docs.oracle.com/javase/8/docs/api/java/util/Optional.html?is-external=true";
 title="class or interface in java.util">Optional
-AsyncMetaTableAccessor.getServerName(Result r,
+static ServerName
+MetaTableAccessor.getServerName(Result r,
  int replicaId)
 Returns a ServerName from catalog table Result.
 
 
 
-static ServerName
-MetaTableAccessor.getServerName(Result r,
+private static http://docs.oracle.com/javase/8/docs/api/java/util/Optional.html?is-external=true";
 title="class or interface in java.util">Optional
+AsyncMetaTableAccessor.getServerName(Result r,
  int replicaId)
 Returns a ServerName from catalog table Result.
 
 
 
-private static http://docs.oracle.com/javase/8/docs/api/java/util/Optional.html?is-external=true";
 title="class or interface in java.util">Optional
-AsyncMetaTableAccessor.getTableState(Result r) 
-
-
 static TableState
 MetaTableAccessor.getTableState(Result r)
 Decode table state from META Result.
 
 
+
+private static http://docs.oracle.com/javase/8/docs/api/java/util/Optional.html?is-external=true";
 title="class or interface in java.util">Optional
+AsyncMetaTableAccessor.getTableState(Result r) 
+
 
 void
 AsyncMetaTableAccessor.MetaTableRawScanResultConsumer.onNext(Result[] results,
@@ -459,13 +459,13 @@ service.
 ClientScanner.cache 
 
 
-private http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List
-CompleteScanResultCache.partialResults 
-
-
 private http://docs.oracle.com/javase/8/docs/api/java/util/Deque.html?is-external=true";
 title="class or interface in java.util">Deque
 BatchScanResultCache.partialResults 
 
+
+private http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List
+CompleteScanResultCache.partialResults 
+
 
 private http://docs.oracle.com/javase/8/docs/api/java/util/Queue.html?is-external=true";
 title="class or interface in java.util">Queue
 AsyncTableResultScanner.queue 
@@ -488,7 +488,7 @@ service.
 
 
 Result[]
-AllowPartialScanResultCache.addAndGet(Result[] results,
+BatchScanResultCache.addAndGet(Result[] results,
  boolean isHeartbeatMessage) 
 
 
@@ -498,26 +498,22 @@ service.
 
 
 Result[]
-BatchScanResultCache.addAndGet(Result[] results,
+AllowPartialScanResultCache.addAndGet(Result[] results,
  boolean isHeartbeatMessage) 
 
 
 Result
-Table.append(Append append)
+HTable.append(Append append)
 Appends values to one or more co

[25/51] [partial] hbase-site git commit: Published site at .

2017-11-14 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6607d33c/devapidocs/org/apache/hadoop/hbase/client/AsyncConnectionImpl.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/client/AsyncConnectionImpl.html 
b/devapidocs/org/apache/hadoop/hbase/client/AsyncConnectionImpl.html
index 05380d3..4270e32 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/AsyncConnectionImpl.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/AsyncConnectionImpl.html
@@ -813,7 +813,7 @@ implements 
 
 getBufferedMutatorBuilder
-public AsyncBufferedMutatorBuilder getBufferedMutatorBuilder(TableName tableName)
+public AsyncBufferedMutatorBuilder getBufferedMutatorBuilder(TableName tableName)
 Description copied from 
interface: AsyncConnection
 Returns an AsyncBufferedMutatorBuilder 
for creating AsyncBufferedMutator.
 
@@ -830,7 +830,7 @@ implements 
 
 getBufferedMutatorBuilder
-public AsyncBufferedMutatorBuilder getBufferedMutatorBuilder(TableName tableName,
+public AsyncBufferedMutatorBuilder getBufferedMutatorBuilder(TableName tableName,
  http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/ExecutorService.html?is-external=true";
 title="class or interface in 
java.util.concurrent">ExecutorService pool)
 Description copied from 
interface: AsyncConnection
 Returns an AsyncBufferedMutatorBuilder 
for creating AsyncBufferedMutator.



[08/51] [partial] hbase-site git commit: Published site at .

2017-11-14 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6607d33c/devapidocs/org/apache/hadoop/hbase/filter/class-use/Filter.ReturnCode.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/filter/class-use/Filter.ReturnCode.html 
b/devapidocs/org/apache/hadoop/hbase/filter/class-use/Filter.ReturnCode.html
index 2cfe6f1..51f1a63 100644
--- a/devapidocs/org/apache/hadoop/hbase/filter/class-use/Filter.ReturnCode.html
+++ b/devapidocs/org/apache/hadoop/hbase/filter/class-use/Filter.ReturnCode.html
@@ -151,111 +151,111 @@
 
 
 Filter.ReturnCode
-ValueFilter.filterCell(Cell c) 
+ColumnPrefixFilter.filterCell(Cell cell) 
 
 
 Filter.ReturnCode
-SkipFilter.filterCell(Cell c) 
+ColumnCountGetFilter.filterCell(Cell c) 
 
 
 Filter.ReturnCode
-FilterListBase.filterCell(Cell c) 
+RowFilter.filterCell(Cell v) 
 
 
 Filter.ReturnCode
-FamilyFilter.filterCell(Cell c) 
+FuzzyRowFilter.filterCell(Cell c) 
 
 
 Filter.ReturnCode
-ColumnPrefixFilter.filterCell(Cell cell) 
+Filter.filterCell(Cell c)
+A way to filter based on the column family, column 
qualifier and/or the column value.
+
 
 
 Filter.ReturnCode
-PageFilter.filterCell(Cell ignored) 
+RandomRowFilter.filterCell(Cell c) 
 
 
 Filter.ReturnCode
-RowFilter.filterCell(Cell v) 
+FirstKeyOnlyFilter.filterCell(Cell c) 
 
 
 Filter.ReturnCode
-ColumnRangeFilter.filterCell(Cell c) 
+SkipFilter.filterCell(Cell c) 
 
 
 Filter.ReturnCode
-ColumnCountGetFilter.filterCell(Cell c) 
+TimestampsFilter.filterCell(Cell c) 
 
 
 Filter.ReturnCode
-MultipleColumnPrefixFilter.filterCell(Cell c) 
+ValueFilter.filterCell(Cell c) 
 
 
 Filter.ReturnCode
-ColumnPaginationFilter.filterCell(Cell c) 
+KeyOnlyFilter.filterCell(Cell ignored) 
 
 
 Filter.ReturnCode
-DependentColumnFilter.filterCell(Cell c) 
+FamilyFilter.filterCell(Cell c) 
 
 
 Filter.ReturnCode
-InclusiveStopFilter.filterCell(Cell c) 
+QualifierFilter.filterCell(Cell c) 
 
 
 Filter.ReturnCode
-KeyOnlyFilter.filterCell(Cell ignored) 
+FilterList.filterCell(Cell c) 
 
 
 Filter.ReturnCode
-MultiRowRangeFilter.filterCell(Cell ignored) 
+ColumnRangeFilter.filterCell(Cell c) 
 
 
 Filter.ReturnCode
-Filter.filterCell(Cell c)
-A way to filter based on the column family, column 
qualifier and/or the column value.
-
+ColumnPaginationFilter.filterCell(Cell c) 
 
 
 Filter.ReturnCode
-FirstKeyOnlyFilter.filterCell(Cell c) 
+WhileMatchFilter.filterCell(Cell c) 
 
 
 Filter.ReturnCode
-WhileMatchFilter.filterCell(Cell c) 
+MultiRowRangeFilter.filterCell(Cell ignored) 
 
 
 Filter.ReturnCode
-FirstKeyValueMatchingQualifiersFilter.filterCell(Cell c)
-Deprecated. 
- 
+PrefixFilter.filterCell(Cell c) 
 
 
 Filter.ReturnCode
-TimestampsFilter.filterCell(Cell c) 
+DependentColumnFilter.filterCell(Cell c) 
 
 
 Filter.ReturnCode
-FuzzyRowFilter.filterCell(Cell c) 
+FirstKeyValueMatchingQualifiersFilter.filterCell(Cell c)
+Deprecated. 
+ 
 
 
 Filter.ReturnCode
-FilterList.filterCell(Cell c) 
+PageFilter.filterCell(Cell ignored) 
 
 
 Filter.ReturnCode
-RandomRowFilter.filterCell(Cell c) 
+FilterListBase.filterCell(Cell c) 
 
 
 Filter.ReturnCode
-PrefixFilter.filterCell(Cell c) 
+InclusiveStopFilter.filterCell(Cell c) 
 
 
 Filter.ReturnCode
-SingleColumnValueFilter.filterCell(Cell c) 
+MultipleColumnPrefixFilter.filterCell(Cell c) 
 
 
 Filter.ReturnCode
-QualifierFilter.filterCell(Cell c) 
+SingleColumnValueFilter.filterCell(Cell c) 
 
 
 Filter.ReturnCode
@@ -271,158 +271,158 @@
 
 
 Filter.ReturnCode
-ValueFilter.filterKeyValue(Cell c)
+ColumnPrefixFilter.filterKeyValue(Cell c)
 Deprecated. 
 
 
 
 Filter.ReturnCode
-SkipFilter.filterKeyValue(Cell c)
+ColumnCountGetFilter.filterKeyValue(Cell c)
 Deprecated. 
 
 
 
 Filter.ReturnCode
-FilterListBase.filterKeyValue(Cell c) 
+RowFilter.filterKeyValue(Cell c)
+Deprecated. 
+
 
 
 Filter.ReturnCode
-FamilyFilter.filterKeyValue(Cell c)
+FuzzyRowFilter.filterKeyValue(Cell c)
 Deprecated. 
 
 
 
 Filter.ReturnCode
-ColumnPrefixFilter.filterKeyValue(Cell c)
-Deprecated. 
+Filter.filterKeyValue(Cell c)
+Deprecated. 
+As of release 2.0.0, this 
will be removed in HBase 3.0.0.
+ Instead use filterCell(Cell)
+
 
 
 
 Filter.ReturnCode
-PageFilter.filterKeyValue(Cell c)
+RandomRowFilter.filterKeyValue(Cell c)
 Deprecated. 
 
 
 
 Filter.ReturnCode
-RowFilter.filterKeyValue(Cell c)
+FirstKeyOnlyFilter.filterKeyValue(Cell c)
 Deprecated. 
 
 
 
 Filter.ReturnCode
-ColumnRangeFilter.filterKeyValue(Cell c)
+SkipFilter.filterKeyValue(Cell c)
 Deprecated. 
 
 
 
 Filter.ReturnCode
-ColumnCountGetFilter.filterKeyValue(Cell c)
+TimestampsFilter.filterKeyValue(Cell c)
 Deprecated. 
 
 
 
 Filter.ReturnCode
-MultipleColumnPrefixFilter.filterKeyValue(Cell c)
+ValueFilter.filterKeyValue(Cell c)
 Deprecated. 
 
 
 
 Filter.ReturnCode
-ColumnPaginationFilter.filterKeyValue(Cell c)
+KeyOnlyFilter.filterKeyValue(Cell ignored)
 Deprecated. 
 
 
 
 Filter.ReturnCode
-DependentColumnFilter.filterKeyValue(Cell c)
+FamilyFilter.filterKeyVal

[43/51] [partial] hbase-site git commit: Published site at .

2017-11-14 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6607d33c/apidocs/org/apache/hadoop/hbase/util/class-use/Bytes.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/util/class-use/Bytes.html 
b/apidocs/org/apache/hadoop/hbase/util/class-use/Bytes.html
index 680dcc7..8cdf469 100644
--- a/apidocs/org/apache/hadoop/hbase/util/class-use/Bytes.html
+++ b/apidocs/org/apache/hadoop/hbase/util/class-use/Bytes.html
@@ -129,13 +129,13 @@
 
 
 Bytes
-HTableDescriptor.getValue(Bytes key)
+HColumnDescriptor.getValue(Bytes key)
 Deprecated. 
  
 
 
 Bytes
-HColumnDescriptor.getValue(Bytes key)
+HTableDescriptor.getValue(Bytes key)
 Deprecated. 
  
 
@@ -150,25 +150,25 @@
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true";
 title="class or interface in java.util">Map
-HTableDescriptor.getValues()
+HColumnDescriptor.getValues()
 Deprecated. 
  
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true";
 title="class or interface in java.util">Map
-HTableDescriptor.getValues()
+HColumnDescriptor.getValues()
 Deprecated. 
  
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true";
 title="class or interface in java.util">Map
-HColumnDescriptor.getValues()
+HTableDescriptor.getValues()
 Deprecated. 
  
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true";
 title="class or interface in java.util">Map
-HColumnDescriptor.getValues()
+HTableDescriptor.getValues()
 Deprecated. 
  
 
@@ -183,13 +183,13 @@
 
 
 Bytes
-HTableDescriptor.getValue(Bytes key)
+HColumnDescriptor.getValue(Bytes key)
 Deprecated. 
  
 
 
 Bytes
-HColumnDescriptor.getValue(Bytes key)
+HTableDescriptor.getValue(Bytes key)
 Deprecated. 
  
 
@@ -236,14 +236,14 @@
 
 
 Bytes
-ColumnFamilyDescriptor.getValue(Bytes key) 
-
-
-Bytes
 TableDescriptor.getValue(Bytes key)
 Getter for accessing the metadata associated with the 
key.
 
 
+
+Bytes
+ColumnFamilyDescriptor.getValue(Bytes key) 
+
 
 
 
@@ -255,23 +255,23 @@
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true";
 title="class or interface in java.util">Map
-ColumnFamilyDescriptor.getValues()
-It clone all bytes of all elements.
-
+TableDescriptor.getValues() 
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true";
 title="class or interface in java.util">Map
-ColumnFamilyDescriptor.getValues()
-It clone all bytes of all elements.
-
+TableDescriptor.getValues() 
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true";
 title="class or interface in java.util">Map
-TableDescriptor.getValues() 
+ColumnFamilyDescriptor.getValues()
+It clone all bytes of all elements.
+
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true";
 title="class or interface in java.util">Map
-TableDescriptor.getValues() 
+ColumnFamilyDescriptor.getValues()
+It clone all bytes of all elements.
+
 
 
 
@@ -284,14 +284,14 @@
 
 
 Bytes
-ColumnFamilyDescriptor.getValue(Bytes key) 
-
-
-Bytes
 TableDescriptor.getValue(Bytes key)
 Getter for accessing the metadata associated with the 
key.
 
 
+
+Bytes
+ColumnFamilyDescriptor.getValue(Bytes key) 
+
 
 TableDescriptorBuilder
 TableDescriptorBuilder.removeValue(Bytes key) 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6607d33c/apidocs/org/apache/hadoop/hbase/util/class-use/Order.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/util/class-use/Order.html 
b/apidocs/org/apache/hadoop/hbase/util/class-use/Order.html
index f77ee9b..a03450e 100644
--- a/apidocs/org/apache/hadoop/hbase/util/class-use/Order.html
+++ b/apidocs/org/apache/hadoop/hbase/util/class-use/Order.html
@@ -112,15 +112,15 @@
 
 
 protected Order
-RawString.order 
+RawBytes.order 
 
 
 protected Order
-RawBytes.order 
+OrderedBytesBase.order 
 
 
 protected Order
-OrderedBytesBase.order 
+RawString.order 
 
 
 
@@ -133,7 +133,7 @@
 
 
 Order
-RawByte.getOrder() 
+RawBytes.getOrder() 
 
 
 Order
@@ -141,66 +141,66 @@
 
 
 Order
-RawFloat.getOrder() 
+RawShort.getOrder() 
 
 
 Order
-PBType.getOrder() 
+TerminatedWrapper.getOrder() 
 
 
 Order
-RawInteger.getOrder() 
+OrderedBytesBase.getOrder() 
 
 
 Order
-DataType.getOrder()
-Retrieve the sort Order imposed by this data type, 
or null when
- natural ordering is not preserved.
-
+RawFloat.getOrder() 
 
 
 Order
-RawLong.getOrder() 
+Union2.getOrder() 
 
 
 Order
-RawShort.getOrder() 
+Struct.getOrder() 
 
 
 Order
-RawString.getOrder() 
+RawInteger.getOrder() 
 
 
 Order
-RawBytes.getOrder() 
+PBType.getOrder() 
 
 
 Order
-Struct.getOrder() 
+Union3.getOrder() 
 
 
 Order
-Union3.getOrder() 
+RawString.getOrder() 
 
 
 Order
-RawDouble.getOrder() 
+RawByte.getOrder() 
 
 
 Order
-Union2.getOrder() 
+Uni

[26/51] [partial] hbase-site git commit: Published site at .

2017-11-14 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6607d33c/devapidocs/org/apache/hadoop/hbase/client/AsyncAdmin.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/client/AsyncAdmin.html 
b/devapidocs/org/apache/hadoop/hbase/client/AsyncAdmin.html
index 9799603..aa94697 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/AsyncAdmin.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/AsyncAdmin.html
@@ -106,11 +106,8 @@ var activeTableTab = "activeTableTab";
 
 
 @InterfaceAudience.Public
-public interface AsyncAdmin
-The asynchronous administrative API for HBase.
- 
- This feature is still under development, so marked as IA.Private. Will change 
to public when
- done. Use it with caution.
+public interface AsyncAdmin
+The asynchronous administrative API for HBase.
 
 Since:
 2.0.0
@@ -969,7 +966,7 @@ public interface 
 
 tableExists
-http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";
 title="class or interface in java.util.concurrent">CompletableFutureBoolean> tableExists(TableName tableName)
+http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";
 title="class or interface in java.util.concurrent">CompletableFutureBoolean> tableExists(TableName tableName)
 
 Parameters:
 tableName - Table to check.
@@ -985,7 +982,7 @@ public interface 
 
 listTables
-default http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";
 title="class or interface in java.util.concurrent">CompletableFutureList> listTables()
+default http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";
 title="class or interface in java.util.concurrent">CompletableFutureList> listTables()
 List all the userspace tables.
 
 Returns:
@@ -999,7 +996,7 @@ public interface 
 
 listTables
-http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";
 title="class or interface in java.util.concurrent">CompletableFutureList> listTables(boolean includeSysTables)
+http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";
 title="class or interface in java.util.concurrent">CompletableFutureList> listTables(boolean includeSysTables)
 List all the tables.
 
 Parameters:
@@ -1015,7 +1012,7 @@ public interface 
 
 listTables
-http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";
 title="class or interface in java.util.concurrent">CompletableFutureList> listTables(http://docs.oracle.com/javase/8/docs/api/java/util/regex/Pattern.html?is-external=true";
 title="class or interface in java.util.regex">Pattern pattern,
+http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";
 title="class or interface in java.util.concurrent">CompletableFutureList> listTables(http://docs.oracle.com/javase/8/docs/api/java/util/regex/Pattern.html?is-external=true";
 title="class or interface in java.util.regex">Pattern pattern,
 
boolean includeSysTables)
 List all the tables matching the given pattern.
 
@@ -1033,7 +1030,7 @@ public interface 
 
 listTableNames
-default http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";
 title="class or interface in java.util.concurrent">CompletableFutureList> listTableNames()
+default http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";
 title="class or interface in java.util.concurrent">CompletableFuture

[10/51] [partial] hbase-site git commit: Published site at .

2017-11-14 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6607d33c/devapidocs/org/apache/hadoop/hbase/client/metrics/class-use/ScanMetrics.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/client/metrics/class-use/ScanMetrics.html 
b/devapidocs/org/apache/hadoop/hbase/client/metrics/class-use/ScanMetrics.html
index 1c39ac5..041d3ab 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/client/metrics/class-use/ScanMetrics.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/client/metrics/class-use/ScanMetrics.html
@@ -120,24 +120,24 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 AsyncTableResultScanner.scanMetrics 
 
 
-private ScanMetrics
-AsyncClientScanner.scanMetrics 
+protected ScanMetrics
+AbstractClientScanner.scanMetrics 
 
 
 private ScanMetrics
-AsyncRpcRetryingCallerFactory.ScanSingleRegionCallerBuilder.scanMetrics 
+AsyncScanSingleRegionRpcRetryingCaller.scanMetrics 
 
 
 protected ScanMetrics
-AbstractClientScanner.scanMetrics 
+ScannerCallable.scanMetrics 
 
 
 private ScanMetrics
-AsyncScanSingleRegionRpcRetryingCaller.scanMetrics 
+AsyncClientScanner.scanMetrics 
 
 
-protected ScanMetrics
-ScannerCallable.scanMetrics 
+private ScanMetrics
+AsyncRpcRetryingCallerFactory.ScanSingleRegionCallerBuilder.scanMetrics 
 
 
 
@@ -200,6 +200,10 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 AsyncRpcRetryingCallerFactory.ScanSingleRegionCallerBuilder.metrics(ScanMetrics scanMetrics) 
 
 
+void
+AsyncTableResultScanner.onScanMetricsCreated(ScanMetrics scanMetrics) 
+
+
 default void
 ScanResultConsumer.onScanMetricsCreated(ScanMetrics scanMetrics)
 If scan.isScanMetricsEnabled() returns true, 
then this method will be called prior to
@@ -207,10 +211,6 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
  operation.
 
 
-
-void
-AsyncTableResultScanner.onScanMetricsCreated(ScanMetrics scanMetrics) 
-
 
 default void
 RawScanResultConsumer.onScanMetricsCreated(ScanMetrics scanMetrics)

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6607d33c/devapidocs/org/apache/hadoop/hbase/client/package-summary.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/client/package-summary.html 
b/devapidocs/org/apache/hadoop/hbase/client/package-summary.html
index b6b1e89..9ec2ecf 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/package-summary.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/package-summary.html
@@ -533,7 +533,7 @@
 
 AsyncHBaseAdmin
 
-The implementation of AsyncAdmin.
+Just a wrapper of RawAsyncHBaseAdmin.
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6607d33c/devapidocs/org/apache/hadoop/hbase/client/package-tree.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/client/package-tree.html 
b/devapidocs/org/apache/hadoop/hbase/client/package-tree.html
index a802c2e..a441486 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/package-tree.html
@@ -543,24 +543,24 @@
 java.lang.http://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true";
 title="class or interface in java.lang">Enum (implements java.lang.http://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true";
 title="class or interface in java.lang">Comparable, java.io.http://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true";
 title="class or interface in java.io">Serializable)
 
 org.apache.hadoop.hbase.client.SnapshotType
-org.apache.hadoop.hbase.client.AsyncScanSingleRegionRpcRetryingCaller.ScanResumerState
-org.apache.hadoop.hbase.client.MobCompactPartitionPolicy
+org.apache.hadoop.hbase.client.RequestController.ReturnCode
 org.apache.hadoop.hbase.client.TableState.State
-org.apache.hadoop.hbase.client.HBaseAdmin.ReplicationState
-org.apache.hadoop.hbase.client.AsyncRequestFutureImpl.Retry
 org.apache.hadoop.hbase.client.AsyncScanSingleRegionRpcRetryingCaller.ScanControllerState
-org.apache.hadoop.hbase.client.RequestController.ReturnCode
+org.apache.hadoop.hbase.client.AsyncScanSingleRegionRpcRetryingCaller.ScanResumerState
+org.apache.hadoop.hbase.client.ScannerCallable.MoreResults
+org.apache.hadoop.hbase.client.HBaseAdmin.ReplicationState
+org.apache.hadoop.hbase.client.Scan.ReadType
 org.apache.hadoop.hbase.client.RegionLocateType
 org.apache.hadoop.hbase.client.AsyncProcessTask.SubmittedRows
-org.apache.hadoop.hbase.client.AbstractResponse.ResponseType
-org.apache.hadoop.hbase.client.CompactType
-org.apache.hadoop.hbase.client.Durability
 org.apache.hadoop.hbase.client.MasterSwitchType
+org.apache.hadoop.hbase.client.IsolationLevel
+org.apache.hadoop.hbase.client.AsyncRequestFutureImpl.Retry
 org.apache.hadoop.hbase.client.CompactionState
+org.apache.hadoop.hbase.client.Co

[30/51] [partial] hbase-site git commit: Published site at .

2017-11-14 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6607d33c/devapidocs/org/apache/hadoop/hbase/class-use/ServerName.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/class-use/ServerName.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/ServerName.html
index 33c2a40..f2615f0 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/ServerName.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/ServerName.html
@@ -723,31 +723,31 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 private ServerName
-FastFailInterceptorContext.server 
+AsyncRequestFutureImpl.SingleServerRequestRunnable.server 
 
 
 private ServerName
-AsyncRequestFutureImpl.SingleServerRequestRunnable.server 
+FastFailInterceptorContext.server 
 
 
 private ServerName
-AsyncRpcRetryingCallerFactory.AdminRequestCallerBuilder.serverName 
+AsyncAdminRequestRetryingCaller.serverName 
 
 
 private ServerName
-AsyncRpcRetryingCallerFactory.ServerRequestCallerBuilder.serverName 
+ConnectionUtils.ShortCircuitingClusterConnection.serverName 
 
 
 private ServerName
-AsyncAdminRequestRetryingCaller.serverName 
+AsyncServerRequestRpcRetryingCaller.serverName 
 
 
 private ServerName
-AsyncServerRequestRpcRetryingCaller.serverName 
+AsyncRpcRetryingCallerFactory.AdminRequestCallerBuilder.serverName 
 
 
 private ServerName
-ConnectionUtils.ShortCircuitingClusterConnection.serverName 
+AsyncRpcRetryingCallerFactory.ServerRequestCallerBuilder.serverName 
 
 
 
@@ -830,7 +830,9 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";
 title="class or interface in java.util.concurrent">CompletableFutureList>
-AsyncHBaseAdmin.clearDeadServers(http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List servers) 
+AsyncAdmin.clearDeadServers(http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List servers)
+Clear dead region servers from master.
+
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List
@@ -839,18 +841,16 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 
-http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List
-HBaseAdmin.clearDeadServers(http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List servers) 
+http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";
 title="class or interface in java.util.concurrent">CompletableFutureList>
+RawAsyncHBaseAdmin.clearDeadServers(http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List servers) 
 
 
-http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";
 title="class or interface in java.util.concurrent">CompletableFutureList>
-AsyncAdmin.clearDeadServers(http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List servers)
-Clear dead region servers from master.
-
+http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List
+HBaseAdmin.clearDeadServers(http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List servers) 
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";
 title="class or interface in java.util.concurrent">CompletableFutureList>
-RawAsyncHBaseAdmin.clearDeadServers(http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List servers) 
+AsyncHBaseAdmin.clearDeadServers(http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List servers) 
 
 
 default http://docs.oracle.com/j

[16/51] [partial] hbase-site git commit: Published site at .

2017-11-14 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6607d33c/devapidocs/org/apache/hadoop/hbase/client/class-use/RegionInfo.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/client/class-use/RegionInfo.html 
b/devapidocs/org/apache/hadoop/hbase/client/class-use/RegionInfo.html
index 78b3155..d436b28 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/class-use/RegionInfo.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/class-use/RegionInfo.html
@@ -516,7 +516,7 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 private static HRegionLocation
-AsyncMetaTableAccessor.getRegionLocation(Result r,
+MetaTableAccessor.getRegionLocation(Result r,
  RegionInfo regionInfo,
  int replicaId)
 Returns the HRegionLocation parsed from the given meta row 
Result
@@ -525,7 +525,7 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 private static HRegionLocation
-MetaTableAccessor.getRegionLocation(Result r,
+AsyncMetaTableAccessor.getRegionLocation(Result r,
  RegionInfo regionInfo,
  int replicaId)
 Returns the HRegionLocation parsed from the given meta row 
Result
@@ -969,18 +969,18 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";
 title="class or interface in java.util.concurrent">CompletableFutureList>
-AsyncHBaseAdmin.getOnlineRegions(ServerName serverName) 
-
-
-http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";
 title="class or interface in java.util.concurrent">CompletableFutureList>
 AsyncAdmin.getOnlineRegions(ServerName serverName)
 Get all the online regions on a region server.
 
 
-
+
 http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";
 title="class or interface in java.util.concurrent">CompletableFutureList>
 RawAsyncHBaseAdmin.getOnlineRegions(ServerName serverName) 
 
+
+http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";
 title="class or interface in java.util.concurrent">CompletableFutureList>
+AsyncHBaseAdmin.getOnlineRegions(ServerName serverName) 
+
 
 (package private) Pair
 HBaseAdmin.getRegion(byte[] regionName) 
@@ -1013,18 +1013,18 @@ Input/OutputFormats, a table indexing MapReduce job, 
and utility methods.
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";
 title="class or interface in java.util.concurrent">CompletableFutureList>
-AsyncHBaseAdmin.getTableRegions(TableName tableName) 
-
-
-http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";
 title="class or interface in java.util.concurrent">CompletableFutureList>
 AsyncAdmin.getTableRegions(TableName tableName)
 Get the regions of a given table.
 
 
-
+
 http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";
 title="class or interface in java.util.concurrent">CompletableFutureList>
 RawAsyncHBaseAdmin.getTableRegions(TableName tableName) 
 
+
+http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";
 title="class or interface in java.util.concurrent">CompletableFutureList>
+AsyncHBaseAdmin.getTableRegions(TableName tableName) 
+
 
 static http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List
 RegionInfo.parseDelimitedFrom(byte[] bytes,
@@ -1800,15 +1800,15 @@ Input/OutputFormats, a table indexing MapReduce job, 
and utility methods.
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/L

[48/51] [partial] hbase-site git commit: Published site at .

2017-11-14 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6607d33c/apidocs/org/apache/hadoop/hbase/client/AsyncAdmin.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/client/AsyncAdmin.html 
b/apidocs/org/apache/hadoop/hbase/client/AsyncAdmin.html
index ff5c5fd..23502af 100644
--- a/apidocs/org/apache/hadoop/hbase/client/AsyncAdmin.html
+++ b/apidocs/org/apache/hadoop/hbase/client/AsyncAdmin.html
@@ -102,11 +102,8 @@ var activeTableTab = "activeTableTab";
 
 
 @InterfaceAudience.Public
-public interface AsyncAdmin
-The asynchronous administrative API for HBase.
- 
- This feature is still under development, so marked as IA.Private. Will change 
to public when
- done. Use it with caution.
+public interface AsyncAdmin
+The asynchronous administrative API for HBase.
 
 Since:
 2.0.0
@@ -965,7 +962,7 @@ public interface 
 
 tableExists
-http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";
 title="class or interface in java.util.concurrent">CompletableFutureBoolean> tableExists(TableName tableName)
+http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";
 title="class or interface in java.util.concurrent">CompletableFutureBoolean> tableExists(TableName tableName)
 
 Parameters:
 tableName - Table to check.
@@ -981,7 +978,7 @@ public interface 
 
 listTables
-default http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";
 title="class or interface in java.util.concurrent">CompletableFutureList> listTables()
+default http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";
 title="class or interface in java.util.concurrent">CompletableFutureList> listTables()
 List all the userspace tables.
 
 Returns:
@@ -995,7 +992,7 @@ public interface 
 
 listTables
-http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";
 title="class or interface in java.util.concurrent">CompletableFutureList> listTables(boolean includeSysTables)
+http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";
 title="class or interface in java.util.concurrent">CompletableFutureList> listTables(boolean includeSysTables)
 List all the tables.
 
 Parameters:
@@ -1011,7 +1008,7 @@ public interface 
 
 listTables
-http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";
 title="class or interface in java.util.concurrent">CompletableFutureList> listTables(http://docs.oracle.com/javase/8/docs/api/java/util/regex/Pattern.html?is-external=true";
 title="class or interface in java.util.regex">Pattern pattern,
+http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";
 title="class or interface in java.util.concurrent">CompletableFutureList> listTables(http://docs.oracle.com/javase/8/docs/api/java/util/regex/Pattern.html?is-external=true";
 title="class or interface in java.util.regex">Pattern pattern,
 
boolean includeSysTables)
 List all the tables matching the given pattern.
 
@@ -1029,7 +1026,7 @@ public interface 
 
 listTableNames
-default http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";
 title="class or interface in java.util.concurrent">CompletableFutureList> listTableNames()
+default http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";
 title="class or interface in java.util.concurrent">CompletableFuture

[07/51] [partial] hbase-site git commit: Published site at .

2017-11-14 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6607d33c/devapidocs/org/apache/hadoop/hbase/filter/class-use/Filter.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/filter/class-use/Filter.html 
b/devapidocs/org/apache/hadoop/hbase/filter/class-use/Filter.html
index d4a07ab..fe4b719 100644
--- a/devapidocs/org/apache/hadoop/hbase/filter/class-use/Filter.html
+++ b/devapidocs/org/apache/hadoop/hbase/filter/class-use/Filter.html
@@ -488,15 +488,15 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 static Filter
-SingleColumnValueExcludeFilter.createFilterFromArguments(http://docs.oracle.com/javase/8/docs/api/java/util/ArrayList.html?is-external=true";
 title="class or interface in 
java.util">ArrayList filterArguments) 
+ColumnPrefixFilter.createFilterFromArguments(http://docs.oracle.com/javase/8/docs/api/java/util/ArrayList.html?is-external=true";
 title="class or interface in 
java.util">ArrayList filterArguments) 
 
 
 static Filter
-ValueFilter.createFilterFromArguments(http://docs.oracle.com/javase/8/docs/api/java/util/ArrayList.html?is-external=true";
 title="class or interface in 
java.util">ArrayList filterArguments) 
+ColumnCountGetFilter.createFilterFromArguments(http://docs.oracle.com/javase/8/docs/api/java/util/ArrayList.html?is-external=true";
 title="class or interface in 
java.util">ArrayList filterArguments) 
 
 
 static Filter
-FamilyFilter.createFilterFromArguments(http://docs.oracle.com/javase/8/docs/api/java/util/ArrayList.html?is-external=true";
 title="class or interface in 
java.util">ArrayList filterArguments) 
+RowFilter.createFilterFromArguments(http://docs.oracle.com/javase/8/docs/api/java/util/ArrayList.html?is-external=true";
 title="class or interface in 
java.util">ArrayList filterArguments) 
 
 
 static Filter
@@ -506,63 +506,63 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 static Filter
-ColumnPrefixFilter.createFilterFromArguments(http://docs.oracle.com/javase/8/docs/api/java/util/ArrayList.html?is-external=true";
 title="class or interface in 
java.util">ArrayList filterArguments) 
+FirstKeyOnlyFilter.createFilterFromArguments(http://docs.oracle.com/javase/8/docs/api/java/util/ArrayList.html?is-external=true";
 title="class or interface in 
java.util">ArrayList filterArguments) 
 
 
 static Filter
-PageFilter.createFilterFromArguments(http://docs.oracle.com/javase/8/docs/api/java/util/ArrayList.html?is-external=true";
 title="class or interface in 
java.util">ArrayList filterArguments) 
+TimestampsFilter.createFilterFromArguments(http://docs.oracle.com/javase/8/docs/api/java/util/ArrayList.html?is-external=true";
 title="class or interface in 
java.util">ArrayList filterArguments) 
 
 
 static Filter
-RowFilter.createFilterFromArguments(http://docs.oracle.com/javase/8/docs/api/java/util/ArrayList.html?is-external=true";
 title="class or interface in 
java.util">ArrayList filterArguments) 
+ValueFilter.createFilterFromArguments(http://docs.oracle.com/javase/8/docs/api/java/util/ArrayList.html?is-external=true";
 title="class or interface in 
java.util">ArrayList filterArguments) 
 
 
 static Filter
-ColumnRangeFilter.createFilterFromArguments(http://docs.oracle.com/javase/8/docs/api/java/util/ArrayList.html?is-external=true";
 title="class or interface in 
java.util">ArrayList filterArguments) 
+KeyOnlyFilter.createFilterFromArguments(http://docs.oracle.com/javase/8/docs/api/java/util/ArrayList.html?is-external=true";
 title="class or interface in 
java.util">ArrayList filterArguments) 
 
 
 static Filter
-ColumnCountGetFilter.createFilterFromArguments(http://docs.oracle.com/javase/8/docs/api/java/util/ArrayList.html?is-external=true";
 title="class or interface in 
java.util">ArrayList filterArguments) 
+FamilyFilter.createFilterFromArguments(http://docs.oracle.com/javase/8/docs/api/java/util/ArrayList.html?is-external=true";
 title="class or interface in 
java.util">ArrayList filterArguments) 
 
 
 static Filter
-MultipleColumnPrefixFilter.createFilterFromArguments(http://docs.oracle.com/javase/8/docs/api/java/util/ArrayList.html?is-external=true";
 title="class or interface in 
java.util">ArrayList filterArguments) 
+QualifierFilter.createFilterFromArguments(http://docs.oracle.com/javase/8/docs/api/java/util/ArrayList.html?is-external=true";
 title="class or interface in 
java.util">ArrayList filterArguments) 
 
 
 static Filter
-ColumnPaginationFilter.createFilterFromArguments(http://docs.oracle.com/javase/8/docs/api/java/util/ArrayList.html?is-external=true";
 title="class or interface in 
java.util">ArrayList filterArguments) 
+ColumnRangeFilter.createFilterFromArguments(http://docs.oracle.com/javase/8/docs/api/java/util/ArrayList.html?is-external=true";
 title="class or 

[02/51] [partial] hbase-site git commit: Published site at .

2017-11-14 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6607d33c/devapidocs/org/apache/hadoop/hbase/ipc/class-use/HBaseRpcController.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/ipc/class-use/HBaseRpcController.html 
b/devapidocs/org/apache/hadoop/hbase/ipc/class-use/HBaseRpcController.html
index 8fd32d4..c245bf9 100644
--- a/devapidocs/org/apache/hadoop/hbase/ipc/class-use/HBaseRpcController.html
+++ b/devapidocs/org/apache/hadoop/hbase/ipc/class-use/HBaseRpcController.html
@@ -123,21 +123,21 @@
 
 
 
-HBaseRpcController
-AsyncClientScanner.OpenScannerResponse.controller 
-
-
 private HBaseRpcController
 RegionAdminServiceCallable.controller 
 
-
+
 private HBaseRpcController
 AsyncScanSingleRegionRpcRetryingCaller.controller 
 
-
+
 protected HBaseRpcController
 AsyncRpcRetryingCaller.controller 
 
+
+HBaseRpcController
+AsyncClientScanner.OpenScannerResponse.controller 
+
 
 private HBaseRpcController
 MasterCallable.rpcController 
@@ -156,13 +156,13 @@
 RegionAdminServiceCallable.getCurrentPayloadCarryingRpcController() 
 
 
-(package private) HBaseRpcController
-MasterCallable.getRpcController() 
-
-
 private HBaseRpcController
 AsyncConnectionImpl.getRpcController() 
 
+
+(package private) HBaseRpcController
+MasterCallable.getRpcController() 
+
 
 
 
@@ -320,17 +320,17 @@
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";
 title="class or interface in java.util.concurrent">CompletableFutureBoolean>
-AsyncRpcRetryingCallerFactory.ScanSingleRegionCallerBuilder.start(HBaseRpcController controller,
+AsyncScanSingleRegionRpcRetryingCaller.start(HBaseRpcController controller,
  
org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ScanResponse respWhenOpen)
-Short cut for build().start(HBaseRpcController, 
ScanResponse).
+Now we will also fetch some cells along with the scanner id 
when opening a scanner, so we also
+ need to process the ScanResponse for the open scanner request.
 
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";
 title="class or interface in java.util.concurrent">CompletableFutureBoolean>
-AsyncScanSingleRegionRpcRetryingCaller.start(HBaseRpcController controller,
+AsyncRpcRetryingCallerFactory.ScanSingleRegionCallerBuilder.start(HBaseRpcController controller,
  
org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ScanResponse respWhenOpen)
-Now we will also fetch some cells along with the scanner id 
when opening a scanner, so we also
- need to process the ScanResponse for the open scanner request.
+Short cut for build().start(HBaseRpcController, 
ScanResponse).
 
 
 
@@ -517,9 +517,9 @@
   
org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback callback) 
 
 
-void
-BlockingRpcConnection.sendRequest(Call call,
-   HBaseRpcController pcrc) 
+abstract void
+RpcConnection.sendRequest(Call call,
+   HBaseRpcController hrc) 
 
 
 void
@@ -527,9 +527,9 @@
HBaseRpcController hrc) 
 
 
-abstract void
-RpcConnection.sendRequest(Call call,
-   HBaseRpcController hrc) 
+void
+BlockingRpcConnection.sendRequest(Call call,
+   HBaseRpcController pcrc) 
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6607d33c/devapidocs/org/apache/hadoop/hbase/ipc/class-use/MetricsHBaseServer.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/ipc/class-use/MetricsHBaseServer.html 
b/devapidocs/org/apache/hadoop/hbase/ipc/class-use/MetricsHBaseServer.html
index 7c967d0..5808ce3 100644
--- a/devapidocs/org/apache/hadoop/hbase/ipc/class-use/MetricsHBaseServer.html
+++ b/devapidocs/org/apache/hadoop/hbase/ipc/class-use/MetricsHBaseServer.html
@@ -105,16 +105,16 @@
 
 
 
-protected MetricsHBaseServer
-RpcServer.metrics 
+private MetricsHBaseServer
+NettyRpcServerRequestDecoder.metrics 
 
 
 private MetricsHBaseServer
 NettyRpcServerResponseEncoder.metrics 
 
 
-private MetricsHBaseServer
-NettyRpcServerRequestDecoder.metrics 
+protected MetricsHBaseServer
+RpcServer.metrics 
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6607d33c/devapidocs/org/apache/hadoop/hbase/ipc/class-use/PriorityFunction.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/ipc/class-use/PriorityFunction.html 
b/devapidocs/org/apache/hadoop/hbase/ipc/class-use/PriorityFunction.html
index eabef95..7ba86da 100644
--- a/devapidocs/org/apache/hadoop/hbase/ipc/class-use/PriorityFunction.html
++

[45/51] [partial] hbase-site git commit: Published site at .

2017-11-14 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6607d33c/apidocs/org/apache/hadoop/hbase/filter/class-use/Filter.ReturnCode.html
--
diff --git 
a/apidocs/org/apache/hadoop/hbase/filter/class-use/Filter.ReturnCode.html 
b/apidocs/org/apache/hadoop/hbase/filter/class-use/Filter.ReturnCode.html
index 30331ac..39728e7 100644
--- a/apidocs/org/apache/hadoop/hbase/filter/class-use/Filter.ReturnCode.html
+++ b/apidocs/org/apache/hadoop/hbase/filter/class-use/Filter.ReturnCode.html
@@ -107,27 +107,27 @@
 
 
 Filter.ReturnCode
-FilterList.filterCell(Cell c) 
+ValueFilter.filterCell(Cell c) 
 
 
 Filter.ReturnCode
-WhileMatchFilter.filterCell(Cell c) 
+FirstKeyOnlyFilter.filterCell(Cell c) 
 
 
 Filter.ReturnCode
-PageFilter.filterCell(Cell ignored) 
+ColumnPrefixFilter.filterCell(Cell cell) 
 
 
 Filter.ReturnCode
-MultipleColumnPrefixFilter.filterCell(Cell c) 
+TimestampsFilter.filterCell(Cell c) 
 
 
 Filter.ReturnCode
-InclusiveStopFilter.filterCell(Cell c) 
+ColumnCountGetFilter.filterCell(Cell c) 
 
 
 Filter.ReturnCode
-KeyOnlyFilter.filterCell(Cell ignored) 
+WhileMatchFilter.filterCell(Cell c) 
 
 
 Filter.ReturnCode
@@ -135,35 +135,33 @@
 
 
 Filter.ReturnCode
-ColumnRangeFilter.filterCell(Cell c) 
+RandomRowFilter.filterCell(Cell c) 
 
 
 Filter.ReturnCode
-FamilyFilter.filterCell(Cell c) 
+MultiRowRangeFilter.filterCell(Cell ignored) 
 
 
 Filter.ReturnCode
-RandomRowFilter.filterCell(Cell c) 
+InclusiveStopFilter.filterCell(Cell c) 
 
 
 Filter.ReturnCode
-FirstKeyValueMatchingQualifiersFilter.filterCell(Cell c)
-Deprecated. 
- 
+SingleColumnValueFilter.filterCell(Cell c) 
 
 
 Filter.ReturnCode
-SkipFilter.filterCell(Cell c) 
+DependentColumnFilter.filterCell(Cell c) 
 
 
 Filter.ReturnCode
-DependentColumnFilter.filterCell(Cell c) 
+QualifierFilter.filterCell(Cell c) 
 
 
 Filter.ReturnCode
-Filter.filterCell(Cell c)
-A way to filter based on the column family, column 
qualifier and/or the column value.
-
+FirstKeyValueMatchingQualifiersFilter.filterCell(Cell c)
+Deprecated. 
+ 
 
 
 Filter.ReturnCode
@@ -171,85 +169,87 @@
 
 
 Filter.ReturnCode
-ValueFilter.filterCell(Cell c) 
+ColumnRangeFilter.filterCell(Cell c) 
 
 
 Filter.ReturnCode
-ColumnCountGetFilter.filterCell(Cell c) 
+FilterList.filterCell(Cell c) 
 
 
 Filter.ReturnCode
-QualifierFilter.filterCell(Cell c) 
+MultipleColumnPrefixFilter.filterCell(Cell c) 
 
 
 Filter.ReturnCode
-PrefixFilter.filterCell(Cell c) 
+SkipFilter.filterCell(Cell c) 
 
 
 Filter.ReturnCode
-FuzzyRowFilter.filterCell(Cell c) 
+PageFilter.filterCell(Cell ignored) 
 
 
 Filter.ReturnCode
-TimestampsFilter.filterCell(Cell c) 
+PrefixFilter.filterCell(Cell c) 
 
 
 Filter.ReturnCode
-ColumnPrefixFilter.filterCell(Cell cell) 
+FamilyFilter.filterCell(Cell c) 
 
 
 Filter.ReturnCode
-MultiRowRangeFilter.filterCell(Cell ignored) 
+Filter.filterCell(Cell c)
+A way to filter based on the column family, column 
qualifier and/or the column value.
+
 
 
 Filter.ReturnCode
-SingleColumnValueFilter.filterCell(Cell c) 
+FuzzyRowFilter.filterCell(Cell c) 
 
 
 Filter.ReturnCode
-FirstKeyOnlyFilter.filterCell(Cell c) 
+KeyOnlyFilter.filterCell(Cell ignored) 
 
 
 Filter.ReturnCode
-MultipleColumnPrefixFilter.filterColumn(Cell cell) 
+ColumnPrefixFilter.filterColumn(Cell cell) 
 
 
 Filter.ReturnCode
-ColumnPrefixFilter.filterColumn(Cell cell) 
+MultipleColumnPrefixFilter.filterColumn(Cell cell) 
 
 
 Filter.ReturnCode
-FilterList.filterKeyValue(Cell c)
+ValueFilter.filterKeyValue(Cell c)
 Deprecated. 
 
 
 
 Filter.ReturnCode
-WhileMatchFilter.filterKeyValue(Cell c)
+FirstKeyOnlyFilter.filterKeyValue(Cell c)
 Deprecated. 
 
 
 
 Filter.ReturnCode
-PageFilter.filterKeyValue(Cell c)
+ColumnPrefixFilter.filterKeyValue(Cell c)
 Deprecated. 
 
 
 
 Filter.ReturnCode
-MultipleColumnPrefixFilter.filterKeyValue(Cell c)
+TimestampsFilter.filterKeyValue(Cell c)
 Deprecated. 
 
 
 
 Filter.ReturnCode
-InclusiveStopFilter.filterKeyValue(Cell c)
+ColumnCountGetFilter.filterKeyValue(Cell c)
 Deprecated. 
 
 
 
 Filter.ReturnCode
-KeyOnlyFilter.filterKeyValue(Cell ignored)
+WhileMatchFilter.filterKeyValue(Cell c)
 Deprecated. 
 
 
@@ -261,47 +261,44 @@
 
 
 Filter.ReturnCode
-ColumnRangeFilter.filterKeyValue(Cell c)
+RandomRowFilter.filterKeyValue(Cell c)
 Deprecated. 
 
 
 
 Filter.ReturnCode
-FamilyFilter.filterKeyValue(Cell c)
+MultiRowRangeFilter.filterKeyValue(Cell ignored)
 Deprecated. 
 
 
 
 Filter.ReturnCode
-RandomRowFilter.filterKeyValue(Cell c)
+InclusiveStopFilter.filterKeyValue(Cell c)
 Deprecated. 
 
 
 
 Filter.ReturnCode
-FirstKeyValueMatchingQualifiersFilter.filterKeyValue(Cell c)
+SingleColumnValueFilter.filterKeyValue(Cell c)
 Deprecated. 
 
 
 
 Filter.ReturnCode
-SkipFilter.filterKeyValue(Cell c)
+DependentColumnFilter.filterKeyValue(Cell c)
 Deprecated. 
 
 
 
 Filter.ReturnCode
-DependentColumnFilter.filterKeyValue(Cell c)
+QualifierFilter.filterKeyValue(Cell c)
 Deprecated. 
 
 
 
 Filter.ReturnCod

[33/51] [partial] hbase-site git commit: Published site at .

2017-11-14 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6607d33c/devapidocs/org/apache/hadoop/hbase/class-use/ClusterStatus.Option.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/class-use/ClusterStatus.Option.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/ClusterStatus.Option.html
index c395de4..37df0dd 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/ClusterStatus.Option.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/ClusterStatus.Option.html
@@ -141,7 +141,7 @@ the order they are declared.
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";
 title="class or interface in java.util.concurrent">CompletableFuture
-AsyncHBaseAdmin.getClusterStatus(http://docs.oracle.com/javase/8/docs/api/java/util/EnumSet.html?is-external=true";
 title="class or interface in java.util">EnumSet options) 
+AsyncAdmin.getClusterStatus(http://docs.oracle.com/javase/8/docs/api/java/util/EnumSet.html?is-external=true";
 title="class or interface in java.util">EnumSet options) 
 
 
 ClusterStatus
@@ -150,16 +150,16 @@ the order they are declared.
 
 
 
-ClusterStatus
-HBaseAdmin.getClusterStatus(http://docs.oracle.com/javase/8/docs/api/java/util/EnumSet.html?is-external=true";
 title="class or interface in java.util">EnumSet options) 
+http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";
 title="class or interface in java.util.concurrent">CompletableFuture
+RawAsyncHBaseAdmin.getClusterStatus(http://docs.oracle.com/javase/8/docs/api/java/util/EnumSet.html?is-external=true";
 title="class or interface in java.util">EnumSet options) 
 
 
-http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";
 title="class or interface in java.util.concurrent">CompletableFuture
-AsyncAdmin.getClusterStatus(http://docs.oracle.com/javase/8/docs/api/java/util/EnumSet.html?is-external=true";
 title="class or interface in java.util">EnumSet options) 
+ClusterStatus
+HBaseAdmin.getClusterStatus(http://docs.oracle.com/javase/8/docs/api/java/util/EnumSet.html?is-external=true";
 title="class or interface in java.util">EnumSet options) 
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";
 title="class or interface in java.util.concurrent">CompletableFuture
-RawAsyncHBaseAdmin.getClusterStatus(http://docs.oracle.com/javase/8/docs/api/java/util/EnumSet.html?is-external=true";
 title="class or interface in java.util">EnumSet options) 
+AsyncHBaseAdmin.getClusterStatus(http://docs.oracle.com/javase/8/docs/api/java/util/EnumSet.html?is-external=true";
 title="class or interface in java.util">EnumSet options) 
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6607d33c/devapidocs/org/apache/hadoop/hbase/class-use/ClusterStatus.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/class-use/ClusterStatus.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/ClusterStatus.html
index 48bced7..2b1f776 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/ClusterStatus.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/ClusterStatus.html
@@ -179,27 +179,27 @@
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";
 title="class or interface in java.util.concurrent">CompletableFuture
-AsyncHBaseAdmin.getClusterStatus() 
+AsyncAdmin.getClusterStatus() 
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";
 title="class or interface in java.util.concurrent">CompletableFuture
-AsyncAdmin.getClusterStatus() 
+RawAsyncHBaseAdmin.getClusterStatus() 
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";
 title="class or interface in java.util.concurrent">CompletableFuture
-RawAsyncHBaseAdmin.getClusterStatus() 
+AsyncHBaseAdmin.getClusterStatus() 
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";
 title="class or interface in java.util.concurrent">CompletableFuture
-AsyncHBaseAdmin.getClusterStatus(http://docs.oracle.com/javase/8/docs/api/java/util/EnumSet.html?is-external=true";
 title="class or interface in java.util">EnumSet options) 
+AsyncAdmin.getClusterStatus(http://docs.oracle.com/javase/8/docs/api/java/util/EnumSet.html?is-external=true";
 title="class or interface in java.util">EnumSet options) 
 
 
 http://docs.oracle.co

[37/51] [partial] hbase-site git commit: Published site at .

2017-11-14 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6607d33c/checkstyle.rss
--
diff --git a/checkstyle.rss b/checkstyle.rss
index 16a4879..fdfe139 100644
--- a/checkstyle.rss
+++ b/checkstyle.rss
@@ -26,7 +26,7 @@ under the License.
 ©2007 - 2017 The Apache Software Foundation
 
   File: 3426,
- Errors: 21363,
+ Errors: 21359,
  Warnings: 0,
  Infos: 0
   
@@ -1852,7 +1852,7 @@ under the License.
   
   
 
-  http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.mob.MobUtils.java";>org/apache/hadoop/hbase/mob/MobUtils.java
+  http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.regionserver.DateTieredStoreEngine.java";>org/apache/hadoop/hbase/regionserver/DateTieredStoreEngine.java
 
 
   0
@@ -1861,12 +1861,12 @@ under the License.
   0
 
 
-  15
+  1
 
   
   
 
-  http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.regionserver.DateTieredStoreEngine.java";>org/apache/hadoop/hbase/regionserver/DateTieredStoreEngine.java
+  http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.mob.MobUtils.java";>org/apache/hadoop/hbase/mob/MobUtils.java
 
 
   0
@@ -1875,7 +1875,7 @@ under the License.
   0
 
 
-  1
+  15
 
   
   
@@ -3700,7 +3700,7 @@ under the License.
   
   
 
-  http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.quotas.TestQuotaObserverChore.java";>org/apache/hadoop/hbase/quotas/TestQuotaObserverChore.java
+  http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.util.HFileArchiveTestingUtil.java";>org/apache/hadoop/hbase/util/HFileArchiveTestingUtil.java
 
 
   0
@@ -3709,12 +3709,12 @@ under the License.
   0
 
 
-  1
+  9
 
   
   
 
-  http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.util.HFileArchiveTestingUtil.java";>org/apache/hadoop/hbase/util/HFileArchiveTestingUtil.java
+  http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.quotas.TestQuotaObserverChore.java";>org/apache/hadoop/hbase/quotas/TestQuotaObserverChore.java
 
 
   0
@@ -3723,7 +3723,7 @@ under the License.
   0
 
 
-  9
+  1
 
   
   
@@ -4479,7 +4479,7 @@ under the License.
   0
 
 
-  33
+  32
 
   
   
@@ -4708,7 +4708,7 @@ under the License.
   
   
 
-  http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.client.RegionOfflineException.java";>org/apache/hadoop/hbase/client/RegionOfflineException.java
+  http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.zookeeper.PendingWatcher.java";>org/apache/hadoop/hbase/zookeeper/PendingWatcher.java
 
 
   0
@@ -4722,7 +4722,7 @@ under the License.
   
   
 
-  http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.zookeeper.PendingWatcher.java";>org/apache/hadoop/hbase/zookeeper/PendingWatcher.java
+  http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.client.RegionOfflineException.java";>org/apache/hadoop/hbase/client/RegionOfflineException.java
 
 
   0
@@ -5254,7 +5254,7 @@ under the License.
   
   
 
-  http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.snapshot.TestRegionSnapshotTask.java";>org/apache/hadoop/hbase/snapshot/TestRegionSnapshotTask.java
+  http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.regionserver.TestSwitchToStreamRead.java";>org/apache/hadoop/hbase/regionserver/TestSwitchToStreamRead.java
 
 
   0
@@ -5263,12 +5263,12 @@ under the License.
   0
 
 

[13/51] [partial] hbase-site git commit: Published site at .

2017-11-14 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6607d33c/devapidocs/org/apache/hadoop/hbase/client/class-use/ResultScanner.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/client/class-use/ResultScanner.html 
b/devapidocs/org/apache/hadoop/hbase/client/class-use/ResultScanner.html
index 0038ea7..e845e0f 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/class-use/ResultScanner.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/class-use/ResultScanner.html
@@ -214,9 +214,9 @@ service.
 
 
 
-default ResultScanner
-AsyncTable.getScanner(byte[] family)
-Gets a scanner on the current table for the given 
family.
+ResultScanner
+HTable.getScanner(byte[] family)
+The underlying HTable must 
not be closed.
 
 
 
@@ -226,16 +226,16 @@ service.
 
 
 
-ResultScanner
-HTable.getScanner(byte[] family)
-The underlying HTable must 
not be closed.
+default ResultScanner
+AsyncTable.getScanner(byte[] family)
+Gets a scanner on the current table for the given 
family.
 
 
 
-default ResultScanner
-AsyncTable.getScanner(byte[] family,
+ResultScanner
+HTable.getScanner(byte[] family,
   byte[] qualifier)
-Gets a scanner on the current table for the given family 
and qualifier.
+The underlying HTable must 
not be closed.
 
 
 
@@ -246,16 +246,16 @@ service.
 
 
 
-ResultScanner
-HTable.getScanner(byte[] family,
+default ResultScanner
+AsyncTable.getScanner(byte[] family,
   byte[] qualifier)
-The underlying HTable must 
not be closed.
+Gets a scanner on the current table for the given family 
and qualifier.
 
 
 
 ResultScanner
-AsyncTable.getScanner(Scan scan)
-Returns a scanner on the current table as specified by the 
Scan 
object.
+HTable.getScanner(Scan scan)
+The underlying HTable must 
not be closed.
 
 
 
@@ -271,8 +271,8 @@ service.
 
 
 ResultScanner
-HTable.getScanner(Scan scan)
-The underlying HTable must 
not be closed.
+AsyncTable.getScanner(Scan scan)
+Returns a scanner on the current table as specified by the 
Scan 
object.
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6607d33c/devapidocs/org/apache/hadoop/hbase/client/class-use/RetriesExhaustedWithDetailsException.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/client/class-use/RetriesExhaustedWithDetailsException.html
 
b/devapidocs/org/apache/hadoop/hbase/client/class-use/RetriesExhaustedWithDetailsException.html
index e52b10d..34a7506 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/client/class-use/RetriesExhaustedWithDetailsException.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/client/class-use/RetriesExhaustedWithDetailsException.html
@@ -106,11 +106,11 @@
 
 
 RetriesExhaustedWithDetailsException
-AsyncRequestFuture.getErrors() 
+AsyncRequestFutureImpl.getErrors() 
 
 
 RetriesExhaustedWithDetailsException
-AsyncRequestFutureImpl.getErrors() 
+AsyncRequestFuture.getErrors() 
 
 
 (package private) RetriesExhaustedWithDetailsException

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6607d33c/devapidocs/org/apache/hadoop/hbase/client/class-use/RetryingCallable.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/client/class-use/RetryingCallable.html 
b/devapidocs/org/apache/hadoop/hbase/client/class-use/RetryingCallable.html
index 88f3213..c67d003 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/class-use/RetryingCallable.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/class-use/RetryingCallable.html
@@ -234,28 +234,36 @@
 
 
 
+T
+RpcRetryingCallerImpl.callWithoutRetries(RetryingCallable callable,
+  int callTimeout) 
+
+
 T
 RpcRetryingCaller.callWithoutRetries(RetryingCallable callable,
   int callTimeout)
 Call the server once only.
 
 
-
+
 T
-RpcRetryingCallerImpl.callWithoutRetries(RetryingCallable callable,
-  int callTimeout) 
+RpcRetryingCallerImpl.callWithRetries(RetryingCallable callable,
+   int callTimeout) 
 
-
+
 T
 RpcRetryingCaller.callWithRetries(RetryingCallable callable,
int callTimeout)
 Retries if invocation fails.
 
 
+
+RetryingCallerInterceptorContext
+NoOpRetryingInterceptorContext.prepare(RetryingCallable callable) 
+
 
-T
-RpcRetryingCallerImpl.callWithRetries(RetryingCallable callable,
-   int callTimeout) 
+FastFailInterceptorContext
+FastFailInterceptorContext.prepare(RetryingCallable callable) 
 
 
 abstract RetryingCallerInterceptorContext
@@ -267,11 +275,13 @@
 
 
 RetryingCallerInterceptorContext
-NoOpRetryingInterceptorContext.prepare(RetryingCallable callable) 
+NoOpRetryingInterceptorContext.prepare(RetryingCallable callable,
+   int tries) 
 
 
 FastFailInterceptorContext
-FastFailInterceptorContext.prepare(RetryingCallable callable) 
+FastFailInterceptorContext.prepare(RetryingCallable callable,
+  

[15/51] [partial] hbase-site git commit: Published site at .

2017-11-14 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6607d33c/devapidocs/org/apache/hadoop/hbase/client/class-use/RegionLocateType.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/client/class-use/RegionLocateType.html 
b/devapidocs/org/apache/hadoop/hbase/client/class-use/RegionLocateType.html
index aec63fb..4345522 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/class-use/RegionLocateType.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/class-use/RegionLocateType.html
@@ -106,7 +106,7 @@
 
 
 private RegionLocateType
-AsyncRpcRetryingCallerFactory.SingleRequestCallerBuilder.locateType 
+AsyncSingleRequestRpcRetryingCaller.locateType 
 
 
 RegionLocateType
@@ -114,7 +114,7 @@
 
 
 private RegionLocateType
-AsyncSingleRequestRpcRetryingCaller.locateType 
+AsyncRpcRetryingCallerFactory.SingleRequestCallerBuilder.locateType 
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6607d33c/devapidocs/org/apache/hadoop/hbase/client/class-use/RegionLocator.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/client/class-use/RegionLocator.html 
b/devapidocs/org/apache/hadoop/hbase/client/class-use/RegionLocator.html
index ef64eb1..2dc7051 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/class-use/RegionLocator.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/class-use/RegionLocator.html
@@ -208,13 +208,13 @@ service.
 
 
 private RegionLocator
-TableInputFormatBase.regionLocator
-The RegionLocator of the 
table.
-
+HFileOutputFormat2.TableInfo.regionLocator 
 
 
 private RegionLocator
-HFileOutputFormat2.TableInfo.regionLocator 
+TableInputFormatBase.regionLocator
+The RegionLocator of the 
table.
+
 
 
 
@@ -226,15 +226,15 @@ service.
 
 
 
+RegionLocator
+HFileOutputFormat2.TableInfo.getRegionLocator() 
+
+
 protected RegionLocator
 TableInputFormatBase.getRegionLocator()
 Allows subclasses to get the RegionLocator.
 
 
-
-RegionLocator
-HFileOutputFormat2.TableInfo.getRegionLocator() 
-
 
 
 



[49/51] [partial] hbase-site git commit: Published site at .

2017-11-14 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6607d33c/apidocs/org/apache/hadoop/hbase/class-use/CompareOperator.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/class-use/CompareOperator.html 
b/apidocs/org/apache/hadoop/hbase/class-use/CompareOperator.html
index e9a69de..5f6d9d4 100644
--- a/apidocs/org/apache/hadoop/hbase/class-use/CompareOperator.html
+++ b/apidocs/org/apache/hadoop/hbase/class-use/CompareOperator.html
@@ -197,11 +197,11 @@ the order they are declared.
 
 
 protected CompareOperator
-CompareFilter.op 
+SingleColumnValueFilter.op 
 
 
 protected CompareOperator
-SingleColumnValueFilter.op 
+CompareFilter.op 
 
 
 
@@ -223,11 +223,11 @@ the order they are declared.
 
 
 CompareOperator
-CompareFilter.getCompareOperator() 
+SingleColumnValueFilter.getCompareOperator() 
 
 
 CompareOperator
-SingleColumnValueFilter.getCompareOperator() 
+CompareFilter.getCompareOperator() 
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6607d33c/apidocs/org/apache/hadoop/hbase/class-use/TableName.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/class-use/TableName.html 
b/apidocs/org/apache/hadoop/hbase/class-use/TableName.html
index 7286958..ff1834a 100644
--- a/apidocs/org/apache/hadoop/hbase/class-use/TableName.html
+++ b/apidocs/org/apache/hadoop/hbase/class-use/TableName.html
@@ -418,38 +418,38 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 TableName
-BufferedMutator.getName()
-Gets the fully qualified table name instance of the table 
that this BufferedMutator writes to.
+AsyncTableRegionLocator.getName()
+Gets the fully qualified table name instance of the table 
whose region we want to locate.
 
 
 
 TableName
-AsyncTableBase.getName()
-Gets the fully qualified table name instance of this 
table.
+BufferedMutator.getName()
+Gets the fully qualified table name instance of the table 
that this BufferedMutator writes to.
 
 
 
 TableName
-RegionLocator.getName()
-Gets the fully qualified table name instance of this 
table.
+AsyncBufferedMutator.getName()
+Gets the fully qualified table name instance of the table 
that this
+ AsyncBufferedMutator writes to.
 
 
 
 TableName
-AsyncBufferedMutator.getName()
-Gets the fully qualified table name instance of the table 
that this
- AsyncBufferedMutator writes to.
+Table.getName()
+Gets the fully qualified table name instance of this 
table.
 
 
 
 TableName
-AsyncTableRegionLocator.getName()
-Gets the fully qualified table name instance of the table 
whose region we want to locate.
+AsyncTableBase.getName()
+Gets the fully qualified table name instance of this 
table.
 
 
 
 TableName
-Table.getName()
+RegionLocator.getName()
 Gets the fully qualified table name instance of this 
table.
 
 
@@ -465,14 +465,14 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 TableName
-SnapshotDescription.getTableName() 
-
-
-TableName
 TableDescriptor.getTableName()
 Get the name of the table
 
 
+
+TableName
+SnapshotDescription.getTableName() 
+
 
 TableName
 BufferedMutatorParams.getTableName() 
@@ -846,18 +846,18 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 
+default AsyncBufferedMutator
+AsyncConnection.getBufferedMutator(TableName tableName)
+Retrieve an AsyncBufferedMutator for 
performing client-side buffering of writes.
+
+
+
 BufferedMutator
 Connection.getBufferedMutator(TableName tableName)
 
  Retrieve a BufferedMutator for performing 
client-side buffering of writes.
 
 
-
-default AsyncBufferedMutator
-AsyncConnection.getBufferedMutator(TableName tableName)
-Retrieve an AsyncBufferedMutator for 
performing client-side buffering of writes.
-
-
 
 default AsyncBufferedMutator
 AsyncConnection.getBufferedMutator(TableName tableName,
@@ -945,17 +945,17 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 
-RegionLocator
-Connection.getRegionLocator(TableName tableName)
-Retrieve a RegionLocator implementation to inspect region 
information on a table.
-
-
-
 AsyncTableRegionLocator
 AsyncConnection.getRegionLocator(TableName tableName)
 Retrieve a AsyncRegionLocator implementation to inspect 
region information on a table.
 
 
+
+RegionLocator
+Connection.getRegionLocator(TableName tableName)
+Retrieve a RegionLocator implementation to inspect region 
information on a table.
+
+
 
 http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List
 Admin.getRegions(TableName tableName)
@@ -969,31 +969,31 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 
-default Table
-Connection.getTable(TableName tableName,
+default AsyncTable
+AsyncConnection.getTable(TableName tableName,
 http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/ExecutorService.h

[41/51] [partial] hbase-site git commit: Published site at .

2017-11-14 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6607d33c/apidocs/overview-tree.html
--
diff --git a/apidocs/overview-tree.html b/apidocs/overview-tree.html
index 3a6add8..e8708c2 100644
--- a/apidocs/overview-tree.html
+++ b/apidocs/overview-tree.html
@@ -880,33 +880,33 @@
 java.lang.http://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true";
 title="class or interface in java.lang">Enum (implements java.lang.http://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true";
 title="class or interface in java.lang">Comparable, java.io.http://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true";
 title="class or interface in java.io">Serializable)
 
 org.apache.hadoop.hbase.util.Order
-org.apache.hadoop.hbase.MemoryCompactionPolicy
 org.apache.hadoop.hbase.KeepDeletedCells
+org.apache.hadoop.hbase.MemoryCompactionPolicy
 org.apache.hadoop.hbase.CompareOperator
 org.apache.hadoop.hbase.ProcedureState
 org.apache.hadoop.hbase.CellBuilderType
+org.apache.hadoop.hbase.filter.BitComparator.BitwiseOp
 org.apache.hadoop.hbase.filter.FilterList.Operator
 org.apache.hadoop.hbase.filter.CompareFilter.CompareOp
-org.apache.hadoop.hbase.filter.BitComparator.BitwiseOp
-org.apache.hadoop.hbase.filter.RegexStringComparator.EngineType
 org.apache.hadoop.hbase.filter.Filter.ReturnCode
+org.apache.hadoop.hbase.filter.RegexStringComparator.EngineType
 org.apache.hadoop.hbase.io.encoding.DataBlockEncoding
 org.apache.hadoop.hbase.regionserver.BloomType
-org.apache.hadoop.hbase.quotas.SpaceViolationPolicy
 org.apache.hadoop.hbase.quotas.ThrottlingException.Type
 org.apache.hadoop.hbase.quotas.QuotaScope
-org.apache.hadoop.hbase.quotas.QuotaType
 org.apache.hadoop.hbase.quotas.ThrottleType
-org.apache.hadoop.hbase.client.SnapshotType
+org.apache.hadoop.hbase.quotas.QuotaType
+org.apache.hadoop.hbase.quotas.SpaceViolationPolicy
 org.apache.hadoop.hbase.client.Durability
+org.apache.hadoop.hbase.client.SnapshotType
+org.apache.hadoop.hbase.client.MasterSwitchType
+org.apache.hadoop.hbase.client.CompactType
 org.apache.hadoop.hbase.client.MobCompactPartitionPolicy
-org.apache.hadoop.hbase.client.IsolationLevel
-org.apache.hadoop.hbase.client.RequestController.ReturnCode
-org.apache.hadoop.hbase.client.Scan.ReadType
 org.apache.hadoop.hbase.client.CompactionState
-org.apache.hadoop.hbase.client.MasterSwitchType
+org.apache.hadoop.hbase.client.Scan.ReadType
+org.apache.hadoop.hbase.client.RequestController.ReturnCode
+org.apache.hadoop.hbase.client.IsolationLevel
 org.apache.hadoop.hbase.client.Consistency
-org.apache.hadoop.hbase.client.CompactType
 org.apache.hadoop.hbase.client.security.SecurityCapability
 
 



[35/51] [partial] hbase-site git commit: Published site at .

2017-11-14 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6607d33c/devapidocs/org/apache/hadoop/hbase/class-use/Cell.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/class-use/Cell.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/Cell.html
index 3425d77..f909add 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/Cell.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/Cell.html
@@ -529,34 +529,34 @@ service.
 
 
 static Cell
-CellUtil.createCell(Cell cell,
+PrivateCellUtil.createCell(Cell cell,
   byte[] tags) 
 
 
 static Cell
-PrivateCellUtil.createCell(Cell cell,
+CellUtil.createCell(Cell cell,
   byte[] tags) 
 
 
 static Cell
-CellUtil.createCell(Cell cell,
+PrivateCellUtil.createCell(Cell cell,
   byte[] value,
   byte[] tags) 
 
 
 static Cell
-PrivateCellUtil.createCell(Cell cell,
+CellUtil.createCell(Cell cell,
   byte[] value,
   byte[] tags) 
 
 
 static Cell
-CellUtil.createCell(Cell cell,
+PrivateCellUtil.createCell(Cell cell,
   http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List tags) 
 
 
 static Cell
-PrivateCellUtil.createCell(Cell cell,
+CellUtil.createCell(Cell cell,
   http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List tags) 
 
 
@@ -739,16 +739,16 @@ service.
 
 
 static byte[]
+PrivateCellUtil.cloneTags(Cell cell) 
+
+
+static byte[]
 CellUtil.cloneTags(Cell cell)
 Deprecated. 
 As of HBase-2.0. Will be 
removed in HBase-3.0.
 
 
 
-
-static byte[]
-PrivateCellUtil.cloneTags(Cell cell) 
-
 
 static byte[]
 CellUtil.cloneValue(Cell cell) 
@@ -762,6 +762,11 @@ service.
 
 
 int
+CellComparatorImpl.compare(Cell a,
+   Cell b) 
+
+
+int
 KeyValue.MetaComparator.compare(Cell left,
Cell right)
 Deprecated. 
@@ -769,7 +774,7 @@ service.
  table.
 
 
-
+
 int
 KeyValue.KVComparator.compare(Cell left,
Cell right)
@@ -778,11 +783,6 @@ service.
  rowkey, colfam/qual, timestamp, type, mvcc
 
 
-
-int
-CellComparatorImpl.compare(Cell a,
-   Cell b) 
-
 
 int
 CellComparatorImpl.compare(Cell a,
@@ -793,27 +793,27 @@ service.
 
 
 static int
-CellUtil.compare(CellComparator comparator,
+PrivateCellUtil.compare(CellComparator comparator,
Cell left,
byte[] key,
int offset,
int length)
-Deprecated. 
-As of HBase-2.0. Will be 
removed in HBase-3.0
-
+Used when a cell needs to be compared with a key byte[] 
such as cases of finding the index from
+ the index block, bloom keys from the bloom blocks This byte[] is expected to 
be serialized in
+ the KeyValue serialization format If the KeyValue (Cell's) serialization 
format changes this
+ method cannot be used.
 
 
 
 static int
-PrivateCellUtil.compare(CellComparator comparator,
+CellUtil.compare(CellComparator comparator,
Cell left,
byte[] key,
int offset,
int length)
-Used when a cell needs to be compared with a key byte[] 
such as cases of finding the index from
- the index block, bloom keys from the bloom blocks This byte[] is expected to 
be serialized in
- the KeyValue serialization format If the KeyValue (Cell's) serialization 
format changes this
- method cannot be used.
+Deprecated. 
+As of HBase-2.0. Will be 
removed in HBase-3.0
+
 
 
 
@@ -1016,23 +1016,23 @@ service.
 
 
 int
-KeyValue.KVComparator.compareRows(Cell left,
-   Cell right)
-Deprecated. 
- 
-
-
-int
 CellComparatorImpl.compareRows(Cell left,
Cell right)
 Compares the rows of the left and right cell.
 
 
-
+
 int
 CellComparatorImpl.MetaCellComparator.compareRows(Cell left,
Cell right) 
 
+
+int
+KeyValue.KVComparator.compareRows(Cell left,
+   Cell right)
+Deprecated. 
+ 
+
 
 int
 CellComparator.compareTimestamps(Cell leftCell,
@@ -1042,17 +1042,17 @@ service.
 
 
 int
-KeyValue.KVComparator.compareTimestamps(Cell left,
+CellComparatorImpl.compareTimestamps(Cell left,
  Cell right)
-Deprecated. 
- 
+Compares cell's timestamps in DESCENDING order.
+
 
 
 int
-CellComparatorImpl.compareTimestamps(Cell left,
+KeyValue.KVComparator.compareTimestamps(Cell left,
  Cell right)
-Compares cell's timestamps in DESCENDING order.
-
+Deprecated. 
+ 
 
 
 static int
@@ -1239,34 +1239,34 @@ service.
 
 
 static Cell
-CellUtil.createCell(Cell cell,
+PrivateCellUtil.createCell(Cell cell,
   byte[] tags) 
 
 
 static Cell
-PrivateCellUtil.createCell(Cell cell,
+CellUtil.createCell(Cell cell,
   byte[] tags) 
 
 
 static Cell
-CellUtil.createCell(Cell cell,
+PrivateCellUtil.createCell(Cell cell,
   byte[] value,
   byte[] tags) 
 
 
 static Cell
-PrivateCellUtil.createCell(Cell cell,
+CellUtil.createCell(Cell cell,
   byte[] value,
   byte[] tags) 
 
 
 static Cell
-CellUtil.createCell(C

[20/51] [partial] hbase-site git commit: Published site at .

2017-11-14 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6607d33c/devapidocs/org/apache/hadoop/hbase/client/RequestController.ReturnCode.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/client/RequestController.ReturnCode.html 
b/devapidocs/org/apache/hadoop/hbase/client/RequestController.ReturnCode.html
index 0d8d8ba..ce7bb8d 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/client/RequestController.ReturnCode.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/client/RequestController.ReturnCode.html
@@ -258,7 +258,7 @@ the order they are declared.
 
 
 values
-public static RequestController.ReturnCode[] values()
+public static RequestController.ReturnCode[] values()
 Returns an array containing the constants of this enum 
type, in
 the order they are declared.  This method may be used to iterate
 over the constants as follows:
@@ -278,7 +278,7 @@ for (RequestController.ReturnCode c : 
RequestController.ReturnCode.values())
 
 
 valueOf
-public static RequestController.ReturnCode valueOf(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String name)
+public static RequestController.ReturnCode valueOf(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String name)
 Returns the enum constant of this type with the specified 
name.
 The string must match exactly an identifier used to declare an
 enum constant in this type.  (Extraneous whitespace characters are 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6607d33c/devapidocs/org/apache/hadoop/hbase/client/SnapshotType.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/client/SnapshotType.html 
b/devapidocs/org/apache/hadoop/hbase/client/SnapshotType.html
index 65dee1a..e08c2b9 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/SnapshotType.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/SnapshotType.html
@@ -246,7 +246,7 @@ the order they are declared.
 
 
 values
-public static SnapshotType[] values()
+public static SnapshotType[] values()
 Returns an array containing the constants of this enum 
type, in
 the order they are declared.  This method may be used to iterate
 over the constants as follows:
@@ -266,7 +266,7 @@ for (SnapshotType c : SnapshotType.values())
 
 
 valueOf
-public static SnapshotType valueOf(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String name)
+public static SnapshotType valueOf(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String name)
 Returns the enum constant of this type with the specified 
name.
 The string must match exactly an identifier used to declare an
 enum constant in this type.  (Extraneous whitespace characters are 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6607d33c/devapidocs/org/apache/hadoop/hbase/client/TableState.State.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/client/TableState.State.html 
b/devapidocs/org/apache/hadoop/hbase/client/TableState.State.html
index 650d5d3..129d253 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/TableState.State.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/TableState.State.html
@@ -274,7 +274,7 @@ the order they are declared.
 
 
 values
-public static TableState.State[] values()
+public static TableState.State[] values()
 Returns an array containing the constants of this enum 
type, in
 the order they are declared.  This method may be used to iterate
 over the constants as follows:
@@ -294,7 +294,7 @@ for (TableState.State c : TableState.State.values())
 
 
 valueOf
-public static TableState.State valueOf(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String name)
+public static TableState.State valueOf(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String name)
 Returns the enum constant of this type with the specified 
name.
 The string must match exactly an identifier used to declare an
 enum constant in this type.  (Extraneous whitespace characters are 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6607d33c/devapidocs/org/apache/hadoop/hbase/client/backoff/class-use/ClientBackoffPolicy.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/client/backoff/class-use/ClientBackoffPolicy.html
 
b/devapidocs/org/apache/hadoop/hbase/client/backoff/class-use/ClientBackoffPolicy.html
index 05caaf5..37c6d12 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/client/backoff/class-use/ClientBackoffPol

[40/51] [partial] hbase-site git commit: Published site at .

2017-11-14 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6607d33c/apidocs/src-html/org/apache/hadoop/hbase/client/AsyncAdmin.html
--
diff --git a/apidocs/src-html/org/apache/hadoop/hbase/client/AsyncAdmin.html 
b/apidocs/src-html/org/apache/hadoop/hbase/client/AsyncAdmin.html
index e249fd7..bc012fd 100644
--- a/apidocs/src-html/org/apache/hadoop/hbase/client/AsyncAdmin.html
+++ b/apidocs/src-html/org/apache/hadoop/hbase/client/AsyncAdmin.html
@@ -54,1064 +54,1061 @@
 046
 047/**
 048 * The asynchronous administrative API 
for HBase.
-049 * 

-050 * This feature is still under development, so marked as IA.Private. Will change to public when -051 * done. Use it with caution. -052 * @since 2.0.0 -053 */ -054@InterfaceAudience.Public -055public interface AsyncAdmin { -056 -057 /** -058 * @param tableName Table to check. -059 * @return True if table exists already. The return value will be wrapped by a -060 * {@link CompletableFuture}. -061 */ -062 CompletableFuture tableExists(TableName tableName); -063 -064 /** -065 * List all the userspace tables. -066 * @return - returns a list of TableDescriptors wrapped by a {@link CompletableFuture}. -067 */ -068 default CompletableFuture> listTables() { -069return listTables(false); -070 } -071 -072 /** -073 * List all the tables. -074 * @param includeSysTables False to match only against userspace tables -075 * @return - returns a list of TableDescriptors wrapped by a {@link CompletableFuture}. -076 */ -077 CompletableFuture> listTables(boolean includeSysTables); -078 -079 /** -080 * List all the tables matching the given pattern. -081 * @param pattern The compiled regular expression to match against -082 * @param includeSysTables False to match only against userspace tables -083 * @return - returns a list of TableDescriptors wrapped by a {@link CompletableFuture}. -084 */ -085 CompletableFuture> listTables(Pattern pattern, boolean includeSysTables); -086 -087 /** -088 * List all of the names of userspace tables. -089 * @return a list of table names wrapped by a {@link CompletableFuture}. -090 * @see #listTableNames(Pattern, boolean) -091 */ -092 default CompletableFuture> listTableNames() { -093return listTableNames(false); -094 } -095 -096 /** -097 * List all of the names of tables. -098 * @param includeSysTables False to match only against userspace tables -099 * @return a list of table names wrapped by a {@link CompletableFuture}. -100 */ -101 CompletableFuture> listTableNames(boolean includeSysTables); -102 -103 /** -104 * List all of the names of userspace tables. -105 * @param pattern The regular expression to match against -106 * @param includeSysTables False to match only against userspace tables -107 * @return a list of table names wrapped by a {@link CompletableFuture}. -108 */ -109 CompletableFuture> listTableNames(Pattern pattern, boolean includeSysTables); -110 -111 /** -112 * Method for getting the tableDescriptor -113 * @param tableName as a {@link TableName} -114 * @return the read-only tableDescriptor wrapped by a {@link CompletableFuture}. -115 */ -116 CompletableFuture getTableDescriptor(TableName tableName); -117 -118 /** -119 * Creates a new table. -120 * @param desc table descriptor for table -121 */ -122 CompletableFuture createTable(TableDescriptor desc); -123 -124 /** -125 * Creates a new table with the specified number of regions. The start key specified will become -126 * the end key of the first region of the table, and the end key specified will become the start -127 * key of the last region of the table (the first region has a null start key and the last region -128 * has a null end key). BigInteger math will be used to divide the key range specified into enough -129 * segments to make the required number of total regions. -130 * @param desc table descriptor for table -131 * @param startKey beginning of key range -132 * @param endKey end of key range -133 * @param numRegions the total number of regions to create -134 */ -135 CompletableFuture createTable(TableDescriptor desc, byte[] startKey, byte[] endKey, -136 int numRegions); -137 -138 /** -139 * Creates a new table with an initial set of empty regions defined by the specified split keys. -140 * The total number of regions created will be the number of split keys plus one. -141 * Note : Avoid passing empty split key. -142 * @param desc table descriptor for table -143 * @param splitKeys array of split keys for the initial regions of the table -144 */ -145 CompletableFuture createTable(TableDescriptor desc, byte[][] splitKeys); -146 -147 /** -148 * Deletes a table. -


[36/51] [partial] hbase-site git commit: Published site at .

2017-11-14 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6607d33c/devapidocs/deprecated-list.html
--
diff --git a/devapidocs/deprecated-list.html b/devapidocs/deprecated-list.html
index cb13ec5..98b6cc7 100644
--- a/devapidocs/deprecated-list.html
+++ b/devapidocs/deprecated-list.html
@@ -614,13 +614,13 @@
 org.apache.hadoop.hbase.mapreduce.CellCreator.create(byte[],
 int, int, byte[], int, int, byte[], int, int, long, byte[], int, int, 
String)
 
 
-org.apache.hadoop.hbase.regionserver.RpcSchedulerFactory.create(Configuration,
 PriorityFunction)
+org.apache.hadoop.hbase.regionserver.FifoRpcSchedulerFactory.create(Configuration,
 PriorityFunction)
 
 
 org.apache.hadoop.hbase.regionserver.SimpleRpcSchedulerFactory.create(Configuration,
 PriorityFunction)
 
 
-org.apache.hadoop.hbase.regionserver.FifoRpcSchedulerFactory.create(Configuration,
 PriorityFunction)
+org.apache.hadoop.hbase.regionserver.RpcSchedulerFactory.create(Configuration,
 PriorityFunction)
 
 
 org.apache.hadoop.hbase.coprocessor.ObserverContextImpl.createAndPrepare(E)
@@ -890,82 +890,82 @@
 
 
 
-org.apache.hadoop.hbase.filter.ValueFilter.filterKeyValue(Cell)
+org.apache.hadoop.hbase.filter.ColumnPrefixFilter.filterKeyValue(Cell)
 
 
-org.apache.hadoop.hbase.filter.SkipFilter.filterKeyValue(Cell)
+org.apache.hadoop.hbase.filter.ColumnCountGetFilter.filterKeyValue(Cell)
 
 
-org.apache.hadoop.hbase.filter.FamilyFilter.filterKeyValue(Cell)
+org.apache.hadoop.hbase.filter.RowFilter.filterKeyValue(Cell)
 
 
-org.apache.hadoop.hbase.filter.ColumnPrefixFilter.filterKeyValue(Cell)
+org.apache.hadoop.hbase.filter.FuzzyRowFilter.filterKeyValue(Cell)
 
 
-org.apache.hadoop.hbase.filter.PageFilter.filterKeyValue(Cell)
+org.apache.hadoop.hbase.filter.Filter.filterKeyValue(Cell)
+As of release 2.0.0, this 
will be removed in HBase 3.0.0.
+ Instead use filterCell(Cell)
+
 
 
-org.apache.hadoop.hbase.filter.RowFilter.filterKeyValue(Cell)
+org.apache.hadoop.hbase.filter.RandomRowFilter.filterKeyValue(Cell)
 
 
-org.apache.hadoop.hbase.filter.ColumnRangeFilter.filterKeyValue(Cell)
+org.apache.hadoop.hbase.filter.FirstKeyOnlyFilter.filterKeyValue(Cell)
 
 
-org.apache.hadoop.hbase.filter.ColumnCountGetFilter.filterKeyValue(Cell)
+org.apache.hadoop.hbase.filter.SkipFilter.filterKeyValue(Cell)
 
 
-org.apache.hadoop.hbase.filter.MultipleColumnPrefixFilter.filterKeyValue(Cell)
+org.apache.hadoop.hbase.filter.TimestampsFilter.filterKeyValue(Cell)
 
 
-org.apache.hadoop.hbase.filter.ColumnPaginationFilter.filterKeyValue(Cell)
+org.apache.hadoop.hbase.filter.ValueFilter.filterKeyValue(Cell)
 
 
-org.apache.hadoop.hbase.filter.DependentColumnFilter.filterKeyValue(Cell)
+org.apache.hadoop.hbase.filter.KeyOnlyFilter.filterKeyValue(Cell)
 
 
-org.apache.hadoop.hbase.filter.InclusiveStopFilter.filterKeyValue(Cell)
+org.apache.hadoop.hbase.filter.FamilyFilter.filterKeyValue(Cell)
 
 
-org.apache.hadoop.hbase.filter.KeyOnlyFilter.filterKeyValue(Cell)
+org.apache.hadoop.hbase.filter.QualifierFilter.filterKeyValue(Cell)
 
 
-org.apache.hadoop.hbase.filter.MultiRowRangeFilter.filterKeyValue(Cell)
+org.apache.hadoop.hbase.filter.FilterList.filterKeyValue(Cell)
 
 
-org.apache.hadoop.hbase.filter.Filter.filterKeyValue(Cell)
-As of release 2.0.0, this 
will be removed in HBase 3.0.0.
- Instead use filterCell(Cell)
-
+org.apache.hadoop.hbase.filter.ColumnRangeFilter.filterKeyValue(Cell)
 
 
-org.apache.hadoop.hbase.filter.FirstKeyOnlyFilter.filterKeyValue(Cell)
+org.apache.hadoop.hbase.filter.ColumnPaginationFilter.filterKeyValue(Cell)
 
 
 org.apache.hadoop.hbase.filter.WhileMatchFilter.filterKeyValue(Cell)
 
 
-org.apache.hadoop.hbase.filter.FirstKeyValueMatchingQualifiersFilter.filterKeyValue(Cell)
+org.apache.hadoop.hbase.filter.MultiRowRangeFilter.filterKeyValue(Cell)
 
 
-org.apache.hadoop.hbase.filter.TimestampsFilter.filterKeyValue(Cell)
+org.apache.hadoop.hbase.filter.PrefixFilter.filterKeyValue(Cell)
 
 
-org.apache.hadoop.hbase.filter.FuzzyRowFilter.filterKeyValue(Cell)
+org.apache.hadoop.hbase.filter.DependentColumnFilter.filterKeyValue(Cell)
 
 
-org.apache.hadoop.hbase.filter.FilterList.filterKeyValue(Cell)
+org.apache.hadoop.hbase.filter.FirstKeyValueMatchingQualifiersFilter.filterKeyValue(Cell)
 
 
-org.apache.hadoop.hbase.filter.RandomRowFilter.filterKeyValue(Cell)
+org.apache.hadoop.hbase.filter.PageFilter.filterKeyValue(Cell)
 
 
-org.apache.hadoop.hbase.filter.PrefixFilter.filterKeyValue(Cell)
+org.apache.hadoop.hbase.filter.InclusiveStopFilter.filterKeyValue(Cell)
 
 
-org.apache.hadoop.hbase.filter.SingleColumnValueFilter.filterKeyValue(Cell)
+org.apache.hadoop.hbase.filter.MultipleColumnPrefixFilter.filterKeyValue(Cell)
 
 
-org.apache.hadoop.hbase.filter.QualifierFilter.filterKeyValue(Cell)
+org.apache.hadoop.hbase.filter.SingleColumnValueFilter.filterKeyValue(Cell)
 
 
 org.apache.hadoop.hbase.filter.FilterWrapper.filterKeyValue(Cell)
@@ -1151,14 +1151,14 @@
 
 

hbase-site git commit: INFRA-10751 Empty commit

2017-11-14 Thread git-site-role
Repository: hbase-site
Updated Branches:
  refs/heads/asf-site 6607d33c5 -> 07c67a9cf


INFRA-10751 Empty commit


Project: http://git-wip-us.apache.org/repos/asf/hbase-site/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase-site/commit/07c67a9c
Tree: http://git-wip-us.apache.org/repos/asf/hbase-site/tree/07c67a9c
Diff: http://git-wip-us.apache.org/repos/asf/hbase-site/diff/07c67a9c

Branch: refs/heads/asf-site
Commit: 07c67a9cfa58156b389e4604ac6ff93c29b8d0b4
Parents: 6607d33
Author: jenkins 
Authored: Tue Nov 14 15:20:55 2017 +
Committer: jenkins 
Committed: Tue Nov 14 15:20:55 2017 +

--

--




[29/51] [partial] hbase-site git commit: Published site at .

2017-11-14 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6607d33c/devapidocs/org/apache/hadoop/hbase/class-use/TableDescriptors.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/class-use/TableDescriptors.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/TableDescriptors.html
index 9a48004..c95dbba 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/TableDescriptors.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/TableDescriptors.html
@@ -122,11 +122,11 @@
 
 
 TableDescriptors
-HMaster.getTableDescriptors() 
+MasterServices.getTableDescriptors() 
 
 
 TableDescriptors
-MasterServices.getTableDescriptors() 
+HMaster.getTableDescriptors() 
 
 
 



[23/51] [partial] hbase-site git commit: Published site at .

2017-11-14 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6607d33c/devapidocs/org/apache/hadoop/hbase/client/AsyncRequestFutureImpl.Retry.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/client/AsyncRequestFutureImpl.Retry.html 
b/devapidocs/org/apache/hadoop/hbase/client/AsyncRequestFutureImpl.Retry.html
index 74b0bc0..0016c72 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/client/AsyncRequestFutureImpl.Retry.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/client/AsyncRequestFutureImpl.Retry.html
@@ -274,7 +274,7 @@ the order they are declared.
 
 
 values
-public static AsyncRequestFutureImpl.Retry[] values()
+public static AsyncRequestFutureImpl.Retry[] values()
 Returns an array containing the constants of this enum 
type, in
 the order they are declared.  This method may be used to iterate
 over the constants as follows:
@@ -294,7 +294,7 @@ for (AsyncRequestFutureImpl.Retry c : 
AsyncRequestFutureImpl.Retry.values())
 
 
 valueOf
-public static AsyncRequestFutureImpl.Retry valueOf(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String name)
+public static AsyncRequestFutureImpl.Retry valueOf(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String name)
 Returns the enum constant of this type with the specified 
name.
 The string must match exactly an identifier used to declare an
 enum constant in this type.  (Extraneous whitespace characters are 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6607d33c/devapidocs/org/apache/hadoop/hbase/client/CompactType.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/client/CompactType.html 
b/devapidocs/org/apache/hadoop/hbase/client/CompactType.html
index 8af2569..7d9f1a6 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/CompactType.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/CompactType.html
@@ -236,7 +236,7 @@ the order they are declared.
 
 
 values
-public static CompactType[] values()
+public static CompactType[] values()
 Returns an array containing the constants of this enum 
type, in
 the order they are declared.  This method may be used to iterate
 over the constants as follows:
@@ -256,7 +256,7 @@ for (CompactType c : CompactType.values())
 
 
 valueOf
-public static CompactType valueOf(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String name)
+public static CompactType valueOf(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String name)
 Returns the enum constant of this type with the specified 
name.
 The string must match exactly an identifier used to declare an
 enum constant in this type.  (Extraneous whitespace characters are 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6607d33c/devapidocs/org/apache/hadoop/hbase/client/CompactionState.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/client/CompactionState.html 
b/devapidocs/org/apache/hadoop/hbase/client/CompactionState.html
index 45059a8..468827d 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/CompactionState.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/CompactionState.html
@@ -258,7 +258,7 @@ the order they are declared.
 
 
 values
-public static CompactionState[] values()
+public static CompactionState[] values()
 Returns an array containing the constants of this enum 
type, in
 the order they are declared.  This method may be used to iterate
 over the constants as follows:
@@ -278,7 +278,7 @@ for (CompactionState c : CompactionState.values())
 
 
 valueOf
-public static CompactionState valueOf(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String name)
+public static CompactionState valueOf(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String name)
 Returns the enum constant of this type with the specified 
name.
 The string must match exactly an identifier used to declare an
 enum constant in this type.  (Extraneous whitespace characters are 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6607d33c/devapidocs/org/apache/hadoop/hbase/client/Durability.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/client/Durability.html 
b/devapidocs/org/apache/hadoop/hbase/client/Durability.html
index fc50054..285159f 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/Durability.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/Durability.html
@@ -293,7 +293,7 @@ the order they are declared.
 
 
 values

[04/51] [partial] hbase-site git commit: Published site at .

2017-11-14 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6607d33c/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/BlockCacheKey.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/BlockCacheKey.html 
b/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/BlockCacheKey.html
index b357af9..40cc4d2 100644
--- a/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/BlockCacheKey.html
+++ b/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/BlockCacheKey.html
@@ -168,23 +168,23 @@
 
 
 void
+CombinedBlockCache.cacheBlock(BlockCacheKey cacheKey,
+  Cacheable buf) 
+
+
+void
 BlockCache.cacheBlock(BlockCacheKey cacheKey,
   Cacheable buf)
 Add block to cache (defaults to not in-memory).
 
 
-
+
 void
 LruBlockCache.cacheBlock(BlockCacheKey cacheKey,
   Cacheable buf)
 Cache the block with the specified name and buffer.
 
 
-
-void
-CombinedBlockCache.cacheBlock(BlockCacheKey cacheKey,
-  Cacheable buf) 
-
 
 void
 MemcachedBlockCache.cacheBlock(BlockCacheKey cacheKey,
@@ -192,35 +192,35 @@
 
 
 void
-BlockCache.cacheBlock(BlockCacheKey cacheKey,
+CombinedBlockCache.cacheBlock(BlockCacheKey cacheKey,
   Cacheable buf,
   boolean inMemory,
-  boolean cacheDataInL1)
-Add block to cache.
-
+  boolean cacheDataInL1) 
 
 
 void
-LruBlockCache.cacheBlock(BlockCacheKey cacheKey,
+InclusiveCombinedBlockCache.cacheBlock(BlockCacheKey cacheKey,
   Cacheable buf,
   boolean inMemory,
-  boolean cacheDataInL1)
-Cache the block with the specified name and buffer.
-
+  boolean cacheDataInL1) 
 
 
 void
-CombinedBlockCache.cacheBlock(BlockCacheKey cacheKey,
+BlockCache.cacheBlock(BlockCacheKey cacheKey,
   Cacheable buf,
   boolean inMemory,
-  boolean cacheDataInL1) 
+  boolean cacheDataInL1)
+Add block to cache.
+
 
 
 void
-InclusiveCombinedBlockCache.cacheBlock(BlockCacheKey cacheKey,
+LruBlockCache.cacheBlock(BlockCacheKey cacheKey,
   Cacheable buf,
   boolean inMemory,
-  boolean cacheDataInL1) 
+  boolean cacheDataInL1)
+Cache the block with the specified name and buffer.
+
 
 
 void
@@ -237,17 +237,17 @@
 
 
 boolean
-BlockCache.evictBlock(BlockCacheKey cacheKey)
-Evict block from cache.
-
+CombinedBlockCache.evictBlock(BlockCacheKey cacheKey) 
 
 
 boolean
-LruBlockCache.evictBlock(BlockCacheKey cacheKey) 
+BlockCache.evictBlock(BlockCacheKey cacheKey)
+Evict block from cache.
+
 
 
 boolean
-CombinedBlockCache.evictBlock(BlockCacheKey cacheKey) 
+LruBlockCache.evictBlock(BlockCacheKey cacheKey) 
 
 
 boolean
@@ -255,35 +255,35 @@
 
 
 Cacheable
-BlockCache.getBlock(BlockCacheKey cacheKey,
+CombinedBlockCache.getBlock(BlockCacheKey cacheKey,
 boolean caching,
 boolean repeat,
-boolean updateCacheMetrics)
-Fetch block from cache.
-
+boolean updateCacheMetrics) 
 
 
 Cacheable
-LruBlockCache.getBlock(BlockCacheKey cacheKey,
+InclusiveCombinedBlockCache.getBlock(BlockCacheKey cacheKey,
 boolean caching,
 boolean repeat,
-boolean updateCacheMetrics)
-Get the buffer of the block with the specified name.
-
+boolean updateCacheMetrics) 
 
 
 Cacheable
-CombinedBlockCache.getBlock(BlockCacheKey cacheKey,
+BlockCache.getBlock(BlockCacheKey cacheKey,
 boolean caching,
 boolean repeat,
-boolean updateCacheMetrics) 
+boolean updateCacheMetrics)
+Fetch block from cache.
+
 
 
 Cacheable
-InclusiveCombinedBlockCache.getBlock(BlockCacheKey cacheKey,
+LruBlockCache.getBlock(BlockCacheKey cacheKey,
 boolean caching,
 boolean repeat,
-boolean updateCacheMetrics) 
+boolean updateCacheMetrics)
+Get the buffer of the block with the specified name.
+
 
 
 Cacheable
@@ -310,20 +310,20 @@
 
 
 void
+CombinedBlockCache.returnBlock(BlockCacheKey cacheKey,
+   Cacheable block) 
+
+
+void
 BlockCache.returnBlock(BlockCacheKey cacheKey,
Cacheable block)
 Called when the scanner using the block decides to return 
the block once its usage
  is over.
 
 
-
-void
-LruBlockCache.returnBlock(BlockCacheKey cacheKey,
-   Cacheable block) 
-
 
 void
-CombinedBlockCache.returnBlock(BlockCacheKey cacheKey,
+LruBlockCache.returnBlock(BlockCacheKey cacheKey,
Cacheable block) 
 
 
@@ -510,13 +510,13 @@
 
 
 void
-CachedEntryQueue.add(http://docs.oracle.com/javase/8/docs/api/java/util/Map.Entry.html?is-external=true";
 title="class or interface in java.util">Map.Entry entry)
-Attempt to add the specified entry to this queue.
-
+BucketCache.BucketEntryGroup.add(http://docs.oracle.com/javase/8/docs/api/java/util/Map.Entry.html?is-external=true";
 title="class or interface in java.util">Map.Entry block) 
 
 
 void
-BucketCache.BucketEntryGroup.add(http://docs.oracle.co

[50/51] [partial] hbase-site git commit: Published site at .

2017-11-14 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6607d33c/apidocs/org/apache/hadoop/hbase/class-use/Cell.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/class-use/Cell.html 
b/apidocs/org/apache/hadoop/hbase/class-use/Cell.html
index c10ded7..d197835 100644
--- a/apidocs/org/apache/hadoop/hbase/class-use/Cell.html
+++ b/apidocs/org/apache/hadoop/hbase/class-use/Cell.html
@@ -1074,17 +1074,17 @@ Input/OutputFormats, a table indexing MapReduce job, 
and utility methods.
 
 
 
-Append
-Append.add(Cell cell)
-Add column and value to this Append operation.
-
-
-
 Increment
 Increment.add(Cell cell)
 Add the specified KeyValue to this operation.
 
 
+
+Delete
+Delete.add(Cell kv)
+Add an existing delete marker to this Delete object.
+
+
 
 Put
 Put.add(Cell kv)
@@ -1092,9 +1092,9 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 
-Delete
-Delete.add(Cell kv)
-Add an existing delete marker to this Delete object.
+Append
+Append.add(Cell cell)
+Add column and value to this Append operation.
 
 
 
@@ -1177,13 +1177,13 @@ Input/OutputFormats, a table indexing MapReduce job, 
and utility methods.
   boolean mayHaveMoreCellsInRow) 
 
 
-Append
-Append.setFamilyCellMap(http://docs.oracle.com/javase/8/docs/api/java/util/NavigableMap.html?is-external=true";
 title="class or interface in java.util">NavigableMapList> map) 
-
-
 Increment
 Increment.setFamilyCellMap(http://docs.oracle.com/javase/8/docs/api/java/util/NavigableMap.html?is-external=true";
 title="class or interface in java.util">NavigableMapList> map) 
 
+
+Delete
+Delete.setFamilyCellMap(http://docs.oracle.com/javase/8/docs/api/java/util/NavigableMap.html?is-external=true";
 title="class or interface in java.util">NavigableMapList> map) 
+
 
 Mutation
 Mutation.setFamilyCellMap(http://docs.oracle.com/javase/8/docs/api/java/util/NavigableMap.html?is-external=true";
 title="class or interface in java.util">NavigableMapList> map)
@@ -1195,8 +1195,8 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 Put.setFamilyCellMap(http://docs.oracle.com/javase/8/docs/api/java/util/NavigableMap.html?is-external=true";
 title="class or interface in java.util">NavigableMapList> map) 
 
 
-Delete
-Delete.setFamilyCellMap(http://docs.oracle.com/javase/8/docs/api/java/util/NavigableMap.html?is-external=true";
 title="class or interface in java.util">NavigableMapList> map) 
+Append
+Append.setFamilyCellMap(http://docs.oracle.com/javase/8/docs/api/java/util/NavigableMap.html?is-external=true";
 title="class or interface in java.util">NavigableMapList> map) 
 
 
 
@@ -1214,67 +1214,67 @@ Input/OutputFormats, a table indexing MapReduce job, 
and utility methods.
 
 
 Cell
-FilterList.getNextCellHint(Cell currentCell) 
+ColumnPrefixFilter.getNextCellHint(Cell cell) 
 
 
 Cell
-MultipleColumnPrefixFilter.getNextCellHint(Cell cell) 
+TimestampsFilter.getNextCellHint(Cell currentCell)
+Pick the next cell that the scanner should seek to.
+
 
 
 Cell
-ColumnRangeFilter.getNextCellHint(Cell cell) 
+MultiRowRangeFilter.getNextCellHint(Cell currentKV) 
 
 
-abstract Cell
-Filter.getNextCellHint(Cell currentCell)
-If the filter returns the match code SEEK_NEXT_USING_HINT, 
then it should also tell which is
- the next key it must seek to.
-
+Cell
+ColumnPaginationFilter.getNextCellHint(Cell cell) 
 
 
 Cell
-ColumnPaginationFilter.getNextCellHint(Cell cell) 
+ColumnRangeFilter.getNextCellHint(Cell cell) 
 
 
 Cell
-FuzzyRowFilter.getNextCellHint(Cell currentCell) 
+FilterList.getNextCellHint(Cell currentCell) 
 
 
 Cell
-TimestampsFilter.getNextCellHint(Cell currentCell)
-Pick the next cell that the scanner should seek to.
-
+MultipleColumnPrefixFilter.getNextCellHint(Cell cell) 
 
 
-Cell
-ColumnPrefixFilter.getNextCellHint(Cell cell) 
+abstract Cell
+Filter.getNextCellHint(Cell currentCell)
+If the filter returns the match code SEEK_NEXT_USING_HINT, 
then it should also tell which is
+ the next key it must seek to.
+
 
 
 Ce

[47/51] [partial] hbase-site git commit: Published site at .

2017-11-14 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6607d33c/apidocs/org/apache/hadoop/hbase/client/CompactType.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/client/CompactType.html 
b/apidocs/org/apache/hadoop/hbase/client/CompactType.html
index d8291ba..314211b 100644
--- a/apidocs/org/apache/hadoop/hbase/client/CompactType.html
+++ b/apidocs/org/apache/hadoop/hbase/client/CompactType.html
@@ -236,7 +236,7 @@ the order they are declared.
 
 
 values
-public static CompactType[] values()
+public static CompactType[] values()
 Returns an array containing the constants of this enum 
type, in
 the order they are declared.  This method may be used to iterate
 over the constants as follows:
@@ -256,7 +256,7 @@ for (CompactType c : CompactType.values())
 
 
 valueOf
-public static CompactType valueOf(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String name)
+public static CompactType valueOf(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String name)
 Returns the enum constant of this type with the specified 
name.
 The string must match exactly an identifier used to declare an
 enum constant in this type.  (Extraneous whitespace characters are 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6607d33c/apidocs/org/apache/hadoop/hbase/client/Consistency.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/client/Consistency.html 
b/apidocs/org/apache/hadoop/hbase/client/Consistency.html
index 06e8661..66ca1bb 100644
--- a/apidocs/org/apache/hadoop/hbase/client/Consistency.html
+++ b/apidocs/org/apache/hadoop/hbase/client/Consistency.html
@@ -253,7 +253,7 @@ the order they are declared.
 
 
 values
-public static Consistency[] values()
+public static Consistency[] values()
 Returns an array containing the constants of this enum 
type, in
 the order they are declared.  This method may be used to iterate
 over the constants as follows:
@@ -273,7 +273,7 @@ for (Consistency c : Consistency.values())
 
 
 valueOf
-public static Consistency valueOf(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String name)
+public static Consistency valueOf(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String name)
 Returns the enum constant of this type with the specified 
name.
 The string must match exactly an identifier used to declare an
 enum constant in this type.  (Extraneous whitespace characters are 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6607d33c/apidocs/org/apache/hadoop/hbase/client/Durability.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/client/Durability.html 
b/apidocs/org/apache/hadoop/hbase/client/Durability.html
index 917c338..13d923a 100644
--- a/apidocs/org/apache/hadoop/hbase/client/Durability.html
+++ b/apidocs/org/apache/hadoop/hbase/client/Durability.html
@@ -293,7 +293,7 @@ the order they are declared.
 
 
 values
-public static Durability[] values()
+public static Durability[] values()
 Returns an array containing the constants of this enum 
type, in
 the order they are declared.  This method may be used to iterate
 over the constants as follows:
@@ -313,7 +313,7 @@ for (Durability c : Durability.values())
 
 
 valueOf
-public static Durability valueOf(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String name)
+public static Durability valueOf(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String name)
 Returns the enum constant of this type with the specified 
name.
 The string must match exactly an identifier used to declare an
 enum constant in this type.  (Extraneous whitespace characters are 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6607d33c/apidocs/org/apache/hadoop/hbase/client/IsolationLevel.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/client/IsolationLevel.html 
b/apidocs/org/apache/hadoop/hbase/client/IsolationLevel.html
index 4b0e702..a581d89 100644
--- a/apidocs/org/apache/hadoop/hbase/client/IsolationLevel.html
+++ b/apidocs/org/apache/hadoop/hbase/client/IsolationLevel.html
@@ -256,7 +256,7 @@ the order they are declared.
 
 
 values
-public static IsolationLevel[] values()
+public static IsolationLevel[] values()
 Returns an array containing the constants of this enum 
type, in
 the order they are declared.  This method may be used to iterate
 over the constants as follows:
@@ -276,7 +276,7 @@ for (IsolationLevel

[31/51] [partial] hbase-site git commit: Published site at .

2017-11-14 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6607d33c/devapidocs/org/apache/hadoop/hbase/class-use/NamespaceDescriptor.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/class-use/NamespaceDescriptor.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/NamespaceDescriptor.html
index 4e35920..d774844 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/NamespaceDescriptor.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/NamespaceDescriptor.html
@@ -270,32 +270,32 @@
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";
 title="class or interface in java.util.concurrent">CompletableFuture
-AsyncHBaseAdmin.getNamespaceDescriptor(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String name) 
-
-
-http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";
 title="class or interface in java.util.concurrent">CompletableFuture
 AsyncAdmin.getNamespaceDescriptor(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String name)
 Get a namespace descriptor by name
 
 
-
+
 http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";
 title="class or interface in java.util.concurrent">CompletableFuture
 RawAsyncHBaseAdmin.getNamespaceDescriptor(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String name) 
 
-
-http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";
 title="class or interface in java.util.concurrent">CompletableFutureList>
-AsyncHBaseAdmin.listNamespaceDescriptors() 
-
 
+http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";
 title="class or interface in java.util.concurrent">CompletableFuture
+AsyncHBaseAdmin.getNamespaceDescriptor(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String name) 
+
+
 http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";
 title="class or interface in java.util.concurrent">CompletableFutureList>
 AsyncAdmin.listNamespaceDescriptors()
 List available namespace descriptors
 
 
-
+
 http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";
 title="class or interface in java.util.concurrent">CompletableFutureList>
 RawAsyncHBaseAdmin.listNamespaceDescriptors() 
 
+
+http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";
 title="class or interface in java.util.concurrent">CompletableFutureList>
+AsyncHBaseAdmin.listNamespaceDescriptors() 
+
 
 
 
@@ -307,7 +307,9 @@
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";
 title="class or interface in java.util.concurrent">CompletableFutureVoid>
-AsyncHBaseAdmin.createNamespace(NamespaceDescriptor descriptor) 
+AsyncAdmin.createNamespace(NamespaceDescriptor descriptor)
+Create a new namespace.
+
 
 
 void
@@ -316,18 +318,16 @@
 
 
 
-void
-HBaseAdmin.createNamespace(NamespaceDescriptor descriptor) 
+http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";
 title="class or interface in java.util.concurrent">CompletableFutureVoid>
+RawAsyncHBaseAdmin.createNamespace(NamespaceDescriptor descriptor) 
 
 
-http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";
 title="class or interface in java.util.concurrent">CompletableFutureVoid>
-AsyncAdmin.createNamespace(NamespaceDescriptor descriptor)
-Create a new namespace.
-
+void

[24/51] [partial] hbase-site git commit: Published site at .

2017-11-14 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6607d33c/devapidocs/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.html 
b/devapidocs/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.html
index db3dbe5..8a0bb3c 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.html
@@ -114,13 +114,20 @@ var activeTableTab = "activeTableTab";
 
 
 @InterfaceAudience.Private
-public class AsyncHBaseAdmin
+public class AsyncHBaseAdmin
 extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true";
 title="class or interface in java.lang">Object
 implements AsyncAdmin
-The implementation of AsyncAdmin.
+Just a wrapper of RawAsyncHBaseAdmin. The 
difference is that users need to provide a
+ thread pool when constructing this class, and the callback methods registered 
to the returned
+ http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";
 title="class or interface in 
java.util.concurrent">CompletableFuture will be executed in 
this thread pool. So usually it is safe for users
+ to do anything they want in the callbacks without breaking the rpc 
framework.
 
 Since:
 2.0.0
+See Also:
+RawAsyncHBaseAdmin, 
+AsyncConnection.getAdmin(ExecutorService),
 
+AsyncConnection.getAdminBuilder(ExecutorService)
 
 
 
@@ -141,14 +148,10 @@ implements Field and Description
 
 
-private static 
org.apache.commons.logging.Log
-LOG 
-
-
 private http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/ExecutorService.html?is-external=true";
 title="class or interface in 
java.util.concurrent">ExecutorService
 pool 
 
-
+
 private RawAsyncHBaseAdmin
 rawAdmin 
 
@@ -982,22 +985,13 @@ implements 
-
-
-
-
-LOG
-private static final org.apache.commons.logging.Log LOG
-
-
 
 
 
 
 
 rawAdmin
-private final RawAsyncHBaseAdmin rawAdmin
+private final RawAsyncHBaseAdmin rawAdmin
 
 
 
@@ -1006,7 +1000,7 @@ implements 
 
 pool
-private final http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/ExecutorService.html?is-external=true";
 title="class or interface in java.util.concurrent">ExecutorService pool
+private final http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/ExecutorService.html?is-external=true";
 title="class or interface in java.util.concurrent">ExecutorService pool
 
 
 
@@ -1023,7 +1017,7 @@ implements 
 
 AsyncHBaseAdmin
-AsyncHBaseAdmin(RawAsyncHBaseAdmin rawAdmin,
+AsyncHBaseAdmin(RawAsyncHBaseAdmin rawAdmin,
 http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/ExecutorService.html?is-external=true";
 title="class or interface in 
java.util.concurrent">ExecutorService pool)
 
 
@@ -1041,7 +1035,7 @@ implements 
 
 wrap
-private  http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";
 title="class or interface in 
java.util.concurrent">CompletableFuture wrap(http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";
 title="class or interface in 
java.util.concurrent">CompletableFuture future)
+private  http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";
 title="class or interface in 
java.util.concurrent">CompletableFuture wrap(http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";
 title="class or interface in 
java.util.concurrent">CompletableFuture future)
 
 
 
@@ -1050,7 +1044,7 @@ implements 
 
 tableExists
-public http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";
 title="class or interface in java.util.concurrent">CompletableFutureBoolean> tableExists(TableName tableName)
+public http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";
 title="class or interface in java.util.concurrent">CompletableFutureBoolean> tableExists(TableName tableName)
 
 Specified by:
 tableExists in
 interface AsyncAdmin
@@ -1068,7 +1062,7 @@ implements 
 
 listTables
-public http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";
 title="class or interface in java.util.concurrent">CompletableFutureList> listTables(boolean includeSysTables)
+public http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";

[22/51] [partial] hbase-site git commit: Published site at .

2017-11-14 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6607d33c/devapidocs/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.ModifyNamespaceProcedureBiConsumer.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.ModifyNamespaceProcedureBiConsumer.html
 
b/devapidocs/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.ModifyNamespaceProcedureBiConsumer.html
index 8f276a7..84f16a8 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.ModifyNamespaceProcedureBiConsumer.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.ModifyNamespaceProcedureBiConsumer.html
@@ -127,7 +127,7 @@ var activeTableTab = "activeTableTab";
 
 
 
-private class RawAsyncHBaseAdmin.ModifyNamespaceProcedureBiConsumer
+private class RawAsyncHBaseAdmin.ModifyNamespaceProcedureBiConsumer
 extends RawAsyncHBaseAdmin.NamespaceProcedureBiConsumer
 
 
@@ -148,13 +148,6 @@ extends RawAsyncHBaseAdmin.NamespaceProcedureBiConsumer
 namespaceName
 
-
-
-
-
-Fields inherited from class org.apache.hadoop.hbase.client.RawAsyncHBaseAdmin.ProcedureBiConsumer
-admin
-
 
 
 
@@ -169,8 +162,7 @@ extends Constructor and Description
 
 
-ModifyNamespaceProcedureBiConsumer(AsyncAdmin admin,
-  http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in 
java.lang">String namespaceName) 
+ModifyNamespaceProcedureBiConsumer(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in 
java.lang">String namespaceName) 
 
 
 
@@ -234,14 +226,13 @@ extends 
+
 
 
 
 
 ModifyNamespaceProcedureBiConsumer
-ModifyNamespaceProcedureBiConsumer(AsyncAdmin admin,
-   http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String namespaceName)
+ModifyNamespaceProcedureBiConsumer(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String namespaceName)
 
 
 
@@ -258,7 +249,7 @@ extends 
 
 getOperationType
-http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String getOperationType()
+http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String getOperationType()
 
 Specified by:
 getOperationType in
 class RawAsyncHBaseAdmin.NamespaceProcedureBiConsumer

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6607d33c/devapidocs/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.NamespaceProcedureBiConsumer.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.NamespaceProcedureBiConsumer.html
 
b/devapidocs/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.NamespaceProcedureBiConsumer.html
index a9b3793..45909d9 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.NamespaceProcedureBiConsumer.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.NamespaceProcedureBiConsumer.html
@@ -126,7 +126,7 @@ var activeTableTab = "activeTableTab";
 
 
 
-private abstract class RawAsyncHBaseAdmin.NamespaceProcedureBiConsumer
+private abstract class RawAsyncHBaseAdmin.NamespaceProcedureBiConsumer
 extends RawAsyncHBaseAdmin.ProcedureBiConsumer
 
 
@@ -151,13 +151,6 @@ extends namespaceName 
 
 
-
-
-
-
-Fields inherited from class org.apache.hadoop.hbase.client.RawAsyncHBaseAdmin.ProcedureBiConsumer
-admin
-
 
 
 
@@ -172,8 +165,7 @@ extends Constructor and Description
 
 
-NamespaceProcedureBiConsumer(AsyncAdmin admin,
-http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in 
java.lang">String namespaceName) 
+NamespaceProcedureBiConsumer(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in 
java.lang">String namespaceName) 
 
 
 
@@ -248,7 +240,7 @@ extends 
 
 namespaceName
-protected final http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String namespaceName
+protected final http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String namespaceName
 
 
 
@@ -259,14 +251,13 @@ extends 
+
 
 
 
 
 NamespaceProcedureBiConsumer
-NamespaceProcedureBiConsumer(AsyncAdmin admin,
- http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String namespaceName)
+NamespaceProcedureBiConsumer(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang"

[38/51] [partial] hbase-site git commit: Published site at .

2017-11-14 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6607d33c/checkstyle-aggregate.html
--
diff --git a/checkstyle-aggregate.html b/checkstyle-aggregate.html
index e4ace30..9048224 100644
--- a/checkstyle-aggregate.html
+++ b/checkstyle-aggregate.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase – Checkstyle Results
 
@@ -289,7 +289,7 @@
 3426
 0
 0
-21363
+21359
 
 Files
 
@@ -702,7 +702,7 @@
 org/apache/hadoop/hbase/PerformanceEvaluation.java
 0
 0
-33
+32
 
 org/apache/hadoop/hbase/PerformanceEvaluationCommons.java
 0
@@ -1617,7 +1617,7 @@
 org/apache/hadoop/hbase/client/AsyncHBaseAdmin.java
 0
 0
-2
+1
 
 org/apache/hadoop/hbase/client/AsyncMasterRequestRpcRetryingCaller.java
 0
@@ -1972,7 +1972,7 @@
 org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.java
 0
 0
-119
+117
 
 org/apache/hadoop/hbase/client/RawAsyncTable.java
 0
@@ -5267,7 +5267,7 @@
 org/apache/hadoop/hbase/mapred/TestIdentityTableMap.java
 0
 0
-3
+4
 
 org/apache/hadoop/hbase/mapred/TestMultiTableSnapshotInputFormat.java
 0
@@ -6102,7 +6102,7 @@
 org/apache/hadoop/hbase/master/TestMaster.java
 0
 0
-1
+2
 
 org/apache/hadoop/hbase/master/TestMasterFailover.java
 0
@@ -9294,3871 +9294,3866 @@
 0
 1
 
-org/apache/hadoop/hbase/regionserver/compactions/TestFIFOCompactionPolicy.java
-0
-0
-2
-
 org/apache/hadoop/hbase/regionserver/compactions/TestStripeCompactionPolicy.java
 0
 0
 10
-
+
 org/apache/hadoop/hbase/regionserver/compactions/TestStripeCompactor.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/regionserver/handler/CloseRegionHandler.java
 0
 0
 5
-
+
 org/apache/hadoop/hbase/regionserver/handler/OpenPriorityRegionHandler.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/regionserver/handler/OpenRegionHandler.java
 0
 0
 3
-
+
 org/apache/hadoop/hbase/regionserver/handler/ParallelSeekHandler.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/regionserver/handler/RegionReplicaFlushHandler.java
 0
 0
 4
-
+
 org/apache/hadoop/hbase/regionserver/handler/WALSplitterHandler.java
 0
 0
 23
-
+
 org/apache/hadoop/hbase/regionserver/querymatcher/ColumnTracker.java
 0
 0
 3
-
+
 org/apache/hadoop/hbase/regionserver/querymatcher/CompactionScanQueryMatcher.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/regionserver/querymatcher/DeleteTracker.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/regionserver/querymatcher/DropDeletesCompactionScanQueryMatcher.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/regionserver/querymatcher/ExplicitColumnTracker.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/regionserver/querymatcher/IncludeAllCompactionQueryMatcher.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/regionserver/querymatcher/MajorCompactionScanQueryMatcher.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/regionserver/querymatcher/MinorCompactionScanQueryMatcher.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/regionserver/querymatcher/NewVersionBehaviorTracker.java
 0
 0
 20
-
+
 org/apache/hadoop/hbase/regionserver/querymatcher/NormalUserScanQueryMatcher.java
 0
 0
 3
-
+
 org/apache/hadoop/hbase/regionserver/querymatcher/RawScanQueryMatcher.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/regionserver/querymatcher/ScanDeleteTracker.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/regionserver/querymatcher/ScanQueryMatcher.java
 0
 0
 8
-
+
 org/apache/hadoop/hbase/regionserver/querymatcher/ScanWildcardColumnTracker.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/regionserver/querymatcher/StripeCompactionScanQueryMatcher.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/regionserver/querymatcher/TestExplicitColumnTracker.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/regionserver/querymatcher/TestNewVersionBehaviorTracker.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/regionserver/querymatcher/TestScanDeleteTracker.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/regionserver/querymatcher/TestUserScanQueryMatcher.java
 0
 0
 21
-
+
 org/apache/hadoop/hbase/regionserver/querymatcher/UserScanQueryMatcher.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/regionserver/snapshot/FlushSnapshotSubprocedure.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/regionserver/snapshot/RegionServerSnapshotManager.java
 0
 0
 19
-
+
 org/apache/hadoop/hbase/regionserver/throttle/CompactionThroughputControllerFactory.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/regionserver/throttle/FlushThroughputControllerFactory.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/regionserver/throttle/NoLimitThroughputController.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/regionserver/throttle/PressureAwareCompactionThroughputController.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/regionserver/throttle/PressureAwareFlushThroughputController.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/regionserver/throttle/PressureAwareThroughputController.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/regionserver/throttle/ThroughputController.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/regionserver/wal/AbstractFSWAL.java
 0
 0
 8
-
+
 org/apache/hadoop/hbase/regionserver/wal/AbstractProtobufLogWriter.java
 0
 0
 2
-
+
 or

[27/51] [partial] hbase-site git commit: Published site at .

2017-11-14 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6607d33c/devapidocs/org/apache/hadoop/hbase/class-use/TableNotDisabledException.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/class-use/TableNotDisabledException.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/TableNotDisabledException.html
index 7131751..4378d2e 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/class-use/TableNotDisabledException.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/class-use/TableNotDisabledException.html
@@ -104,14 +104,14 @@
 
 
 void
-HMaster.checkTableModifiable(TableName tableName) 
-
-
-void
 MasterServices.checkTableModifiable(TableName tableName)
 Check table is modifiable; i.e.
 
 
+
+void
+HMaster.checkTableModifiable(TableName tableName) 
+
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6607d33c/devapidocs/org/apache/hadoop/hbase/class-use/TableNotFoundException.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/class-use/TableNotFoundException.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/TableNotFoundException.html
index 1e9a0a9..71c9d33 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/TableNotFoundException.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/TableNotFoundException.html
@@ -157,14 +157,14 @@
 
 
 void
-HMaster.checkTableModifiable(TableName tableName) 
-
-
-void
 MasterServices.checkTableModifiable(TableName tableName)
 Check table is modifiable; i.e.
 
 
+
+void
+HMaster.checkTableModifiable(TableName tableName) 
+
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6607d33c/devapidocs/org/apache/hadoop/hbase/class-use/Tag.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/class-use/Tag.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/Tag.html
index 6442178..8a26ebd 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/Tag.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/Tag.html
@@ -166,18 +166,18 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 static Tag
-CellUtil.getTag(Cell cell,
+PrivateCellUtil.getTag(Cell cell,
   byte type)
-Deprecated. 
-As of release 2.0.0, this 
will be removed in HBase 3.0.0.
-
+Retrieve Cell's first tag, matching the passed in type
 
 
 
 static Tag
-PrivateCellUtil.getTag(Cell cell,
+CellUtil.getTag(Cell cell,
   byte type)
-Retrieve Cell's first tag, matching the passed in type
+Deprecated. 
+As of release 2.0.0, this 
will be removed in HBase 3.0.0.
+
 
 
 
@@ -229,17 +229,23 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 static http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List
+PrivateCellUtil.getTags(Cell cell) 
+
+
+static http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List
 CellUtil.getTags(Cell cell)
 Deprecated. 
 As of release 2.0.0, this 
will be removed in HBase 3.0.0.
 
 
 
-
-static http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List
-PrivateCellUtil.getTags(Cell cell) 
-
 
+private static http://docs.oracle.com/javase/8/docs/api/java/util/Iterator.html?is-external=true";
 title="class or interface in java.util">Iterator
+PrivateCellUtil.tagsIterator(byte[] tags,
+int offset,
+int length) 
+
+
 static http://docs.oracle.com/javase/8/docs/api/java/util/Iterator.html?is-external=true";
 title="class or interface in java.util">Iterator
 CellUtil.tagsIterator(byte[] tags,
 int offset,
@@ -250,12 +256,6 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 
-
-private static http://docs.oracle.com/javase/8/docs/api/java/util/Iterator.html?is-external=true";
 title="class or interface in java.util">Iterator
-PrivateCellUtil.tagsIterator(byte[] tags,
-int offset,
-int length) 
-
 
 private static http://docs.oracle.com/javase/8/docs/api/java/util/Iterator.html?is-external=true";
 title="class or interface in java.util">Iterator
 PrivateCellUtil.tagsIterator(http://docs.oracle.com/javase/8/docs/api/java/nio/ByteBuffer.html?is-external=true";
 title="class or interface in java.nio">ByteBuffer tags,
@@ -386,12 +386,12 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 static Cell
-CellUtil.createCell(Cell cell,
+PrivateCellUtil.createCell(Cell cell,
   http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List tags) 
 
 
 static Cell
-PrivateCellUtil.createCell(Cell cell,
+CellUtil.createCell(Cell cell,
   http://docs.oracle.com/javase/8/d

[39/51] [partial] hbase-site git commit: Published site at .

2017-11-14 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6607d33c/book.html
--
diff --git a/book.html b/book.html
index 614e048..8f56303 100644
--- a/book.html
+++ b/book.html
@@ -35445,7 +35445,7 @@ The server will return cellblocks compressed using this 
same compressor as long
 
 
 Version 3.0.0-SNAPSHOT
-Last updated 2017-11-12 14:29:36 UTC
+Last updated 2017-11-14 14:32:23 UTC
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6607d33c/bulk-loads.html
--
diff --git a/bulk-loads.html b/bulk-loads.html
index 8db77a2..9e415c1 100644
--- a/bulk-loads.html
+++ b/bulk-loads.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase –  
   Bulk Loads in Apache HBase (TM)
@@ -311,7 +311,7 @@ under the License. -->
 https://www.apache.org/";>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2017-11-12
+  Last Published: 
2017-11-14
 
 
 



[34/51] [partial] hbase-site git commit: Published site at .

2017-11-14 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6607d33c/devapidocs/org/apache/hadoop/hbase/class-use/CellBuilder.DataType.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/class-use/CellBuilder.DataType.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/CellBuilder.DataType.html
index 2128d5c..80b569a 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/CellBuilder.DataType.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/CellBuilder.DataType.html
@@ -125,16 +125,16 @@ the order they are declared.
 
 
 
-CellBuilder
-CellBuilder.setType(CellBuilder.DataType type) 
+ExtendedCellBuilder
+ExtendedCellBuilderImpl.setType(CellBuilder.DataType type) 
 
 
-ExtendedCellBuilder
-ExtendedCellBuilder.setType(CellBuilder.DataType type) 
+CellBuilder
+CellBuilder.setType(CellBuilder.DataType type) 
 
 
 ExtendedCellBuilder
-ExtendedCellBuilderImpl.setType(CellBuilder.DataType type) 
+ExtendedCellBuilder.setType(CellBuilder.DataType type) 
 
 
 private static KeyValue.Type

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6607d33c/devapidocs/org/apache/hadoop/hbase/class-use/CellComparator.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/class-use/CellComparator.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/CellComparator.html
index 380d3db..e7df486 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/CellComparator.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/CellComparator.html
@@ -172,27 +172,27 @@
 
 
 static int
-CellUtil.compare(CellComparator comparator,
+PrivateCellUtil.compare(CellComparator comparator,
Cell left,
byte[] key,
int offset,
int length)
-Deprecated. 
-As of HBase-2.0. Will be 
removed in HBase-3.0
-
+Used when a cell needs to be compared with a key byte[] 
such as cases of finding the index from
+ the index block, bloom keys from the bloom blocks This byte[] is expected to 
be serialized in
+ the KeyValue serialization format If the KeyValue (Cell's) serialization 
format changes this
+ method cannot be used.
 
 
 
 static int
-PrivateCellUtil.compare(CellComparator comparator,
+CellUtil.compare(CellComparator comparator,
Cell left,
byte[] key,
int offset,
int length)
-Used when a cell needs to be compared with a key byte[] 
such as cases of finding the index from
- the index block, bloom keys from the bloom blocks This byte[] is expected to 
be serialized in
- the KeyValue serialization format If the KeyValue (Cell's) serialization 
format changes this
- method cannot be used.
+Deprecated. 
+As of HBase-2.0. Will be 
removed in HBase-3.0
+
 
 
 
@@ -265,12 +265,12 @@
 
 
 int
-BufferedDataBlockEncoder.BufferedEncodedSeeker.compareKey(CellComparator comparator,
+RowIndexSeekerV1.compareKey(CellComparator comparator,
   Cell key) 
 
 
 int
-RowIndexSeekerV1.compareKey(CellComparator comparator,
+BufferedDataBlockEncoder.BufferedEncodedSeeker.compareKey(CellComparator comparator,
   Cell key) 
 
 
@@ -282,27 +282,27 @@
 
 
 DataBlockEncoder.EncodedSeeker
-CopyKeyDataBlockEncoder.createSeeker(CellComparator comparator,
+RowIndexCodecV1.createSeeker(CellComparator comparator,
 HFileBlockDecodingContext decodingCtx) 
 
 
 DataBlockEncoder.EncodedSeeker
-PrefixKeyDeltaEncoder.createSeeker(CellComparator comparator,
+CopyKeyDataBlockEncoder.createSeeker(CellComparator comparator,
 HFileBlockDecodingContext decodingCtx) 
 
 
 DataBlockEncoder.EncodedSeeker
-FastDiffDeltaEncoder.createSeeker(CellComparator comparator,
+DiffKeyDeltaEncoder.createSeeker(CellComparator comparator,
 HFileBlockDecodingContext decodingCtx) 
 
 
 DataBlockEncoder.EncodedSeeker
-DiffKeyDeltaEncoder.createSeeker(CellComparator comparator,
+FastDiffDeltaEncoder.createSeeker(CellComparator comparator,
 HFileBlockDecodingContext decodingCtx) 
 
 
 DataBlockEncoder.EncodedSeeker
-RowIndexCodecV1.createSeeker(CellComparator comparator,
+PrefixKeyDeltaEncoder.createSeeker(CellComparator comparator,
 HFileBlockDecodingContext decodingCtx) 
 
 
@@ -340,9 +340,9 @@
 
 
 
-private CellComparator
-HFileBlockIndex.CellBasedKeyBlockIndexReader.comparator
-Needed doing lookup on blocks.
+protected CellComparator
+HFileWriterImpl.comparator
+Key comparator.
 
 
 
@@ -356,9 +356,9 @@
 
 
 
-protected CellComparator
-HFileWriterImpl.comparator
-Key comparator.
+private CellComparator
+HFileBlockIndex.CellBasedKeyBlockIndexReader.comparator
+Needed doing lookup on blocks.
 
 
 
@@ -539,15 +539,15 @@
 
 
 private CellComparator
-DefaultStoreFileManager.cellComparator 
+StripeStoreFileManager.cellComparator 
 
 
 private CellComparator
-StripeStoreFileManager.cellComparator 
+DefaultStoreFileManager.cellComparator 
 
 
-private CellComparator
-StoreFileWriter.Builder.comparator 
+protected CellComparator
+StripeMultiF

[51/51] [partial] hbase-site git commit: Published site at .

2017-11-14 Thread git-site-role
Published site at .


Project: http://git-wip-us.apache.org/repos/asf/hbase-site/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase-site/commit/6607d33c
Tree: http://git-wip-us.apache.org/repos/asf/hbase-site/tree/6607d33c
Diff: http://git-wip-us.apache.org/repos/asf/hbase-site/diff/6607d33c

Branch: refs/heads/asf-site
Commit: 6607d33c5ac6a1fe7914a458892409f6182d51bc
Parents: cd9d576
Author: jenkins 
Authored: Tue Nov 14 15:20:12 2017 +
Committer: jenkins 
Committed: Tue Nov 14 15:20:12 2017 +

--
 acid-semantics.html | 4 +-
 apache_hbase_reference_guide.pdf| 6 +-
 apidocs/deprecated-list.html|76 +-
 .../apache/hadoop/hbase/CompareOperator.html| 4 +-
 .../apache/hadoop/hbase/KeepDeletedCells.html   | 4 +-
 .../hadoop/hbase/MemoryCompactionPolicy.html| 4 +-
 .../org/apache/hadoop/hbase/class-use/Cell.html |   298 +-
 .../hadoop/hbase/class-use/CompareOperator.html | 8 +-
 .../hadoop/hbase/class-use/TableName.html   |80 +-
 .../apache/hadoop/hbase/client/AsyncAdmin.html  |   269 +-
 .../apache/hadoop/hbase/client/CompactType.html | 4 +-
 .../apache/hadoop/hbase/client/Consistency.html | 4 +-
 .../apache/hadoop/hbase/client/Durability.html  | 4 +-
 .../hadoop/hbase/client/IsolationLevel.html | 4 +-
 .../hadoop/hbase/client/MasterSwitchType.html   | 4 +-
 .../hbase/client/MobCompactPartitionPolicy.html | 4 +-
 .../client/RequestController.ReturnCode.html| 4 +-
 .../hadoop/hbase/client/Scan.ReadType.html  | 4 +-
 .../hadoop/hbase/client/SnapshotType.html   | 4 +-
 .../hadoop/hbase/client/class-use/Append.html   | 8 +-
 .../hbase/client/class-use/Consistency.html | 8 +-
 .../hadoop/hbase/client/class-use/Delete.html   |20 +-
 .../hbase/client/class-use/Durability.html  |20 +-
 .../hadoop/hbase/client/class-use/Get.html  |46 +-
 .../hbase/client/class-use/Increment.html   | 8 +-
 .../hbase/client/class-use/IsolationLevel.html  | 8 +-
 .../hadoop/hbase/client/class-use/Mutation.html | 8 +-
 .../hadoop/hbase/client/class-use/Put.html  |24 +-
 .../hadoop/hbase/client/class-use/Result.html   |22 +-
 .../hbase/client/class-use/ResultScanner.html   |26 +-
 .../hadoop/hbase/client/class-use/Row.html  | 8 +-
 .../hbase/client/class-use/RowMutations.html| 8 +-
 .../hadoop/hbase/client/class-use/Scan.html |22 +-
 .../hadoop/hbase/client/package-tree.html   |12 +-
 .../client/security/SecurityCapability.html | 4 +-
 .../hbase/filter/CompareFilter.CompareOp.html   | 4 +-
 .../filter/class-use/ByteArrayComparable.html   | 8 +-
 .../class-use/CompareFilter.CompareOp.html  | 8 +-
 .../filter/class-use/Filter.ReturnCode.html |   114 +-
 .../hadoop/hbase/filter/class-use/Filter.html   |56 +-
 .../hadoop/hbase/filter/package-tree.html   | 4 +-
 .../io/class-use/ImmutableBytesWritable.html|42 +-
 .../hadoop/hbase/io/class-use/TimeRange.html|12 +-
 .../hbase/io/crypto/class-use/Cipher.html   |18 +-
 .../hbase/io/encoding/DataBlockEncoding.html| 4 +-
 .../mapreduce/class-use/TableRecordReader.html  | 4 +-
 .../org/apache/hadoop/hbase/package-tree.html   | 2 +-
 .../apache/hadoop/hbase/quotas/QuotaType.html   | 4 +-
 .../hbase/quotas/SpaceViolationPolicy.html  | 4 +-
 .../hadoop/hbase/quotas/ThrottleType.html   | 4 +-
 .../hbase/quotas/ThrottlingException.Type.html  | 4 +-
 .../hadoop/hbase/quotas/package-tree.html   | 4 +-
 .../hadoop/hbase/regionserver/BloomType.html| 4 +-
 apidocs/org/apache/hadoop/hbase/util/Order.html | 4 +-
 .../hadoop/hbase/util/class-use/ByteRange.html  |   124 +-
 .../hadoop/hbase/util/class-use/Bytes.html  |48 +-
 .../hadoop/hbase/util/class-use/Order.html  |44 +-
 .../util/class-use/PositionedByteRange.html |   356 +-
 apidocs/overview-tree.html  |22 +-
 .../apache/hadoop/hbase/client/AsyncAdmin.html  |  2113 +-
 book.html   | 2 +-
 bulk-loads.html | 4 +-
 checkstyle-aggregate.html   | 28464 -
 checkstyle.rss  |   262 +-
 coc.html| 4 +-
 cygwin.html | 4 +-
 dependencies.html   | 4 +-
 dependency-convergence.html |28 +-
 dependency-info.html| 4 +-
 dependency-management.html  | 8 +-
 devapidocs/constant-values.html |14 +-
 devapidocs/deprecated-list.html |   118 +-
 devapidocs/index-all.html   |42 +-
 .../hadoop/hbase

[32/51] [partial] hbase-site git commit: Published site at .

2017-11-14 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6607d33c/devapidocs/org/apache/hadoop/hbase/class-use/HRegionLocation.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/class-use/HRegionLocation.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/HRegionLocation.html
index 8521855..46e033c 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/HRegionLocation.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/HRegionLocation.html
@@ -162,7 +162,7 @@ service.
 
 
 private static HRegionLocation
-AsyncMetaTableAccessor.getRegionLocation(Result r,
+MetaTableAccessor.getRegionLocation(Result r,
  RegionInfo regionInfo,
  int replicaId)
 Returns the HRegionLocation parsed from the given meta row 
Result
@@ -171,7 +171,7 @@ service.
 
 
 private static HRegionLocation
-MetaTableAccessor.getRegionLocation(Result r,
+AsyncMetaTableAccessor.getRegionLocation(Result r,
  RegionInfo regionInfo,
  int replicaId)
 Returns the HRegionLocation parsed from the given meta row 
Result
@@ -304,14 +304,6 @@ service.
 HTableMultiplexer.FlushWorker.addr 
 
 
-HRegionLocation
-AsyncClientScanner.OpenScannerResponse.loc 
-
-
-private HRegionLocation
-AsyncRpcRetryingCallerFactory.ScanSingleRegionCallerBuilder.loc 
-
-
 private HRegionLocation
 AsyncScanSingleRegionRpcRetryingCaller.loc 
 
@@ -320,15 +312,23 @@ service.
 AsyncBatchRpcRetryingCaller.RegionRequest.loc 
 
 
-protected HRegionLocation
-RegionAdminServiceCallable.location 
+HRegionLocation
+AsyncClientScanner.OpenScannerResponse.loc 
 
 
+private HRegionLocation
+AsyncRpcRetryingCallerFactory.ScanSingleRegionCallerBuilder.loc 
+
+
 protected HRegionLocation
 RegionServerCallable.location
 Some subclasses want to set their own location.
 
 
+
+protected HRegionLocation
+RegionAdminServiceCallable.location 
+
 
 
 
@@ -371,11 +371,11 @@ service.
 
 
 protected HRegionLocation
-MultiServerCallable.getLocation() 
+RegionServerCallable.getLocation() 
 
 
 protected HRegionLocation
-RegionServerCallable.getLocation() 
+MultiServerCallable.getLocation() 
 
 
 HRegionLocation
@@ -383,44 +383,44 @@ service.
 
 
 HRegionLocation
-HRegionLocator.getRegionLocation(byte[] row)
+RegionLocator.getRegionLocation(byte[] row)
 Finds the region on which the given row is being 
served.
 
 
 
 HRegionLocation
-RegionLocator.getRegionLocation(byte[] row)
+HRegionLocator.getRegionLocation(byte[] row)
 Finds the region on which the given row is being 
served.
 
 
 
 HRegionLocation
-HRegionLocator.getRegionLocation(byte[] row,
+RegionLocator.getRegionLocation(byte[] row,
  boolean reload)
 Finds the region on which the given row is being 
served.
 
 
 
 HRegionLocation
-RegionLocator.getRegionLocation(byte[] row,
+HRegionLocator.getRegionLocation(byte[] row,
  boolean reload)
 Finds the region on which the given row is being 
served.
 
 
 
 HRegionLocation
-ConnectionImplementation.getRegionLocation(TableName tableName,
- byte[] row,
- boolean reload) 
-
-
-HRegionLocation
 ClusterConnection.getRegionLocation(TableName tableName,
  byte[] row,
  boolean reload)
 Find region location hosting passed row
 
 
+
+HRegionLocation
+ConnectionImplementation.getRegionLocation(TableName tableName,
+ byte[] row,
+ boolean reload) 
+
 
 private HRegionLocation
 AsyncRequestFutureImpl.getReplicaLocationOrFail(Action action) 
@@ -434,20 +434,15 @@ service.
 
 
 HRegionLocation
-ConnectionImplementation.locateRegion(byte[] regionName) 
-
-
-HRegionLocation
 ClusterConnection.locateRegion(byte[] regionName)
 Gets the location of the region of regionName.
 
 
-
+
 HRegionLocation
-ConnectionImplementation.locateRegion(TableName tableName,
-byte[] row) 
+ConnectionImplementation.locateRegion(byte[] regionName) 
 
-
+
 HRegionLocation
 ClusterConnection.locateRegion(TableName tableName,
 byte[] row)
@@ -455,6 +450,11 @@ service.
  lives in.
 
 
+
+HRegionLocation
+ConnectionImplementation.locateRegion(TableName tableName,
+byte[] row) 
+
 
 private HRegionLocation
 AsyncNonMetaRegionLocator.locateRowBeforeInCache(AsyncNonMetaRegionLocator.TableCache tableCache,
@@ -469,17 +469,17 @@ service.
 
 
 HRegionLocation
-ConnectionImplementation.relocateRegion(TableName tableName,
-  byte[] row) 
-
-
-HRegionLocation
 ClusterConnection.relocateRegion(TableName tableName,
   byte[] row)
 Find the location of the region of tableName that 
row
  lives in, ignoring any value that might be in the cache.
 
 
+
+HRegionLocation
+ConnectionImplementation.relocateRegion(TableName tableName,
+  byte[] row) 
+
 
 
 
@@ -491,14 +491,14 @@ service.
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List

[44/51] [partial] hbase-site git commit: Published site at .

2017-11-14 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6607d33c/apidocs/org/apache/hadoop/hbase/io/class-use/ImmutableBytesWritable.html
--
diff --git 
a/apidocs/org/apache/hadoop/hbase/io/class-use/ImmutableBytesWritable.html 
b/apidocs/org/apache/hadoop/hbase/io/class-use/ImmutableBytesWritable.html
index 841abc0..ec301de 100644
--- a/apidocs/org/apache/hadoop/hbase/io/class-use/ImmutableBytesWritable.html
+++ b/apidocs/org/apache/hadoop/hbase/io/class-use/ImmutableBytesWritable.html
@@ -175,23 +175,23 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 org.apache.hadoop.mapred.RecordReader
-TableSnapshotInputFormat.getRecordReader(org.apache.hadoop.mapred.InputSplit split,
+TableInputFormatBase.getRecordReader(org.apache.hadoop.mapred.InputSplit split,
org.apache.hadoop.mapred.JobConf job,
-   
org.apache.hadoop.mapred.Reporter reporter) 
+   org.apache.hadoop.mapred.Reporter reporter)
+Builds a TableRecordReader.
+
 
 
 org.apache.hadoop.mapred.RecordReader
-MultiTableSnapshotInputFormat.getRecordReader(org.apache.hadoop.mapred.InputSplit split,
+TableSnapshotInputFormat.getRecordReader(org.apache.hadoop.mapred.InputSplit split,
org.apache.hadoop.mapred.JobConf job,

org.apache.hadoop.mapred.Reporter reporter) 
 
 
 org.apache.hadoop.mapred.RecordReader
-TableInputFormatBase.getRecordReader(org.apache.hadoop.mapred.InputSplit split,
+MultiTableSnapshotInputFormat.getRecordReader(org.apache.hadoop.mapred.InputSplit split,
org.apache.hadoop.mapred.JobConf job,
-   org.apache.hadoop.mapred.Reporter reporter)
-Builds a TableRecordReader.
-
+   
org.apache.hadoop.mapred.Reporter reporter) 
 
 
 
@@ -324,9 +324,9 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 org.apache.hadoop.mapreduce.RecordReader
-MultiTableInputFormatBase.createRecordReader(org.apache.hadoop.mapreduce.InputSplit split,
+TableInputFormatBase.createRecordReader(org.apache.hadoop.mapreduce.InputSplit split,
   
org.apache.hadoop.mapreduce.TaskAttemptContext context)
-Builds a TableRecordReader.
+Builds a TableRecordReader.
 
 
 
@@ -336,19 +336,19 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 org.apache.hadoop.mapreduce.RecordReader
-TableInputFormatBase.createRecordReader(org.apache.hadoop.mapreduce.InputSplit split,
+MultiTableInputFormatBase.createRecordReader(org.apache.hadoop.mapreduce.InputSplit split,
   
org.apache.hadoop.mapreduce.TaskAttemptContext context)
-Builds a TableRecordReader.
+Builds a TableRecordReader.
 
 
 
-org.apache.hadoop.mapreduce.RecordWriter
-MultiTableOutputFormat.getRecordWriter(org.apache.hadoop.mapreduce.TaskAttemptContext context) 
-
-
 org.apache.hadoop.mapreduce.RecordWriter
 HFileOutputFormat2.getRecordWriter(org.apache.hadoop.mapreduce.TaskAttemptContext context) 
 
+
+org.apache.hadoop.mapreduce.RecordWriter
+MultiTableOutputFormat.getRecordWriter(org.apache.hadoop.mapreduce.TaskAttemptContext context) 
+
 
 
 
@@ -375,12 +375,6 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 int
-SimpleTotalOrderPartitioner.getPartition(ImmutableBytesWritable key,
-VALUE value,
-int reduces) 
-
-
-int
 HRegionPartitioner.getPartition(ImmutableBytesWritable key,
 VALUE value,
 int numPartitions)
@@ -388,6 +382,12 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
  number of partitions i.e.
 
 
+
+int
+SimpleTotalOrderPartitioner.getPartition(ImmutableBytesWritable key,
+VALUE value,
+int reduces) 
+
 
 void
 IdentityTableMapper.map(ImmutableBytesWritable key,

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6607d33c/apidocs/org/apache/hadoop/hbase/io/class-use/TimeRange.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/io/class-use/TimeRange.html 
b/apidocs/org/apache/hadoop/hbase/io/class-use/TimeRange.html
index 4cd3d3b..dca98a0 100644
--- a/apidocs/org/apache/hadoop/hbase/io/class-use/TimeRange.html
+++ b/apidocs/org/apache/hadoop/hbase/io/class-use/TimeRange.html
@@ -123,19 +123,19 @@
 
 
 TimeRange
-Get.getTimeRange()
-Method for retrieving the get's TimeRange
+Increment.getTimeRange()
+Gets the TimeRange used for this increment.
 
 
 
 TimeRange
-Increment.getTimeRange()
-Gets the TimeRange used for this increment.
-
+Scan.getTimeRange() 
 
 
 TimeRange
-Scan.getTimeRange() 
+Get.getTimeRange()
+Method for retrieving the get's TimeRange
+
 
 
 

ht

[46/51] [partial] hbase-site git commit: Published site at .

2017-11-14 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6607d33c/apidocs/org/apache/hadoop/hbase/client/class-use/ResultScanner.html
--
diff --git 
a/apidocs/org/apache/hadoop/hbase/client/class-use/ResultScanner.html 
b/apidocs/org/apache/hadoop/hbase/client/class-use/ResultScanner.html
index b7ffb1e..4974cf0 100644
--- a/apidocs/org/apache/hadoop/hbase/client/class-use/ResultScanner.html
+++ b/apidocs/org/apache/hadoop/hbase/client/class-use/ResultScanner.html
@@ -109,42 +109,42 @@
 
 
 
-default ResultScanner
-AsyncTable.getScanner(byte[] family)
+ResultScanner
+Table.getScanner(byte[] family)
 Gets a scanner on the current table for the given 
family.
 
 
 
-ResultScanner
-Table.getScanner(byte[] family)
+default ResultScanner
+AsyncTable.getScanner(byte[] family)
 Gets a scanner on the current table for the given 
family.
 
 
 
-default ResultScanner
-AsyncTable.getScanner(byte[] family,
+ResultScanner
+Table.getScanner(byte[] family,
   byte[] qualifier)
 Gets a scanner on the current table for the given family 
and qualifier.
 
 
 
-ResultScanner
-Table.getScanner(byte[] family,
+default ResultScanner
+AsyncTable.getScanner(byte[] family,
   byte[] qualifier)
 Gets a scanner on the current table for the given family 
and qualifier.
 
 
 
 ResultScanner
-AsyncTable.getScanner(Scan scan)
-Returns a scanner on the current table as specified by the 
Scan 
object.
+Table.getScanner(Scan scan)
+Returns a scanner on the current table as specified by the 
Scan
+ object.
 
 
 
 ResultScanner
-Table.getScanner(Scan scan)
-Returns a scanner on the current table as specified by the 
Scan
- object.
+AsyncTable.getScanner(Scan scan)
+Returns a scanner on the current table as specified by the 
Scan 
object.
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6607d33c/apidocs/org/apache/hadoop/hbase/client/class-use/Row.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/client/class-use/Row.html 
b/apidocs/org/apache/hadoop/hbase/client/class-use/Row.html
index 9de93fc..fc04c78 100644
--- a/apidocs/org/apache/hadoop/hbase/client/class-use/Row.html
+++ b/apidocs/org/apache/hadoop/hbase/client/class-use/Row.html
@@ -179,19 +179,19 @@
 
 
 int
-Get.compareTo(Row other) 
+Increment.compareTo(Row i) 
 
 
 int
-Increment.compareTo(Row i) 
+Mutation.compareTo(Row d) 
 
 
 int
-Mutation.compareTo(Row d) 
+RowMutations.compareTo(Row i) 
 
 
 int
-RowMutations.compareTo(Row i) 
+Get.compareTo(Row other) 
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6607d33c/apidocs/org/apache/hadoop/hbase/client/class-use/RowMutations.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/client/class-use/RowMutations.html 
b/apidocs/org/apache/hadoop/hbase/client/class-use/RowMutations.html
index 969626d..61c3609 100644
--- a/apidocs/org/apache/hadoop/hbase/client/class-use/RowMutations.html
+++ b/apidocs/org/apache/hadoop/hbase/client/class-use/RowMutations.html
@@ -134,14 +134,14 @@
 
 
 
-http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";
 title="class or interface in java.util.concurrent">CompletableFutureVoid>
-AsyncTableBase.mutateRow(RowMutations mutation)
+void
+Table.mutateRow(RowMutations rm)
 Performs multiple mutations atomically on a single 
row.
 
 
 
-void
-Table.mutateRow(RowMutations rm)
+http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";
 title="class or interface in java.util.concurrent">CompletableFutureVoid>
+AsyncTableBase.mutateRow(RowMutations mutation)
 Performs multiple mutations atomically on a single 
row.
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6607d33c/apidocs/org/apache/hadoop/hbase/client/class-use/Scan.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/client/class-use/Scan.html 
b/apidocs/org/apache/hadoop/hbase/client/class-use/Scan.html
index 9a9c509..b72d92f 100644
--- a/apidocs/org/apache/hadoop/hbase/client/class-use/Scan.html
+++ b/apidocs/org/apache/hadoop/hbase/client/class-use/Scan.html
@@ -406,15 +406,15 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 ResultScanner
-AsyncTable.getScanner(Scan scan)
-Returns a scanner on the current table as specified by the 
Scan 
object.
+Table.getScanner(Scan scan)
+Returns a scanner on the current table as specified by the 
Scan
+ object.
 
 
 
 ResultScanner
-Table.getScanner(Scan scan)
-Returns a scanner on the current table as specified by the 
Scan
-

[28/51] [partial] hbase-site git commit: Published site at .

2017-11-14 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6607d33c/devapidocs/org/apache/hadoop/hbase/class-use/TableName.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/class-use/TableName.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/TableName.html
index 74b441f..96bbdee 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/TableName.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/TableName.html
@@ -2026,119 +2026,119 @@ service.
 
 
 private TableName
-RegionCoprocessorRpcChannel.table 
+SnapshotDescription.table 
 
 
 private TableName
-SnapshotDescription.table 
+RegionCoprocessorRpcChannel.table 
 
 
 private TableName
-HRegionLocator.tableName 
+RawAsyncTableImpl.tableName 
 
 
 private TableName
-ScannerCallableWithReplicas.tableName 
+RegionServerCallable.tableName 
 
 
 protected TableName
-ClientScanner.tableName 
+RegionAdminServiceCallable.tableName 
 
 
 private TableName
-AsyncClientScanner.tableName 
+BufferedMutatorImpl.tableName 
 
 
 private TableName
-AsyncRpcRetryingCallerFactory.SingleRequestCallerBuilder.tableName 
+AsyncProcessTask.tableName 
 
 
 private TableName
-AsyncRpcRetryingCallerFactory.BatchCallerBuilder.tableName 
+AsyncProcessTask.Builder.tableName 
 
 
 private TableName
-RegionInfoBuilder.tableName 
+AsyncRequestFutureImpl.tableName 
 
 
-private TableName
-RegionInfoBuilder.MutableRegionInfo.tableName 
+protected TableName
+TableBuilderBase.tableName 
 
 
 private TableName
-RawAsyncTableImpl.tableName 
+AsyncBatchRpcRetryingCaller.tableName 
 
 
 private TableName
-RegionCoprocessorRpcChannelImpl.tableName 
+RegionInfoBuilder.tableName 
 
 
 private TableName
-AsyncTableRegionLocatorImpl.tableName 
+RegionInfoBuilder.MutableRegionInfo.tableName 
 
 
-protected TableName
-RegionAdminServiceCallable.tableName 
+private TableName
+HTable.tableName 
 
 
 private TableName
-HTable.tableName 
+TableState.tableName 
 
 
-private TableName
-BufferedMutatorImpl.tableName 
+protected TableName
+RpcRetryingCallerWithReadReplicas.tableName 
 
 
-private TableName
-AsyncBatchRpcRetryingCaller.tableName 
+protected TableName
+AsyncTableBuilderBase.tableName 
 
 
 private TableName
-BufferedMutatorParams.tableName 
+AsyncSingleRequestRpcRetryingCaller.tableName 
 
 
 private TableName
-HBaseAdmin.TableFuture.tableName 
+ScannerCallableWithReplicas.tableName 
 
 
-private TableName
-AsyncRequestFutureImpl.tableName 
+protected TableName
+RawAsyncHBaseAdmin.TableProcedureBiConsumer.tableName 
 
 
 private TableName
-AsyncProcessTask.tableName 
+AsyncTableRegionLocatorImpl.tableName 
 
 
 private TableName
-AsyncProcessTask.Builder.tableName 
+HBaseAdmin.TableFuture.tableName 
 
 
-protected TableName
-RawAsyncHBaseAdmin.TableProcedureBiConsumer.tableName 
+private TableName
+RegionCoprocessorRpcChannelImpl.tableName 
 
 
-private TableName
-RegionServerCallable.tableName 
+protected TableName
+ClientScanner.tableName 
 
 
 private TableName
-AsyncSingleRequestRpcRetryingCaller.tableName 
+BufferedMutatorParams.tableName 
 
 
-protected TableName
-TableBuilderBase.tableName 
+private TableName
+AsyncClientScanner.tableName 
 
 
-protected TableName
-RpcRetryingCallerWithReadReplicas.tableName 
+private TableName
+AsyncRpcRetryingCallerFactory.SingleRequestCallerBuilder.tableName 
 
 
-protected TableName
-AsyncTableBuilderBase.tableName 
+private TableName
+AsyncRpcRetryingCallerFactory.BatchCallerBuilder.tableName 
 
 
 private TableName
-TableState.tableName 
+HRegionLocator.tableName 
 
 
 
@@ -2180,83 +2180,83 @@ service.
 
 
 TableName
-Table.getName()
-Gets the fully qualified table name instance of this 
table.
-
+RawAsyncTableImpl.getName() 
 
 
 TableName
-HRegionLocator.getName() 
+RegionLocator.getName()
+Gets the fully qualified table name instance of this 
table.
+
 
 
 TableName
-AsyncTableRegionLocator.getName()
-Gets the fully qualified table name instance of the table 
whose region we want to locate.
-
+BufferedMutatorImpl.getName() 
 
 
 TableName
-AsyncTableImpl.getName() 
+BufferedMutator.getName()
+Gets the fully qualified table name instance of the table 
that this BufferedMutator writes to.
+
 
 
 TableName
-RawAsyncTableImpl.getName() 
+HTable.getName() 
 
 
 TableName
-AsyncTableRegionLocatorImpl.getName() 
+AsyncBufferedMutator.getName()
+Gets the fully qualified table name instance of the table 
that this
+ AsyncBufferedMutator writes to.
+
 
 
 TableName
-BufferedMutator.getName()
-Gets the fully qualified table name instance of the table 
that this BufferedMutator writes to.
+Table.getName()
+Gets the fully qualified table name instance of this 
table.
 
 
 
 TableName
-RegionLocator.getName()
-Gets the fully qualified table name instance of this 
table.
-
+AsyncTableImpl.getName() 
 
 
 TableName
-AsyncBufferedMutatorImpl.getName() 
+AsyncTableRegionLocatorImpl.getName() 
 
 
 TableName
-HTable.getName() 
+AsyncTableRegionLocator.getName()
+Gets the fully qualified

[17/51] [partial] hbase-site git commit: Published site at .

2017-11-14 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6607d33c/devapidocs/org/apache/hadoop/hbase/client/class-use/MasterSwitchType.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/client/class-use/MasterSwitchType.html 
b/devapidocs/org/apache/hadoop/hbase/client/class-use/MasterSwitchType.html
index 306cbf1..38bcde1 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/class-use/MasterSwitchType.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/class-use/MasterSwitchType.html
@@ -261,13 +261,13 @@ the order they are declared.
 
 
 boolean
-HMaster.isSplitOrMergeEnabled(MasterSwitchType switchType)
-Queries the state of the SplitOrMergeTracker.
-
+MasterServices.isSplitOrMergeEnabled(MasterSwitchType switchType) 
 
 
 boolean
-MasterServices.isSplitOrMergeEnabled(MasterSwitchType switchType) 
+HMaster.isSplitOrMergeEnabled(MasterSwitchType switchType)
+Queries the state of the SplitOrMergeTracker.
+
 
 
 void

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6607d33c/devapidocs/org/apache/hadoop/hbase/client/class-use/MetricsConnection.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/client/class-use/MetricsConnection.html 
b/devapidocs/org/apache/hadoop/hbase/client/class-use/MetricsConnection.html
index fb157d5..d1031db 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/class-use/MetricsConnection.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/class-use/MetricsConnection.html
@@ -112,11 +112,11 @@
 
 
 private MetricsConnection
-MetaCache.metrics 
+ConnectionImplementation.metrics 
 
 
 private MetricsConnection
-ConnectionImplementation.metrics 
+MetaCache.metrics 
 
 
 
@@ -129,11 +129,11 @@
 
 
 MetricsConnection
-ConnectionImplementation.getConnectionMetrics() 
+ClusterConnection.getConnectionMetrics() 
 
 
 MetricsConnection
-ClusterConnection.getConnectionMetrics() 
+ConnectionImplementation.getConnectionMetrics() 
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6607d33c/devapidocs/org/apache/hadoop/hbase/client/class-use/Mutation.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/client/class-use/Mutation.html 
b/devapidocs/org/apache/hadoop/hbase/client/class-use/Mutation.html
index 81ed4c6..3897b40 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/class-use/Mutation.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/class-use/Mutation.html
@@ -340,24 +340,24 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 void
+BufferedMutatorImpl.mutate(Mutation m) 
+
+
+void
 BufferedMutator.mutate(Mutation mutation)
 Sends a Mutation to 
the table.
 
 
-
-http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";
 title="class or interface in java.util.concurrent">CompletableFutureVoid>
-AsyncBufferedMutatorImpl.mutate(Mutation mutation) 
-
 
-void
-BufferedMutatorImpl.mutate(Mutation m) 
-
-
 http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";
 title="class or interface in java.util.concurrent">CompletableFutureVoid>
 AsyncBufferedMutator.mutate(Mutation mutation)
 Sends a Mutation to 
the table.
 
 
+
+http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";
 title="class or interface in java.util.concurrent">CompletableFutureVoid>
+AsyncBufferedMutatorImpl.mutate(Mutation mutation) 
+
 
 
 
@@ -369,24 +369,24 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 void
+BufferedMutatorImpl.mutate(http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List ms) 
+
+
+void
 BufferedMutator.mutate(http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List mutations)
 Send some Mutations to 
the table.
 
 
-
-http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">ListCompletableFutureVoid>>
-AsyncBufferedMutatorImpl.mutate(http://docs.oracle.com/javase/8/docs/api/java/util/List.h

[21/51] [partial] hbase-site git commit: Published site at .

2017-11-14 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6607d33c/devapidocs/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.html 
b/devapidocs/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.html
index da5c15f..8185965 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.html
@@ -114,13 +114,22 @@ var activeTableTab = "activeTableTab";
 
 
 @InterfaceAudience.Private
-public class RawAsyncHBaseAdmin
+public class RawAsyncHBaseAdmin
 extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true";
 title="class or interface in java.lang">Object
 implements AsyncAdmin
-The implementation of AsyncAdmin.
+The implementation of AsyncAdmin.
+ 
+ The word 'Raw' means that this is a low level class. The returned http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";
 title="class or interface in 
java.util.concurrent">CompletableFuture will
+ be finished inside the rpc framework thread, which means that the callbacks 
registered to the
+ http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";
 title="class or interface in 
java.util.concurrent">CompletableFuture will also be executed 
inside the rpc framework thread. So users who use
+ this class should not try to do time consuming tasks in the callbacks.
 
 Since:
 2.0.0
+See Also:
+AsyncHBaseAdmin, 
+AsyncConnection.getAdmin(),
 
+AsyncConnection.getAdminBuilder()
 
 
 
@@ -268,10 +277,14 @@ implements pauseNs 
 
 
+private 
org.apache.hadoop.hbase.shaded.io.netty.util.HashedWheelTimer
+retryTimer 
+
+
 private long
 rpcTimeoutNs 
 
-
+
 private int
 startLogErrorsCnt 
 
@@ -290,7 +303,8 @@ implements Constructor and Description
 
 
-RawAsyncHBaseAdmin(AsyncConnectionImpl connection,
+RawAsyncHBaseAdmin(AsyncConnectionImpl connection,
+  
org.apache.hadoop.hbase.shaded.io.netty.util.HashedWheelTimer retryTimer,
   AsyncAdminBuilderBase builder) 
 
 
@@ -1310,7 +1324,7 @@ implements 
 
 FLUSH_TABLE_PROCEDURE_SIGNATURE
-public static final http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String FLUSH_TABLE_PROCEDURE_SIGNATURE
+public static final http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String FLUSH_TABLE_PROCEDURE_SIGNATURE
 
 See Also:
 Constant
 Field Values
@@ -1323,7 +1337,7 @@ implements 
 
 LOG
-private static final org.apache.commons.logging.Log LOG
+private static final org.apache.commons.logging.Log LOG
 
 
 
@@ -1332,7 +1346,16 @@ implements 
 
 connection
-private final AsyncConnectionImpl connection
+private final AsyncConnectionImpl connection
+
+
+
+
+
+
+
+retryTimer
+private 
final org.apache.hadoop.hbase.shaded.io.netty.util.HashedWheelTimer retryTimer
 
 
 
@@ -1341,7 +1364,7 @@ implements 
 
 metaTable
-private final RawAsyncTable metaTable
+private final RawAsyncTable metaTable
 
 
 
@@ -1350,7 +1373,7 @@ implements 
 
 rpcTimeoutNs
-private final long rpcTimeoutNs
+private final long rpcTimeoutNs
 
 
 
@@ -1359,7 +1382,7 @@ implements 
 
 operationTimeoutNs
-private final long operationTimeoutNs
+private final long operationTimeoutNs
 
 
 
@@ -1368,7 +1391,7 @@ implements 
 
 pauseNs
-private final long pauseNs
+private final long pauseNs
 
 
 
@@ -1377,7 +1400,7 @@ implements 
 
 maxAttempts
-private final int maxAttempts
+private final int maxAttempts
 
 
 
@@ -1386,7 +1409,7 @@ implements 
 
 startLogErrorsCnt
-private final int startLogErrorsCnt
+private final int startLogErrorsCnt
 
 
 
@@ -1395,7 +1418,7 @@ implements 
 
 ng
-private final NonceGenerator ng
+private final NonceGenerator ng
 
 
 
@@ -1406,13 +1429,14 @@ implements 
+
 
 
 
 
 RawAsyncHBaseAdmin
-RawAsyncHBaseAdmin(AsyncConnectionImpl connection,
+RawAsyncHBaseAdmin(AsyncConnectionImpl connection,
+   
org.apache.hadoop.hbase.shaded.io.netty.util.HashedWheelTimer retryTimer,
AsyncAdminBuilderBase builder)
 
 
@@ -1430,7 +1454,7 @@ implements 
 
 newMasterCaller
-private  AsyncRpcRetryingCallerFactory.MasterRequestCallerBuilder newMasterCaller()
+private  AsyncRpcRetryingCallerFactory.MasterRequestCallerBuilder newMasterCaller()
 
 
 
@@ -1439,7 +1463,7 @@ implements 
 
 newAdminCaller
-private  AsyncRpcRetryingCallerFactory.AdminRequestCallerBuilder newAdminCaller()
+private  AsyncRpcRetryingCallerFactory.AdminRequestCallerBuilder newAdminCaller()
 
 
 
@@ -1450,7 +1474,7 @@ implements 
 
 call
-private  http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";
 title="class or interface in 
java.util.conc

[18/51] [partial] hbase-site git commit: Published site at .

2017-11-14 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6607d33c/devapidocs/org/apache/hadoop/hbase/client/class-use/Connection.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/client/class-use/Connection.html 
b/devapidocs/org/apache/hadoop/hbase/client/class-use/Connection.html
index 00eec75..0e78a43 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/class-use/Connection.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/class-use/Connection.html
@@ -810,23 +810,23 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 private Connection
-RestoreTablesClient.conn 
+BackupAdminImpl.conn 
 
 
-protected Connection
-TableBackupClient.conn 
-
-
 (package private) Connection
 BackupCommands.Command.conn 
 
+
+private Connection
+RestoreTablesClient.conn 
+
 
 protected Connection
-BackupManager.conn 
+TableBackupClient.conn 
 
 
-private Connection
-BackupAdminImpl.conn 
+protected Connection
+BackupManager.conn 
 
 
 private Connection
@@ -1140,13 +1140,13 @@ Input/OutputFormats, a table indexing MapReduce job, 
and utility methods.
 
 
 
-(package private) Connection
-ConnectionImplementation.MasterServiceState.connection 
-
-
 private Connection
 RegionServerCallable.connection 
 
+
+(package private) Connection
+ConnectionImplementation.MasterServiceState.connection 
+
 
 
 
@@ -1191,20 +1191,20 @@ Input/OutputFormats, a table indexing MapReduce job, 
and utility methods.
 
 
 
-Connection
-Admin.getConnection() 
-
-
 (package private) Connection
 RegionAdminServiceCallable.getConnection() 
 
-
+
 protected Connection
 HTable.getConnection()
 INTERNAL Used by unit tests and tools to do 
low-level
  manipulations.
 
 
+
+Connection
+Admin.getConnection() 
+
 
 Connection
 HBaseAdmin.getConnection() 
@@ -1500,11 +1500,11 @@ Input/OutputFormats, a table indexing MapReduce job, 
and utility methods.
 
 
 private Connection
-TableInputFormatBase.connection 
+HRegionPartitioner.connection 
 
 
 private Connection
-HRegionPartitioner.connection 
+TableInputFormatBase.connection 
 
 
 
@@ -1537,22 +1537,22 @@ Input/OutputFormats, a table indexing MapReduce job, 
and utility methods.
 
 
 
-private Connection
-TableOutputFormat.TableRecordWriter.connection 
-
-
 (package private) Connection
 MultiTableOutputFormat.MultiTableRecordWriter.connection 
 
+
+private Connection
+HRegionPartitioner.connection 
+
 
 private Connection
-TableInputFormatBase.connection
-The underlying Connection 
of the table.
-
+TableOutputFormat.TableRecordWriter.connection 
 
 
 private Connection
-HRegionPartitioner.connection 
+TableInputFormatBase.connection
+The underlying Connection 
of the table.
+
 
 
 (package private) Connection
@@ -1637,7 +1637,7 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 private Connection
-MasterCoprocessorHost.MasterEnvironment.connection 
+RegionPlacementMaintainer.connection 
 
 
 private Connection
@@ -1645,11 +1645,11 @@ Input/OutputFormats, a table indexing MapReduce job, 
and utility methods.
 
 
 private Connection
-SnapshotOfRegionAssignmentFromMeta.connection 
+MasterCoprocessorHost.MasterEnvironment.connection 
 
 
 private Connection
-RegionPlacementMaintainer.connection 
+SnapshotOfRegionAssignmentFromMeta.connection 
 
 
 
@@ -1782,31 +1782,31 @@ Input/OutputFormats, a table indexing MapReduce job, 
and utility methods.
 
 
 private Connection
-TableQuotaSnapshotStore.conn 
+QuotaObserverChore.conn 
 
 
 private Connection
-SpaceQuotaRefresherChore.conn 
+QuotaObserverChore.TablesWithQuotas.conn 
 
 
 private Connection
-NamespaceQuotaSnapshotStore.conn 
+SnapshotQuotaObserverChore.conn 
 
 
 private Connection
-SnapshotQuotaObserverChore.conn 
+NamespaceQuotaSnapshotStore.conn 
 
 
 private Connection
-QuotaObserverChore.conn 
+TableQuotaSnapshotStore.conn 
 
 
 private Connection
-QuotaObserverChore.TablesWithQuotas.conn 
+TableSpaceQuotaSnapshotNotifier.conn 
 
 
 private Connection
-TableSpaceQuotaSnapshotNotifier.conn 
+SpaceQuotaRefresherChore.conn 
 
 
 private Connection
@@ -2148,11 +2148,11 @@ Input/OutputFormats, a table indexing MapReduce job, 
and utility methods.
 
 
 Connection
-RegionCoprocessorHost.RegionEnvironment.getConnection() 
+HRegionServer.getConnection() 
 
 
 Connection
-HRegionServer.getConnection() 
+RegionCoprocessorHost.RegionEnvironment.getConnection() 
 
 
 
@@ -2204,11 +2204,11 @@ Input/OutputFormats, a table indexing MapReduce job, 
and utility methods.
 
 
 private Connection
-HFileReplicator.connection 
+ReplicationSourceManager.connection 
 
 
 private Connection
-ReplicationSourceManager.connection 
+HFileReplicator.connection 
 
 
 private Connection
@@ -2224,13 +2224,13 @@ Input/OutputFormats, a table indexing MapReduce job, 
and utility methods.
 
 
 
-private Connection
-ReplicationSink.getConnection() 
-
-
 Connection
 ReplicationSourceManager.getConnection() 
 
+
+private Connection
+ReplicationSink.getCon

[42/51] [partial] hbase-site git commit: Published site at .

2017-11-14 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6607d33c/apidocs/org/apache/hadoop/hbase/util/class-use/PositionedByteRange.html
--
diff --git 
a/apidocs/org/apache/hadoop/hbase/util/class-use/PositionedByteRange.html 
b/apidocs/org/apache/hadoop/hbase/util/class-use/PositionedByteRange.html
index 06e9c8f..bfce4fe 100644
--- a/apidocs/org/apache/hadoop/hbase/util/class-use/PositionedByteRange.html
+++ b/apidocs/org/apache/hadoop/hbase/util/class-use/PositionedByteRange.html
@@ -125,104 +125,104 @@
 
 
 byte[]
-OrderedBlobVar.decode(PositionedByteRange src) 
+RawBytes.decode(PositionedByteRange src) 
 
 
-http://docs.oracle.com/javase/8/docs/api/java/lang/Number.html?is-external=true";
 title="class or interface in java.lang">Number
-OrderedNumeric.decode(PositionedByteRange src) 
+T
+FixedLengthWrapper.decode(PositionedByteRange src) 
 
 
-http://docs.oracle.com/javase/8/docs/api/java/lang/Byte.html?is-external=true";
 title="class or interface in java.lang">Byte
-RawByte.decode(PositionedByteRange src) 
+http://docs.oracle.com/javase/8/docs/api/java/lang/Short.html?is-external=true";
 title="class or interface in java.lang">Short
+RawShort.decode(PositionedByteRange src) 
 
 
-http://docs.oracle.com/javase/8/docs/api/java/lang/Integer.html?is-external=true";
 title="class or interface in java.lang">Integer
-OrderedInt32.decode(PositionedByteRange src) 
+T
+TerminatedWrapper.decode(PositionedByteRange src) 
 
 
-T
-FixedLengthWrapper.decode(PositionedByteRange src) 
+http://docs.oracle.com/javase/8/docs/api/java/lang/Float.html?is-external=true";
 title="class or interface in java.lang">Float
+OrderedFloat32.decode(PositionedByteRange src) 
 
 
-http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
-OrderedString.decode(PositionedByteRange src) 
+http://docs.oracle.com/javase/8/docs/api/java/lang/Double.html?is-external=true";
 title="class or interface in java.lang">Double
+OrderedFloat64.decode(PositionedByteRange src) 
 
 
 http://docs.oracle.com/javase/8/docs/api/java/lang/Float.html?is-external=true";
 title="class or interface in java.lang">Float
 RawFloat.decode(PositionedByteRange src) 
 
 
-http://docs.oracle.com/javase/8/docs/api/java/lang/Integer.html?is-external=true";
 title="class or interface in java.lang">Integer
-RawInteger.decode(PositionedByteRange src) 
+http://docs.oracle.com/javase/8/docs/api/java/lang/Byte.html?is-external=true";
 title="class or interface in java.lang">Byte
+OrderedInt8.decode(PositionedByteRange src) 
 
 
-T
-DataType.decode(PositionedByteRange src)
-Read an instance of T from the buffer 
src.
-
+http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true";
 title="class or interface in java.lang">Object[]
+Struct.decode(PositionedByteRange src) 
 
 
-http://docs.oracle.com/javase/8/docs/api/java/lang/Long.html?is-external=true";
 title="class or interface in java.lang">Long
-RawLong.decode(PositionedByteRange src) 
+byte[]
+OrderedBlob.decode(PositionedByteRange src) 
 
 
-http://docs.oracle.com/javase/8/docs/api/java/lang/Short.html?is-external=true";
 title="class or interface in java.lang">Short
-RawShort.decode(PositionedByteRange src) 
+http://docs.oracle.com/javase/8/docs/api/java/lang/Integer.html?is-external=true";
 title="class or interface in java.lang">Integer
+RawInteger.decode(PositionedByteRange src) 
 
 
-http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
-RawString.decode(PositionedByteRange src) 
+http://docs.oracle.com/javase/8/docs/api/java/lang/Short.html?is-external=true";
 title="class or interface in java.lang">Short
+OrderedInt16.decode(PositionedByteRange src) 
 
 
-byte[]
-RawBytes.decode(PositionedByteRange src) 
+http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
+RawString.decode(PositionedByteRange src) 
 
 
 byte[]
-OrderedBlob.decode(PositionedByteRange src) 
+OrderedBlobVar.decode(PositionedByteRange src) 
 
 
-http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true";
 title="class or interface in java.lang">Object[]
-Struct.decode(PositionedByteRange src) 
+http://docs.oracle.com/javase/8/docs/api/java/lang/Byte.html?is-external=true";
 title="class or interface in java.lang">Byte
+RawByte.decode(PositionedByteRange src) 
 
 
-http://docs.oracle.com/javase/8/docs/api/java/lang/Double.html?is-external=true";
 title="class or interface in java.lang">Double
-RawDouble.decode(PositionedByteRange src) 
+http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
+OrderedString.decode(PositionedByteRange src) 
 
 
-http://docs.oracle.com/javase/8/docs/api/java/lang/Short.html?is-external=true";
 title="class or interface 

[05/51] [partial] hbase-site git commit: Published site at .

2017-11-14 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6607d33c/devapidocs/org/apache/hadoop/hbase/io/encoding/class-use/HFileBlockDecodingContext.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/io/encoding/class-use/HFileBlockDecodingContext.html
 
b/devapidocs/org/apache/hadoop/hbase/io/encoding/class-use/HFileBlockDecodingContext.html
index a694809..2df0d13 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/io/encoding/class-use/HFileBlockDecodingContext.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/io/encoding/class-use/HFileBlockDecodingContext.html
@@ -166,27 +166,27 @@
 
 
 DataBlockEncoder.EncodedSeeker
-CopyKeyDataBlockEncoder.createSeeker(CellComparator comparator,
+RowIndexCodecV1.createSeeker(CellComparator comparator,
 HFileBlockDecodingContext decodingCtx) 
 
 
 DataBlockEncoder.EncodedSeeker
-PrefixKeyDeltaEncoder.createSeeker(CellComparator comparator,
+CopyKeyDataBlockEncoder.createSeeker(CellComparator comparator,
 HFileBlockDecodingContext decodingCtx) 
 
 
 DataBlockEncoder.EncodedSeeker
-FastDiffDeltaEncoder.createSeeker(CellComparator comparator,
+DiffKeyDeltaEncoder.createSeeker(CellComparator comparator,
 HFileBlockDecodingContext decodingCtx) 
 
 
 DataBlockEncoder.EncodedSeeker
-DiffKeyDeltaEncoder.createSeeker(CellComparator comparator,
+FastDiffDeltaEncoder.createSeeker(CellComparator comparator,
 HFileBlockDecodingContext decodingCtx) 
 
 
 DataBlockEncoder.EncodedSeeker
-RowIndexCodecV1.createSeeker(CellComparator comparator,
+PrefixKeyDeltaEncoder.createSeeker(CellComparator comparator,
 HFileBlockDecodingContext decodingCtx) 
 
 
@@ -198,13 +198,13 @@
 
 
 http://docs.oracle.com/javase/8/docs/api/java/nio/ByteBuffer.html?is-external=true";
 title="class or interface in java.nio">ByteBuffer
-BufferedDataBlockEncoder.decodeKeyValues(http://docs.oracle.com/javase/8/docs/api/java/io/DataInputStream.html?is-external=true";
 title="class or interface in java.io">DataInputStream source,
-   HFileBlockDecodingContext blkDecodingCtx) 
+RowIndexCodecV1.decodeKeyValues(http://docs.oracle.com/javase/8/docs/api/java/io/DataInputStream.html?is-external=true";
 title="class or interface in java.io">DataInputStream source,
+   HFileBlockDecodingContext decodingCtx) 
 
 
 http://docs.oracle.com/javase/8/docs/api/java/nio/ByteBuffer.html?is-external=true";
 title="class or interface in java.nio">ByteBuffer
-RowIndexCodecV1.decodeKeyValues(http://docs.oracle.com/javase/8/docs/api/java/io/DataInputStream.html?is-external=true";
 title="class or interface in java.io">DataInputStream source,
-   HFileBlockDecodingContext decodingCtx) 
+BufferedDataBlockEncoder.decodeKeyValues(http://docs.oracle.com/javase/8/docs/api/java/io/DataInputStream.html?is-external=true";
 title="class or interface in java.io">DataInputStream source,
+   HFileBlockDecodingContext blkDecodingCtx) 
 
 
 
@@ -279,17 +279,17 @@
 
 
 HFileBlockDecodingContext
-NoOpDataBlockEncoder.newDataBlockDecodingContext(HFileContext meta) 
+HFileDataBlockEncoderImpl.newDataBlockDecodingContext(HFileContext fileContext) 
 
 
 HFileBlockDecodingContext
-HFileDataBlockEncoder.newDataBlockDecodingContext(HFileContext fileContext)
-create a encoder specific decoding context for 
reading.
-
+NoOpDataBlockEncoder.newDataBlockDecodingContext(HFileContext meta) 
 
 
 HFileBlockDecodingContext
-HFileDataBlockEncoderImpl.newDataBlockDecodingContext(HFileContext fileContext) 
+HFileDataBlockEncoder.newDataBlockDecodingContext(HFileContext fileContext)
+create a encoder specific decoding context for 
reading.
+
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6607d33c/devapidocs/org/apache/hadoop/hbase/io/encoding/class-use/HFileBlockDefaultDecodingContext.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/io/encoding/class-use/HFileBlockDefaultDecodingContext.html
 
b/devapidocs/org/apache/hadoop/hbase/io/encoding/class-use/HFileBlockDefaultDecodingContext.html
index 337ccf5..9f3340f 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/io/encoding/class-use/HFileBlockDefaultDecodingContext.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/io/encoding/class-use/HFileBlockDefaultDecodingContext.html
@@ -116,36 +116,36 @@
  HFileBlockDefaultDecodingContext decodingCtx) 
 
 
-protected abstract http://docs.oracle.com/javase/8/docs/api/java/nio/ByteBuffer.html?is-external=true";
 title="class or interface in java.nio">ByteBuffer
-BufferedDataBlockEncoder.internalDecodeKeyValues(http://docs.oracle.com/javase/8/docs/api/java/io/DataInputStream.html?is-external=true";
 title="class or interface in java.io">DataInputStream source,
+protected http://docs.oracle.com/javase/8/docs/api/java/nio/ByteBuffer.html?is-external=true";
 title="class or interface in java.nio">ByteBuffer
+Copy

[12/51] [partial] hbase-site git commit: Published site at .

2017-11-14 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6607d33c/devapidocs/org/apache/hadoop/hbase/client/class-use/Scan.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/client/class-use/Scan.html 
b/devapidocs/org/apache/hadoop/hbase/client/class-use/Scan.html
index d9fb34d..4e9ad44 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/class-use/Scan.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/class-use/Scan.html
@@ -283,27 +283,27 @@ service.
 
 
 private Scan
-ScannerCallableWithReplicas.scan 
+AsyncScanSingleRegionRpcRetryingCaller.scan 
 
 
 protected Scan
-ClientScanner.scan 
+ScannerCallable.scan 
 
 
 private Scan
-AsyncClientScanner.scan 
+ScannerCallableWithReplicas.scan 
 
 
-private Scan
-AsyncRpcRetryingCallerFactory.ScanSingleRegionCallerBuilder.scan 
+protected Scan
+ClientScanner.scan 
 
 
 private Scan
-AsyncScanSingleRegionRpcRetryingCaller.scan 
+AsyncClientScanner.scan 
 
 
-protected Scan
-ScannerCallable.scan 
+private Scan
+AsyncRpcRetryingCallerFactory.ScanSingleRegionCallerBuilder.scan 
 
 
 private Scan
@@ -339,11 +339,11 @@ service.
 
 
 protected Scan
-ClientScanner.getScan() 
+ScannerCallable.getScan() 
 
 
 protected Scan
-ScannerCallable.getScan() 
+ClientScanner.getScan() 
 
 
 Scan
@@ -638,8 +638,8 @@ service.
 
 
 ResultScanner
-AsyncTable.getScanner(Scan scan)
-Returns a scanner on the current table as specified by the 
Scan 
object.
+HTable.getScanner(Scan scan)
+The underlying HTable must 
not be closed.
 
 
 
@@ -655,8 +655,8 @@ service.
 
 
 ResultScanner
-HTable.getScanner(Scan scan)
-The underlying HTable must 
not be closed.
+AsyncTable.getScanner(Scan scan)
+Returns a scanner on the current table as specified by the 
Scan 
object.
 
 
 
@@ -689,16 +689,16 @@ service.
 
 
 void
+AsyncTableImpl.scan(Scan scan,
+ScanResultConsumer consumer) 
+
+
+void
 AsyncTable.scan(Scan scan,
 ScanResultConsumer consumer)
 The scan API uses the observer pattern.
 
 
-
-void
-AsyncTableImpl.scan(Scan scan,
-ScanResultConsumer consumer) 
-
 
 private void
 AsyncTableImpl.scan0(Scan scan,
@@ -706,11 +706,11 @@ service.
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";
 title="class or interface in java.util.concurrent">CompletableFutureList>
-AsyncTableImpl.scanAll(Scan scan) 
+RawAsyncTableImpl.scanAll(Scan scan) 
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";
 title="class or interface in java.util.concurrent">CompletableFutureList>
-RawAsyncTableImpl.scanAll(Scan scan) 
+AsyncTableImpl.scanAll(Scan scan) 
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";
 title="class or interface in java.util.concurrent">CompletableFutureList>
@@ -1311,17 +1311,17 @@ service.
 
 
 private Scan
-TableInputFormatBase.scan
-Holds the details for the internal scanner.
-
+TableSnapshotInputFormatImpl.RecordReader.scan 
 
 
 private Scan
-TableSnapshotInputFormatImpl.RecordReader.scan 
+TableRecordReaderImpl.scan 
 
 
 private Scan
-TableRecordReaderImpl.scan 
+TableInputFormatBase.scan
+Holds the details for the internal scanner.
+
 
 
 
@@ -1371,14 +1371,14 @@ service.
 
 
 Scan
-TableInputFormatBase.getScan()
-Gets the scan defining the actual details like columns 
etc.
+TableSplit.getScan()
+Returns a Scan object from the stored string 
representation.
 
 
 
 Scan
-TableSplit.getScan()
-Returns a Scan object from the stored string 
representation.
+TableInputFormatBase.getScan()
+Gets the scan defining the actual details like columns 
etc.
 
 
 
@@ -1624,13 +1624,13 @@ service.
 
 
 void
-TableInputFormatBase.setScan(Scan scan)
+TableRecordReaderImpl.setScan(Scan scan)
 Sets the scan defining the actual details like columns 
etc.
 
 
 
 void
-TableRecordReaderImpl.setScan(Scan scan)
+TableInputFormatBase.setScan(Scan scan)
 Sets the scan defining the actual details like columns 
etc.
 
 
@@ -1697,12 +1697,6 @@ service.
 
 
 
-static void
-MultiTableSnapshotInputFormat.setInput(org.apache.hadoop.conf.Configuration configuration,
-http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true";
 title="class or interface in java.util">MapString,http://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true";
 title="class or interface in java.util">Collection> snapshotScans,
-

[03/51] [partial] hbase-site git commit: Published site at .

2017-11-14 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6607d33c/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/CachedBlock.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/CachedBlock.html 
b/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/CachedBlock.html
index 59ee24f..8108f0a 100644
--- a/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/CachedBlock.html
+++ b/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/CachedBlock.html
@@ -150,15 +150,15 @@
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/Iterator.html?is-external=true";
 title="class or interface in java.util">Iterator
-BlockCache.iterator() 
+CombinedBlockCache.iterator() 
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/Iterator.html?is-external=true";
 title="class or interface in java.util">Iterator
-LruBlockCache.iterator() 
+BlockCache.iterator() 
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/Iterator.html?is-external=true";
 title="class or interface in java.util">Iterator
-CombinedBlockCache.iterator() 
+LruBlockCache.iterator() 
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/Iterator.html?is-external=true";
 title="class or interface in java.util">Iterator

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6607d33c/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/HFile.Writer.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/HFile.Writer.html 
b/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/HFile.Writer.html
index 7d29f77..a8e355e 100644
--- a/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/HFile.Writer.html
+++ b/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/HFile.Writer.html
@@ -143,17 +143,17 @@
 
 
 void
-NoOpDataBlockEncoder.saveMetadata(HFile.Writer writer) 
+HFileDataBlockEncoderImpl.saveMetadata(HFile.Writer writer) 
 
 
 void
-HFileDataBlockEncoder.saveMetadata(HFile.Writer writer)
-Save metadata in HFile which will be written to disk
-
+NoOpDataBlockEncoder.saveMetadata(HFile.Writer writer) 
 
 
 void
-HFileDataBlockEncoderImpl.saveMetadata(HFile.Writer writer) 
+HFileDataBlockEncoder.saveMetadata(HFile.Writer writer)
+Save metadata in HFile which will be written to disk
+
 
 
 
@@ -203,18 +203,18 @@
 
 
 
-abstract void
-BloomContext.addLastBloomKey(HFile.Writer writer)
-Adds the last bloom key to the HFile Writer as part of 
StorefileWriter close.
-
+void
+RowColBloomContext.addLastBloomKey(HFile.Writer writer) 
 
 
 void
 RowBloomContext.addLastBloomKey(HFile.Writer writer) 
 
 
-void
-RowColBloomContext.addLastBloomKey(HFile.Writer writer) 
+abstract void
+BloomContext.addLastBloomKey(HFile.Writer writer)
+Adds the last bloom key to the HFile Writer as part of 
StorefileWriter close.
+
 
 
 static BloomFilterWriter

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6607d33c/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/HFileBlock.Writer.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/HFileBlock.Writer.html 
b/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/HFileBlock.Writer.html
index ded8848..6b0eb0b 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/HFileBlock.Writer.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/HFileBlock.Writer.html
@@ -106,15 +106,15 @@
 
 
 
-private HFileBlock.Writer
-HFileBlockIndex.BlockIndexWriter.blockWriter 
-
-
 protected HFileBlock.Writer
 HFileWriterImpl.blockWriter
 block writer
 
 
+
+private HFileBlock.Writer
+HFileBlockIndex.BlockIndexWriter.blockWriter 
+
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6607d33c/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/HFileContext.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/HFileContext.html 
b/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/HFileContext.html
index eff6d3c..2af7d60 100644
--- a/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/HFileContext.html
+++ b/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/HFileContext.html
@@ -136,15 +136,15 @@
 
 
 HFileContext
-HFileBlockEncodingContext.getHFileContext() 
+HFileBlockDecodingContext.getHFileContext() 
 
 
 HFileContext
-HFileBlockDecodingContext.getHFileContext() 
+HFileBlockDefaultDecodingContext.getHFileContext() 
 
 
 HFileContext
-HFileBlockDefaultDecodingContext.getHFileContext() 
+HFileBlockEncodingContext.getHFileContext() 
 
 
 HFileContext
@@ -224,24 +224,24 @@
 
 
 private HFileContext
-HFile.WriterFactory.fileContext 
-
-
-private HFileContext
 HFileBlock.fileContext
 Meta data that holds meta information on the 
hfileblock.
 
 
-
+
 private HFileContext
 HFileBlock.Writer

[19/51] [partial] hbase-site git commit: Published site at .

2017-11-14 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6607d33c/devapidocs/org/apache/hadoop/hbase/client/class-use/AsyncServerRequestRpcRetryingCaller.Callable.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/client/class-use/AsyncServerRequestRpcRetryingCaller.Callable.html
 
b/devapidocs/org/apache/hadoop/hbase/client/class-use/AsyncServerRequestRpcRetryingCaller.Callable.html
index 1f0ac88..115843e 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/client/class-use/AsyncServerRequestRpcRetryingCaller.Callable.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/client/class-use/AsyncServerRequestRpcRetryingCaller.Callable.html
@@ -105,13 +105,13 @@
 
 
 
-private AsyncServerRequestRpcRetryingCaller.Callable
-AsyncRpcRetryingCallerFactory.ServerRequestCallerBuilder.callable 
-
-
 private AsyncServerRequestRpcRetryingCaller.Callable
 AsyncServerRequestRpcRetryingCaller.callable 
 
+
+private AsyncServerRequestRpcRetryingCaller.Callable
+AsyncRpcRetryingCallerFactory.ServerRequestCallerBuilder.callable 
+
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6607d33c/devapidocs/org/apache/hadoop/hbase/client/class-use/AsyncSingleRequestRpcRetryingCaller.Callable.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/client/class-use/AsyncSingleRequestRpcRetryingCaller.Callable.html
 
b/devapidocs/org/apache/hadoop/hbase/client/class-use/AsyncSingleRequestRpcRetryingCaller.Callable.html
index 0b0bb88..e3be081 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/client/class-use/AsyncSingleRequestRpcRetryingCaller.Callable.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/client/class-use/AsyncSingleRequestRpcRetryingCaller.Callable.html
@@ -105,13 +105,13 @@
 
 
 
-private AsyncSingleRequestRpcRetryingCaller.Callable
-AsyncRpcRetryingCallerFactory.SingleRequestCallerBuilder.callable 
-
-
 private AsyncSingleRequestRpcRetryingCaller.Callable
 AsyncSingleRequestRpcRetryingCaller.callable 
 
+
+private AsyncSingleRequestRpcRetryingCaller.Callable
+AsyncRpcRetryingCallerFactory.SingleRequestCallerBuilder.callable 
+
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6607d33c/devapidocs/org/apache/hadoop/hbase/client/class-use/AsyncTable.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/client/class-use/AsyncTable.html 
b/devapidocs/org/apache/hadoop/hbase/client/class-use/AsyncTable.html
index 1dce44c..a4ea15f 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/class-use/AsyncTable.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/class-use/AsyncTable.html
@@ -137,16 +137,16 @@
 
 
 AsyncTableBuilder
+AsyncConnectionImpl.getTableBuilder(TableName tableName,
+   http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/ExecutorService.html?is-external=true";
 title="class or interface in 
java.util.concurrent">ExecutorService pool) 
+
+
+AsyncTableBuilder
 AsyncConnection.getTableBuilder(TableName tableName,
http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/ExecutorService.html?is-external=true";
 title="class or interface in 
java.util.concurrent">ExecutorService pool)
 Returns an AsyncTableBuilder for creating 
AsyncTable.
 
 
-
-AsyncTableBuilder
-AsyncConnectionImpl.getTableBuilder(TableName tableName,
-   http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/ExecutorService.html?is-external=true";
 title="class or interface in 
java.util.concurrent">ExecutorService pool) 
-
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6607d33c/devapidocs/org/apache/hadoop/hbase/client/class-use/AsyncTableBase.CheckAndMutateBuilder.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/client/class-use/AsyncTableBase.CheckAndMutateBuilder.html
 
b/devapidocs/org/apache/hadoop/hbase/client/class-use/AsyncTableBase.CheckAndMutateBuilder.html
index 429f3c1..00712e0 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/client/class-use/AsyncTableBase.CheckAndMutateBuilder.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/client/class-use/AsyncTableBase.CheckAndMutateBuilder.html
@@ -119,12 +119,12 @@
 
 
 AsyncTableBase.CheckAndMutateBuilder
-AsyncTableImpl.checkAndMutate(byte[] row,
+RawAsyncTableImpl.checkAndMutate(byte[] row,
   byte[] family) 
 
 
 AsyncTableBase.CheckAndMutateBuilder
-RawAsyncTableImpl.checkAndMutate(byte[] row,
+AsyncTableImpl.checkAndMutate(byte[] row,
   byte[] family) 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6607d33c/devapidocs/org/apache/hadoop/hbase/client/class-use/AsyncTableBuilder.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/client/class-use/AsyncTableBuil

[06/51] [partial] hbase-site git commit: Published site at .

2017-11-14 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6607d33c/devapidocs/org/apache/hadoop/hbase/io/class-use/ImmutableBytesWritable.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/io/class-use/ImmutableBytesWritable.html 
b/devapidocs/org/apache/hadoop/hbase/io/class-use/ImmutableBytesWritable.html
index 67eb2fa..53b63c6 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/io/class-use/ImmutableBytesWritable.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/io/class-use/ImmutableBytesWritable.html
@@ -162,11 +162,11 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 ImmutableBytesWritable
-TableRecordReader.createKey() 
+TableSnapshotInputFormat.TableSnapshotRecordReader.createKey() 
 
 
 ImmutableBytesWritable
-TableSnapshotInputFormat.TableSnapshotRecordReader.createKey() 
+TableRecordReader.createKey() 
 
 
 ImmutableBytesWritable
@@ -183,11 +183,9 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 org.apache.hadoop.mapred.RecordReader
-TableInputFormatBase.getRecordReader(org.apache.hadoop.mapred.InputSplit split,
+TableSnapshotInputFormat.getRecordReader(org.apache.hadoop.mapred.InputSplit split,
org.apache.hadoop.mapred.JobConf job,
-   org.apache.hadoop.mapred.Reporter reporter)
-Builds a TableRecordReader.
-
+   
org.apache.hadoop.mapred.Reporter reporter) 
 
 
 org.apache.hadoop.mapred.RecordReader
@@ -197,9 +195,11 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 org.apache.hadoop.mapred.RecordReader
-TableSnapshotInputFormat.getRecordReader(org.apache.hadoop.mapred.InputSplit split,
+TableInputFormatBase.getRecordReader(org.apache.hadoop.mapred.InputSplit split,
org.apache.hadoop.mapred.JobConf job,
-   
org.apache.hadoop.mapred.Reporter reporter) 
+   org.apache.hadoop.mapred.Reporter reporter)
+Builds a TableRecordReader.
+
 
 
 
@@ -218,12 +218,10 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 void
-IdentityTableMap.map(ImmutableBytesWritable key,
-   Result value,
+RowCounter.RowCounterMapper.map(ImmutableBytesWritable row,
+   Result values,
org.apache.hadoop.mapred.OutputCollector output,
-   org.apache.hadoop.mapred.Reporter reporter)
-Pass the key, value to reduce
-
+   org.apache.hadoop.mapred.Reporter reporter) 
 
 
 void
@@ -236,19 +234,21 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 void
-RowCounter.RowCounterMapper.map(ImmutableBytesWritable row,
-   Result values,
+IdentityTableMap.map(ImmutableBytesWritable key,
+   Result value,
org.apache.hadoop.mapred.OutputCollector output,
-   org.apache.hadoop.mapred.Reporter reporter) 
+   org.apache.hadoop.mapred.Reporter reporter)
+Pass the key, value to reduce
+
 
 
 boolean
-TableRecordReader.next(ImmutableBytesWritable key,
+TableSnapshotInputFormat.TableSnapshotRecordReader.next(ImmutableBytesWritable key,
 Result value) 
 
 
 boolean
-TableSnapshotInputFormat.TableSnapshotRecordReader.next(ImmutableBytesWritable key,
+TableRecordReader.next(ImmutableBytesWritable key,
 Result value) 
 
 
@@ -281,12 +281,10 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 void
-IdentityTableMap.map(ImmutableBytesWritable key,
-   Result value,
+RowCounter.RowCounterMapper.map(ImmutableBytesWritable row,
+   Result values,
org.apache.hadoop.mapred.OutputCollector output,
-   org.apache.hadoop.mapred.Reporter reporter)
-Pass the key, value to reduce
-
+   org.apache.hadoop.mapred.Reporter reporter) 
 
 
 void
@@ -299,10 +297,12 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 void
-RowCounter.RowCounterMapper.map(ImmutableBytesWritable row,
-   Result values,
+IdentityTableMap.map(ImmutableBytesWritable key,
+   Result value,
org.apache.hadoop.mapred.OutputCollector output,
-   org.apache.hadoop.mapred.Reporter reporter) 
+   org.apache.hadoop.mapred.Reporter reporter)
+Pass the key, value to reduce
+
 
 
 void
@@ -349,7 +349,7 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 private ImmutableBytesWritable
-MultithreadedTableMapper.SubMapRecordReader.key 
+TableRecordReaderImpl.key 
 
 
 private ImmutableBytesWritable
@@ -357,7 +357,7 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 private ImmutableBytesWritable
-TableRecordReaderImpl.key 
+MultithreadedTableMapper.SubMapRecordReader.key 
 
 
 (package private) ImmutableBytesWritable
@@ -427,33 +427,33 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 ImmutableBytesWritable
-Multit

[01/51] [partial] hbase-site git commit: Published site at .

2017-11-14 Thread git-site-role
Repository: hbase-site
Updated Branches:
  refs/heads/asf-site cd9d57650 -> 6607d33c5


http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6607d33c/devapidocs/org/apache/hadoop/hbase/master/assignment/class-use/RegionStates.RegionStateNode.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/master/assignment/class-use/RegionStates.RegionStateNode.html
 
b/devapidocs/org/apache/hadoop/hbase/master/assignment/class-use/RegionStates.RegionStateNode.html
index 71724c6..f37c444 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/master/assignment/class-use/RegionStates.RegionStateNode.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/master/assignment/class-use/RegionStates.RegionStateNode.html
@@ -255,7 +255,7 @@
 
 
 protected void
-UnassignProcedure.finishTransition(MasterProcedureEnv env,
+AssignProcedure.finishTransition(MasterProcedureEnv env,
 RegionStates.RegionStateNode regionNode) 
 
 
@@ -265,7 +265,7 @@
 
 
 protected void
-AssignProcedure.finishTransition(MasterProcedureEnv env,
+UnassignProcedure.finishTransition(MasterProcedureEnv env,
 RegionStates.RegionStateNode regionNode) 
 
 
@@ -316,7 +316,7 @@
 
 
 protected boolean
-UnassignProcedure.remoteCallFailed(MasterProcedureEnv env,
+AssignProcedure.remoteCallFailed(MasterProcedureEnv env,
 RegionStates.RegionStateNode regionNode,
 http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true";
 title="class or interface in 
java.io">IOException exception) 
 
@@ -328,7 +328,7 @@
 
 
 protected boolean
-AssignProcedure.remoteCallFailed(MasterProcedureEnv env,
+UnassignProcedure.remoteCallFailed(MasterProcedureEnv env,
 RegionStates.RegionStateNode regionNode,
 http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true";
 title="class or interface in 
java.io">IOException exception) 
 
@@ -353,10 +353,10 @@
 
 
 protected void
-UnassignProcedure.reportTransition(MasterProcedureEnv env,
+AssignProcedure.reportTransition(MasterProcedureEnv env,
 RegionStates.RegionStateNode regionNode,
 
org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionStateTransition.TransitionCode code,
-long seqId) 
+long openSeqNum) 
 
 
 protected abstract void
@@ -367,10 +367,10 @@
 
 
 protected void
-AssignProcedure.reportTransition(MasterProcedureEnv env,
+UnassignProcedure.reportTransition(MasterProcedureEnv env,
 RegionStates.RegionStateNode regionNode,
 
org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionStateTransition.TransitionCode code,
-long openSeqNum) 
+long seqId) 
 
 
 private boolean
@@ -381,7 +381,7 @@
 
 
 protected boolean
-UnassignProcedure.startTransition(MasterProcedureEnv env,
+AssignProcedure.startTransition(MasterProcedureEnv env,
RegionStates.RegionStateNode regionNode) 
 
 
@@ -391,7 +391,7 @@
 
 
 protected boolean
-AssignProcedure.startTransition(MasterProcedureEnv env,
+UnassignProcedure.startTransition(MasterProcedureEnv env,
RegionStates.RegionStateNode regionNode) 
 
 
@@ -404,7 +404,7 @@
 
 
 protected boolean
-UnassignProcedure.updateTransition(MasterProcedureEnv env,
+AssignProcedure.updateTransition(MasterProcedureEnv env,
 RegionStates.RegionStateNode regionNode) 
 
 
@@ -416,7 +416,7 @@
 
 
 protected boolean
-AssignProcedure.updateTransition(MasterProcedureEnv env,
+UnassignProcedure.updateTransition(MasterProcedureEnv env,
 RegionStates.RegionStateNode regionNode) 
 
 



[09/51] [partial] hbase-site git commit: Published site at .

2017-11-14 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6607d33c/devapidocs/org/apache/hadoop/hbase/exceptions/class-use/DeserializationException.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/exceptions/class-use/DeserializationException.html
 
b/devapidocs/org/apache/hadoop/hbase/exceptions/class-use/DeserializationException.html
index 82c9df9..5544c71 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/exceptions/class-use/DeserializationException.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/exceptions/class-use/DeserializationException.html
@@ -146,14 +146,16 @@
 
 
 
-static HTableDescriptor
-HTableDescriptor.parseFrom(byte[] bytes)
+static HColumnDescriptor
+HColumnDescriptor.parseFrom(byte[] bytes)
 Deprecated. 
  
 
 
-static ClusterId
-ClusterId.parseFrom(byte[] bytes) 
+static HTableDescriptor
+HTableDescriptor.parseFrom(byte[] bytes)
+Deprecated. 
+ 
 
 
 static HRegionInfo
@@ -165,10 +167,8 @@
 
 
 
-static HColumnDescriptor
-HColumnDescriptor.parseFrom(byte[] bytes)
-Deprecated. 
- 
+static ClusterId
+ClusterId.parseFrom(byte[] bytes) 
 
 
 static SplitLogTask
@@ -222,17 +222,17 @@
 TableDescriptorBuilder.ModifyableTableDescriptor.parseFrom(byte[] bytes) 
 
 
-static RegionInfo
-RegionInfo.parseFrom(byte[] bytes) 
-
-
 static ColumnFamilyDescriptor
 ColumnFamilyDescriptorBuilder.parseFrom(byte[] pbBytes) 
 
-
+
 private static ColumnFamilyDescriptor
 ColumnFamilyDescriptorBuilder.ModifyableColumnFamilyDescriptor.parseFrom(byte[] bytes) 
 
+
+static RegionInfo
+RegionInfo.parseFrom(byte[] bytes) 
+
 
 static RegionInfo
 RegionInfo.parseFrom(byte[] bytes,
@@ -307,111 +307,111 @@
 ByteArrayComparable.parseFrom(byte[] pbBytes) 
 
 
-static SingleColumnValueExcludeFilter
-SingleColumnValueExcludeFilter.parseFrom(byte[] pbBytes) 
+static ColumnPrefixFilter
+ColumnPrefixFilter.parseFrom(byte[] pbBytes) 
 
 
-static ValueFilter
-ValueFilter.parseFrom(byte[] pbBytes) 
+static ColumnCountGetFilter
+ColumnCountGetFilter.parseFrom(byte[] pbBytes) 
 
 
-static SkipFilter
-SkipFilter.parseFrom(byte[] pbBytes) 
+static RowFilter
+RowFilter.parseFrom(byte[] pbBytes) 
 
 
-static FamilyFilter
-FamilyFilter.parseFrom(byte[] pbBytes) 
+static FuzzyRowFilter
+FuzzyRowFilter.parseFrom(byte[] pbBytes) 
 
 
-static BinaryPrefixComparator
-BinaryPrefixComparator.parseFrom(byte[] pbBytes) 
+static BinaryComparator
+BinaryComparator.parseFrom(byte[] pbBytes) 
 
 
-static NullComparator
-NullComparator.parseFrom(byte[] pbBytes) 
+static RegexStringComparator
+RegexStringComparator.parseFrom(byte[] pbBytes) 
 
 
-static BigDecimalComparator
-BigDecimalComparator.parseFrom(byte[] pbBytes) 
+static Filter
+Filter.parseFrom(byte[] pbBytes)
+Concrete implementers can signal a failure condition in 
their code by throwing an
+ http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true";
 title="class or interface in java.io">IOException.
+
 
 
-static ColumnPrefixFilter
-ColumnPrefixFilter.parseFrom(byte[] pbBytes) 
+static RandomRowFilter
+RandomRowFilter.parseFrom(byte[] pbBytes) 
 
 
-static PageFilter
-PageFilter.parseFrom(byte[] pbBytes) 
+static FirstKeyOnlyFilter
+FirstKeyOnlyFilter.parseFrom(byte[] pbBytes) 
 
 
-static BitComparator
-BitComparator.parseFrom(byte[] pbBytes) 
+static SkipFilter
+SkipFilter.parseFrom(byte[] pbBytes) 
 
 
-static RowFilter
-RowFilter.parseFrom(byte[] pbBytes) 
+static BinaryPrefixComparator
+BinaryPrefixComparator.parseFrom(byte[] pbBytes) 
 
 
-static ColumnRangeFilter
-ColumnRangeFilter.parseFrom(byte[] pbBytes) 
+static TimestampsFilter
+TimestampsFilter.parseFrom(byte[] pbBytes) 
 
 
-static ColumnCountGetFilter
-ColumnCountGetFilter.parseFrom(byte[] pbBytes) 
+static ValueFilter
+ValueFilter.parseFrom(byte[] pbBytes) 
 
 
-static SubstringComparator
-SubstringComparator.parseFrom(byte[] pbBytes) 
+static KeyOnlyFilter
+KeyOnlyFilter.parseFrom(byte[] pbBytes) 
 
 
-static MultipleColumnPrefixFilter
-MultipleColumnPrefixFilter.parseFrom(byte[] pbBytes) 
+static FamilyFilter
+FamilyFilter.parseFrom(byte[] pbBytes) 
 
 
-static ColumnPaginationFilter
-ColumnPaginationFilter.parseFrom(byte[] pbBytes) 
+static QualifierFilter
+QualifierFilter.parseFrom(byte[] pbBytes) 
 
 
-static DependentColumnFilter
-DependentColumnFilter.parseFrom(byte[] pbBytes) 
+static FilterList
+FilterList.parseFrom(byte[] pbBytes) 
 
 
-static BinaryComparator
-BinaryComparator.parseFrom(byte[] pbBytes) 
+static BigDecimalComparator
+BigDecimalComparator.parseFrom(byte[] pbBytes) 
 
 
-static InclusiveStopFilter
-InclusiveStopFilter.parseFrom(byte[] pbBytes) 
+static ColumnRangeFilter
+ColumnRangeFilter.parseFrom(byte[] pbBytes) 
 
 
-static KeyOnlyFilter
-KeyOnlyFilter.parseFrom(byte[] pbBytes) 
+static ColumnPaginationFilter
+ColumnPaginationFilter.parseFrom(byte[] pbBytes) 
 
 
-static MultiRowRangeFilter
-MultiRowRangeFilter.parseFrom(byte[] pbBytes) 
+static SubstringComparator
+SubstringComparator.parseFrom(b

[11/51] [partial] hbase-site git commit: Published site at .

2017-11-14 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6607d33c/devapidocs/org/apache/hadoop/hbase/client/class-use/SnapshotType.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/client/class-use/SnapshotType.html 
b/devapidocs/org/apache/hadoop/hbase/client/class-use/SnapshotType.html
index aadcd01..b57d788 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/class-use/SnapshotType.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/class-use/SnapshotType.html
@@ -148,8 +148,8 @@ the order they are declared.
 
 
 
-void
-Admin.snapshot(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String snapshotName,
+default http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";
 title="class or interface in java.util.concurrent">CompletableFutureVoid>
+AsyncAdmin.snapshot(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String snapshotName,
 TableName tableName,
 SnapshotType type)
 Create typed snapshot of the table.
@@ -157,18 +157,18 @@ the order they are declared.
 
 
 void
-HBaseAdmin.snapshot(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String snapshotName,
-TableName tableName,
-SnapshotType type) 
-
-
-default http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";
 title="class or interface in java.util.concurrent">CompletableFutureVoid>
-AsyncAdmin.snapshot(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String snapshotName,
+Admin.snapshot(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String snapshotName,
 TableName tableName,
 SnapshotType type)
 Create typed snapshot of the table.
 
 
+
+void
+HBaseAdmin.snapshot(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String snapshotName,
+TableName tableName,
+SnapshotType type) 
+
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6607d33c/devapidocs/org/apache/hadoop/hbase/client/class-use/TableDescriptor.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/client/class-use/TableDescriptor.html 
b/devapidocs/org/apache/hadoop/hbase/client/class-use/TableDescriptor.html
index 94335f9..a1ef4d1 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/class-use/TableDescriptor.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/class-use/TableDescriptor.html
@@ -411,13 +411,13 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 TableDescriptor
-Table.getDescriptor()
-Gets the table 
descriptor for this table.
-
+HTable.getDescriptor() 
 
 
 TableDescriptor
-HTable.getDescriptor() 
+Table.getDescriptor()
+Gets the table 
descriptor for this table.
+
 
 
 TableDescriptor
@@ -467,18 +467,18 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";
 title="class or interface in java.util.concurrent">CompletableFuture
-AsyncHBaseAdmin.getTableDescriptor(TableName tableName) 
-
-
-http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";
 title="class or interface in java.util.concurrent">CompletableFuture
 AsyncAdmin.getTableDescriptor(TableName tableName)
 Method for getting the tableDescriptor
 
 
-
+
 http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";
 title="class or interface in java.util.concurrent">CompletableFuture
 RawAsyncHBaseAdmin.getTableDescriptor(TableName tableName) 
 
+
+http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";
 title="class or interface in java.util.concurrent">CompletableFuture
+AsyncHBaseAdmin.getTableDescriptor(TableName tableName) 
+
 
 private http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";
 title="class or interface in java.util.concurrent">CompletableFutureList>
 RawAsyn

hbase git commit: HBASE-19243 Start mini cluster once before class for TestFIFOCompactionPolicy

2017-11-14 Thread zhangduo
Repository: hbase
Updated Branches:
  refs/heads/branch-1.2 79cc3b314 -> 31677c0aa


HBASE-19243 Start mini cluster once before class for TestFIFOCompactionPolicy


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/31677c0a
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/31677c0a
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/31677c0a

Branch: refs/heads/branch-1.2
Commit: 31677c0aa8e6d76573eed291b7a37929b3f7484f
Parents: 79cc3b3
Author: zhangduo 
Authored: Mon Nov 13 22:03:48 2017 +0800
Committer: zhangduo 
Committed: Tue Nov 14 17:49:33 2017 +0800

--
 .../compactions/TestFIFOCompactionPolicy.java   | 166 +++
 1 file changed, 62 insertions(+), 104 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/31677c0a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/TestFIFOCompactionPolicy.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/TestFIFOCompactionPolicy.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/TestFIFOCompactionPolicy.java
index b887da9..d92ef32 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/TestFIFOCompactionPolicy.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/TestFIFOCompactionPolicy.java
@@ -18,20 +18,20 @@
 package org.apache.hadoop.hbase.regionserver.compactions;
 
 import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertTrue;
 
 import java.io.IOException;
-import java.util.Collection;
 import java.util.List;
-import java.util.Random;
+import java.util.concurrent.ThreadLocalRandom;
 
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.DoNotRetryIOException;
 import org.apache.hadoop.hbase.HBaseTestingUtility;
 import org.apache.hadoop.hbase.HColumnDescriptor;
 import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.HTableDescriptor;
 import org.apache.hadoop.hbase.MiniHBaseCluster;
 import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.Waiter.ExplainingPredicate;
 import org.apache.hadoop.hbase.client.HBaseAdmin;
 import org.apache.hadoop.hbase.client.Put;
 import org.apache.hadoop.hbase.client.Table;
@@ -41,7 +41,6 @@ import org.apache.hadoop.hbase.regionserver.HRegionServer;
 import org.apache.hadoop.hbase.regionserver.HStore;
 import org.apache.hadoop.hbase.regionserver.Region;
 import org.apache.hadoop.hbase.regionserver.Store;
-import org.apache.hadoop.hbase.regionserver.StoreFile;
 import org.apache.hadoop.hbase.testclassification.MediumTests;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.hbase.util.EnvironmentEdge;
@@ -49,23 +48,24 @@ import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
 import org.apache.hadoop.hbase.util.JVMClusterUtil;
 import org.apache.hadoop.hbase.util.TimeOffsetEnvironmentEdge;
 import org.junit.AfterClass;
-import org.junit.Assert;
 import org.junit.BeforeClass;
+import org.junit.Rule;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
+import org.junit.rules.ExpectedException;
 
 @Category({ MediumTests.class })
 public class TestFIFOCompactionPolicy {
 
   private static final HBaseTestingUtility TEST_UTIL = new 
HBaseTestingUtility();
 
-
-  private final TableName tableName = 
TableName.valueOf(getClass().getSimpleName());
-
   private final byte[] family = Bytes.toBytes("f");
 
   private final byte[] qualifier = Bytes.toBytes("q");
 
+  @Rule
+  public ExpectedException error = ExpectedException.none();
+
   private Store getStoreWithName(TableName tableName) {
 MiniHBaseCluster cluster = TEST_UTIL.getMiniHBaseCluster();
 List rsts = 
cluster.getRegionServerThreads();
@@ -78,16 +78,12 @@ public class TestFIFOCompactionPolicy {
 return null;
   }
 
-  private Store prepareData() throws IOException {
+  private Store prepareData(TableName tableName) throws IOException {
 HBaseAdmin admin = TEST_UTIL.getHBaseAdmin();
-if (admin.tableExists(tableName)) {
-  admin.disableTable(tableName);
-  admin.deleteTable(tableName);
-}
 HTableDescriptor desc = new HTableDescriptor(tableName);
-
desc.setConfiguration(DefaultStoreEngine.DEFAULT_COMPACTION_POLICY_CLASS_KEY, 
+
desc.setConfiguration(DefaultStoreEngine.DEFAULT_COMPACTION_POLICY_CLASS_KEY,
   FIFOCompactionPolicy.class.getName());
-desc.setConfiguration(HConstants.HBASE_REGION_SPLIT_POLICY_KEY, 
+desc.setConfiguration(HConstants.HBASE_REGION_SPLIT_POLICY_KEY,
   DisabledRegionSplitPolicy.class.getName());
 HColumnDescriptor colDesc = new HColumnDescriptor(family);
 colDesc.setTimeToLive(1)

hbase git commit: HBASE-19243 Start mini cluster once before class for TestFIFOCompactionPolicy

2017-11-14 Thread zhangduo
Repository: hbase
Updated Branches:
  refs/heads/branch-1.4 ade166d5f -> dace9d776


HBASE-19243 Start mini cluster once before class for TestFIFOCompactionPolicy


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/dace9d77
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/dace9d77
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/dace9d77

Branch: refs/heads/branch-1.4
Commit: dace9d7765f425adf4c3d4d7c6ab7e16da0464bb
Parents: ade166d
Author: zhangduo 
Authored: Mon Nov 13 22:03:48 2017 +0800
Committer: zhangduo 
Committed: Tue Nov 14 15:56:09 2017 +0800

--
 .../compactions/TestFIFOCompactionPolicy.java   | 166 +++
 1 file changed, 62 insertions(+), 104 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/dace9d77/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/TestFIFOCompactionPolicy.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/TestFIFOCompactionPolicy.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/TestFIFOCompactionPolicy.java
index b887da9..d92ef32 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/TestFIFOCompactionPolicy.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/TestFIFOCompactionPolicy.java
@@ -18,20 +18,20 @@
 package org.apache.hadoop.hbase.regionserver.compactions;
 
 import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertTrue;
 
 import java.io.IOException;
-import java.util.Collection;
 import java.util.List;
-import java.util.Random;
+import java.util.concurrent.ThreadLocalRandom;
 
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.DoNotRetryIOException;
 import org.apache.hadoop.hbase.HBaseTestingUtility;
 import org.apache.hadoop.hbase.HColumnDescriptor;
 import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.HTableDescriptor;
 import org.apache.hadoop.hbase.MiniHBaseCluster;
 import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.Waiter.ExplainingPredicate;
 import org.apache.hadoop.hbase.client.HBaseAdmin;
 import org.apache.hadoop.hbase.client.Put;
 import org.apache.hadoop.hbase.client.Table;
@@ -41,7 +41,6 @@ import org.apache.hadoop.hbase.regionserver.HRegionServer;
 import org.apache.hadoop.hbase.regionserver.HStore;
 import org.apache.hadoop.hbase.regionserver.Region;
 import org.apache.hadoop.hbase.regionserver.Store;
-import org.apache.hadoop.hbase.regionserver.StoreFile;
 import org.apache.hadoop.hbase.testclassification.MediumTests;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.hbase.util.EnvironmentEdge;
@@ -49,23 +48,24 @@ import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
 import org.apache.hadoop.hbase.util.JVMClusterUtil;
 import org.apache.hadoop.hbase.util.TimeOffsetEnvironmentEdge;
 import org.junit.AfterClass;
-import org.junit.Assert;
 import org.junit.BeforeClass;
+import org.junit.Rule;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
+import org.junit.rules.ExpectedException;
 
 @Category({ MediumTests.class })
 public class TestFIFOCompactionPolicy {
 
   private static final HBaseTestingUtility TEST_UTIL = new 
HBaseTestingUtility();
 
-
-  private final TableName tableName = 
TableName.valueOf(getClass().getSimpleName());
-
   private final byte[] family = Bytes.toBytes("f");
 
   private final byte[] qualifier = Bytes.toBytes("q");
 
+  @Rule
+  public ExpectedException error = ExpectedException.none();
+
   private Store getStoreWithName(TableName tableName) {
 MiniHBaseCluster cluster = TEST_UTIL.getMiniHBaseCluster();
 List rsts = 
cluster.getRegionServerThreads();
@@ -78,16 +78,12 @@ public class TestFIFOCompactionPolicy {
 return null;
   }
 
-  private Store prepareData() throws IOException {
+  private Store prepareData(TableName tableName) throws IOException {
 HBaseAdmin admin = TEST_UTIL.getHBaseAdmin();
-if (admin.tableExists(tableName)) {
-  admin.disableTable(tableName);
-  admin.deleteTable(tableName);
-}
 HTableDescriptor desc = new HTableDescriptor(tableName);
-
desc.setConfiguration(DefaultStoreEngine.DEFAULT_COMPACTION_POLICY_CLASS_KEY, 
+
desc.setConfiguration(DefaultStoreEngine.DEFAULT_COMPACTION_POLICY_CLASS_KEY,
   FIFOCompactionPolicy.class.getName());
-desc.setConfiguration(HConstants.HBASE_REGION_SPLIT_POLICY_KEY, 
+desc.setConfiguration(HConstants.HBASE_REGION_SPLIT_POLICY_KEY,
   DisabledRegionSplitPolicy.class.getName());
 HColumnDescriptor colDesc = new HColumnDescriptor(family);
 colDesc.setTimeToLive(1)

hbase git commit: HBASE-19243 Start mini cluster once before class for TestFIFOCompactionPolicy

2017-11-14 Thread zhangduo
Repository: hbase
Updated Branches:
  refs/heads/branch-1.3 dea935dab -> 1fad75296


HBASE-19243 Start mini cluster once before class for TestFIFOCompactionPolicy


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/1fad7529
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/1fad7529
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/1fad7529

Branch: refs/heads/branch-1.3
Commit: 1fad75296704f5d1294a64b2f9f79f18663e09c5
Parents: dea935d
Author: zhangduo 
Authored: Mon Nov 13 22:03:48 2017 +0800
Committer: zhangduo 
Committed: Tue Nov 14 17:48:02 2017 +0800

--
 .../compactions/TestFIFOCompactionPolicy.java   | 166 +++
 1 file changed, 62 insertions(+), 104 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/1fad7529/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/TestFIFOCompactionPolicy.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/TestFIFOCompactionPolicy.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/TestFIFOCompactionPolicy.java
index b887da9..d92ef32 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/TestFIFOCompactionPolicy.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/TestFIFOCompactionPolicy.java
@@ -18,20 +18,20 @@
 package org.apache.hadoop.hbase.regionserver.compactions;
 
 import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertTrue;
 
 import java.io.IOException;
-import java.util.Collection;
 import java.util.List;
-import java.util.Random;
+import java.util.concurrent.ThreadLocalRandom;
 
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.DoNotRetryIOException;
 import org.apache.hadoop.hbase.HBaseTestingUtility;
 import org.apache.hadoop.hbase.HColumnDescriptor;
 import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.HTableDescriptor;
 import org.apache.hadoop.hbase.MiniHBaseCluster;
 import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.Waiter.ExplainingPredicate;
 import org.apache.hadoop.hbase.client.HBaseAdmin;
 import org.apache.hadoop.hbase.client.Put;
 import org.apache.hadoop.hbase.client.Table;
@@ -41,7 +41,6 @@ import org.apache.hadoop.hbase.regionserver.HRegionServer;
 import org.apache.hadoop.hbase.regionserver.HStore;
 import org.apache.hadoop.hbase.regionserver.Region;
 import org.apache.hadoop.hbase.regionserver.Store;
-import org.apache.hadoop.hbase.regionserver.StoreFile;
 import org.apache.hadoop.hbase.testclassification.MediumTests;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.hbase.util.EnvironmentEdge;
@@ -49,23 +48,24 @@ import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
 import org.apache.hadoop.hbase.util.JVMClusterUtil;
 import org.apache.hadoop.hbase.util.TimeOffsetEnvironmentEdge;
 import org.junit.AfterClass;
-import org.junit.Assert;
 import org.junit.BeforeClass;
+import org.junit.Rule;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
+import org.junit.rules.ExpectedException;
 
 @Category({ MediumTests.class })
 public class TestFIFOCompactionPolicy {
 
   private static final HBaseTestingUtility TEST_UTIL = new 
HBaseTestingUtility();
 
-
-  private final TableName tableName = 
TableName.valueOf(getClass().getSimpleName());
-
   private final byte[] family = Bytes.toBytes("f");
 
   private final byte[] qualifier = Bytes.toBytes("q");
 
+  @Rule
+  public ExpectedException error = ExpectedException.none();
+
   private Store getStoreWithName(TableName tableName) {
 MiniHBaseCluster cluster = TEST_UTIL.getMiniHBaseCluster();
 List rsts = 
cluster.getRegionServerThreads();
@@ -78,16 +78,12 @@ public class TestFIFOCompactionPolicy {
 return null;
   }
 
-  private Store prepareData() throws IOException {
+  private Store prepareData(TableName tableName) throws IOException {
 HBaseAdmin admin = TEST_UTIL.getHBaseAdmin();
-if (admin.tableExists(tableName)) {
-  admin.disableTable(tableName);
-  admin.deleteTable(tableName);
-}
 HTableDescriptor desc = new HTableDescriptor(tableName);
-
desc.setConfiguration(DefaultStoreEngine.DEFAULT_COMPACTION_POLICY_CLASS_KEY, 
+
desc.setConfiguration(DefaultStoreEngine.DEFAULT_COMPACTION_POLICY_CLASS_KEY,
   FIFOCompactionPolicy.class.getName());
-desc.setConfiguration(HConstants.HBASE_REGION_SPLIT_POLICY_KEY, 
+desc.setConfiguration(HConstants.HBASE_REGION_SPLIT_POLICY_KEY,
   DisabledRegionSplitPolicy.class.getName());
 HColumnDescriptor colDesc = new HColumnDescriptor(family);
 colDesc.setTimeToLive(1)

hbase git commit: HBASE-19243 Start mini cluster once before class for TestFIFOCompactionPolicy

2017-11-14 Thread zhangduo
Repository: hbase
Updated Branches:
  refs/heads/branch-1 bac254bd4 -> c0639d271


HBASE-19243 Start mini cluster once before class for TestFIFOCompactionPolicy


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/c0639d27
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/c0639d27
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/c0639d27

Branch: refs/heads/branch-1
Commit: c0639d271ae85d8bd447d938c09c07e67d65915b
Parents: bac254b
Author: zhangduo 
Authored: Mon Nov 13 22:03:48 2017 +0800
Committer: zhangduo 
Committed: Tue Nov 14 15:55:51 2017 +0800

--
 .../compactions/TestFIFOCompactionPolicy.java   | 166 +++
 1 file changed, 62 insertions(+), 104 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/c0639d27/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/TestFIFOCompactionPolicy.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/TestFIFOCompactionPolicy.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/TestFIFOCompactionPolicy.java
index b887da9..d92ef32 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/TestFIFOCompactionPolicy.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/TestFIFOCompactionPolicy.java
@@ -18,20 +18,20 @@
 package org.apache.hadoop.hbase.regionserver.compactions;
 
 import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertTrue;
 
 import java.io.IOException;
-import java.util.Collection;
 import java.util.List;
-import java.util.Random;
+import java.util.concurrent.ThreadLocalRandom;
 
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.DoNotRetryIOException;
 import org.apache.hadoop.hbase.HBaseTestingUtility;
 import org.apache.hadoop.hbase.HColumnDescriptor;
 import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.HTableDescriptor;
 import org.apache.hadoop.hbase.MiniHBaseCluster;
 import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.Waiter.ExplainingPredicate;
 import org.apache.hadoop.hbase.client.HBaseAdmin;
 import org.apache.hadoop.hbase.client.Put;
 import org.apache.hadoop.hbase.client.Table;
@@ -41,7 +41,6 @@ import org.apache.hadoop.hbase.regionserver.HRegionServer;
 import org.apache.hadoop.hbase.regionserver.HStore;
 import org.apache.hadoop.hbase.regionserver.Region;
 import org.apache.hadoop.hbase.regionserver.Store;
-import org.apache.hadoop.hbase.regionserver.StoreFile;
 import org.apache.hadoop.hbase.testclassification.MediumTests;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.hbase.util.EnvironmentEdge;
@@ -49,23 +48,24 @@ import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
 import org.apache.hadoop.hbase.util.JVMClusterUtil;
 import org.apache.hadoop.hbase.util.TimeOffsetEnvironmentEdge;
 import org.junit.AfterClass;
-import org.junit.Assert;
 import org.junit.BeforeClass;
+import org.junit.Rule;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
+import org.junit.rules.ExpectedException;
 
 @Category({ MediumTests.class })
 public class TestFIFOCompactionPolicy {
 
   private static final HBaseTestingUtility TEST_UTIL = new 
HBaseTestingUtility();
 
-
-  private final TableName tableName = 
TableName.valueOf(getClass().getSimpleName());
-
   private final byte[] family = Bytes.toBytes("f");
 
   private final byte[] qualifier = Bytes.toBytes("q");
 
+  @Rule
+  public ExpectedException error = ExpectedException.none();
+
   private Store getStoreWithName(TableName tableName) {
 MiniHBaseCluster cluster = TEST_UTIL.getMiniHBaseCluster();
 List rsts = 
cluster.getRegionServerThreads();
@@ -78,16 +78,12 @@ public class TestFIFOCompactionPolicy {
 return null;
   }
 
-  private Store prepareData() throws IOException {
+  private Store prepareData(TableName tableName) throws IOException {
 HBaseAdmin admin = TEST_UTIL.getHBaseAdmin();
-if (admin.tableExists(tableName)) {
-  admin.disableTable(tableName);
-  admin.deleteTable(tableName);
-}
 HTableDescriptor desc = new HTableDescriptor(tableName);
-
desc.setConfiguration(DefaultStoreEngine.DEFAULT_COMPACTION_POLICY_CLASS_KEY, 
+
desc.setConfiguration(DefaultStoreEngine.DEFAULT_COMPACTION_POLICY_CLASS_KEY,
   FIFOCompactionPolicy.class.getName());
-desc.setConfiguration(HConstants.HBASE_REGION_SPLIT_POLICY_KEY, 
+desc.setConfiguration(HConstants.HBASE_REGION_SPLIT_POLICY_KEY,
   DisabledRegionSplitPolicy.class.getName());
 HColumnDescriptor colDesc = new HColumnDescriptor(family);
 colDesc.setTimeToLive(1); //

hbase git commit: HBASE-19243 Start mini cluster once before class for TestFIFOCompactionPolicy

2017-11-14 Thread zhangduo
Repository: hbase
Updated Branches:
  refs/heads/branch-2 d5df4c042 -> ffb8c641d


HBASE-19243 Start mini cluster once before class for TestFIFOCompactionPolicy


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/ffb8c641
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/ffb8c641
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/ffb8c641

Branch: refs/heads/branch-2
Commit: ffb8c641d9acaa53cdfc66f36a6217f1f5526e11
Parents: d5df4c0
Author: zhangduo 
Authored: Mon Nov 13 22:03:48 2017 +0800
Committer: zhangduo 
Committed: Tue Nov 14 15:40:30 2017 +0800

--
 .../compactions/TestFIFOCompactionPolicy.java   | 136 +++
 1 file changed, 49 insertions(+), 87 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/ffb8c641/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/TestFIFOCompactionPolicy.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/TestFIFOCompactionPolicy.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/TestFIFOCompactionPolicy.java
index 166fede..d06855b 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/TestFIFOCompactionPolicy.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/TestFIFOCompactionPolicy.java
@@ -18,17 +18,18 @@
 package org.apache.hadoop.hbase.regionserver.compactions;
 
 import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertTrue;
 
 import java.io.IOException;
 import java.util.List;
-import java.util.Random;
+import java.util.concurrent.ThreadLocalRandom;
 
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.DoNotRetryIOException;
 import org.apache.hadoop.hbase.HBaseTestingUtility;
 import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.MiniHBaseCluster;
 import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.Waiter.ExplainingPredicate;
 import org.apache.hadoop.hbase.client.Admin;
 import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
 import org.apache.hadoop.hbase.client.Put;
@@ -40,7 +41,6 @@ import 
org.apache.hadoop.hbase.regionserver.DisabledRegionSplitPolicy;
 import org.apache.hadoop.hbase.regionserver.HRegion;
 import org.apache.hadoop.hbase.regionserver.HRegionServer;
 import org.apache.hadoop.hbase.regionserver.HStore;
-import org.apache.hadoop.hbase.regionserver.Region;
 import org.apache.hadoop.hbase.testclassification.MediumTests;
 import org.apache.hadoop.hbase.testclassification.RegionServerTests;
 import org.apache.hadoop.hbase.util.Bytes;
@@ -49,30 +49,33 @@ import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
 import org.apache.hadoop.hbase.util.JVMClusterUtil;
 import org.apache.hadoop.hbase.util.TimeOffsetEnvironmentEdge;
 import org.junit.AfterClass;
-import org.junit.Assert;
 import org.junit.BeforeClass;
+import org.junit.Rule;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
+import org.junit.rules.ExpectedException;
 
 @Category({ RegionServerTests.class, MediumTests.class })
 public class TestFIFOCompactionPolicy {
 
   private static final HBaseTestingUtility TEST_UTIL = new 
HBaseTestingUtility();
 
-
   private final TableName tableName = 
TableName.valueOf(getClass().getSimpleName());
 
   private final byte[] family = Bytes.toBytes("f");
 
   private final byte[] qualifier = Bytes.toBytes("q");
 
+  @Rule
+  public ExpectedException error = ExpectedException.none();
+
   private HStore getStoreWithName(TableName tableName) {
 MiniHBaseCluster cluster = TEST_UTIL.getMiniHBaseCluster();
 List rsts = 
cluster.getRegionServerThreads();
 for (int i = 0; i < cluster.getRegionServerThreads().size(); i++) {
   HRegionServer hrs = rsts.get(i).getRegionServer();
-  for (Region region : hrs.getRegions(tableName)) {
-return ((HRegion) region).getStores().iterator().next();
+  for (HRegion region : hrs.getRegions(tableName)) {
+return region.getStores().iterator().next();
   }
 }
 return null;
@@ -80,10 +83,6 @@ public class TestFIFOCompactionPolicy {
 
   private HStore prepareData() throws IOException {
 Admin admin = TEST_UTIL.getAdmin();
-if (admin.tableExists(tableName)) {
-  admin.disableTable(tableName);
-  admin.deleteTable(tableName);
-}
 TableDescriptor desc = TableDescriptorBuilder.newBuilder(tableName)
 .setValue(DefaultStoreEngine.DEFAULT_COMPACTION_POLICY_CLASS_KEY,
   FIFOCompactionPolicy.class.getName())
@@ -91,16 +90,14 @@ public class TestFIFOCompactionPolicy {
   DisabledRegionSplitPolicy.class.getName())
 

hbase git commit: HBASE-19243 Start mini cluster once before class for TestFIFOCompactionPolicy

2017-11-14 Thread zhangduo
Repository: hbase
Updated Branches:
  refs/heads/master 7406c83ef -> e0580b20d


HBASE-19243 Start mini cluster once before class for TestFIFOCompactionPolicy


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/e0580b20
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/e0580b20
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/e0580b20

Branch: refs/heads/master
Commit: e0580b20d14e47a7941f0ca5a37ee6bbbcf044c8
Parents: 7406c83
Author: zhangduo 
Authored: Mon Nov 13 22:03:48 2017 +0800
Committer: zhangduo 
Committed: Tue Nov 14 15:40:24 2017 +0800

--
 .../compactions/TestFIFOCompactionPolicy.java   | 136 +++
 1 file changed, 49 insertions(+), 87 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/e0580b20/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/TestFIFOCompactionPolicy.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/TestFIFOCompactionPolicy.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/TestFIFOCompactionPolicy.java
index 166fede..d06855b 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/TestFIFOCompactionPolicy.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/TestFIFOCompactionPolicy.java
@@ -18,17 +18,18 @@
 package org.apache.hadoop.hbase.regionserver.compactions;
 
 import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertTrue;
 
 import java.io.IOException;
 import java.util.List;
-import java.util.Random;
+import java.util.concurrent.ThreadLocalRandom;
 
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.DoNotRetryIOException;
 import org.apache.hadoop.hbase.HBaseTestingUtility;
 import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.MiniHBaseCluster;
 import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.Waiter.ExplainingPredicate;
 import org.apache.hadoop.hbase.client.Admin;
 import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
 import org.apache.hadoop.hbase.client.Put;
@@ -40,7 +41,6 @@ import 
org.apache.hadoop.hbase.regionserver.DisabledRegionSplitPolicy;
 import org.apache.hadoop.hbase.regionserver.HRegion;
 import org.apache.hadoop.hbase.regionserver.HRegionServer;
 import org.apache.hadoop.hbase.regionserver.HStore;
-import org.apache.hadoop.hbase.regionserver.Region;
 import org.apache.hadoop.hbase.testclassification.MediumTests;
 import org.apache.hadoop.hbase.testclassification.RegionServerTests;
 import org.apache.hadoop.hbase.util.Bytes;
@@ -49,30 +49,33 @@ import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
 import org.apache.hadoop.hbase.util.JVMClusterUtil;
 import org.apache.hadoop.hbase.util.TimeOffsetEnvironmentEdge;
 import org.junit.AfterClass;
-import org.junit.Assert;
 import org.junit.BeforeClass;
+import org.junit.Rule;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
+import org.junit.rules.ExpectedException;
 
 @Category({ RegionServerTests.class, MediumTests.class })
 public class TestFIFOCompactionPolicy {
 
   private static final HBaseTestingUtility TEST_UTIL = new 
HBaseTestingUtility();
 
-
   private final TableName tableName = 
TableName.valueOf(getClass().getSimpleName());
 
   private final byte[] family = Bytes.toBytes("f");
 
   private final byte[] qualifier = Bytes.toBytes("q");
 
+  @Rule
+  public ExpectedException error = ExpectedException.none();
+
   private HStore getStoreWithName(TableName tableName) {
 MiniHBaseCluster cluster = TEST_UTIL.getMiniHBaseCluster();
 List rsts = 
cluster.getRegionServerThreads();
 for (int i = 0; i < cluster.getRegionServerThreads().size(); i++) {
   HRegionServer hrs = rsts.get(i).getRegionServer();
-  for (Region region : hrs.getRegions(tableName)) {
-return ((HRegion) region).getStores().iterator().next();
+  for (HRegion region : hrs.getRegions(tableName)) {
+return region.getStores().iterator().next();
   }
 }
 return null;
@@ -80,10 +83,6 @@ public class TestFIFOCompactionPolicy {
 
   private HStore prepareData() throws IOException {
 Admin admin = TEST_UTIL.getAdmin();
-if (admin.tableExists(tableName)) {
-  admin.disableTable(tableName);
-  admin.deleteTable(tableName);
-}
 TableDescriptor desc = TableDescriptorBuilder.newBuilder(tableName)
 .setValue(DefaultStoreEngine.DEFAULT_COMPACTION_POLICY_CLASS_KEY,
   FIFOCompactionPolicy.class.getName())
@@ -91,16 +90,14 @@ public class TestFIFOCompactionPolicy {
   DisabledRegionSplitPolicy.class.getName())
 
.ad