[25/51] [partial] hbase-site git commit: Published site at 73e1bcd33515061be2dc2e51e6ad19d9798a8ef6.

2017-04-02 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/71b53f08/devapidocs/org/apache/hadoop/hbase/filter/class-use/Filter.ReturnCode.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/filter/class-use/Filter.ReturnCode.html 
b/devapidocs/org/apache/hadoop/hbase/filter/class-use/Filter.ReturnCode.html
index 2812ba9..d3cba55 100644
--- a/devapidocs/org/apache/hadoop/hbase/filter/class-use/Filter.ReturnCode.html
+++ b/devapidocs/org/apache/hadoop/hbase/filter/class-use/Filter.ReturnCode.html
@@ -140,111 +140,111 @@
 
 
 Filter.ReturnCode
-ColumnPrefixFilter.filterKeyValue(Cell cell) 
+ValueFilter.filterKeyValue(Cell v) 
 
 
 Filter.ReturnCode
-ColumnCountGetFilter.filterKeyValue(Cell v) 
+SkipFilter.filterKeyValue(Cell v) 
 
 
 Filter.ReturnCode
-RowFilter.filterKeyValue(Cell v) 
+FamilyFilter.filterKeyValue(Cell v) 
 
 
 Filter.ReturnCode
-FuzzyRowFilter.filterKeyValue(Cell c) 
+FilterWrapper.filterKeyValue(Cell v) 
 
 
-abstract Filter.ReturnCode
-Filter.filterKeyValue(Cell v)
-A way to filter based on the column family, column 
qualifier and/or the column value.
-
+Filter.ReturnCode
+ColumnPrefixFilter.filterKeyValue(Cell cell) 
 
 
 Filter.ReturnCode
-RandomRowFilter.filterKeyValue(Cell v) 
+PageFilter.filterKeyValue(Cell ignored) 
 
 
 Filter.ReturnCode
-FirstKeyOnlyFilter.filterKeyValue(Cell v) 
+RowFilter.filterKeyValue(Cell v) 
 
 
 Filter.ReturnCode
-SkipFilter.filterKeyValue(Cell v) 
+ColumnRangeFilter.filterKeyValue(Cell kv) 
 
 
 Filter.ReturnCode
-TimestampsFilter.filterKeyValue(Cell v) 
+ColumnCountGetFilter.filterKeyValue(Cell v) 
 
 
 Filter.ReturnCode
-ValueFilter.filterKeyValue(Cell v) 
+MultipleColumnPrefixFilter.filterKeyValue(Cell kv) 
 
 
 Filter.ReturnCode
-KeyOnlyFilter.filterKeyValue(Cell ignored) 
+ColumnPaginationFilter.filterKeyValue(Cell v) 
 
 
 Filter.ReturnCode
-FamilyFilter.filterKeyValue(Cell v) 
+DependentColumnFilter.filterKeyValue(Cell c) 
 
 
 Filter.ReturnCode
-QualifierFilter.filterKeyValue(Cell v) 
+InclusiveStopFilter.filterKeyValue(Cell v) 
 
 
 Filter.ReturnCode
-FilterList.filterKeyValue(Cell c) 
+KeyOnlyFilter.filterKeyValue(Cell ignored) 
 
 
 Filter.ReturnCode
-ColumnRangeFilter.filterKeyValue(Cell kv) 
+MultiRowRangeFilter.filterKeyValue(Cell ignored) 
 
 
-Filter.ReturnCode
-ColumnPaginationFilter.filterKeyValue(Cell v) 
+abstract Filter.ReturnCode
+Filter.filterKeyValue(Cell v)
+A way to filter based on the column family, column 
qualifier and/or the column value.
+
 
 
 Filter.ReturnCode
-WhileMatchFilter.filterKeyValue(Cell v) 
+FirstKeyOnlyFilter.filterKeyValue(Cell v) 
 
 
 Filter.ReturnCode
-MultiRowRangeFilter.filterKeyValue(Cell ignored) 
+WhileMatchFilter.filterKeyValue(Cell v) 
 
 
 Filter.ReturnCode
-PrefixFilter.filterKeyValue(Cell v) 
+FirstKeyValueMatchingQualifiersFilter.filterKeyValue(Cell v)
+Deprecated. 
+ 
 
 
 Filter.ReturnCode
-DependentColumnFilter.filterKeyValue(Cell c) 
+TimestampsFilter.filterKeyValue(Cell v) 
 
 
 Filter.ReturnCode
-FirstKeyValueMatchingQualifiersFilter.filterKeyValue(Cell v)
-Deprecated. 
- 
+FuzzyRowFilter.filterKeyValue(Cell c) 
 
 
 Filter.ReturnCode
-PageFilter.filterKeyValue(Cell ignored) 
+FilterList.filterKeyValue(Cell c) 
 
 
 Filter.ReturnCode
-InclusiveStopFilter.filterKeyValue(Cell v) 
+RandomRowFilter.filterKeyValue(Cell v) 
 
 
 Filter.ReturnCode
-FilterWrapper.filterKeyValue(Cell v) 
+PrefixFilter.filterKeyValue(Cell v) 
 
 
 Filter.ReturnCode
-MultipleColumnPrefixFilter.filterKeyValue(Cell kv) 
+SingleColumnValueFilter.filterKeyValue(Cell c) 
 
 
 Filter.ReturnCode
-SingleColumnValueFilter.filterKeyValue(Cell c) 
+QualifierFilter.filterKeyValue(Cell v) 
 
 
 static Filter.ReturnCode

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/71b53f08/devapidocs/org/apache/hadoop/hbase/filter/class-use/Filter.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/filter/class-use/Filter.html 
b/devapidocs/org/apache/hadoop/hbase/filter/class-use/Filter.html
index abddd91..8f44ebe 100644
--- a/devapidocs/org/apache/hadoop/hbase/filter/class-use/Filter.html
+++ b/devapidocs/org/apache/hadoop/hbase/filter/class-use/Filter.html
@@ -433,13 +433,13 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 SkipFilter.filter 
 
 
-private Filter
-WhileMatchFilter.filter 
-
-
 (package private) Filter
 FilterWrapper.filter 
 
+
+private Filter
+WhileMatchFilter.filter 
+
 
 private Filter
 FilterList.seekHintFilter 
@@ -468,15 +468,15 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 static Filter
-ColumnPrefixFilter.createFilterFromArguments(http://docs.oracle.com/javase/8/docs/api/java/util/ArrayList.html?is-external=true";
 title="class or interface in 
java.util">ArrayList filterArguments) 
+SingleColumnValueExcludeFilter.createFilterFromArguments(http://docs.oracle.com/javase/8/docs/api/java/util/ArrayList.ht

[25/51] [partial] hbase-site git commit: Published site at 73e1bcd33515061be2dc2e51e6ad19d9798a8ef6.

2017-04-02 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6509b6fa/devapidocs/org/apache/hadoop/hbase/io/Reference.Range.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/io/Reference.Range.html 
b/devapidocs/org/apache/hadoop/hbase/io/Reference.Range.html
index 8a19eda..2408858 100644
--- a/devapidocs/org/apache/hadoop/hbase/io/Reference.Range.html
+++ b/devapidocs/org/apache/hadoop/hbase/io/Reference.Range.html
@@ -244,7 +244,7 @@ the order they are declared.
 
 
 values
-public static Reference.Range[] values()
+public static Reference.Range[] values()
 Returns an array containing the constants of this enum 
type, in
 the order they are declared.  This method may be used to iterate
 over the constants as follows:
@@ -264,7 +264,7 @@ for (Reference.Range c : Reference.Range.values())
 
 
 valueOf
-public static Reference.Range valueOf(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String name)
+public static Reference.Range valueOf(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String name)
 Returns the enum constant of this type with the specified 
name.
 The string must match exactly an identifier used to declare an
 enum constant in this type.  (Extraneous whitespace characters are 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6509b6fa/devapidocs/org/apache/hadoop/hbase/io/class-use/ImmutableBytesWritable.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/io/class-use/ImmutableBytesWritable.html 
b/devapidocs/org/apache/hadoop/hbase/io/class-use/ImmutableBytesWritable.html
index b6e7034..44310c8 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/io/class-use/ImmutableBytesWritable.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/io/class-use/ImmutableBytesWritable.html
@@ -162,11 +162,11 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 ImmutableBytesWritable
-TableRecordReader.createKey() 
+TableSnapshotInputFormat.TableSnapshotRecordReader.createKey() 
 
 
 ImmutableBytesWritable
-TableSnapshotInputFormat.TableSnapshotRecordReader.createKey() 
+TableRecordReader.createKey() 
 
 
 ImmutableBytesWritable
@@ -183,11 +183,9 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 org.apache.hadoop.mapred.RecordReader
-TableInputFormatBase.getRecordReader(org.apache.hadoop.mapred.InputSplit split,
+TableSnapshotInputFormat.getRecordReader(org.apache.hadoop.mapred.InputSplit split,
org.apache.hadoop.mapred.JobConf job,
-   org.apache.hadoop.mapred.Reporter reporter)
-Builds a TableRecordReader.
-
+   
org.apache.hadoop.mapred.Reporter reporter) 
 
 
 org.apache.hadoop.mapred.RecordReader
@@ -197,9 +195,11 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 org.apache.hadoop.mapred.RecordReader
-TableSnapshotInputFormat.getRecordReader(org.apache.hadoop.mapred.InputSplit split,
+TableInputFormatBase.getRecordReader(org.apache.hadoop.mapred.InputSplit split,
org.apache.hadoop.mapred.JobConf job,
-   
org.apache.hadoop.mapred.Reporter reporter) 
+   org.apache.hadoop.mapred.Reporter reporter)
+Builds a TableRecordReader.
+
 
 
 
@@ -218,12 +218,10 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 void
-IdentityTableMap.map(ImmutableBytesWritable key,
-   Result value,
+RowCounter.RowCounterMapper.map(ImmutableBytesWritable row,
+   Result values,
org.apache.hadoop.mapred.OutputCollector output,
-   org.apache.hadoop.mapred.Reporter reporter)
-Pass the key, value to reduce
-
+   org.apache.hadoop.mapred.Reporter reporter) 
 
 
 void
@@ -236,19 +234,21 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 void
-RowCounter.RowCounterMapper.map(ImmutableBytesWritable row,
-   Result values,
+IdentityTableMap.map(ImmutableBytesWritable key,
+   Result value,
org.apache.hadoop.mapred.OutputCollector output,
-   org.apache.hadoop.mapred.Reporter reporter) 
+   org.apache.hadoop.mapred.Reporter reporter)
+Pass the key, value to reduce
+
 
 
 boolean
-TableRecordReader.next(ImmutableBytesWritable key,
+TableSnapshotInputFormat.TableSnapshotRecordReader.next(ImmutableBytesWritable key,
 Result value) 
 
 
 boolean
-TableSnapshotInputFormat.TableSnapshotRecordReader.next(ImmutableBytesWritable key,
+TableRecordReader.next(ImmutableBytesWritable key,
 Result value) 
 
 
@@ -281,12 +281,10 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 void
-IdentityTableMap.map(ImmutableBytesWritable key,
- 

[25/51] [partial] hbase-site git commit: Published site at 73e1bcd33515061be2dc2e51e6ad19d9798a8ef6.

2017-04-02 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7a34b01b/devapidocs/org/apache/hadoop/hbase/io/Reference.Range.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/io/Reference.Range.html 
b/devapidocs/org/apache/hadoop/hbase/io/Reference.Range.html
index 2408858..8a19eda 100644
--- a/devapidocs/org/apache/hadoop/hbase/io/Reference.Range.html
+++ b/devapidocs/org/apache/hadoop/hbase/io/Reference.Range.html
@@ -244,7 +244,7 @@ the order they are declared.
 
 
 values
-public static Reference.Range[] values()
+public static Reference.Range[] values()
 Returns an array containing the constants of this enum 
type, in
 the order they are declared.  This method may be used to iterate
 over the constants as follows:
@@ -264,7 +264,7 @@ for (Reference.Range c : Reference.Range.values())
 
 
 valueOf
-public static Reference.Range valueOf(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String name)
+public static Reference.Range valueOf(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String name)
 Returns the enum constant of this type with the specified 
name.
 The string must match exactly an identifier used to declare an
 enum constant in this type.  (Extraneous whitespace characters are 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7a34b01b/devapidocs/org/apache/hadoop/hbase/io/class-use/ImmutableBytesWritable.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/io/class-use/ImmutableBytesWritable.html 
b/devapidocs/org/apache/hadoop/hbase/io/class-use/ImmutableBytesWritable.html
index 44310c8..b6e7034 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/io/class-use/ImmutableBytesWritable.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/io/class-use/ImmutableBytesWritable.html
@@ -162,11 +162,11 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 ImmutableBytesWritable
-TableSnapshotInputFormat.TableSnapshotRecordReader.createKey() 
+TableRecordReader.createKey() 
 
 
 ImmutableBytesWritable
-TableRecordReader.createKey() 
+TableSnapshotInputFormat.TableSnapshotRecordReader.createKey() 
 
 
 ImmutableBytesWritable
@@ -183,9 +183,11 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 org.apache.hadoop.mapred.RecordReader
-TableSnapshotInputFormat.getRecordReader(org.apache.hadoop.mapred.InputSplit split,
+TableInputFormatBase.getRecordReader(org.apache.hadoop.mapred.InputSplit split,
org.apache.hadoop.mapred.JobConf job,
-   
org.apache.hadoop.mapred.Reporter reporter) 
+   org.apache.hadoop.mapred.Reporter reporter)
+Builds a TableRecordReader.
+
 
 
 org.apache.hadoop.mapred.RecordReader
@@ -195,11 +197,9 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 org.apache.hadoop.mapred.RecordReader
-TableInputFormatBase.getRecordReader(org.apache.hadoop.mapred.InputSplit split,
+TableSnapshotInputFormat.getRecordReader(org.apache.hadoop.mapred.InputSplit split,
org.apache.hadoop.mapred.JobConf job,
-   org.apache.hadoop.mapred.Reporter reporter)
-Builds a TableRecordReader.
-
+   
org.apache.hadoop.mapred.Reporter reporter) 
 
 
 
@@ -218,10 +218,12 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 void
-RowCounter.RowCounterMapper.map(ImmutableBytesWritable row,
-   Result values,
+IdentityTableMap.map(ImmutableBytesWritable key,
+   Result value,
org.apache.hadoop.mapred.OutputCollector output,
-   org.apache.hadoop.mapred.Reporter reporter) 
+   org.apache.hadoop.mapred.Reporter reporter)
+Pass the key, value to reduce
+
 
 
 void
@@ -234,21 +236,19 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 void
-IdentityTableMap.map(ImmutableBytesWritable key,
-   Result value,
+RowCounter.RowCounterMapper.map(ImmutableBytesWritable row,
+   Result values,
org.apache.hadoop.mapred.OutputCollector output,
-   org.apache.hadoop.mapred.Reporter reporter)
-Pass the key, value to reduce
-
+   org.apache.hadoop.mapred.Reporter reporter) 
 
 
 boolean
-TableSnapshotInputFormat.TableSnapshotRecordReader.next(ImmutableBytesWritable key,
+TableRecordReader.next(ImmutableBytesWritable key,
 Result value) 
 
 
 boolean
-TableRecordReader.next(ImmutableBytesWritable key,
+TableSnapshotInputFormat.TableSnapshotRecordReader.next(ImmutableBytesWritable key,
 Result value) 
 
 
@@ -281,10 +281,12 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 void
-RowCounter.RowCounterMapper.map(ImmutableBytesWrita

[25/51] [partial] hbase-site git commit: Published site at 73e1bcd33515061be2dc2e51e6ad19d9798a8ef6.

2017-04-02 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6d254372/devapidocs/org/apache/hadoop/hbase/filter/class-use/Filter.ReturnCode.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/filter/class-use/Filter.ReturnCode.html 
b/devapidocs/org/apache/hadoop/hbase/filter/class-use/Filter.ReturnCode.html
index d3cba55..2812ba9 100644
--- a/devapidocs/org/apache/hadoop/hbase/filter/class-use/Filter.ReturnCode.html
+++ b/devapidocs/org/apache/hadoop/hbase/filter/class-use/Filter.ReturnCode.html
@@ -140,111 +140,111 @@
 
 
 Filter.ReturnCode
-ValueFilter.filterKeyValue(Cell v) 
+ColumnPrefixFilter.filterKeyValue(Cell cell) 
 
 
 Filter.ReturnCode
-SkipFilter.filterKeyValue(Cell v) 
+ColumnCountGetFilter.filterKeyValue(Cell v) 
 
 
 Filter.ReturnCode
-FamilyFilter.filterKeyValue(Cell v) 
+RowFilter.filterKeyValue(Cell v) 
 
 
 Filter.ReturnCode
-FilterWrapper.filterKeyValue(Cell v) 
+FuzzyRowFilter.filterKeyValue(Cell c) 
 
 
-Filter.ReturnCode
-ColumnPrefixFilter.filterKeyValue(Cell cell) 
+abstract Filter.ReturnCode
+Filter.filterKeyValue(Cell v)
+A way to filter based on the column family, column 
qualifier and/or the column value.
+
 
 
 Filter.ReturnCode
-PageFilter.filterKeyValue(Cell ignored) 
+RandomRowFilter.filterKeyValue(Cell v) 
 
 
 Filter.ReturnCode
-RowFilter.filterKeyValue(Cell v) 
+FirstKeyOnlyFilter.filterKeyValue(Cell v) 
 
 
 Filter.ReturnCode
-ColumnRangeFilter.filterKeyValue(Cell kv) 
+SkipFilter.filterKeyValue(Cell v) 
 
 
 Filter.ReturnCode
-ColumnCountGetFilter.filterKeyValue(Cell v) 
+TimestampsFilter.filterKeyValue(Cell v) 
 
 
 Filter.ReturnCode
-MultipleColumnPrefixFilter.filterKeyValue(Cell kv) 
+ValueFilter.filterKeyValue(Cell v) 
 
 
 Filter.ReturnCode
-ColumnPaginationFilter.filterKeyValue(Cell v) 
+KeyOnlyFilter.filterKeyValue(Cell ignored) 
 
 
 Filter.ReturnCode
-DependentColumnFilter.filterKeyValue(Cell c) 
+FamilyFilter.filterKeyValue(Cell v) 
 
 
 Filter.ReturnCode
-InclusiveStopFilter.filterKeyValue(Cell v) 
+QualifierFilter.filterKeyValue(Cell v) 
 
 
 Filter.ReturnCode
-KeyOnlyFilter.filterKeyValue(Cell ignored) 
+FilterList.filterKeyValue(Cell c) 
 
 
 Filter.ReturnCode
-MultiRowRangeFilter.filterKeyValue(Cell ignored) 
+ColumnRangeFilter.filterKeyValue(Cell kv) 
 
 
-abstract Filter.ReturnCode
-Filter.filterKeyValue(Cell v)
-A way to filter based on the column family, column 
qualifier and/or the column value.
-
+Filter.ReturnCode
+ColumnPaginationFilter.filterKeyValue(Cell v) 
 
 
 Filter.ReturnCode
-FirstKeyOnlyFilter.filterKeyValue(Cell v) 
+WhileMatchFilter.filterKeyValue(Cell v) 
 
 
 Filter.ReturnCode
-WhileMatchFilter.filterKeyValue(Cell v) 
+MultiRowRangeFilter.filterKeyValue(Cell ignored) 
 
 
 Filter.ReturnCode
-FirstKeyValueMatchingQualifiersFilter.filterKeyValue(Cell v)
-Deprecated. 
- 
+PrefixFilter.filterKeyValue(Cell v) 
 
 
 Filter.ReturnCode
-TimestampsFilter.filterKeyValue(Cell v) 
+DependentColumnFilter.filterKeyValue(Cell c) 
 
 
 Filter.ReturnCode
-FuzzyRowFilter.filterKeyValue(Cell c) 
+FirstKeyValueMatchingQualifiersFilter.filterKeyValue(Cell v)
+Deprecated. 
+ 
 
 
 Filter.ReturnCode
-FilterList.filterKeyValue(Cell c) 
+PageFilter.filterKeyValue(Cell ignored) 
 
 
 Filter.ReturnCode
-RandomRowFilter.filterKeyValue(Cell v) 
+InclusiveStopFilter.filterKeyValue(Cell v) 
 
 
 Filter.ReturnCode
-PrefixFilter.filterKeyValue(Cell v) 
+FilterWrapper.filterKeyValue(Cell v) 
 
 
 Filter.ReturnCode
-SingleColumnValueFilter.filterKeyValue(Cell c) 
+MultipleColumnPrefixFilter.filterKeyValue(Cell kv) 
 
 
 Filter.ReturnCode
-QualifierFilter.filterKeyValue(Cell v) 
+SingleColumnValueFilter.filterKeyValue(Cell c) 
 
 
 static Filter.ReturnCode

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6d254372/devapidocs/org/apache/hadoop/hbase/filter/class-use/Filter.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/filter/class-use/Filter.html 
b/devapidocs/org/apache/hadoop/hbase/filter/class-use/Filter.html
index 8f44ebe..abddd91 100644
--- a/devapidocs/org/apache/hadoop/hbase/filter/class-use/Filter.html
+++ b/devapidocs/org/apache/hadoop/hbase/filter/class-use/Filter.html
@@ -433,13 +433,13 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 SkipFilter.filter 
 
 
-(package private) Filter
-FilterWrapper.filter 
-
-
 private Filter
 WhileMatchFilter.filter 
 
+
+(package private) Filter
+FilterWrapper.filter 
+
 
 private Filter
 FilterList.seekHintFilter 
@@ -468,15 +468,15 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 static Filter
-SingleColumnValueExcludeFilter.createFilterFromArguments(http://docs.oracle.com/javase/8/docs/api/java/util/ArrayList.html?is-external=true";
 title="class or interface in 
java.util">ArrayList filterArguments) 
+ColumnPrefixFilter.createFilterFromArguments(http://docs.oracle.com/javase/8/docs/api/java/util/Array

[25/51] [partial] hbase-site git commit: Published site at 73e1bcd33515061be2dc2e51e6ad19d9798a8ef6.

2017-04-01 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/3a970c89/devapidocs/org/apache/hadoop/hbase/io/Reference.Range.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/io/Reference.Range.html 
b/devapidocs/org/apache/hadoop/hbase/io/Reference.Range.html
index 2408858..8a19eda 100644
--- a/devapidocs/org/apache/hadoop/hbase/io/Reference.Range.html
+++ b/devapidocs/org/apache/hadoop/hbase/io/Reference.Range.html
@@ -244,7 +244,7 @@ the order they are declared.
 
 
 values
-public static Reference.Range[] values()
+public static Reference.Range[] values()
 Returns an array containing the constants of this enum 
type, in
 the order they are declared.  This method may be used to iterate
 over the constants as follows:
@@ -264,7 +264,7 @@ for (Reference.Range c : Reference.Range.values())
 
 
 valueOf
-public static Reference.Range valueOf(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String name)
+public static Reference.Range valueOf(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String name)
 Returns the enum constant of this type with the specified 
name.
 The string must match exactly an identifier used to declare an
 enum constant in this type.  (Extraneous whitespace characters are 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/3a970c89/devapidocs/org/apache/hadoop/hbase/io/class-use/ImmutableBytesWritable.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/io/class-use/ImmutableBytesWritable.html 
b/devapidocs/org/apache/hadoop/hbase/io/class-use/ImmutableBytesWritable.html
index 44310c8..b6e7034 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/io/class-use/ImmutableBytesWritable.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/io/class-use/ImmutableBytesWritable.html
@@ -162,11 +162,11 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 ImmutableBytesWritable
-TableSnapshotInputFormat.TableSnapshotRecordReader.createKey() 
+TableRecordReader.createKey() 
 
 
 ImmutableBytesWritable
-TableRecordReader.createKey() 
+TableSnapshotInputFormat.TableSnapshotRecordReader.createKey() 
 
 
 ImmutableBytesWritable
@@ -183,9 +183,11 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 org.apache.hadoop.mapred.RecordReader
-TableSnapshotInputFormat.getRecordReader(org.apache.hadoop.mapred.InputSplit split,
+TableInputFormatBase.getRecordReader(org.apache.hadoop.mapred.InputSplit split,
org.apache.hadoop.mapred.JobConf job,
-   
org.apache.hadoop.mapred.Reporter reporter) 
+   org.apache.hadoop.mapred.Reporter reporter)
+Builds a TableRecordReader.
+
 
 
 org.apache.hadoop.mapred.RecordReader
@@ -195,11 +197,9 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 org.apache.hadoop.mapred.RecordReader
-TableInputFormatBase.getRecordReader(org.apache.hadoop.mapred.InputSplit split,
+TableSnapshotInputFormat.getRecordReader(org.apache.hadoop.mapred.InputSplit split,
org.apache.hadoop.mapred.JobConf job,
-   org.apache.hadoop.mapred.Reporter reporter)
-Builds a TableRecordReader.
-
+   
org.apache.hadoop.mapred.Reporter reporter) 
 
 
 
@@ -218,10 +218,12 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 void
-RowCounter.RowCounterMapper.map(ImmutableBytesWritable row,
-   Result values,
+IdentityTableMap.map(ImmutableBytesWritable key,
+   Result value,
org.apache.hadoop.mapred.OutputCollector output,
-   org.apache.hadoop.mapred.Reporter reporter) 
+   org.apache.hadoop.mapred.Reporter reporter)
+Pass the key, value to reduce
+
 
 
 void
@@ -234,21 +236,19 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 void
-IdentityTableMap.map(ImmutableBytesWritable key,
-   Result value,
+RowCounter.RowCounterMapper.map(ImmutableBytesWritable row,
+   Result values,
org.apache.hadoop.mapred.OutputCollector output,
-   org.apache.hadoop.mapred.Reporter reporter)
-Pass the key, value to reduce
-
+   org.apache.hadoop.mapred.Reporter reporter) 
 
 
 boolean
-TableSnapshotInputFormat.TableSnapshotRecordReader.next(ImmutableBytesWritable key,
+TableRecordReader.next(ImmutableBytesWritable key,
 Result value) 
 
 
 boolean
-TableRecordReader.next(ImmutableBytesWritable key,
+TableSnapshotInputFormat.TableSnapshotRecordReader.next(ImmutableBytesWritable key,
 Result value) 
 
 
@@ -281,10 +281,12 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 void
-RowCounter.RowCounterMapper.map(ImmutableBytesWrita

[25/51] [partial] hbase-site git commit: Published site at 73e1bcd33515061be2dc2e51e6ad19d9798a8ef6.

2017-04-01 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/cd27d06a/devapidocs/org/apache/hadoop/hbase/io/Reference.Range.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/io/Reference.Range.html 
b/devapidocs/org/apache/hadoop/hbase/io/Reference.Range.html
index 8a19eda..2408858 100644
--- a/devapidocs/org/apache/hadoop/hbase/io/Reference.Range.html
+++ b/devapidocs/org/apache/hadoop/hbase/io/Reference.Range.html
@@ -244,7 +244,7 @@ the order they are declared.
 
 
 values
-public static Reference.Range[] values()
+public static Reference.Range[] values()
 Returns an array containing the constants of this enum 
type, in
 the order they are declared.  This method may be used to iterate
 over the constants as follows:
@@ -264,7 +264,7 @@ for (Reference.Range c : Reference.Range.values())
 
 
 valueOf
-public static Reference.Range valueOf(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String name)
+public static Reference.Range valueOf(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String name)
 Returns the enum constant of this type with the specified 
name.
 The string must match exactly an identifier used to declare an
 enum constant in this type.  (Extraneous whitespace characters are 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/cd27d06a/devapidocs/org/apache/hadoop/hbase/io/class-use/ImmutableBytesWritable.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/io/class-use/ImmutableBytesWritable.html 
b/devapidocs/org/apache/hadoop/hbase/io/class-use/ImmutableBytesWritable.html
index b6e7034..44310c8 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/io/class-use/ImmutableBytesWritable.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/io/class-use/ImmutableBytesWritable.html
@@ -162,11 +162,11 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 ImmutableBytesWritable
-TableRecordReader.createKey() 
+TableSnapshotInputFormat.TableSnapshotRecordReader.createKey() 
 
 
 ImmutableBytesWritable
-TableSnapshotInputFormat.TableSnapshotRecordReader.createKey() 
+TableRecordReader.createKey() 
 
 
 ImmutableBytesWritable
@@ -183,11 +183,9 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 org.apache.hadoop.mapred.RecordReader
-TableInputFormatBase.getRecordReader(org.apache.hadoop.mapred.InputSplit split,
+TableSnapshotInputFormat.getRecordReader(org.apache.hadoop.mapred.InputSplit split,
org.apache.hadoop.mapred.JobConf job,
-   org.apache.hadoop.mapred.Reporter reporter)
-Builds a TableRecordReader.
-
+   
org.apache.hadoop.mapred.Reporter reporter) 
 
 
 org.apache.hadoop.mapred.RecordReader
@@ -197,9 +195,11 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 org.apache.hadoop.mapred.RecordReader
-TableSnapshotInputFormat.getRecordReader(org.apache.hadoop.mapred.InputSplit split,
+TableInputFormatBase.getRecordReader(org.apache.hadoop.mapred.InputSplit split,
org.apache.hadoop.mapred.JobConf job,
-   
org.apache.hadoop.mapred.Reporter reporter) 
+   org.apache.hadoop.mapred.Reporter reporter)
+Builds a TableRecordReader.
+
 
 
 
@@ -218,12 +218,10 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 void
-IdentityTableMap.map(ImmutableBytesWritable key,
-   Result value,
+RowCounter.RowCounterMapper.map(ImmutableBytesWritable row,
+   Result values,
org.apache.hadoop.mapred.OutputCollector output,
-   org.apache.hadoop.mapred.Reporter reporter)
-Pass the key, value to reduce
-
+   org.apache.hadoop.mapred.Reporter reporter) 
 
 
 void
@@ -236,19 +234,21 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 void
-RowCounter.RowCounterMapper.map(ImmutableBytesWritable row,
-   Result values,
+IdentityTableMap.map(ImmutableBytesWritable key,
+   Result value,
org.apache.hadoop.mapred.OutputCollector output,
-   org.apache.hadoop.mapred.Reporter reporter) 
+   org.apache.hadoop.mapred.Reporter reporter)
+Pass the key, value to reduce
+
 
 
 boolean
-TableRecordReader.next(ImmutableBytesWritable key,
+TableSnapshotInputFormat.TableSnapshotRecordReader.next(ImmutableBytesWritable key,
 Result value) 
 
 
 boolean
-TableSnapshotInputFormat.TableSnapshotRecordReader.next(ImmutableBytesWritable key,
+TableRecordReader.next(ImmutableBytesWritable key,
 Result value) 
 
 
@@ -281,12 +281,10 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 void
-IdentityTableMap.map(ImmutableBytesWritable key,
-