[52/52] hbase-site git commit: INFRA-10751 Empty commit
INFRA-10751 Empty commit Project: http://git-wip-us.apache.org/repos/asf/hbase-site/repo Commit: http://git-wip-us.apache.org/repos/asf/hbase-site/commit/b6dc5735 Tree: http://git-wip-us.apache.org/repos/asf/hbase-site/tree/b6dc5735 Diff: http://git-wip-us.apache.org/repos/asf/hbase-site/diff/b6dc5735 Branch: refs/heads/asf-site Commit: b6dc573589f1e7ae199f59067cb48cadc193766d Parents: 6444c27 Author: Sean BusbeyAuthored: Tue Jul 5 18:52:31 2016 -0500 Committer: Sean Busbey Committed: Tue Jul 5 18:52:31 2016 -0500 -- --
[38/52] [partial] hbase-site git commit: Published site at 29c46c4834a3f96e9fca33cb16bc7f3748fcd60c.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6444c276/checkstyle.rss -- diff --git a/checkstyle.rss b/checkstyle.rss index bc6bf36..05e93d2 100644 --- a/checkstyle.rss +++ b/checkstyle.rss @@ -25,8 +25,8 @@ under the License. en-us 2007 - 2016 The Apache Software Foundation - File: 1782, - Errors: 11565, + File: 1784, + Errors: 11571, Warnings: 0, Infos: 0 @@ -130,7 +130,7 @@ under the License. - http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.regionserver.RowTooBigException.java;>org/apache/hadoop/hbase/regionserver/RowTooBigException.java + http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.regionserver.LogRoller.java;>org/apache/hadoop/hbase/regionserver/LogRoller.java 0 @@ -139,12 +139,12 @@ under the License. 0 - 0 + 5 - http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.regionserver.LogRoller.java;>org/apache/hadoop/hbase/regionserver/LogRoller.java + http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.regionserver.RowTooBigException.java;>org/apache/hadoop/hbase/regionserver/RowTooBigException.java 0 @@ -153,7 +153,7 @@ under the License. 0 - 5 + 0 @@ -956,20 +956,6 @@ under the License. - http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.util.ConcurrentIndex.java;>org/apache/hadoop/hbase/util/ConcurrentIndex.java - - - 0 - - - 0 - - - 1 - - - - http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.mob.DefaultMobStoreCompactor.java;>org/apache/hadoop/hbase/mob/DefaultMobStoreCompactor.java @@ -1124,7 +1110,7 @@ under the License. - http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.filter.IncompatibleFilterException.java;>org/apache/hadoop/hbase/filter/IncompatibleFilterException.java + http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.zookeeper.MasterAddressTracker.java;>org/apache/hadoop/hbase/zookeeper/MasterAddressTracker.java 0 @@ -1133,12 +1119,12 @@ under the License. 0 - 0 + 12 - http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.zookeeper.MasterAddressTracker.java;>org/apache/hadoop/hbase/zookeeper/MasterAddressTracker.java + http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.filter.IncompatibleFilterException.java;>org/apache/hadoop/hbase/filter/IncompatibleFilterException.java 0 @@ -1147,7 +1133,7 @@ under the License. 0 - 12 + 0 @@ -1348,7 +1334,7 @@ under the License. - http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.http.ServerConfigurationKeys.java;>org/apache/hadoop/hbase/http/ServerConfigurationKeys.java + http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.filter.ValueFilter.java;>org/apache/hadoop/hbase/filter/ValueFilter.java 0 @@ -1357,12 +1343,12 @@ under the License. 0 - 1 + 6 - http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.filter.ValueFilter.java;>org/apache/hadoop/hbase/filter/ValueFilter.java +
[23/52] [partial] hbase-site git commit: Published site at 29c46c4834a3f96e9fca33cb16bc7f3748fcd60c.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6444c276/devapidocs/org/apache/hadoop/hbase/class-use/TableName.html -- diff --git a/devapidocs/org/apache/hadoop/hbase/class-use/TableName.html b/devapidocs/org/apache/hadoop/hbase/class-use/TableName.html index b8f8ff8..c3adb68 100644 --- a/devapidocs/org/apache/hadoop/hbase/class-use/TableName.html +++ b/devapidocs/org/apache/hadoop/hbase/class-use/TableName.html @@ -754,32 +754,40 @@ service. +protected TableName +AbstractRegionServerCallable.tableName + + private TableName BufferedMutatorParams.tableName + +protected TableName +RpcRetryingCallerWithReadReplicas.tableName + private TableName -TableState.tableName +AsyncProcess.AsyncRequestFutureImpl.tableName protected TableName -AbstractRegionServerCallable.tableName +RegionAdminServiceCallable.tableName private TableName -ClientScanner.tableName +HRegionLocator.tableName private TableName HTable.tableName -protected TableName -RpcRetryingCallerWithReadReplicas.tableName +private TableName +ClientScanner.tableName private TableName -HRegionLocator.tableName +TableState.tableName private TableName @@ -787,20 +795,12 @@ service. private TableName -AsyncProcess.AsyncRequestFutureImpl.tableName - - -private TableName ScannerCallableWithReplicas.tableName - + private TableName BufferedMutatorImpl.tableName - -protected TableName -RegionAdminServiceCallable.tableName - @@ -833,15 +833,13 @@ service. TableName -BufferedMutator.getName() -Gets the fully qualified table name instance of the table that this BufferedMutator writes to. +RegionLocator.getName() +Gets the fully qualified table name instance of this table. TableName -Table.getName() -Gets the fully qualified table name instance of this table. - +HRegionLocator.getName() TableName @@ -849,17 +847,19 @@ service. TableName -HRegionLocator.getName() +BufferedMutator.getName() +Gets the fully qualified table name instance of the table that this BufferedMutator writes to. + TableName -RegionLocator.getName() -Gets the fully qualified table name instance of this table. - +BufferedMutatorImpl.getName() TableName -BufferedMutatorImpl.getName() +Table.getName() +Gets the fully qualified table name instance of this table. + TableName @@ -871,17 +871,17 @@ service. TableName -BufferedMutatorParams.getTableName() +AbstractRegionServerCallable.getTableName() TableName -TableState.getTableName() -Table name for state - +BufferedMutatorParams.getTableName() TableName -AbstractRegionServerCallable.getTableName() +TableState.getTableName() +Table name for state + protected TableName @@ -1000,15 +1000,15 @@ service. void -MetaCache.cacheLocation(TableNametableName, - RegionLocationslocations) +ConnectionImplementation.cacheLocation(TableNametableName, + RegionLocationslocation) Put a newly discovered HRegionLocation into the cache. void -ConnectionImplementation.cacheLocation(TableNametableName, - RegionLocationslocation) +MetaCache.cacheLocation(TableNametableName, + RegionLocationslocations) Put a newly discovered HRegionLocation into the cache. @@ -1018,16 +1018,16 @@ service. RegionLocationslocation) -void -MetaCache.cacheLocation(TableNametableName, +private void +ConnectionImplementation.cacheLocation(TableNametableName, ServerNamesource, HRegionLocationlocation) Put a newly discovered HRegionLocation into the cache. -private void -ConnectionImplementation.cacheLocation(TableNametableName, +void +MetaCache.cacheLocation(TableNametableName, ServerNamesource, HRegionLocationlocation) Put a newly discovered HRegionLocation into the cache. @@ -1358,15 +1358,15 @@ service. ConnectionImplementation.getBufferedMutator(TableNametableName) -RegionLocations -MetaCache.getCachedLocation(TableNametableName, +(package private) RegionLocations +ConnectionImplementation.getCachedLocation(TableNametableName, byte[]row) Search the cache for a location that fits our table and row key. -(package private) RegionLocations -ConnectionImplementation.getCachedLocation(TableNametableName, +RegionLocations +MetaCache.getCachedLocation(TableNametableName, byte[]row) Search the cache for a location that fits our table and row key. @@ -1429,15 +1429,15 @@ service. HBaseAdmin.getMobRegionInfo(TableNametableName) +(package private) int +ConnectionImplementation.getNumberOfCachedRegionLocations(TableNametableName) + + int MetaCache.getNumberOfCachedRegionLocations(TableNametableName) Return the number of cached region for a table. - -(package
[44/52] [partial] hbase-site git commit: Published site at 29c46c4834a3f96e9fca33cb16bc7f3748fcd60c.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6444c276/apidocs/src-html/org/apache/hadoop/hbase/CellUtil.html -- diff --git a/apidocs/src-html/org/apache/hadoop/hbase/CellUtil.html b/apidocs/src-html/org/apache/hadoop/hbase/CellUtil.html index 921faad..6040ac9 100644 --- a/apidocs/src-html/org/apache/hadoop/hbase/CellUtil.html +++ b/apidocs/src-html/org/apache/hadoop/hbase/CellUtil.html @@ -1513,813 +1513,817 @@ 1505} 1506return new FirstOnRowCell(cell.getRowArray(), cell.getRowOffset(), cell.getRowLength()); 1507 } -1508 -1509 /** -1510 * Create a Cell that is smaller than all other possible Cells for the given Cell's row. -1511 * -1512 * @param cell -1513 * @return First possible Cell on passed Cell's row. -1514 */ -1515 public static Cell createFirstOnRowCol(final Cell cell) { -1516if (cell instanceof ByteBufferedCell) { -1517 return new FirstOnRowColByteBufferedCell(((ByteBufferedCell) cell).getRowByteBuffer(), -1518 ((ByteBufferedCell) cell).getRowPosition(), cell.getRowLength(), -1519 HConstants.EMPTY_BYTE_BUFFER, 0, (byte) 0, -1520 ((ByteBufferedCell) cell).getQualifierByteBuffer(), -1521 ((ByteBufferedCell) cell).getQualifierPosition(), cell.getQualifierLength()); -1522} -1523return new FirstOnRowColCell(cell.getRowArray(), cell.getRowOffset(), -1524cell.getRowLength(), HConstants.EMPTY_BYTE_ARRAY, 0, (byte)0, cell.getQualifierArray(), -1525cell.getQualifierOffset(), cell.getQualifierLength()); -1526 } -1527 /** -1528 * Create a Cell that is smaller than all other possible Cells for the given Cell row's next row. -1529 * Makes the next row's rowkey by appending single byte 0x00 to the end of current row key. -1530 */ -1531 public static Cell createFirstOnNextRow(final Cell cell) { -1532byte[] nextRow = new byte[cell.getRowLength() + 1]; -1533copyRowTo(cell, nextRow, 0); -1534nextRow[nextRow.length - 1] = 0;// maybe not necessary -1535return new FirstOnRowCell(nextRow, 0, (short) nextRow.length); -1536 } -1537 -1538 /** -1539 * Create a Cell that is smaller than all other possible Cells for the given Cell's rk:cf and -1540 * passed qualifier. -1541 * -1542 * @param cell -1543 * @param qArray -1544 * @param qoffest -1545 * @param qlength -1546 * @return Last possible Cell on passed Cell's rk:cf and passed qualifier. -1547 */ -1548 public static Cell createFirstOnRowCol(final Cell cell, byte[] qArray, int qoffest, int qlength) { -1549if(cell instanceof ByteBufferedCell) { -1550 return new FirstOnRowColByteBufferedCell(((ByteBufferedCell) cell).getRowByteBuffer(), -1551 ((ByteBufferedCell) cell).getRowPosition(), cell.getRowLength(), -1552 ((ByteBufferedCell) cell).getFamilyByteBuffer(), -1553 ((ByteBufferedCell) cell).getFamilyPosition(), cell.getFamilyLength(), -1554 ByteBuffer.wrap(qArray), qoffest, qlength); -1555} -1556return new FirstOnRowColCell(cell.getRowArray(), cell.getRowOffset(), -1557cell.getRowLength(), cell.getFamilyArray(), cell.getFamilyOffset(), cell.getFamilyLength(), -1558qArray, qoffest, qlength); -1559 } -1560 -1561 /** -1562 * Creates the first cell with the row/family/qualifier of this cell and the given timestamp. -1563 * Uses the "maximum" type that guarantees that the new cell is the lowest possible for this -1564 * combination of row, family, qualifier, and timestamp. This cell's own timestamp is ignored. -1565 * -1566 * @param cell - cell -1567 * @param ts -1568 */ -1569 public static Cell createFirstOnRowColTS(Cell cell, long ts) { -1570if(cell instanceof ByteBufferedCell) { -1571 return new FirstOnRowColTSByteBufferedCell(((ByteBufferedCell) cell).getRowByteBuffer(), -1572 ((ByteBufferedCell) cell).getRowPosition(), cell.getRowLength(), -1573 ((ByteBufferedCell) cell).getFamilyByteBuffer(), -1574 ((ByteBufferedCell) cell).getFamilyPosition(), cell.getFamilyLength(), -1575 ((ByteBufferedCell) cell).getQualifierByteBuffer(), -1576 ((ByteBufferedCell) cell).getQualifierPosition(), cell.getQualifierLength(), -1577 ts); -1578} -1579return new FirstOnRowColTSCell(cell.getRowArray(), cell.getRowOffset(), -1580cell.getRowLength(), cell.getFamilyArray(), cell.getFamilyOffset(), cell.getFamilyLength(), -1581cell.getQualifierArray(), cell.getQualifierOffset(), cell.getQualifierLength(), ts); -1582 } -1583 -1584 /** -1585 * Create a Cell that is larger than all other possible Cells for the given Cell's row. -1586 * -1587 * @param cell -1588 * @return Last possible Cell on passed Cell's row. -1589 */ -1590 public static Cell createLastOnRow(final Cell cell) { -1591if (cell instanceof ByteBufferedCell) { -1592 return new
[09/52] [partial] hbase-site git commit: Published site at 29c46c4834a3f96e9fca33cb16bc7f3748fcd60c.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6444c276/devapidocs/org/apache/hadoop/hbase/io/class-use/TagCompressionContext.html -- diff --git a/devapidocs/org/apache/hadoop/hbase/io/class-use/TagCompressionContext.html b/devapidocs/org/apache/hadoop/hbase/io/class-use/TagCompressionContext.html index 5ef9d78..6981803 100644 --- a/devapidocs/org/apache/hadoop/hbase/io/class-use/TagCompressionContext.html +++ b/devapidocs/org/apache/hadoop/hbase/io/class-use/TagCompressionContext.html @@ -125,21 +125,21 @@ -private TagCompressionContext -HFileBlockDefaultEncodingContext.tagCompressionContext - - protected TagCompressionContext BufferedDataBlockEncoder.SeekerState.tagCompressionContext - + protected TagCompressionContext BufferedDataBlockEncoder.BufferedEncodedSeeker.tagCompressionContext - + private TagCompressionContext HFileBlockDefaultDecodingContext.tagCompressionContext + +private TagCompressionContext +HFileBlockDefaultEncodingContext.tagCompressionContext + @@ -151,11 +151,11 @@ TagCompressionContext -HFileBlockDefaultEncodingContext.getTagCompressionContext() +HFileBlockDefaultDecodingContext.getTagCompressionContext() TagCompressionContext -HFileBlockDefaultDecodingContext.getTagCompressionContext() +HFileBlockDefaultEncodingContext.getTagCompressionContext() @@ -168,11 +168,11 @@ void -HFileBlockDefaultEncodingContext.setTagCompressionContext(TagCompressionContexttagCompressionContext) +HFileBlockDefaultDecodingContext.setTagCompressionContext(TagCompressionContexttagCompressionContext) void -HFileBlockDefaultDecodingContext.setTagCompressionContext(TagCompressionContexttagCompressionContext) +HFileBlockDefaultEncodingContext.setTagCompressionContext(TagCompressionContexttagCompressionContext) http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6444c276/devapidocs/org/apache/hadoop/hbase/io/class-use/TimeRange.html -- diff --git a/devapidocs/org/apache/hadoop/hbase/io/class-use/TimeRange.html b/devapidocs/org/apache/hadoop/hbase/io/class-use/TimeRange.html index 848b434..ca660bd 100644 --- a/devapidocs/org/apache/hadoop/hbase/io/class-use/TimeRange.html +++ b/devapidocs/org/apache/hadoop/hbase/io/class-use/TimeRange.html @@ -106,15 +106,15 @@ private TimeRange -Get.tr +Scan.tr private TimeRange -Increment.tr +Get.tr private TimeRange -Scan.tr +Increment.tr @@ -140,20 +140,20 @@ TimeRange +Scan.getTimeRange() + + +TimeRange Get.getTimeRange() Method for retrieving the get's TimeRange - + TimeRange Increment.getTimeRange() Gets the TimeRange used for this increment. - -TimeRange -Scan.getTimeRange() - @@ -202,16 +202,16 @@ +protected TimeRange +StoreFileReader.timeRange + + private TimeRange ImmutableSegment.timeRange This is an immutable segment so use the read-only TimeRange rather than the heavy-weight TimeRangeTracker with all its synchronization when doing time range stuff. - -protected TimeRange -StoreFileReader.timeRange - private TimeRange ScanQueryMatcher.tr http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6444c276/devapidocs/org/apache/hadoop/hbase/io/compress/class-use/Compression.Algorithm.html -- diff --git a/devapidocs/org/apache/hadoop/hbase/io/compress/class-use/Compression.Algorithm.html b/devapidocs/org/apache/hadoop/hbase/io/compress/class-use/Compression.Algorithm.html index a60692e..045614b 100644 --- a/devapidocs/org/apache/hadoop/hbase/io/compress/class-use/Compression.Algorithm.html +++ b/devapidocs/org/apache/hadoop/hbase/io/compress/class-use/Compression.Algorithm.html @@ -328,11 +328,11 @@ the order they are declared. Compression.Algorithm -HFileReaderImpl.getCompressionAlgorithm() +HFile.Reader.getCompressionAlgorithm() Compression.Algorithm -HFile.Reader.getCompressionAlgorithm() +HFileReaderImpl.getCompressionAlgorithm() Compression.Algorithm @@ -512,36 +512,36 @@ the order they are declared. StoreFileWriter -Store.createWriterInTmp(longmaxKeyCount, +HStore.createWriterInTmp(longmaxKeyCount, Compression.Algorithmcompression, booleanisCompaction, booleanincludeMVCCReadpoint, - booleanincludesTags) + booleanincludesTag) StoreFileWriter -HStore.createWriterInTmp(longmaxKeyCount, +Store.createWriterInTmp(longmaxKeyCount, Compression.Algorithmcompression, booleanisCompaction, booleanincludeMVCCReadpoint, - booleanincludesTag) + booleanincludesTags)
[39/52] [partial] hbase-site git commit: Published site at 29c46c4834a3f96e9fca33cb16bc7f3748fcd60c.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6444c276/checkstyle-aggregate.html -- diff --git a/checkstyle-aggregate.html b/checkstyle-aggregate.html index 1233993..798c91c 100644 --- a/checkstyle-aggregate.html +++ b/checkstyle-aggregate.html @@ -7,7 +7,7 @@ - + Apache HBase Checkstyle Results @@ -280,10 +280,10 @@ Warnings Errors -1782 +1784 0 0 -11565 +11571 Files @@ -711,7 +711,7 @@ org/apache/hadoop/hbase/client/HBaseAdmin.java 0 0 -54 +39 org/apache/hadoop/hbase/client/HRegionLocator.java 0 @@ -2331,7 +2331,7 @@ org/apache/hadoop/hbase/mapreduce/RowCounter.java 0 0 -3 +6 org/apache/hadoop/hbase/mapreduce/SampleUploader.java 0 @@ -2356,7 +2356,7 @@ org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.java 0 0 -14 +15 org/apache/hadoop/hbase/mapreduce/TableMapReduceUtil.java 0 @@ -2511,7 +2511,7 @@ org/apache/hadoop/hbase/master/HMaster.java 0 0 -47 +49 org/apache/hadoop/hbase/master/HMasterCommandLine.java 0 @@ -3171,7 +3171,7 @@ org/apache/hadoop/hbase/procedure2/store/wal/WALProcedureStore.java 0 0 -4 +3 org/apache/hadoop/hbase/procedure2/util/StringUtils.java 0 @@ -4206,7 +4206,7 @@ org/apache/hadoop/hbase/replication/ReplicationQueuesZKImpl.java 0 0 -5 +6 org/apache/hadoop/hbase/replication/ReplicationStateZKBase.java 0 @@ -5193,135 +5193,135 @@ 0 4 -org/apache/hadoop/hbase/util/AbstractHBaseTool.java +org/apache/hadoop/hbase/util/AbstractFileStatusFilter.java 0 0 2 +org/apache/hadoop/hbase/util/AbstractHBaseTool.java +0 +0 +2 + org/apache/hadoop/hbase/util/AbstractPositionedByteRange.java 0 0 3 - + org/apache/hadoop/hbase/util/Addressing.java 0 0 5 - + org/apache/hadoop/hbase/util/ArrayUtils.java 0 0 1 - + org/apache/hadoop/hbase/util/AtomicUtils.java 0 0 1 - + org/apache/hadoop/hbase/util/Base64.java 0 0 28 - + org/apache/hadoop/hbase/util/BloomFilterChunk.java 0 0 1 - + org/apache/hadoop/hbase/util/BloomFilterFactory.java 0 0 8 - + org/apache/hadoop/hbase/util/BloomFilterUtil.java 0 0 14 - + org/apache/hadoop/hbase/util/BoundedArrayQueue.java 0 0 3 - + org/apache/hadoop/hbase/util/BoundedPriorityBlockingQueue.java 0 0 11 - + org/apache/hadoop/hbase/util/ByteBufferArray.java 0 0 6 - + org/apache/hadoop/hbase/util/ByteBufferUtils.java 0 0 54 - + org/apache/hadoop/hbase/util/ByteRangeUtils.java 0 0 2 - + org/apache/hadoop/hbase/util/ByteStringer.java 0 0 2 - + org/apache/hadoop/hbase/util/Bytes.java 0 0 97 - + org/apache/hadoop/hbase/util/ChecksumFactory.java 0 0 1 - + org/apache/hadoop/hbase/util/ChecksumType.java 0 0 2 - + org/apache/hadoop/hbase/util/ClassLoaderBase.java 0 0 1 - + org/apache/hadoop/hbase/util/ClassSize.java 0 0 13 - + org/apache/hadoop/hbase/util/Classes.java 0 0 1 - + org/apache/hadoop/hbase/util/CollectionBackedScanner.java 0 0 1 - + org/apache/hadoop/hbase/util/CollectionUtils.java 0 0 2 - + org/apache/hadoop/hbase/util/CompressionTest.java 0 0 4 - + org/apache/hadoop/hbase/util/ConcatenatedLists.java 0 0 3 - -org/apache/hadoop/hbase/util/ConcurrentIndex.java -0 -0 -1 org/apache/hadoop/hbase/util/ConfigurationUtil.java 0 @@ -5396,12 +5396,12 @@ org/apache/hadoop/hbase/util/FSUtils.java 0 0 -80 +89 org/apache/hadoop/hbase/util/FSVisitor.java 0 0 -1 +3 org/apache/hadoop/hbase/util/ForeignExceptionUtil.java 0 @@ -5421,7 +5421,7 @@ org/apache/hadoop/hbase/util/HBaseFsck.java 0 0 -86 +88 org/apache/hadoop/hbase/util/HBaseFsckRepair.java 0 @@ -5716,7 +5716,7 @@ org/apache/hadoop/hbase/util/hbck/HFileCorruptionChecker.java 0 0 -9 +10 org/apache/hadoop/hbase/util/hbck/OfflineMetaRepair.java 0 @@ -5973,12 +5973,12 @@ http://checkstyle.sourceforge.net/config_blocks.html#NeedBraces;>NeedBraces -1692 +1693 Error coding http://checkstyle.sourceforge.net/config_coding.html#EmptyStatement;>EmptyStatement -7 +8 Error @@ -6035,7 +6035,7 @@ ordered: true sortStaticImportsAlphabetically: true option: top -839 +841 Error @@ -6047,7 +6047,7 @@ http://checkstyle.sourceforge.net/config_imports.html#UnusedImports;>UnusedImports processJavadoc: true -43 +45 Error indentation @@ -6058,7 +6058,7 @@ arrayInitIndent: 2 throwsIndent: 2 basicOffset: 2 -3760 +3744 Error javadoc @@ -6070,7 +6070,7 @@ http://checkstyle.sourceforge.net/config_javadoc.html#NonEmptyAtclauseDescription;>NonEmptyAtclauseDescription -3217 +3222 Error misc @@ -6088,7 +6088,7 @@ max: 100 ignorePattern: ^package.*|^import.*|a href|href|http://|https://|ftp://|org.apache.thrift.|com.google.protobuf.|hbase.protobuf.generated -331 +342 Error @@ -6939,49 +6939,49 @@ javadoc NonEmptyAtclauseDescription At-clause should have a non-empty description. -1512 +1516 Error javadoc NonEmptyAtclauseDescription At-clause should have a non-empty description. -1542 +1546 Error
[34/52] [partial] hbase-site git commit: Published site at 29c46c4834a3f96e9fca33cb16bc7f3748fcd60c.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6444c276/devapidocs/org/apache/hadoop/hbase/CellUtil.LastOnRowByteBufferedCell.html -- diff --git a/devapidocs/org/apache/hadoop/hbase/CellUtil.LastOnRowByteBufferedCell.html b/devapidocs/org/apache/hadoop/hbase/CellUtil.LastOnRowByteBufferedCell.html index c14a880..b7ce12d 100644 --- a/devapidocs/org/apache/hadoop/hbase/CellUtil.LastOnRowByteBufferedCell.html +++ b/devapidocs/org/apache/hadoop/hbase/CellUtil.LastOnRowByteBufferedCell.html @@ -118,7 +118,7 @@ @InterfaceAudience.Private -private static class CellUtil.LastOnRowByteBufferedCell +private static class CellUtil.LastOnRowByteBufferedCell extends CellUtil.EmptyByteBufferedCell @@ -239,7 +239,7 @@ extends rowBuff -private finalhttp://docs.oracle.com/javase/7/docs/api/java/nio/ByteBuffer.html?is-external=true; title="class or interface in java.nio">ByteBuffer rowBuff +private finalhttp://docs.oracle.com/javase/7/docs/api/java/nio/ByteBuffer.html?is-external=true; title="class or interface in java.nio">ByteBuffer rowBuff @@ -248,7 +248,7 @@ extends roffset -private finalint roffset +private finalint roffset @@ -257,7 +257,7 @@ extends rlength -private finalshort rlength +private finalshort rlength @@ -274,7 +274,7 @@ extends CellUtil.LastOnRowByteBufferedCell -publicCellUtil.LastOnRowByteBufferedCell(http://docs.oracle.com/javase/7/docs/api/java/nio/ByteBuffer.html?is-external=true; title="class or interface in java.nio">ByteBufferrow, +publicCellUtil.LastOnRowByteBufferedCell(http://docs.oracle.com/javase/7/docs/api/java/nio/ByteBuffer.html?is-external=true; title="class or interface in java.nio">ByteBufferrow, introffset, shortrlength) @@ -293,7 +293,7 @@ extends getRowByteBuffer -publichttp://docs.oracle.com/javase/7/docs/api/java/nio/ByteBuffer.html?is-external=true; title="class or interface in java.nio">ByteBuffergetRowByteBuffer() +publichttp://docs.oracle.com/javase/7/docs/api/java/nio/ByteBuffer.html?is-external=true; title="class or interface in java.nio">ByteBuffergetRowByteBuffer() Overrides: getRowByteBufferin classCellUtil.EmptyByteBufferedCell @@ -306,7 +306,7 @@ extends getRowPosition -publicintgetRowPosition() +publicintgetRowPosition() Overrides: getRowPositionin classCellUtil.EmptyByteBufferedCell @@ -319,7 +319,7 @@ extends getRowLength -publicshortgetRowLength() +publicshortgetRowLength() Specified by: getRowLengthin interfaceCell @@ -334,7 +334,7 @@ extends getTimestamp -publiclonggetTimestamp() +publiclonggetTimestamp() Returns:Long value representing time at which this cell was "Put" into the row. Typically represents the time of insertion, but can be any value from 0 to Long.MAX_VALUE. @@ -345,7 +345,7 @@ extends getTypeByte -publicbytegetTypeByte() +publicbytegetTypeByte() Returns:The byte representation of the KeyValue.TYPE of this cell: one of Put, Delete, etc http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6444c276/devapidocs/org/apache/hadoop/hbase/CellUtil.LastOnRowCell.html -- diff --git a/devapidocs/org/apache/hadoop/hbase/CellUtil.LastOnRowCell.html b/devapidocs/org/apache/hadoop/hbase/CellUtil.LastOnRowCell.html index ba2c796..c31a4c0 100644 --- a/devapidocs/org/apache/hadoop/hbase/CellUtil.LastOnRowCell.html +++ b/devapidocs/org/apache/hadoop/hbase/CellUtil.LastOnRowCell.html @@ -113,7 +113,7 @@ @InterfaceAudience.Private -private static class CellUtil.LastOnRowCell +private static class CellUtil.LastOnRowCell extends CellUtil.EmptyCell @@ -236,7 +236,7 @@ extends rowArray -private finalbyte[] rowArray +private finalbyte[] rowArray @@ -245,7 +245,7 @@ extends roffset -private finalint roffset +private finalint roffset @@ -254,7 +254,7 @@ extends rlength -private finalshort rlength +private finalshort rlength @@ -271,7 +271,7 @@ extends CellUtil.LastOnRowCell -publicCellUtil.LastOnRowCell(byte[]row, +publicCellUtil.LastOnRowCell(byte[]row, introffset, shortrlength) @@ -290,7 +290,7 @@ extends getRowArray -publicbyte[]getRowArray() +publicbyte[]getRowArray() Description copied from interface:Cell Contiguous raw bytes that may start at any index in the containing array. Max length is Short.MAX_VALUE which is 32,767 bytes. @@ -308,7 +308,7 @@ extends getRowOffset -publicintgetRowOffset() +publicintgetRowOffset() Specified by: getRowOffsetin interfaceCell @@ -323,7 +323,7 @@ extends getRowLength -publicshortgetRowLength() +publicshortgetRowLength() Specified by: getRowLengthin interfaceCell @@ -338,7 +338,7 @@ extends getTimestamp -publiclonggetTimestamp() +publiclonggetTimestamp() Returns:Long value representing time at which
[30/52] [partial] hbase-site git commit: Published site at 29c46c4834a3f96e9fca33cb16bc7f3748fcd60c.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6444c276/devapidocs/org/apache/hadoop/hbase/class-use/HColumnDescriptor.html -- diff --git a/devapidocs/org/apache/hadoop/hbase/class-use/HColumnDescriptor.html b/devapidocs/org/apache/hadoop/hbase/class-use/HColumnDescriptor.html index 04cacd4..cecaaab 100644 --- a/devapidocs/org/apache/hadoop/hbase/class-use/HColumnDescriptor.html +++ b/devapidocs/org/apache/hadoop/hbase/class-use/HColumnDescriptor.html @@ -561,22 +561,22 @@ service. void -BaseMasterObserver.postAddColumn(ObserverContextMasterCoprocessorEnvironmentctx, +BaseMasterAndRegionObserver.postAddColumn(ObserverContextMasterCoprocessorEnvironmentctx, TableNametableName, HColumnDescriptorcolumnFamily) -Deprecated. -As of release 2.0.0, this will be removed in HBase 3.0.0 - (https://issues.apache.org/jira/browse/HBASE-13645;>HBASE-13645). - Use BaseMasterObserver.postAddColumnFamily(ObserverContext, TableName, HColumnDescriptor). - +Deprecated. void -BaseMasterAndRegionObserver.postAddColumn(ObserverContextMasterCoprocessorEnvironmentctx, +BaseMasterObserver.postAddColumn(ObserverContextMasterCoprocessorEnvironmentctx, TableNametableName, HColumnDescriptorcolumnFamily) -Deprecated. +Deprecated. +As of release 2.0.0, this will be removed in HBase 3.0.0 + (https://issues.apache.org/jira/browse/HBASE-13645;>HBASE-13645). + Use BaseMasterObserver.postAddColumnFamily(ObserverContext, TableName, HColumnDescriptor). + @@ -593,13 +593,13 @@ service. void -BaseMasterObserver.postAddColumnFamily(ObserverContextMasterCoprocessorEnvironmentctx, +BaseMasterAndRegionObserver.postAddColumnFamily(ObserverContextMasterCoprocessorEnvironmentctx, TableNametableName, HColumnDescriptorcolumnFamily) void -BaseMasterAndRegionObserver.postAddColumnFamily(ObserverContextMasterCoprocessorEnvironmentctx, +BaseMasterObserver.postAddColumnFamily(ObserverContextMasterCoprocessorEnvironmentctx, TableNametableName, HColumnDescriptorcolumnFamily) @@ -613,22 +613,22 @@ service. void -BaseMasterObserver.postAddColumnHandler(ObserverContextMasterCoprocessorEnvironmentctx, +BaseMasterAndRegionObserver.postAddColumnHandler(ObserverContextMasterCoprocessorEnvironmentctx, TableNametableName, HColumnDescriptorcolumnFamily) -Deprecated. -As of release 2.0.0, this will be removed in HBase 3.0.0 - (https://issues.apache.org/jira/browse/HBASE-13645;>HBASE-13645). Use - BaseMasterObserver.postCompletedAddColumnFamilyAction(ObserverContext, TableName, HColumnDescriptor). - +Deprecated. void -BaseMasterAndRegionObserver.postAddColumnHandler(ObserverContextMasterCoprocessorEnvironmentctx, +BaseMasterObserver.postAddColumnHandler(ObserverContextMasterCoprocessorEnvironmentctx, TableNametableName, HColumnDescriptorcolumnFamily) -Deprecated. +Deprecated. +As of release 2.0.0, this will be removed in HBase 3.0.0 + (https://issues.apache.org/jira/browse/HBASE-13645;>HBASE-13645). Use + BaseMasterObserver.postCompletedAddColumnFamilyAction(ObserverContext, TableName, HColumnDescriptor). + @@ -645,13 +645,13 @@ service. void -BaseMasterObserver.postCompletedAddColumnFamilyAction(ObserverContextMasterCoprocessorEnvironmentctx, +BaseMasterAndRegionObserver.postCompletedAddColumnFamilyAction(ObserverContextMasterCoprocessorEnvironmentctx, TableNametableName, HColumnDescriptorcolumnFamily) void -BaseMasterAndRegionObserver.postCompletedAddColumnFamilyAction(ObserverContextMasterCoprocessorEnvironmentctx, +BaseMasterObserver.postCompletedAddColumnFamilyAction(ObserverContextMasterCoprocessorEnvironmentctx, TableNametableName, HColumnDescriptorcolumnFamily) @@ -665,13 +665,13 @@ service. void -BaseMasterObserver.postCompletedModifyColumnFamilyAction(ObserverContextMasterCoprocessorEnvironmentctx, +BaseMasterAndRegionObserver.postCompletedModifyColumnFamilyAction(ObserverContextMasterCoprocessorEnvironmentctx, TableNametableName, HColumnDescriptorcolumnFamily) void
[42/52] [partial] hbase-site git commit: Published site at 29c46c4834a3f96e9fca33cb16bc7f3748fcd60c.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6444c276/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.html -- diff --git a/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.html b/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.html index e36ae5f..7003255 100644 --- a/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.html +++ b/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.html @@ -283,384 +283,385 @@ 275if (!includeRegionInSplit(keys.getFirst()[i], keys.getSecond()[i])) { 276 continue; 277} -278HRegionLocation location = getRegionLocator().getRegionLocation(keys.getFirst()[i], false); -279// The below InetSocketAddress creation does a name resolution. -280InetSocketAddress isa = new InetSocketAddress(location.getHostname(), location.getPort()); -281if (isa.isUnresolved()) { -282 LOG.warn("Failed resolve " + isa); -283} -284InetAddress regionAddress = isa.getAddress(); -285String regionLocation; -286regionLocation = reverseDNS(regionAddress); -287 -288byte[] startRow = scan.getStartRow(); -289byte[] stopRow = scan.getStopRow(); -290// determine if the given start an stop key fall into the region -291if ((startRow.length == 0 || keys.getSecond()[i].length == 0 || -292Bytes.compareTo(startRow, keys.getSecond()[i]) 0) -293(stopRow.length == 0 || -294 Bytes.compareTo(stopRow, keys.getFirst()[i]) 0)) { -295 byte[] splitStart = startRow.length == 0 || -296 Bytes.compareTo(keys.getFirst()[i], startRow) = 0 ? -297 keys.getFirst()[i] : startRow; -298 byte[] splitStop = (stopRow.length == 0 || -299 Bytes.compareTo(keys.getSecond()[i], stopRow) = 0) -300keys.getSecond()[i].length 0 ? -301 keys.getSecond()[i] : stopRow; -302 -303 byte[] regionName = location.getRegionInfo().getRegionName(); -304 String encodedRegionName = location.getRegionInfo().getEncodedName(); -305 long regionSize = sizeCalculator.getRegionSize(regionName); -306 TableSplit split = new TableSplit(tableName, scan, -307splitStart, splitStop, regionLocation, encodedRegionName, regionSize); -308 splits.add(split); -309 if (LOG.isDebugEnabled()) { -310LOG.debug("getSplits: split - " + i + " - " + split); -311 } -312} -313 } -314 //The default value of "hbase.mapreduce.input.autobalance" is false, which means not enabled. -315 boolean enableAutoBalance = context.getConfiguration() -316 .getBoolean(MAPREDUCE_INPUT_AUTOBALANCE, false); -317 if (enableAutoBalance) { -318long totalRegionSize=0; -319for (int i = 0; i splits.size(); i++){ -320 TableSplit ts = (TableSplit)splits.get(i); -321 totalRegionSize += ts.getLength(); -322} -323long averageRegionSize = totalRegionSize / splits.size(); -324// the averageRegionSize must be positive. -325if (averageRegionSize = 0) { -326LOG.warn("The averageRegionSize is not positive: "+ averageRegionSize + ", " + -327"set it to 1."); -328averageRegionSize = 1; -329} -330return calculateRebalancedSplits(splits, context, averageRegionSize); -331 } else { -332return splits; -333 } -334} finally { -335 if (closeOnFinish) { -336closeTable(); -337 } -338} -339 } -340 -341 String reverseDNS(InetAddress ipAddress) throws UnknownHostException { -342String hostName = this.reverseDNSCacheMap.get(ipAddress); -343if (hostName == null) { -344 String ipAddressString = null; -345 try { -346ipAddressString = DNS.reverseDns(ipAddress, null); -347 } catch (Exception e) { -348// We can use InetAddress in case the jndi failed to pull up the reverse DNS entry from the -349// name service. Also, in case of ipv6, we need to use the InetAddress since resolving -350// reverse DNS using jndi doesn't work well with ipv6 addresses. -351ipAddressString = InetAddress.getByName(ipAddress.getHostAddress()).getHostName(); -352 } -353 if (ipAddressString == null) throw new UnknownHostException("No host found for " + ipAddress); -354 hostName = Strings.domainNamePointerToHostName(ipAddressString); -355 this.reverseDNSCacheMap.put(ipAddress, hostName); -356} -357return hostName; -358 } -359 -360 /** -361 * Calculates the number of MapReduce input splits for the map tasks. The number of -362 * MapReduce input splits depends on the average region size and the "data skew ratio"
[20/52] [partial] hbase-site git commit: Published site at 29c46c4834a3f96e9fca33cb16bc7f3748fcd60c.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6444c276/devapidocs/org/apache/hadoop/hbase/client/HBaseAdmin.html -- diff --git a/devapidocs/org/apache/hadoop/hbase/client/HBaseAdmin.html b/devapidocs/org/apache/hadoop/hbase/client/HBaseAdmin.html index f09f999..bef66b9 100644 --- a/devapidocs/org/apache/hadoop/hbase/client/HBaseAdmin.html +++ b/devapidocs/org/apache/hadoop/hbase/client/HBaseAdmin.html @@ -3831,7 +3831,7 @@ publicvoid getTableRegions -publichttp://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true; title="class or interface in java.util">ListHRegionInfogetTableRegions(TableNametableName) +publichttp://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true; title="class or interface in java.util">ListHRegionInfogetTableRegions(TableNametableName) throws http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true; title="class or interface in java.io">IOException Description copied from interface:Admin Get the regions of a given table. @@ -3850,7 +3850,7 @@ publicvoid close -publicvoidclose() +publicvoidclose() throws http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true; title="class or interface in java.io">IOException Specified by: @@ -3869,7 +3869,7 @@ publicvoid getTableDescriptorsByTableName -publicHTableDescriptor[]getTableDescriptorsByTableName(http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true; title="class or interface in java.util">ListTableNametableNames) +publicHTableDescriptor[]getTableDescriptorsByTableName(http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true; title="class or interface in java.util">ListTableNametableNames) throws http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true; title="class or interface in java.io">IOException Description copied from interface:Admin Get tableDescriptors @@ -3888,7 +3888,7 @@ publicvoid getTableDescriptorByTableName -privateHTableDescriptorgetTableDescriptorByTableName(TableNametableName) +privateHTableDescriptorgetTableDescriptorByTableName(TableNametableName) throws http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true; title="class or interface in java.io">IOException Get tableDescriptor Parameters:tableName - one table name @@ -3903,7 +3903,7 @@ publicvoid getTableDescriptors -publicHTableDescriptor[]getTableDescriptors(http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true; title="class or interface in java.util">Listhttp://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true; title="class or interface in java.lang">Stringnames) +publicHTableDescriptor[]getTableDescriptors(http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true; title="class or interface in java.util">Listhttp://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true; title="class or interface in java.lang">Stringnames) throws http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true; title="class or interface in java.io">IOException Description copied from interface:Admin Get tableDescriptors @@ -3922,7 +3922,7 @@ publicvoid rollWALWriterImpl -privateorg.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponserollWALWriterImpl(ServerNamesn) +privateorg.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponserollWALWriterImpl(ServerNamesn) throws http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true; title="class or interface in java.io">IOException, FailedLogCloseException Throws: @@ -3937,7 +3937,7 @@ publicvoid rollHLogWriter http://docs.oracle.com/javase/7/docs/api/java/lang/Deprecated.html?is-external=true; title="class or interface in java.lang">@Deprecated -publicbyte[][]rollHLogWriter(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true; title="class or interface in java.lang">StringserverName) +publicbyte[][]rollHLogWriter(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true; title="class or interface in java.lang">StringserverName) throws http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true; title="class or interface in java.io">IOException, FailedLogCloseException Deprecated.use rollWALWriter(ServerName) @@ -3967,7 +3967,7 @@ publicbyte[][]
[12/52] [partial] hbase-site git commit: Published site at 29c46c4834a3f96e9fca33cb16bc7f3748fcd60c.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6444c276/devapidocs/org/apache/hadoop/hbase/coprocessor/class-use/RegionCoprocessorEnvironment.html -- diff --git a/devapidocs/org/apache/hadoop/hbase/coprocessor/class-use/RegionCoprocessorEnvironment.html b/devapidocs/org/apache/hadoop/hbase/coprocessor/class-use/RegionCoprocessorEnvironment.html index 3ddba24..43287e3 100644 --- a/devapidocs/org/apache/hadoop/hbase/coprocessor/class-use/RegionCoprocessorEnvironment.html +++ b/devapidocs/org/apache/hadoop/hbase/coprocessor/class-use/RegionCoprocessorEnvironment.html @@ -152,7 +152,7 @@ private RegionCoprocessorEnvironment -BaseRowProcessorEndpoint.env +AggregateImplementation.env private RegionCoprocessorEnvironment @@ -160,7 +160,7 @@ private RegionCoprocessorEnvironment -AggregateImplementation.env +BaseRowProcessorEndpoint.env @@ -1473,11 +1473,11 @@ private RegionCoprocessorEnvironment -BulkDeleteEndpoint.env +RowCountEndpoint.env private RegionCoprocessorEnvironment -RowCountEndpoint.env +BulkDeleteEndpoint.env @@ -1948,13 +1948,13 @@ void -VisibilityLabelService.init(RegionCoprocessorEnvironmente) -System calls this after opening of regions. - +DefaultVisibilityLabelServiceImpl.init(RegionCoprocessorEnvironmente) void -DefaultVisibilityLabelServiceImpl.init(RegionCoprocessorEnvironmente) +VisibilityLabelService.init(RegionCoprocessorEnvironmente) +System calls this after opening of regions. + private void http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6444c276/devapidocs/org/apache/hadoop/hbase/coprocessor/class-use/RegionServerCoprocessorEnvironment.html -- diff --git a/devapidocs/org/apache/hadoop/hbase/coprocessor/class-use/RegionServerCoprocessorEnvironment.html b/devapidocs/org/apache/hadoop/hbase/coprocessor/class-use/RegionServerCoprocessorEnvironment.html index f76650a..a6609dd 100644 --- a/devapidocs/org/apache/hadoop/hbase/coprocessor/class-use/RegionServerCoprocessorEnvironment.html +++ b/devapidocs/org/apache/hadoop/hbase/coprocessor/class-use/RegionServerCoprocessorEnvironment.html @@ -110,24 +110,17 @@ ReplicationEndpoint -BaseRegionServerObserver.postCreateReplicationEndPoint(ObserverContextRegionServerCoprocessorEnvironmentctx, - ReplicationEndpointendpoint) - - -ReplicationEndpoint RegionServerObserver.postCreateReplicationEndPoint(ObserverContextRegionServerCoprocessorEnvironmentctx, ReplicationEndpointendpoint) This will be called after the replication endpoint is instantiated. - -void -BaseRegionServerObserver.postMerge(ObserverContextRegionServerCoprocessorEnvironmentc, - RegionregionA, - RegionregionB, - RegionmergedRegion) - +ReplicationEndpoint +BaseRegionServerObserver.postCreateReplicationEndPoint(ObserverContextRegionServerCoprocessorEnvironmentctx, + ReplicationEndpointendpoint) + + void RegionServerObserver.postMerge(ObserverContextRegionServerCoprocessorEnvironmentc, RegionregionA, @@ -136,14 +129,14 @@ called after the regions merge. - + void -BaseRegionServerObserver.postMergeCommit(ObserverContextRegionServerCoprocessorEnvironmentctx, - RegionregionA, - RegionregionB, - RegionmergedRegion) +BaseRegionServerObserver.postMerge(ObserverContextRegionServerCoprocessorEnvironmentc, + RegionregionA, + RegionregionB, + RegionmergedRegion) - + void RegionServerObserver.postMergeCommit(ObserverContextRegionServerCoprocessorEnvironmentctx, RegionregionA, @@ -152,13 +145,14 @@ This will be called after PONR step as part of regions merge transaction. - + void -BaseRegionServerObserver.postReplicateLogEntries(ObserverContextRegionServerCoprocessorEnvironmentctx, - http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true; title="class or interface in java.util">Listorg.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntryentries, - CellScannercells) +BaseRegionServerObserver.postMergeCommit(ObserverContextRegionServerCoprocessorEnvironmentctx, + RegionregionA, + RegionregionB, + RegionmergedRegion) - + void RegionServerObserver.postReplicateLogEntries(ObserverContextRegionServerCoprocessorEnvironmentctx, http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true;
[04/52] [partial] hbase-site git commit: Published site at 29c46c4834a3f96e9fca33cb16bc7f3748fcd60c.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6444c276/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/HFileBlock.html -- diff --git a/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/HFileBlock.html b/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/HFileBlock.html index 1cbe6c4..5f33b9d 100644 --- a/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/HFileBlock.html +++ b/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/HFileBlock.html @@ -162,12 +162,12 @@ HFileBlock -HFileReaderImpl.getMetaBlock(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true; title="class or interface in java.lang">StringmetaBlockName, +HFile.Reader.getMetaBlock(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true; title="class or interface in java.lang">StringmetaBlockName, booleancacheBlock) HFileBlock -HFile.Reader.getMetaBlock(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true; title="class or interface in java.lang">StringmetaBlockName, +HFileReaderImpl.getMetaBlock(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true; title="class or interface in java.lang">StringmetaBlockName, booleancacheBlock) @@ -185,27 +185,27 @@ HFileBlock -HFileReaderImpl.readBlock(longdataBlockOffset, +HFile.CachingBlockReader.readBlock(longoffset, longonDiskBlockSize, booleancacheBlock, booleanpread, booleanisCompaction, booleanupdateCacheMetrics, BlockTypeexpectedBlockType, - DataBlockEncodingexpectedDataBlockEncoding) + DataBlockEncodingexpectedDataBlockEncoding) +Read in a file block. + HFileBlock -HFile.CachingBlockReader.readBlock(longoffset, +HFileReaderImpl.readBlock(longdataBlockOffset, longonDiskBlockSize, booleancacheBlock, booleanpread, booleanisCompaction, booleanupdateCacheMetrics, BlockTypeexpectedBlockType, - DataBlockEncodingexpectedDataBlockEncoding) -Read in a file block. - + DataBlockEncodingexpectedDataBlockEncoding) HFileBlock @@ -346,14 +346,14 @@ void -HFileReaderImpl.returnBlock(HFileBlockblock) - - -void HFile.CachingBlockReader.returnBlock(HFileBlockblock) Return the given block back to the cache, if it was obtained from cache. + +void +HFileReaderImpl.returnBlock(HFileBlockblock) + private void HFileReaderImpl.HFileScannerImpl.returnBlockToCache(HFileBlockblock) http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6444c276/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/HFileBlockIndex.BlockIndexReader.html -- diff --git a/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/HFileBlockIndex.BlockIndexReader.html b/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/HFileBlockIndex.BlockIndexReader.html index 7fe5bdb..19962b5 100644 --- a/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/HFileBlockIndex.BlockIndexReader.html +++ b/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/HFileBlockIndex.BlockIndexReader.html @@ -136,11 +136,11 @@ HFileBlockIndex.BlockIndexReader -HFileReaderImpl.getDataBlockIndexReader() +HFile.Reader.getDataBlockIndexReader() HFileBlockIndex.BlockIndexReader -HFile.Reader.getDataBlockIndexReader() +HFileReaderImpl.getDataBlockIndexReader() http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6444c276/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/HFileContext.html -- diff --git a/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/HFileContext.html b/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/HFileContext.html index 542e8f5..bc9a36d 100644 --- a/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/HFileContext.html +++ b/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/HFileContext.html @@ -135,11 +135,11 @@ private HFileContext -HFileBlockDefaultEncodingContext.fileContext +HFileBlockDefaultDecodingContext.fileContext private HFileContext -HFileBlockDefaultDecodingContext.fileContext +HFileBlockDefaultEncodingContext.fileContext private HFileContext @@ -156,19 +156,19 @@ HFileContext -HFileBlockDefaultEncodingContext.getHFileContext() +HFileBlockEncodingContext.getHFileContext() HFileContext -HFileBlockDecodingContext.getHFileContext() +HFileBlockDefaultDecodingContext.getHFileContext() HFileContext -HFileBlockDefaultDecodingContext.getHFileContext() +HFileBlockDecodingContext.getHFileContext() HFileContext -HFileBlockEncodingContext.getHFileContext()
[29/52] [partial] hbase-site git commit: Published site at 29c46c4834a3f96e9fca33cb16bc7f3748fcd60c.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6444c276/devapidocs/org/apache/hadoop/hbase/class-use/HRegionInfo.html -- diff --git a/devapidocs/org/apache/hadoop/hbase/class-use/HRegionInfo.html b/devapidocs/org/apache/hadoop/hbase/class-use/HRegionInfo.html index 91234f9..03e37db 100644 --- a/devapidocs/org/apache/hadoop/hbase/class-use/HRegionInfo.html +++ b/devapidocs/org/apache/hadoop/hbase/class-use/HRegionInfo.html @@ -1113,12 +1113,12 @@ Input/OutputFormats, a table indexing MapReduce job, and utility methods. void -BaseMasterObserver.postAssign(ObserverContextMasterCoprocessorEnvironmentctx, +BaseMasterAndRegionObserver.postAssign(ObserverContextMasterCoprocessorEnvironmentctx, HRegionInforegionInfo) void -BaseMasterAndRegionObserver.postAssign(ObserverContextMasterCoprocessorEnvironmentctx, +BaseMasterObserver.postAssign(ObserverContextMasterCoprocessorEnvironmentctx, HRegionInforegionInfo) @@ -1130,13 +1130,13 @@ Input/OutputFormats, a table indexing MapReduce job, and utility methods. void -BaseMasterObserver.postCompletedCreateTableAction(ObserverContextMasterCoprocessorEnvironmentctx, +BaseMasterAndRegionObserver.postCompletedCreateTableAction(ObserverContextMasterCoprocessorEnvironmentctx, HTableDescriptordesc, HRegionInfo[]regions) void -BaseMasterAndRegionObserver.postCompletedCreateTableAction(ObserverContextMasterCoprocessorEnvironmentctx, +BaseMasterObserver.postCompletedCreateTableAction(ObserverContextMasterCoprocessorEnvironmentctx, HTableDescriptordesc, HRegionInfo[]regions) @@ -1150,13 +1150,13 @@ Input/OutputFormats, a table indexing MapReduce job, and utility methods. void -BaseMasterObserver.postCreateTable(ObserverContextMasterCoprocessorEnvironmentctx, +BaseMasterAndRegionObserver.postCreateTable(ObserverContextMasterCoprocessorEnvironmentctx, HTableDescriptordesc, HRegionInfo[]regions) void -BaseMasterAndRegionObserver.postCreateTable(ObserverContextMasterCoprocessorEnvironmentctx, +BaseMasterObserver.postCreateTable(ObserverContextMasterCoprocessorEnvironmentctx, HTableDescriptordesc, HRegionInfo[]regions) @@ -1170,22 +1170,22 @@ Input/OutputFormats, a table indexing MapReduce job, and utility methods. void -BaseMasterObserver.postCreateTableHandler(ObserverContextMasterCoprocessorEnvironmentctx, +BaseMasterAndRegionObserver.postCreateTableHandler(ObserverContextMasterCoprocessorEnvironmentctx, HTableDescriptordesc, HRegionInfo[]regions) -Deprecated. -As of release 2.0.0, this will be removed in HBase 3.0.0 - (https://issues.apache.org/jira/browse/HBASE-15575;>HBASE-15575). - Use BaseMasterObserver.postCompletedCreateTableAction(ObserverContext, HTableDescriptor, HRegionInfo[]) - +Deprecated. void -BaseMasterAndRegionObserver.postCreateTableHandler(ObserverContextMasterCoprocessorEnvironmentctx, +BaseMasterObserver.postCreateTableHandler(ObserverContextMasterCoprocessorEnvironmentctx, HTableDescriptordesc, HRegionInfo[]regions) -Deprecated. +Deprecated. +As of release 2.0.0, this will be removed in HBase 3.0.0 + (https://issues.apache.org/jira/browse/HBASE-15575;>HBASE-15575). + Use BaseMasterObserver.postCompletedCreateTableAction(ObserverContext, HTableDescriptor, HRegionInfo[]) + @@ -1202,13 +1202,13 @@ Input/OutputFormats, a table indexing MapReduce job, and utility methods. void -BaseMasterObserver.postDispatchMerge(ObserverContextMasterCoprocessorEnvironmentctx, +BaseMasterAndRegionObserver.postDispatchMerge(ObserverContextMasterCoprocessorEnvironmentctx, HRegionInforegionA, HRegionInforegionB) void -BaseMasterAndRegionObserver.postDispatchMerge(ObserverContextMasterCoprocessorEnvironmentctx, +BaseMasterObserver.postDispatchMerge(ObserverContextMasterCoprocessorEnvironmentctx, HRegionInforegionA, HRegionInforegionB) @@ -1222,14 +1222,14 @@ Input/OutputFormats, a table indexing MapReduce job, and utility methods. void -BaseMasterObserver.postMove(ObserverContextMasterCoprocessorEnvironmentctx, +BaseMasterAndRegionObserver.postMove(ObserverContextMasterCoprocessorEnvironmentctx, HRegionInforegion, ServerNamesrcServer, ServerNamedestServer)
[13/52] [partial] hbase-site git commit: Published site at 29c46c4834a3f96e9fca33cb16bc7f3748fcd60c.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6444c276/devapidocs/org/apache/hadoop/hbase/coprocessor/class-use/ObserverContext.html -- diff --git a/devapidocs/org/apache/hadoop/hbase/coprocessor/class-use/ObserverContext.html b/devapidocs/org/apache/hadoop/hbase/coprocessor/class-use/ObserverContext.html index 90aa87b..6b89aff 100644 --- a/devapidocs/org/apache/hadoop/hbase/coprocessor/class-use/ObserverContext.html +++ b/devapidocs/org/apache/hadoop/hbase/coprocessor/class-use/ObserverContext.html @@ -185,11 +185,11 @@ void -BaseMasterObserver.postAbortProcedure(ObserverContextMasterCoprocessorEnvironmentctx) +BaseMasterAndRegionObserver.postAbortProcedure(ObserverContextMasterCoprocessorEnvironmentctx) void -BaseMasterAndRegionObserver.postAbortProcedure(ObserverContextMasterCoprocessorEnvironmentctx) +BaseMasterObserver.postAbortProcedure(ObserverContextMasterCoprocessorEnvironmentctx) void @@ -199,22 +199,22 @@ void -BaseMasterObserver.postAddColumn(ObserverContextMasterCoprocessorEnvironmentctx, +BaseMasterAndRegionObserver.postAddColumn(ObserverContextMasterCoprocessorEnvironmentctx, TableNametableName, HColumnDescriptorcolumnFamily) -Deprecated. -As of release 2.0.0, this will be removed in HBase 3.0.0 - (https://issues.apache.org/jira/browse/HBASE-13645;>HBASE-13645). - Use BaseMasterObserver.postAddColumnFamily(ObserverContext, TableName, HColumnDescriptor). - +Deprecated. void -BaseMasterAndRegionObserver.postAddColumn(ObserverContextMasterCoprocessorEnvironmentctx, +BaseMasterObserver.postAddColumn(ObserverContextMasterCoprocessorEnvironmentctx, TableNametableName, HColumnDescriptorcolumnFamily) -Deprecated. +Deprecated. +As of release 2.0.0, this will be removed in HBase 3.0.0 + (https://issues.apache.org/jira/browse/HBASE-13645;>HBASE-13645). + Use BaseMasterObserver.postAddColumnFamily(ObserverContext, TableName, HColumnDescriptor). + @@ -231,13 +231,13 @@ void -BaseMasterObserver.postAddColumnFamily(ObserverContextMasterCoprocessorEnvironmentctx, +BaseMasterAndRegionObserver.postAddColumnFamily(ObserverContextMasterCoprocessorEnvironmentctx, TableNametableName, HColumnDescriptorcolumnFamily) void -BaseMasterAndRegionObserver.postAddColumnFamily(ObserverContextMasterCoprocessorEnvironmentctx, +BaseMasterObserver.postAddColumnFamily(ObserverContextMasterCoprocessorEnvironmentctx, TableNametableName, HColumnDescriptorcolumnFamily) @@ -251,22 +251,22 @@ void -BaseMasterObserver.postAddColumnHandler(ObserverContextMasterCoprocessorEnvironmentctx, +BaseMasterAndRegionObserver.postAddColumnHandler(ObserverContextMasterCoprocessorEnvironmentctx, TableNametableName, HColumnDescriptorcolumnFamily) -Deprecated. -As of release 2.0.0, this will be removed in HBase 3.0.0 - (https://issues.apache.org/jira/browse/HBASE-13645;>HBASE-13645). Use - BaseMasterObserver.postCompletedAddColumnFamilyAction(ObserverContext, TableName, HColumnDescriptor). - +Deprecated. void -BaseMasterAndRegionObserver.postAddColumnHandler(ObserverContextMasterCoprocessorEnvironmentctx, +BaseMasterObserver.postAddColumnHandler(ObserverContextMasterCoprocessorEnvironmentctx, TableNametableName, HColumnDescriptorcolumnFamily) -Deprecated. +Deprecated. +As of release 2.0.0, this will be removed in HBase 3.0.0 + (https://issues.apache.org/jira/browse/HBASE-13645;>HBASE-13645). Use + BaseMasterObserver.postCompletedAddColumnFamilyAction(ObserverContext, TableName, HColumnDescriptor). + @@ -283,12 +283,12 @@ void -BaseMasterObserver.postAddRSGroup(ObserverContextMasterCoprocessorEnvironmentctx, +BaseMasterAndRegionObserver.postAddRSGroup(ObserverContextMasterCoprocessorEnvironmentctx, http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true; title="class or interface in java.lang">Stringname) void -BaseMasterAndRegionObserver.postAddRSGroup(ObserverContextMasterCoprocessorEnvironmentctx, +BaseMasterObserver.postAddRSGroup(ObserverContextMasterCoprocessorEnvironmentctx, http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true; title="class or interface in java.lang">Stringname) @@ -314,12 +314,12 @@ void -BaseMasterObserver.postAssign(ObserverContextMasterCoprocessorEnvironmentctx, +BaseMasterAndRegionObserver.postAssign(ObserverContextMasterCoprocessorEnvironmentctx,
[15/52] [partial] hbase-site git commit: Published site at 29c46c4834a3f96e9fca33cb16bc7f3748fcd60c.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6444c276/devapidocs/org/apache/hadoop/hbase/codec/prefixtree/class-use/PrefixTreeBlockMeta.html -- diff --git a/devapidocs/org/apache/hadoop/hbase/codec/prefixtree/class-use/PrefixTreeBlockMeta.html b/devapidocs/org/apache/hadoop/hbase/codec/prefixtree/class-use/PrefixTreeBlockMeta.html index 6030bdd..0a6132e 100644 --- a/devapidocs/org/apache/hadoop/hbase/codec/prefixtree/class-use/PrefixTreeBlockMeta.html +++ b/devapidocs/org/apache/hadoop/hbase/codec/prefixtree/class-use/PrefixTreeBlockMeta.html @@ -205,13 +205,13 @@ protected PrefixTreeBlockMeta -ColumnNodeReader.blockMeta +ColumnReader.blockMeta fields protected PrefixTreeBlockMeta -ColumnReader.blockMeta +ColumnNodeReader.blockMeta fields @@ -226,12 +226,12 @@ void -ColumnNodeReader.initOnBlock(PrefixTreeBlockMetablockMeta, +ColumnReader.initOnBlock(PrefixTreeBlockMetablockMeta, ByteBuffblock) void -ColumnReader.initOnBlock(PrefixTreeBlockMetablockMeta, +ColumnNodeReader.initOnBlock(PrefixTreeBlockMetablockMeta, ByteBuffblock) @@ -390,15 +390,15 @@ +protected PrefixTreeBlockMeta +ColumnNodeWriter.blockMeta + + private PrefixTreeBlockMeta ColumnSectionWriter.blockMeta fields - -protected PrefixTreeBlockMeta -ColumnNodeWriter.blockMeta - @@ -450,11 +450,11 @@ protected PrefixTreeBlockMeta -RowSectionWriter.blockMeta +RowNodeWriter.blockMeta protected PrefixTreeBlockMeta -RowNodeWriter.blockMeta +RowSectionWriter.blockMeta http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6444c276/devapidocs/org/apache/hadoop/hbase/codec/prefixtree/encode/class-use/PrefixTreeEncoder.html -- diff --git a/devapidocs/org/apache/hadoop/hbase/codec/prefixtree/encode/class-use/PrefixTreeEncoder.html b/devapidocs/org/apache/hadoop/hbase/codec/prefixtree/encode/class-use/PrefixTreeEncoder.html index 1fb2511..6ed6dd7 100644 --- a/devapidocs/org/apache/hadoop/hbase/codec/prefixtree/encode/class-use/PrefixTreeEncoder.html +++ b/devapidocs/org/apache/hadoop/hbase/codec/prefixtree/encode/class-use/PrefixTreeEncoder.html @@ -135,17 +135,17 @@ PrefixTreeEncoder -EncoderPool.checkOut(http://docs.oracle.com/javase/7/docs/api/java/io/OutputStream.html?is-external=true; title="class or interface in java.io">OutputStreamoutputStream, +EncoderPoolImpl.checkOut(http://docs.oracle.com/javase/7/docs/api/java/io/OutputStream.html?is-external=true; title="class or interface in java.io">OutputStreamoutputStream, booleanincludeMvccVersion) -PrefixTreeEncoder -EncoderPoolImpl.checkOut(http://docs.oracle.com/javase/7/docs/api/java/io/OutputStream.html?is-external=true; title="class or interface in java.io">OutputStreamoutputStream, +static PrefixTreeEncoder +EncoderFactory.checkOut(http://docs.oracle.com/javase/7/docs/api/java/io/OutputStream.html?is-external=true; title="class or interface in java.io">OutputStreamoutputStream, booleanincludeMvccVersion) -static PrefixTreeEncoder -EncoderFactory.checkOut(http://docs.oracle.com/javase/7/docs/api/java/io/OutputStream.html?is-external=true; title="class or interface in java.io">OutputStreamoutputStream, +PrefixTreeEncoder +EncoderPool.checkOut(http://docs.oracle.com/javase/7/docs/api/java/io/OutputStream.html?is-external=true; title="class or interface in java.io">OutputStreamoutputStream, booleanincludeMvccVersion) @@ -167,16 +167,16 @@ void -EncoderPool.checkIn(PrefixTreeEncoderencoder) - - -void EncoderPoolImpl.checkIn(PrefixTreeEncoderencoder) - + static void EncoderFactory.checkIn(PrefixTreeEncoderencoder) + +void +EncoderPool.checkIn(PrefixTreeEncoderencoder) + protected static PrefixTreeEncoder EncoderFactory.prepareEncoder(PrefixTreeEncoderencoder, @@ -201,13 +201,13 @@ protected PrefixTreeEncoder -RowSectionWriter.prefixTreeEncoder +RowNodeWriter.prefixTreeEncoder fields protected PrefixTreeEncoder -RowNodeWriter.prefixTreeEncoder +RowSectionWriter.prefixTreeEncoder fields http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6444c276/devapidocs/org/apache/hadoop/hbase/codec/prefixtree/encode/other/class-use/ColumnNodeType.html -- diff --git a/devapidocs/org/apache/hadoop/hbase/codec/prefixtree/encode/other/class-use/ColumnNodeType.html b/devapidocs/org/apache/hadoop/hbase/codec/prefixtree/encode/other/class-use/ColumnNodeType.html index ec91e8f..2b8a872 100644 --- a/devapidocs/org/apache/hadoop/hbase/codec/prefixtree/encode/other/class-use/ColumnNodeType.html +++ b/devapidocs/org/apache/hadoop/hbase/codec/prefixtree/encode/other/class-use/ColumnNodeType.html @@ -104,11 +104,11 @@ protected ColumnNodeType -ColumnNodeReader.nodeType
[11/52] [partial] hbase-site git commit: Published site at 29c46c4834a3f96e9fca33cb16bc7f3748fcd60c.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6444c276/devapidocs/org/apache/hadoop/hbase/exceptions/class-use/DeserializationException.html -- diff --git a/devapidocs/org/apache/hadoop/hbase/exceptions/class-use/DeserializationException.html b/devapidocs/org/apache/hadoop/hbase/exceptions/class-use/DeserializationException.html index 34cfd66..d078cfc 100644 --- a/devapidocs/org/apache/hadoop/hbase/exceptions/class-use/DeserializationException.html +++ b/devapidocs/org/apache/hadoop/hbase/exceptions/class-use/DeserializationException.html @@ -144,20 +144,20 @@ -static HTableDescriptor -HTableDescriptor.parseFrom(byte[]bytes) - - static HColumnDescriptor HColumnDescriptor.parseFrom(byte[]bytes) + +static HRegionInfo +HRegionInfo.parseFrom(byte[]bytes) + static ClusterId ClusterId.parseFrom(byte[]bytes) -static HRegionInfo -HRegionInfo.parseFrom(byte[]bytes) +static HTableDescriptor +HTableDescriptor.parseFrom(byte[]bytes) static SplitLogTask @@ -253,145 +253,145 @@ ByteArrayComparable.parseFrom(byte[]pbBytes) -static QualifierFilter -QualifierFilter.parseFrom(byte[]pbBytes) +static ColumnPaginationFilter +ColumnPaginationFilter.parseFrom(byte[]pbBytes) -static WhileMatchFilter -WhileMatchFilter.parseFrom(byte[]pbBytes) +static InclusiveStopFilter +InclusiveStopFilter.parseFrom(byte[]pbBytes) -static RandomRowFilter -RandomRowFilter.parseFrom(byte[]pbBytes) +static FilterWrapper +FilterWrapper.parseFrom(byte[]pbBytes) -static ColumnCountGetFilter -ColumnCountGetFilter.parseFrom(byte[]pbBytes) +static BinaryComparator +BinaryComparator.parseFrom(byte[]pbBytes) -static DependentColumnFilter -DependentColumnFilter.parseFrom(byte[]pbBytes) +static WhileMatchFilter +WhileMatchFilter.parseFrom(byte[]pbBytes) -static KeyOnlyFilter -KeyOnlyFilter.parseFrom(byte[]pbBytes) - - static FuzzyRowFilter FuzzyRowFilter.parseFrom(byte[]pbBytes) - -static SingleColumnValueFilter -SingleColumnValueFilter.parseFrom(byte[]pbBytes) - -static FamilyFilter -FamilyFilter.parseFrom(byte[]pbBytes) +static FirstKeyValueMatchingQualifiersFilter +FirstKeyValueMatchingQualifiersFilter.parseFrom(byte[]pbBytes) +Deprecated. + -static LongComparator -LongComparator.parseFrom(byte[]pbBytes) +static KeyOnlyFilter +KeyOnlyFilter.parseFrom(byte[]pbBytes) -static MultipleColumnPrefixFilter -MultipleColumnPrefixFilter.parseFrom(byte[]pbBytes) +static RegexStringComparator +RegexStringComparator.parseFrom(byte[]pbBytes) -static FilterList -FilterList.parseFrom(byte[]pbBytes) +static ValueFilter +ValueFilter.parseFrom(byte[]pbBytes) -static BinaryComparator -BinaryComparator.parseFrom(byte[]pbBytes) +static LongComparator +LongComparator.parseFrom(byte[]pbBytes) -static ColumnPrefixFilter -ColumnPrefixFilter.parseFrom(byte[]pbBytes) +static PrefixFilter +PrefixFilter.parseFrom(byte[]pbBytes) -static ColumnRangeFilter -ColumnRangeFilter.parseFrom(byte[]pbBytes) +static RandomRowFilter +RandomRowFilter.parseFrom(byte[]pbBytes) +static SingleColumnValueExcludeFilter +SingleColumnValueExcludeFilter.parseFrom(byte[]pbBytes) + + static SubstringComparator SubstringComparator.parseFrom(byte[]pbBytes) + +static FilterList +FilterList.parseFrom(byte[]pbBytes) + -static PrefixFilter -PrefixFilter.parseFrom(byte[]pbBytes) +static ColumnPrefixFilter +ColumnPrefixFilter.parseFrom(byte[]pbBytes) -static BitComparator -BitComparator.parseFrom(byte[]pbBytes) +static TimestampsFilter +TimestampsFilter.parseFrom(byte[]pbBytes) -static ValueFilter -ValueFilter.parseFrom(byte[]pbBytes) +static RowFilter +RowFilter.parseFrom(byte[]pbBytes) -static Filter -Filter.parseFrom(byte[]pbBytes) -Concrete implementers can signal a failure condition in their code by throwing an - http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true; title="class or interface in java.io">IOException. - +static SkipFilter +SkipFilter.parseFrom(byte[]pbBytes) -static MultiRowRangeFilter -MultiRowRangeFilter.parseFrom(byte[]pbBytes) +static BinaryPrefixComparator +BinaryPrefixComparator.parseFrom(byte[]pbBytes) -static FirstKeyOnlyFilter -FirstKeyOnlyFilter.parseFrom(byte[]pbBytes) +static NullComparator +NullComparator.parseFrom(byte[]pbBytes) -static FirstKeyValueMatchingQualifiersFilter -FirstKeyValueMatchingQualifiersFilter.parseFrom(byte[]pbBytes) -Deprecated. - +static SingleColumnValueFilter +SingleColumnValueFilter.parseFrom(byte[]pbBytes) -static PageFilter -PageFilter.parseFrom(byte[]pbBytes) +static FirstKeyOnlyFilter +FirstKeyOnlyFilter.parseFrom(byte[]pbBytes) -static TimestampsFilter -TimestampsFilter.parseFrom(byte[]pbBytes) +static ColumnRangeFilter +ColumnRangeFilter.parseFrom(byte[]pbBytes) -static ColumnPaginationFilter -ColumnPaginationFilter.parseFrom(byte[]pbBytes) +static ColumnCountGetFilter +ColumnCountGetFilter.parseFrom(byte[]pbBytes)
[28/52] [partial] hbase-site git commit: Published site at 29c46c4834a3f96e9fca33cb16bc7f3748fcd60c.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6444c276/devapidocs/org/apache/hadoop/hbase/class-use/HRegionLocation.html -- diff --git a/devapidocs/org/apache/hadoop/hbase/class-use/HRegionLocation.html b/devapidocs/org/apache/hadoop/hbase/class-use/HRegionLocation.html index caf1ff9..93ea532 100644 --- a/devapidocs/org/apache/hadoop/hbase/class-use/HRegionLocation.html +++ b/devapidocs/org/apache/hadoop/hbase/class-use/HRegionLocation.html @@ -310,26 +310,26 @@ service. HRegionLocation -HRegionLocator.getRegionLocation(byte[]row) +RegionLocator.getRegionLocation(byte[]row) Finds the region on which the given row is being served. HRegionLocation -RegionLocator.getRegionLocation(byte[]row) +HRegionLocator.getRegionLocation(byte[]row) Finds the region on which the given row is being served. HRegionLocation -HRegionLocator.getRegionLocation(byte[]row, +RegionLocator.getRegionLocation(byte[]row, booleanreload) Finds the region on which the given row is being served. HRegionLocation -RegionLocator.getRegionLocation(byte[]row, +HRegionLocator.getRegionLocation(byte[]row, booleanreload) Finds the region on which the given row is being served. @@ -399,14 +399,14 @@ service. http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true; title="class or interface in java.util">ListHRegionLocation -HRegionLocator.getAllRegionLocations() - - -http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true; title="class or interface in java.util">ListHRegionLocation RegionLocator.getAllRegionLocations() Retrieves all of the regions associated with this table. + +http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true; title="class or interface in java.util">ListHRegionLocation +HRegionLocator.getAllRegionLocations() + private Pairhttp://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true; title="class or interface in java.util">Listbyte[],http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true; title="class or interface in java.util">ListHRegionLocation HTable.getKeysAndRegionsInRange(byte[]startKey, @@ -468,16 +468,16 @@ service. -void -MetaCache.cacheLocation(TableNametableName, +private void +ConnectionImplementation.cacheLocation(TableNametableName, ServerNamesource, HRegionLocationlocation) Put a newly discovered HRegionLocation into the cache. -private void -ConnectionImplementation.cacheLocation(TableNametableName, +void +MetaCache.cacheLocation(TableNametableName, ServerNamesource, HRegionLocationlocation) Put a newly discovered HRegionLocation into the cache.
[32/52] [partial] hbase-site git commit: Published site at 29c46c4834a3f96e9fca33cb16bc7f3748fcd60c.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6444c276/devapidocs/org/apache/hadoop/hbase/class-use/Cell.html -- diff --git a/devapidocs/org/apache/hadoop/hbase/class-use/Cell.html b/devapidocs/org/apache/hadoop/hbase/class-use/Cell.html index 31ab35d..fa9b375 100644 --- a/devapidocs/org/apache/hadoop/hbase/class-use/Cell.html +++ b/devapidocs/org/apache/hadoop/hbase/class-use/Cell.html @@ -475,17 +475,23 @@ service. static Cell +CellUtil.createFirstOnRow(byte[]row, +introffset, +shortrlength) + + +static Cell CellUtil.createFirstOnRow(Cellcell) Create a Cell that is smaller than all other possible Cells for the given Cell's row. - + static Cell CellUtil.createFirstOnRowCol(Cellcell) Create a Cell that is smaller than all other possible Cells for the given Cell's row. - + static Cell CellUtil.createFirstOnRowCol(Cellcell, byte[]qArray, @@ -495,26 +501,26 @@ service. passed qualifier. - + static Cell CellUtil.createFirstOnRowColTS(Cellcell, longts) Creates the first cell with the row/family/qualifier of this cell and the given timestamp. - + static Cell CellUtil.createLastOnRow(Cellcell) Create a Cell that is larger than all other possible Cells for the given Cell's row. - + static Cell CellUtil.createLastOnRowCol(Cellcell) Create a Cell that is larger than all other possible Cells for the given Cell's rk:cf:q. - + Cell CellScanner.current() @@ -599,6 +605,16 @@ service. int +CellComparator.compare(Cella, + Cellb) + + +int +CellComparator.RowComparator.compare(Cella, + Cellb) + + +int KeyValue.MetaComparator.compare(Cellleft, Cellright) Deprecated. @@ -623,16 +639,6 @@ service. -int -CellComparator.compare(Cella, - Cellb) - - -int -CellComparator.RowComparator.compare(Cella, - Cellb) - - private int CellComparator.compare(Cella, Cellb, @@ -808,37 +814,37 @@ service. int -KeyValue.KVComparator.compareRows(Cellleft, - Cellright) -Deprecated. - - - -int CellComparator.compareRows(Cellleft, Cellright) Compares the rows of the left and right cell. - + int CellComparator.MetaCellComparator.compareRows(Cellleft, Cellright) - + int -KeyValue.KVComparator.compareTimestamps(Cellleft, - Cellright) +KeyValue.KVComparator.compareRows(Cellleft, + Cellright) Deprecated. - + static int CellComparator.compareTimestamps(Cellleft, Cellright) Compares cell's timestamps in DESCENDING order. + +int +KeyValue.KVComparator.compareTimestamps(Cellleft, + Cellright) +Deprecated. + + static int CellComparator.compareValue(Cellcell, @@ -1253,32 +1259,32 @@ service. -boolean -KeyValue.KVComparator.matchingRowColumn(Cellleft, +static boolean +CellUtil.matchingRowColumn(Cellleft, Cellright) -Deprecated. Compares the row and column of two keyvalues for equality -static boolean -CellUtil.matchingRowColumn(Cellleft, +boolean +KeyValue.KVComparator.matchingRowColumn(Cellleft, Cellright) +Deprecated. Compares the row and column of two keyvalues for equality -boolean -KeyValue.KVComparator.matchingRows(Cellleft, +static boolean +CellUtil.matchingRows(Cellleft, Cellright) -Deprecated. Compares the row of two keyvalues for equality -static boolean -CellUtil.matchingRows(Cellleft, +boolean +KeyValue.KVComparator.matchingRows(Cellleft, Cellright) +Deprecated. Compares the row of two keyvalues for equality @@ -1638,17 +1644,17 @@ service. -Put -Put.add(Cellkv) -Add the specified KeyValue to this Put operation. - - - Increment Increment.add(Cellcell) Add the specified KeyValue to this operation. + +Put +Put.add(Cellkv) +Add the specified KeyValue to this Put operation. + + Append Append.add(Cellcell) @@ -1743,26 +1749,26 @@ service. booleanpartial) -Put -Put.setFamilyCellMap(http://docs.oracle.com/javase/7/docs/api/java/util/NavigableMap.html?is-external=true; title="class or interface in java.util">NavigableMapbyte[],http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true; title="class or interface in java.util">ListCellmap) - - Delete Delete.setFamilyCellMap(http://docs.oracle.com/javase/7/docs/api/java/util/NavigableMap.html?is-external=true; title="class or interface in java.util">NavigableMapbyte[],http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true; title="class or interface in java.util">ListCellmap) + +Mutation
[01/52] [partial] hbase-site git commit: Published site at 29c46c4834a3f96e9fca33cb16bc7f3748fcd60c.
Repository: hbase-site Updated Branches: refs/heads/asf-site 597f30e55 -> b6dc57358 http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6444c276/devapidocs/org/apache/hadoop/hbase/master/HMasterCommandLine.LocalHMaster.html -- diff --git a/devapidocs/org/apache/hadoop/hbase/master/HMasterCommandLine.LocalHMaster.html b/devapidocs/org/apache/hadoop/hbase/master/HMasterCommandLine.LocalHMaster.html index c398436..8a322cc 100644 --- a/devapidocs/org/apache/hadoop/hbase/master/HMasterCommandLine.LocalHMaster.html +++ b/devapidocs/org/apache/hadoop/hbase/master/HMasterCommandLine.LocalHMaster.html @@ -118,7 +118,7 @@ -public static class HMasterCommandLine.LocalHMaster +public static class HMasterCommandLine.LocalHMaster extends HMaster @@ -234,7 +234,7 @@ extends Methods inherited from classorg.apache.hadoop.hbase.master.HMaster -abort, abortProcedure, addColumn, assignMeta, balance, balance, balanceSwitch, canCreateBaseZNode, canUpdateTableDescriptor, checkInitialized, checkServiceStarted, checkTableModifiable, configureInfoServer, constructMaster, createNamespace, createRpcServices, createServerManager, createTable, deleteColumn, deleteNamespace, deleteTable, disableTable, dispatchMergingRegions, enableTable, getAssignmentManager, getAverageLoad, getClientIdAuditPrefix, getClusterSchema, getClusterStatus, getDumpServlet, getFsTableDescriptors, getHFileCleaner, getInitializedEvent, getLastMajorCompactionTimestamp, getLastMajorCompactionTimestampForRegion, getLoadBalancer, getLoadBalancerClassName, getLoadedCoprocessors, getMasterActiveTime, getMasterCoprocessorHost, getMasterCoprocessors, getMasterFileSystem, getMasterMetrics, getMasterProcedureExecutor, getMasterProcedureManagerHost, getMasterQuotaManager, getMasterRpcServices, getMasterStartTime, getMasterWalManager, getMergePlanCount, getMobCompactionState, getNamespace, getNamespaces, getNumWALFiles, getProcessName, getRegionNormalizer, getRegionNormalizerTracker, getRegionServerFatalLogBuffer, getRegionServerInfoPort, getRegionServerVersion, getRemoteInetAddress, getServerCrashProcessingEnabledEvent, getServerManager, getServerName, getSnapshotManager, getSplitOrMergeTracker, getSplitPlanCount, getTableDescriptors, getTableRegionForRow, getTableStateManager, getWalProcedureStore, getZooKeeper, initClusterSchemaService, initializeZKBasedSystemTrackers, initQuotaManager, isActiveMaster, isBalancerOn, isCatalogJanitorEnabled, isInitializationStartsMetaRegionAssignment, isInitialized, isNormalizerOn, isServerCrashProcessingEnabled, isSplitOrMergeEnabled, listProcedures, listTableDescriptors, listTableDescriptorsByNamespace, listTableNames, listTableNamesByNamespace, login, main, modifyColumn, < a href="../../../../../org/apache/hadoop/hbase/master/HMaster.html#modifyNamespace(org.apache.hadoop.hbase.NamespaceDescriptor,%20long,%20long)">modifyNamespace, modifyTable, move, normalizeRegions, registerService, reportMobCompactionEnd, reportM obCompactionStart, requestMobCompaction, sendShutdownInterrupt, setCatalogJanitorEnabled, setInitialized, setServerCrashProcessingEnabled, shutdown, stopMaster, stopServiceThreads, href="../../../../../org/apache/hadoop/hbase/master/HMaster.html#truncateTable(org.apache.hadoop.hbase.TableName,%20boolean,%20long,%20long)">truncateTable, > href="../../../../../org/apache/hadoop/hbase/master/HMaster.html#waitForMasterActive()">waitForMasterActive +abort, abortProcedure, addColumn, assignMeta, balance, balance, balanceSwitch, canCreateBaseZNode, canUpdateTableDescriptor, checkInitialized, checkServiceStarted, checkTableModifiable, configureInfoServer, constructMaster, createNamespace, createRpcServices, createServerManager, createTable, deleteColumn, deleteNamespace, deleteTable, disableTable, dispatchMergingRegions, enableTable, getAssignmentManager, getAverageLoad, getClientIdAuditPrefix, getClusterSchema, getClusterStatus, getDumpServlet, getFsTableDescriptors, getHFileCleaner, getInitializedEvent, getLastMajorCompactionTimestamp, getLastMajorCompactionTimestampForRegion, getLoadBalancer, getLoadBalancerClassName, getLoadedCoprocessors, getMasterActiveTime, getMasterCoprocessorHost, getMasterCoprocessors, getMasterFileSystem, getMasterMetrics, getMasterProcedureExecutor, getMasterProcedureManagerHost, getMasterQuotaManager, getMasterRpcServices, getMasterStartTime, getMasterWalManager, getMergePlanCount, getMobCompactionState, getNamespace, getNamespaces , getNumWALFiles, getProcessName, getRegionNormalizer, getRegionNormalizerTracker, getRegionServerFatalLogBuffer, getRegionServerInfoPort, getRegionServerVersion, getRemoteInetAddress, getServerCrashProcessingEnabledEvent, getServerManager, getServerName, getSnapshotManager, getSplitOrMergeTracker, getSplitPlanCount,
[18/52] [partial] hbase-site git commit: Published site at 29c46c4834a3f96e9fca33cb16bc7f3748fcd60c.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6444c276/devapidocs/org/apache/hadoop/hbase/client/class-use/Durability.html -- diff --git a/devapidocs/org/apache/hadoop/hbase/client/class-use/Durability.html b/devapidocs/org/apache/hadoop/hbase/client/class-use/Durability.html index f0e682c..fc49675 100644 --- a/devapidocs/org/apache/hadoop/hbase/client/class-use/Durability.html +++ b/devapidocs/org/apache/hadoop/hbase/client/class-use/Durability.html @@ -241,7 +241,7 @@ the order they are declared. long -Table.incrementColumnValue(byte[]row, +HTable.incrementColumnValue(byte[]row, byte[]family, byte[]qualifier, longamount, @@ -251,7 +251,7 @@ the order they are declared. long -HTable.incrementColumnValue(byte[]row, +Table.incrementColumnValue(byte[]row, byte[]family, byte[]qualifier, longamount, @@ -268,26 +268,26 @@ the order they are declared. Durabilitydurability) -Put -Put.setDurability(Durabilityd) - - Delete Delete.setDurability(Durabilityd) + +Mutation +Mutation.setDurability(Durabilityd) +Set the durability for this mutation + + Increment Increment.setDurability(Durabilityd) -Append -Append.setDurability(Durabilityd) +Put +Put.setDurability(Durabilityd) -Mutation -Mutation.setDurability(Durabilityd) -Set the durability for this mutation - +Append +Append.setDurability(Durabilityd) @@ -448,15 +448,15 @@ the order they are declared. Durability -RowProcessor.useDurability() +MultiRowMutationProcessor.useDurability() Durability -MultiRowMutationProcessor.useDurability() +BaseRowProcessor.useDurability() Durability -BaseRowProcessor.useDurability() +RowProcessor.useDurability() http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6444c276/devapidocs/org/apache/hadoop/hbase/client/class-use/Get.html -- diff --git a/devapidocs/org/apache/hadoop/hbase/client/class-use/Get.html b/devapidocs/org/apache/hadoop/hbase/client/class-use/Get.html index 16f5dee..3b53cee 100644 --- a/devapidocs/org/apache/hadoop/hbase/client/class-use/Get.html +++ b/devapidocs/org/apache/hadoop/hbase/client/class-use/Get.html @@ -295,13 +295,13 @@ service. boolean -Table.exists(Getget) +HTable.exists(Getget) Test for the existence of columns in the table, as specified by the Get. boolean -HTable.exists(Getget) +Table.exists(Getget) Test for the existence of columns in the table, as specified by the Get. @@ -311,13 +311,13 @@ service. Result -Table.get(Getget) +HTable.get(Getget) Extracts certain cells from a given row. Result -HTable.get(Getget) +Table.get(Getget) Extracts certain cells from a given row. @@ -349,13 +349,13 @@ service. boolean[] -Table.existsAll(http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true; title="class or interface in java.util">ListGetgets) +HTable.existsAll(http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true; title="class or interface in java.util">ListGetgets) Test for the existence of columns in the table, as specified by the Gets. boolean[] -HTable.existsAll(http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true; title="class or interface in java.util">ListGetgets) +Table.existsAll(http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true; title="class or interface in java.util">ListGetgets) Test for the existence of columns in the table, as specified by the Gets. @@ -365,13 +365,13 @@ service. Result[] -Table.get(http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true; title="class or interface in java.util">ListGetgets) +HTable.get(http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true; title="class or interface in java.util">ListGetgets) Extracts certain cells from the given rows, in batch. Result[] -HTable.get(http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true; title="class or interface in java.util">ListGetgets) +Table.get(http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true; title="class or interface in java.util">ListGetgets) Extracts certain cells from the given rows, in batch. http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6444c276/devapidocs/org/apache/hadoop/hbase/client/class-use/Increment.html -- diff --git a/devapidocs/org/apache/hadoop/hbase/client/class-use/Increment.html b/devapidocs/org/apache/hadoop/hbase/client/class-use/Increment.html index
[03/52] [partial] hbase-site git commit: Published site at 29c46c4834a3f96e9fca33cb16bc7f3748fcd60c.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6444c276/devapidocs/org/apache/hadoop/hbase/ipc/class-use/StoppedRpcClientException.html -- diff --git a/devapidocs/org/apache/hadoop/hbase/ipc/class-use/StoppedRpcClientException.html b/devapidocs/org/apache/hadoop/hbase/ipc/class-use/StoppedRpcClientException.html index 05d70e1..9b4390f 100644 --- a/devapidocs/org/apache/hadoop/hbase/ipc/class-use/StoppedRpcClientException.html +++ b/devapidocs/org/apache/hadoop/hbase/ipc/class-use/StoppedRpcClientException.html @@ -112,17 +112,17 @@ AsyncRpcChannel -RpcClient.createRpcChannel(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true; title="class or interface in java.lang">StringserviceName, +RpcClientImpl.createRpcChannel(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true; title="class or interface in java.lang">StringserviceName, ServerNamesn, -Useruser) -Create or fetch AsyncRpcChannel - +Useruser) AsyncRpcChannel -RpcClientImpl.createRpcChannel(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true; title="class or interface in java.lang">StringserviceName, +RpcClient.createRpcChannel(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true; title="class or interface in java.lang">StringserviceName, ServerNamesn, -Useruser) +Useruser) +Create or fetch AsyncRpcChannel + http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6444c276/devapidocs/org/apache/hadoop/hbase/mapred/TableOutputFormat.TableRecordWriter.html -- diff --git a/devapidocs/org/apache/hadoop/hbase/mapred/TableOutputFormat.TableRecordWriter.html b/devapidocs/org/apache/hadoop/hbase/mapred/TableOutputFormat.TableRecordWriter.html index 81dfe27..e32a2ad 100644 --- a/devapidocs/org/apache/hadoop/hbase/mapred/TableOutputFormat.TableRecordWriter.html +++ b/devapidocs/org/apache/hadoop/hbase/mapred/TableOutputFormat.TableRecordWriter.html @@ -128,7 +128,7 @@ implements org.apache.hadoop.mapred.RecordWriter private Connection -connection +conn private BufferedMutator @@ -149,13 +149,10 @@ implements org.apache.hadoop.mapred.RecordWriterConstructor and Description -TableOutputFormat.TableRecordWriter(BufferedMutatormutator) -Instantiate a TableRecordWriter with the HBase HClient for writing. +TableOutputFormat.TableRecordWriter(org.apache.hadoop.mapred.JobConfjob) +Instantiate a TableRecordWriter with a BufferedMutator for batch writing. - -TableOutputFormat.TableRecordWriter(org.apache.hadoop.mapred.JobConfjob) - @@ -211,13 +208,13 @@ implements org.apache.hadoop.mapred.RecordWriterBufferedMutator m_mutator - + -connection -privateConnection connection +conn +privateConnection conn @@ -228,28 +225,15 @@ implements org.apache.hadoop.mapred.RecordWriter - - - - -TableOutputFormat.TableRecordWriter -publicTableOutputFormat.TableRecordWriter(BufferedMutatormutator) -throws http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true; title="class or interface in java.io">IOException -Instantiate a TableRecordWriter with the HBase HClient for writing. Assumes control over the - lifecycle of conn. -Throws: -http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true; title="class or interface in java.io">IOException - - TableOutputFormat.TableRecordWriter -publicTableOutputFormat.TableRecordWriter(org.apache.hadoop.mapred.JobConfjob) +publicTableOutputFormat.TableRecordWriter(org.apache.hadoop.mapred.JobConfjob) throws http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true; title="class or interface in java.io">IOException +Instantiate a TableRecordWriter with a BufferedMutator for batch writing. Throws: http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true; title="class or interface in java.io">IOException @@ -268,7 +252,7 @@ implements org.apache.hadoop.mapred.RecordWriter close -publicvoidclose(org.apache.hadoop.mapred.Reporterreporter) +publicvoidclose(org.apache.hadoop.mapred.Reporterreporter) throws http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true; title="class or interface in java.io">IOException Specified by: @@ -283,7 +267,7 @@ implements org.apache.hadoop.mapred.RecordWriter write -publicvoidwrite(ImmutableBytesWritablekey, +publicvoidwrite(ImmutableBytesWritablekey, Putvalue) throws http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true;
[40/52] [partial] hbase-site git commit: Published site at 29c46c4834a3f96e9fca33cb16bc7f3748fcd60c.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6444c276/book.html -- diff --git a/book.html b/book.html index 8244382..28dbf41 100644 --- a/book.html +++ b/book.html @@ -19179,7 +19179,7 @@ using HBase Shell. For more details see Loading Coprocesso Call the coprocessor from your client-side code. HBase handles the coprocessor -trapsparently. +transparently. @@ -25160,6 +25160,24 @@ The master cluster relies on randomization to attempt to balance the stream of r It is expected that the slave cluster has storage capacity to hold the replicated data, as well as any data it is responsible for ingesting. If a slave cluster does run out of room, or is inaccessible for other reasons, it throws an error and the master retains the WAL and retries the replication at intervals. + + + + + + + +Consistency Across Replicated Clusters + +How your application builds on top of the HBase API matters when replication is in play. HBases replication system provides at-least-once delivery of client edits for an enabled column family to each configured destination cluster. In the event of failure to reach a given destination, the replication system will retry sending edits in a way that might repeat a given message. Further more, there is not a guaranteed order of delivery for client edits. In the event of a RegionServer failing, recovery of the replication queue happens independent of recovery of the individual regions that server was previously handling. This means that it is possible for the not-yet-replicated edits to be serviced by a RegionServer that is currently slower to replicate than the one that handles edits from after the failure. + + +The combination of these two properties (at-least-once delivery and the lack of message ordering) means that some destination clusters may end up in a different state if your application makes use of operations that are not idempotent, e.g. Increments. + + + + + @@ -33748,7 +33766,7 @@ The server will return cellblocks compressed using this same compressor as long Version 2.0.0-SNAPSHOT -Last updated 2016-04-11 14:30:43 UTC +Last updated 2016-06-15 16:11:32 UTC http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6444c276/bulk-loads.html -- diff --git a/bulk-loads.html b/bulk-loads.html index e7f5511..da2508c 100644 --- a/bulk-loads.html +++ b/bulk-loads.html @@ -7,7 +7,7 @@ - + Apache HBase Bulk Loads in Apache HBase (TM) @@ -305,7 +305,7 @@ under the License. --> http://www.apache.org/;>The Apache Software Foundation. All rights reserved. - Last Published: 2016-06-27 + Last Published: 2016-07-05
[47/52] [partial] hbase-site git commit: Published site at 29c46c4834a3f96e9fca33cb16bc7f3748fcd60c.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6444c276/apidocs/org/apache/hadoop/hbase/client/Durability.html -- diff --git a/apidocs/org/apache/hadoop/hbase/client/Durability.html b/apidocs/org/apache/hadoop/hbase/client/Durability.html index af1f718..0f065d5 100644 --- a/apidocs/org/apache/hadoop/hbase/client/Durability.html +++ b/apidocs/org/apache/hadoop/hbase/client/Durability.html @@ -280,7 +280,7 @@ the order they are declared. values -public staticDurability[]values() +public staticDurability[]values() Returns an array containing the constants of this enum type, in the order they are declared. This method may be used to iterate over the constants as follows: @@ -297,7 +297,7 @@ for (Durability c : Durability.values()) valueOf -public staticDurabilityvalueOf(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true; title="class or interface in java.lang">Stringname) +public staticDurabilityvalueOf(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true; title="class or interface in java.lang">Stringname) Returns the enum constant of this type with the specified name. The string must match exactly an identifier used to declare an enum constant in this type. (Extraneous whitespace characters are http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6444c276/apidocs/org/apache/hadoop/hbase/client/IsolationLevel.html -- diff --git a/apidocs/org/apache/hadoop/hbase/client/IsolationLevel.html b/apidocs/org/apache/hadoop/hbase/client/IsolationLevel.html index 7dc6ab3..7cc0a93 100644 --- a/apidocs/org/apache/hadoop/hbase/client/IsolationLevel.html +++ b/apidocs/org/apache/hadoop/hbase/client/IsolationLevel.html @@ -243,7 +243,7 @@ the order they are declared. values -public staticIsolationLevel[]values() +public staticIsolationLevel[]values() Returns an array containing the constants of this enum type, in the order they are declared. This method may be used to iterate over the constants as follows: @@ -260,7 +260,7 @@ for (IsolationLevel c : IsolationLevel.values()) valueOf -public staticIsolationLevelvalueOf(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true; title="class or interface in java.lang">Stringname) +public staticIsolationLevelvalueOf(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true; title="class or interface in java.lang">Stringname) Returns the enum constant of this type with the specified name. The string must match exactly an identifier used to declare an enum constant in this type. (Extraneous whitespace characters are http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6444c276/apidocs/org/apache/hadoop/hbase/client/SnapshotType.html -- diff --git a/apidocs/org/apache/hadoop/hbase/client/SnapshotType.html b/apidocs/org/apache/hadoop/hbase/client/SnapshotType.html index 81ef17a..bcea5a9 100644 --- a/apidocs/org/apache/hadoop/hbase/client/SnapshotType.html +++ b/apidocs/org/apache/hadoop/hbase/client/SnapshotType.html @@ -233,7 +233,7 @@ the order they are declared. values -public staticSnapshotType[]values() +public staticSnapshotType[]values() Returns an array containing the constants of this enum type, in the order they are declared. This method may be used to iterate over the constants as follows: @@ -250,7 +250,7 @@ for (SnapshotType c : SnapshotType.values()) valueOf -public staticSnapshotTypevalueOf(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true; title="class or interface in java.lang">Stringname) +public staticSnapshotTypevalueOf(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true; title="class or interface in java.lang">Stringname) Returns the enum constant of this type with the specified name. The string must match exactly an identifier used to declare an enum constant in this type. (Extraneous whitespace characters are http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6444c276/apidocs/org/apache/hadoop/hbase/client/class-use/Consistency.html -- diff --git a/apidocs/org/apache/hadoop/hbase/client/class-use/Consistency.html b/apidocs/org/apache/hadoop/hbase/client/class-use/Consistency.html index f347eea..edbc8bd 100644 --- a/apidocs/org/apache/hadoop/hbase/client/class-use/Consistency.html +++ b/apidocs/org/apache/hadoop/hbase/client/class-use/Consistency.html @@ -138,19 +138,19 @@ the order they are declared. -Get -Get.setConsistency(Consistencyconsistency) - - Scan Scan.setConsistency(Consistencyconsistency) - + Query Query.setConsistency(Consistencyconsistency) Sets the consistency level for this operation + +Get
[36/52] [partial] hbase-site git commit: Published site at 29c46c4834a3f96e9fca33cb16bc7f3748fcd60c.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6444c276/devapidocs/index-all.html -- diff --git a/devapidocs/index-all.html b/devapidocs/index-all.html index 8327d79..1504f1d 100644 --- a/devapidocs/index-all.html +++ b/devapidocs/index-all.html @@ -322,6 +322,12 @@ AbstractClientScanner() - Constructor for class org.apache.hadoop.hbase.client.AbstractClientScanner +AbstractFileStatusFilter - Class in org.apache.hadoop.hbase.util + +Typical base class for file status filter. + +AbstractFileStatusFilter() - Constructor for class org.apache.hadoop.hbase.util.AbstractFileStatusFilter + AbstractFSWALW - Class in org.apache.hadoop.hbase.regionserver.wal Implementation of WAL to go against FileSystem; i.e. @@ -404,17 +410,30 @@ accept(Path) - Method in class org.apache.hadoop.hbase.mob.mapreduce.SweepReducer.PathPrefixFilter -accept(Path) - Method in class org.apache.hadoop.hbase.util.FSUtils.BlackListDirFilter +accept(Path, Boolean) - Method in class org.apache.hadoop.hbase.util.AbstractFileStatusFilter + +Filters out a path. + +accept(FileStatus) - Method in class org.apache.hadoop.hbase.util.AbstractFileStatusFilter + +accept(Path) - Method in class org.apache.hadoop.hbase.util.AbstractFileStatusFilter + +accept(FileStatus) - Method in interface org.apache.hadoop.hbase.util.FileStatusFilter + +Tests whether or not the specified filestatus should be + included in a filestatus list. + +accept(Path, Boolean) - Method in class org.apache.hadoop.hbase.util.FSUtils.BlackListDirFilter -accept(Path) - Method in class org.apache.hadoop.hbase.util.FSUtils.FamilyDirFilter +accept(Path, Boolean) - Method in class org.apache.hadoop.hbase.util.FSUtils.FamilyDirFilter -accept(Path) - Method in class org.apache.hadoop.hbase.util.FSUtils.FileFilter +accept(Path, Boolean) - Method in class org.apache.hadoop.hbase.util.FSUtils.FileFilter -accept(Path) - Method in class org.apache.hadoop.hbase.util.FSUtils.HFileFilter +accept(Path, Boolean) - Method in class org.apache.hadoop.hbase.util.FSUtils.HFileFilter -accept(Path) - Method in class org.apache.hadoop.hbase.util.FSUtils.ReferenceFileFilter +accept(Path, Boolean) - Method in class org.apache.hadoop.hbase.util.FSUtils.ReferenceFileFilter -accept(Path) - Method in class org.apache.hadoop.hbase.util.FSUtils.RegionDirFilter +accept(Path, Boolean) - Method in class org.apache.hadoop.hbase.util.FSUtils.RegionDirFilter ACCEPT_POLICY_CONF_KEY - Static variable in class org.apache.hadoop.hbase.thrift.HThreadedSelectorServerArgs @@ -8378,6 +8397,8 @@ checkLength(int, int) - Static method in class org.apache.hadoop.hbase.regionserver.wal.WALCellCodec.CompressedKvDecoder +checkLockExists(String) - Method in class org.apache.hadoop.hbase.replication.ReplicationQueuesZKImpl + checkLogRoll() - Method in class org.apache.hadoop.hbase.regionserver.wal.FSHLog Schedule a log roll if needed. @@ -8798,6 +8819,8 @@ chore() - Method in class org.apache.hadoop.hbase.master.cleaner.CleanerChore +chore() - Method in class org.apache.hadoop.hbase.master.cleaner.ReplicationZKLockCleanerChore + chore() - Method in class org.apache.hadoop.hbase.master.ClusterStatusPublisher chore() - Method in class org.apache.hadoop.hbase.master.ExpiredMobFileCleanerChore @@ -11739,8 +11762,6 @@ comparator - Variable in class org.apache.hadoop.hbase.util.CollectionBackedScanner -comparator - Variable in class org.apache.hadoop.hbase.util.ConcurrentIndex.DefaultValueSetFactory - comparator() - Method in class org.apache.hadoop.hbase.util.SortedCopyOnWriteSet comparator - Variable in class org.apache.hadoop.hbase.util.SortedList @@ -12749,29 +12770,6 @@ Overhead for ConcurrentSkipListMap Entry -ConcurrentIndexK,V - Class in org.apache.hadoop.hbase.util - -A simple concurrent map of sets. - -ConcurrentIndex(SupplierSetV) - Constructor for class org.apache.hadoop.hbase.util.ConcurrentIndex - -Creates an instance with a specified factory object for sets to be - associated with a given key. - -ConcurrentIndex(ComparatorV) - Constructor for class org.apache.hadoop.hbase.util.ConcurrentIndex - -Creates an instance using the DefaultValueSetFactory for sets, - which in turn creates instances of http://docs.oracle.com/javase/7/docs/api/java/util/concurrent/ConcurrentSkipListSet.html?is-external=true; title="class or interface in java.util.concurrent">ConcurrentSkipListSet - -ConcurrentIndex.DefaultValueSetFactoryV - Class in org.apache.hadoop.hbase.util - -Default factory class for the sets associated with given keys. - -ConcurrentIndex.DefaultValueSetFactory(ComparatorV) - Constructor for class org.apache.hadoop.hbase.util.ConcurrentIndex.DefaultValueSetFactory - -Creates an instance that passes a specified comparator to the - http://docs.oracle.com/javase/7/docs/api/java/util/concurrent/ConcurrentSkipListSet.html?is-external=true; title="class or
[14/52] [partial] hbase-site git commit: Published site at 29c46c4834a3f96e9fca33cb16bc7f3748fcd60c.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6444c276/devapidocs/org/apache/hadoop/hbase/coprocessor/class-use/MasterCoprocessorEnvironment.html -- diff --git a/devapidocs/org/apache/hadoop/hbase/coprocessor/class-use/MasterCoprocessorEnvironment.html b/devapidocs/org/apache/hadoop/hbase/coprocessor/class-use/MasterCoprocessorEnvironment.html index 0dbc5d0..b8db80e 100644 --- a/devapidocs/org/apache/hadoop/hbase/coprocessor/class-use/MasterCoprocessorEnvironment.html +++ b/devapidocs/org/apache/hadoop/hbase/coprocessor/class-use/MasterCoprocessorEnvironment.html @@ -114,11 +114,11 @@ void -BaseMasterObserver.postAbortProcedure(ObserverContextMasterCoprocessorEnvironmentctx) +BaseMasterAndRegionObserver.postAbortProcedure(ObserverContextMasterCoprocessorEnvironmentctx) void -BaseMasterAndRegionObserver.postAbortProcedure(ObserverContextMasterCoprocessorEnvironmentctx) +BaseMasterObserver.postAbortProcedure(ObserverContextMasterCoprocessorEnvironmentctx) void @@ -128,22 +128,22 @@ void -BaseMasterObserver.postAddColumn(ObserverContextMasterCoprocessorEnvironmentctx, +BaseMasterAndRegionObserver.postAddColumn(ObserverContextMasterCoprocessorEnvironmentctx, TableNametableName, HColumnDescriptorcolumnFamily) -Deprecated. -As of release 2.0.0, this will be removed in HBase 3.0.0 - (https://issues.apache.org/jira/browse/HBASE-13645;>HBASE-13645). - Use BaseMasterObserver.postAddColumnFamily(ObserverContext, TableName, HColumnDescriptor). - +Deprecated. void -BaseMasterAndRegionObserver.postAddColumn(ObserverContextMasterCoprocessorEnvironmentctx, +BaseMasterObserver.postAddColumn(ObserverContextMasterCoprocessorEnvironmentctx, TableNametableName, HColumnDescriptorcolumnFamily) -Deprecated. +Deprecated. +As of release 2.0.0, this will be removed in HBase 3.0.0 + (https://issues.apache.org/jira/browse/HBASE-13645;>HBASE-13645). + Use BaseMasterObserver.postAddColumnFamily(ObserverContext, TableName, HColumnDescriptor). + @@ -160,13 +160,13 @@ void -BaseMasterObserver.postAddColumnFamily(ObserverContextMasterCoprocessorEnvironmentctx, +BaseMasterAndRegionObserver.postAddColumnFamily(ObserverContextMasterCoprocessorEnvironmentctx, TableNametableName, HColumnDescriptorcolumnFamily) void -BaseMasterAndRegionObserver.postAddColumnFamily(ObserverContextMasterCoprocessorEnvironmentctx, +BaseMasterObserver.postAddColumnFamily(ObserverContextMasterCoprocessorEnvironmentctx, TableNametableName, HColumnDescriptorcolumnFamily) @@ -180,22 +180,22 @@ void -BaseMasterObserver.postAddColumnHandler(ObserverContextMasterCoprocessorEnvironmentctx, +BaseMasterAndRegionObserver.postAddColumnHandler(ObserverContextMasterCoprocessorEnvironmentctx, TableNametableName, HColumnDescriptorcolumnFamily) -Deprecated. -As of release 2.0.0, this will be removed in HBase 3.0.0 - (https://issues.apache.org/jira/browse/HBASE-13645;>HBASE-13645). Use - BaseMasterObserver.postCompletedAddColumnFamilyAction(ObserverContext, TableName, HColumnDescriptor). - +Deprecated. void -BaseMasterAndRegionObserver.postAddColumnHandler(ObserverContextMasterCoprocessorEnvironmentctx, +BaseMasterObserver.postAddColumnHandler(ObserverContextMasterCoprocessorEnvironmentctx, TableNametableName, HColumnDescriptorcolumnFamily) -Deprecated. +Deprecated. +As of release 2.0.0, this will be removed in HBase 3.0.0 + (https://issues.apache.org/jira/browse/HBASE-13645;>HBASE-13645). Use + BaseMasterObserver.postCompletedAddColumnFamilyAction(ObserverContext, TableName, HColumnDescriptor). + @@ -212,12 +212,12 @@ void -BaseMasterObserver.postAddRSGroup(ObserverContextMasterCoprocessorEnvironmentctx, +BaseMasterAndRegionObserver.postAddRSGroup(ObserverContextMasterCoprocessorEnvironmentctx, http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true; title="class or interface in java.lang">Stringname) void -BaseMasterAndRegionObserver.postAddRSGroup(ObserverContextMasterCoprocessorEnvironmentctx, +BaseMasterObserver.postAddRSGroup(ObserverContextMasterCoprocessorEnvironmentctx, http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true; title="class or interface in java.lang">Stringname) @@ -229,12 +229,12 @@ void -BaseMasterObserver.postAssign(ObserverContextMasterCoprocessorEnvironmentctx,
[45/52] [partial] hbase-site git commit: Published site at 29c46c4834a3f96e9fca33cb16bc7f3748fcd60c.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6444c276/apidocs/org/apache/hadoop/hbase/util/class-use/Bytes.html -- diff --git a/apidocs/org/apache/hadoop/hbase/util/class-use/Bytes.html b/apidocs/org/apache/hadoop/hbase/util/class-use/Bytes.html index e08e447..0422f2f 100644 --- a/apidocs/org/apache/hadoop/hbase/util/class-use/Bytes.html +++ b/apidocs/org/apache/hadoop/hbase/util/class-use/Bytes.html @@ -113,23 +113,23 @@ http://docs.oracle.com/javase/7/docs/api/java/util/Map.html?is-external=true; title="class or interface in java.util">MapBytes,Bytes -HTableDescriptor.getValues() -Getter for fetching an unmodifiable HTableDescriptor.values map. - +HColumnDescriptor.getValues() http://docs.oracle.com/javase/7/docs/api/java/util/Map.html?is-external=true; title="class or interface in java.util">MapBytes,Bytes -HTableDescriptor.getValues() -Getter for fetching an unmodifiable HTableDescriptor.values map. - +HColumnDescriptor.getValues() http://docs.oracle.com/javase/7/docs/api/java/util/Map.html?is-external=true; title="class or interface in java.util">MapBytes,Bytes -HColumnDescriptor.getValues() +HTableDescriptor.getValues() +Getter for fetching an unmodifiable HTableDescriptor.values map. + http://docs.oracle.com/javase/7/docs/api/java/util/Map.html?is-external=true; title="class or interface in java.util">MapBytes,Bytes -HColumnDescriptor.getValues() +HTableDescriptor.getValues() +Getter for fetching an unmodifiable HTableDescriptor.values map. + http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6444c276/apidocs/org/apache/hadoop/hbase/util/class-use/Order.html -- diff --git a/apidocs/org/apache/hadoop/hbase/util/class-use/Order.html b/apidocs/org/apache/hadoop/hbase/util/class-use/Order.html index 289e2d6..6e1994e 100644 --- a/apidocs/org/apache/hadoop/hbase/util/class-use/Order.html +++ b/apidocs/org/apache/hadoop/hbase/util/class-use/Order.html @@ -125,39 +125,39 @@ Order -RawString.getOrder() +FixedLengthWrapper.getOrder() Order -RawDouble.getOrder() +Union2.getOrder() Order -RawInteger.getOrder() +Union3.getOrder() Order -RawFloat.getOrder() +RawShort.getOrder() Order -FixedLengthWrapper.getOrder() +RawLong.getOrder() Order -Union2.getOrder() +RawInteger.getOrder() Order -PBType.getOrder() +RawDouble.getOrder() Order -OrderedBytesBase.getOrder() +RawString.getOrder() Order -TerminatedWrapper.getOrder() +OrderedBytesBase.getOrder() Order @@ -165,34 +165,34 @@ Order -RawLong.getOrder() +Union4.getOrder() Order -RawShort.getOrder() +RawByte.getOrder() Order -RawByte.getOrder() +TerminatedWrapper.getOrder() Order -DataType.getOrder() -Retrieve the sort Order imposed by this data type, or null when - natural ordering is not preserved. - +PBType.getOrder() Order -Struct.getOrder() +RawFloat.getOrder() Order -Union3.getOrder() +DataType.getOrder() +Retrieve the sort Order imposed by this data type, or null when + natural ordering is not preserved. + Order -Union4.getOrder() +Struct.getOrder() http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6444c276/apidocs/org/apache/hadoop/hbase/util/class-use/Pair.html -- diff --git a/apidocs/org/apache/hadoop/hbase/util/class-use/Pair.html b/apidocs/org/apache/hadoop/hbase/util/class-use/Pair.html index 268b819..eb232f1 100644 --- a/apidocs/org/apache/hadoop/hbase/util/class-use/Pair.html +++ b/apidocs/org/apache/hadoop/hbase/util/class-use/Pair.html @@ -169,11 +169,11 @@ Input/OutputFormats, a table indexing MapReduce job, and utility methods. protected Pairbyte[][],byte[][] -TableInputFormat.getStartEndKeys() +TableInputFormatBase.getStartEndKeys() protected Pairbyte[][],byte[][] -TableInputFormatBase.getStartEndKeys() +TableInputFormat.getStartEndKeys() http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6444c276/apidocs/org/apache/hadoop/hbase/util/class-use/PositionedByteRange.html -- diff --git a/apidocs/org/apache/hadoop/hbase/util/class-use/PositionedByteRange.html b/apidocs/org/apache/hadoop/hbase/util/class-use/PositionedByteRange.html index 58b0e89..99a410e 100644 --- a/apidocs/org/apache/hadoop/hbase/util/class-use/PositionedByteRange.html +++ b/apidocs/org/apache/hadoop/hbase/util/class-use/PositionedByteRange.html @@ -116,105 +116,105 @@ -byte[] -OrderedBlob.decode(PositionedByteRangesrc) +T +FixedLengthWrapper.decode(PositionedByteRangesrc) -http://docs.oracle.com/javase/7/docs/api/java/lang/Integer.html?is-external=true; title="class or interface in java.lang">Integer -OrderedInt32.decode(PositionedByteRangesrc) +http://docs.oracle.com/javase/7/docs/api/java/lang/Short.html?is-external=true;
[06/52] [partial] hbase-site git commit: Published site at 29c46c4834a3f96e9fca33cb16bc7f3748fcd60c.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6444c276/devapidocs/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.RAMQueueEntry.html -- diff --git a/devapidocs/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.RAMQueueEntry.html b/devapidocs/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.RAMQueueEntry.html index 427e985..c166b11 100644 --- a/devapidocs/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.RAMQueueEntry.html +++ b/devapidocs/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.RAMQueueEntry.html @@ -99,7 +99,7 @@ -static class BucketCache.RAMQueueEntry +static class BucketCache.RAMQueueEntry extends http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true; title="class or interface in java.lang">Object Block Entry stored in the memory with key,data and so on @@ -218,7 +218,7 @@ extends http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html? key -privateBlockCacheKey key +privateBlockCacheKey key @@ -227,7 +227,7 @@ extends http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html? data -privateCacheable data +privateCacheable data @@ -236,7 +236,7 @@ extends http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html? accessCounter -privatelong accessCounter +privatelong accessCounter @@ -245,7 +245,7 @@ extends http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html? inMemory -privateboolean inMemory +privateboolean inMemory @@ -262,7 +262,7 @@ extends http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html? BucketCache.RAMQueueEntry -publicBucketCache.RAMQueueEntry(BlockCacheKeybck, +publicBucketCache.RAMQueueEntry(BlockCacheKeybck, Cacheabledata, longaccessCounter, booleaninMemory) @@ -282,7 +282,7 @@ extends http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html? getData -publicCacheablegetData() +publicCacheablegetData() @@ -291,7 +291,7 @@ extends http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html? getKey -publicBlockCacheKeygetKey() +publicBlockCacheKeygetKey() @@ -300,7 +300,7 @@ extends http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html? access -publicvoidaccess(longaccessCounter) +publicvoidaccess(longaccessCounter) @@ -309,7 +309,7 @@ extends http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html? writeToCache -publicBucketCache.BucketEntrywriteToCache(IOEngineioEngine, +publicBucketCache.BucketEntrywriteToCache(IOEngineioEngine, BucketAllocatorbucketAllocator, UniqueIndexMaphttp://docs.oracle.com/javase/7/docs/api/java/lang/Integer.html?is-external=true; title="class or interface in java.lang">IntegerdeserialiserMap, http://docs.oracle.com/javase/7/docs/api/java/util/concurrent/atomic/AtomicLong.html?is-external=true; title="class or interface in java.util.concurrent.atomic">AtomicLongrealCacheSize) http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6444c276/devapidocs/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.StatisticsThread.html -- diff --git a/devapidocs/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.StatisticsThread.html b/devapidocs/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.StatisticsThread.html index 199cf00..f8829c9 100644 --- a/devapidocs/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.StatisticsThread.html +++ b/devapidocs/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.StatisticsThread.html @@ -108,7 +108,7 @@ -private static class BucketCache.StatisticsThread +private static class BucketCache.StatisticsThread extends http://docs.oracle.com/javase/7/docs/api/java/lang/Thread.html?is-external=true; title="class or interface in java.lang">Thread @@ -225,7 +225,7 @@ extends http://docs.oracle.com/javase/7/docs/api/java/lang/Thread.html? bucketCache -private finalBucketCache bucketCache +private finalBucketCache bucketCache @@ -242,7 +242,7 @@ extends http://docs.oracle.com/javase/7/docs/api/java/lang/Thread.html? BucketCache.StatisticsThread -publicBucketCache.StatisticsThread(BucketCachebucketCache) +publicBucketCache.StatisticsThread(BucketCachebucketCache) @@ -259,7 +259,7 @@ extends http://docs.oracle.com/javase/7/docs/api/java/lang/Thread.html? run -publicvoidrun() +publicvoidrun() Specified by: http://docs.oracle.com/javase/7/docs/api/java/lang/Runnable.html?is-external=true#run()" title="class or interface in java.lang">runin interfacehttp://docs.oracle.com/javase/7/docs/api/java/lang/Runnable.html?is-external=true; title="class or interface in java.lang">Runnable
[21/52] [partial] hbase-site git commit: Published site at 29c46c4834a3f96e9fca33cb16bc7f3748fcd60c.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6444c276/devapidocs/org/apache/hadoop/hbase/client/HBaseAdmin.ProcedureFuture.html -- diff --git a/devapidocs/org/apache/hadoop/hbase/client/HBaseAdmin.ProcedureFuture.html b/devapidocs/org/apache/hadoop/hbase/client/HBaseAdmin.ProcedureFuture.html index b472a02..1d106d1 100644 --- a/devapidocs/org/apache/hadoop/hbase/client/HBaseAdmin.ProcedureFuture.html +++ b/devapidocs/org/apache/hadoop/hbase/client/HBaseAdmin.ProcedureFuture.html @@ -109,7 +109,7 @@ @InterfaceAudience.Private @InterfaceStability.Evolving -protected static class HBaseAdmin.ProcedureFutureV +protected static class HBaseAdmin.ProcedureFutureV extends http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true; title="class or interface in java.lang">Object implements http://docs.oracle.com/javase/7/docs/api/java/util/concurrent/Future.html?is-external=true; title="class or interface in java.util.concurrent">FutureV Future that waits on a procedure result. @@ -314,7 +314,7 @@ implements http://docs.oracle.com/javase/7/docs/api/java/util/concurren exception -privatehttp://docs.oracle.com/javase/7/docs/api/java/util/concurrent/ExecutionException.html?is-external=true; title="class or interface in java.util.concurrent">ExecutionException exception +privatehttp://docs.oracle.com/javase/7/docs/api/java/util/concurrent/ExecutionException.html?is-external=true; title="class or interface in java.util.concurrent">ExecutionException exception @@ -323,7 +323,7 @@ implements http://docs.oracle.com/javase/7/docs/api/java/util/concurren procResultFound -privateboolean procResultFound +privateboolean procResultFound @@ -332,7 +332,7 @@ implements http://docs.oracle.com/javase/7/docs/api/java/util/concurren done -privateboolean done +privateboolean done @@ -341,7 +341,7 @@ implements http://docs.oracle.com/javase/7/docs/api/java/util/concurren cancelled -privateboolean cancelled +privateboolean cancelled @@ -350,7 +350,7 @@ implements http://docs.oracle.com/javase/7/docs/api/java/util/concurren result -privateV result +privateV result @@ -359,7 +359,7 @@ implements http://docs.oracle.com/javase/7/docs/api/java/util/concurren admin -private finalHBaseAdmin admin +private finalHBaseAdmin admin @@ -368,7 +368,7 @@ implements http://docs.oracle.com/javase/7/docs/api/java/util/concurren procId -private finalhttp://docs.oracle.com/javase/7/docs/api/java/lang/Long.html?is-external=true; title="class or interface in java.lang">Long procId +private finalhttp://docs.oracle.com/javase/7/docs/api/java/lang/Long.html?is-external=true; title="class or interface in java.lang">Long procId @@ -385,7 +385,7 @@ implements http://docs.oracle.com/javase/7/docs/api/java/util/concurren HBaseAdmin.ProcedureFuture -publicHBaseAdmin.ProcedureFuture(HBaseAdminadmin, +publicHBaseAdmin.ProcedureFuture(HBaseAdminadmin, http://docs.oracle.com/javase/7/docs/api/java/lang/Long.html?is-external=true; title="class or interface in java.lang">LongprocId) @@ -403,7 +403,7 @@ implements http://docs.oracle.com/javase/7/docs/api/java/util/concurren cancel -publicbooleancancel(booleanmayInterruptIfRunning) +publicbooleancancel(booleanmayInterruptIfRunning) Specified by: http://docs.oracle.com/javase/7/docs/api/java/util/concurrent/Future.html?is-external=true#cancel(boolean)" title="class or interface in java.util.concurrent">cancelin interfacehttp://docs.oracle.com/javase/7/docs/api/java/util/concurrent/Future.html?is-external=true; title="class or interface in java.util.concurrent">FutureV @@ -416,7 +416,7 @@ implements http://docs.oracle.com/javase/7/docs/api/java/util/concurren isCancelled -publicbooleanisCancelled() +publicbooleanisCancelled() Specified by: http://docs.oracle.com/javase/7/docs/api/java/util/concurrent/Future.html?is-external=true#isCancelled()" title="class or interface in java.util.concurrent">isCancelledin interfacehttp://docs.oracle.com/javase/7/docs/api/java/util/concurrent/Future.html?is-external=true; title="class or interface in java.util.concurrent">FutureV @@ -429,7 +429,7 @@ implements http://docs.oracle.com/javase/7/docs/api/java/util/concurren abortProcedureResult -protectedorg.apache.hadoop.hbase.protobuf.generated.MasterProtos.AbortProcedureResponseabortProcedureResult(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AbortProcedureRequestrequest) +protectedorg.apache.hadoop.hbase.protobuf.generated.MasterProtos.AbortProcedureResponseabortProcedureResult(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AbortProcedureRequestrequest) throws http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true; title="class or interface
[50/52] [partial] hbase-site git commit: Published site at 29c46c4834a3f96e9fca33cb16bc7f3748fcd60c.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6444c276/apache_hbase_reference_guide.pdf -- diff --git a/apache_hbase_reference_guide.pdf b/apache_hbase_reference_guide.pdf index e3b8de2..7fd9cd0 100644 --- a/apache_hbase_reference_guide.pdf +++ b/apache_hbase_reference_guide.pdf @@ -5,24 +5,24 @@ /Author (Apache HBase Team) /Creator (Asciidoctor PDF 1.5.0.alpha.6, based on Prawn 1.2.1) /Producer (Apache HBase Team) -/CreationDate (D:20160627144300+00'00') -/ModDate (D:20160627144300+00'00') +/CreationDate (D:20160705181933+00'00') +/ModDate (D:20160705181933+00'00') >> endobj 2 0 obj << /Type /Catalog /Pages 3 0 R /Names 25 0 R -/Outlines 4005 0 R -/PageLabels 4210 0 R +/Outlines 4007 0 R +/PageLabels 4212 0 R /PageMode /UseOutlines /ViewerPreferences [/FitWindow] >> endobj 3 0 obj << /Type /Pages -/Count 661 -/Kids [7 0 R 13 0 R 15 0 R 17 0 R 19 0 R 21 0 R 23 0 R 39 0 R 43 0 R 47 0 R 58 0 R 62 0 R 64 0 R 66 0 R 68 0 R 75 0 R 78 0 R 80 0 R 85 0 R 88 0 R 90 0 R 92 0 R 101 0 R 107 0 R 112 0 R 114 0 R 131 0 R 137 0 R 144 0 R 146 0 R 150 0 R 153 0 R 164 0 R 172 0 R 188 0 R 192 0 R 196 0 R 198 0 R 202 0 R 208 0 R 210 0 R 212 0 R 214 0 R 216 0 R 219 0 R 225 0 R 227 0 R 229 0 R 231 0 R 233 0 R 235 0 R 237 0 R 239 0 R 243 0 R 247 0 R 249 0 R 251 0 R 253 0 R 255 0 R 257 0 R 259 0 R 261 0 R 267 0 R 270 0 R 272 0 R 274 0 R 276 0 R 281 0 R 286 0 R 291 0 R 294 0 R 298 0 R 313 0 R 324 0 R 331 0 R 341 0 R 352 0 R 357 0 R 359 0 R 361 0 R 366 0 R 380 0 R 385 0 R 388 0 R 393 0 R 397 0 R 408 0 R 420 0 R 435 0 R 441 0 R 443 0 R 445 0 R 452 0 R 463 0 R 474 0 R 485 0 R 488 0 R 491 0 R 495 0 R 499 0 R 502 0 R 505 0 R 507 0 R 510 0 R 514 0 R 516 0 R 520 0 R 524 0 R 530 0 R 534 0 R 536 0 R 542 0 R 544 0 R 548 0 R 556 0 R 558 0 R 561 0 R 564 0 R 567 0 R 570 0 R 585 0 R 592 0 R 599 0 R 610 0 R 617 0 R 626 0 R 634 0 R 637 0 R 641 0 R 644 0 R 657 0 R 665 0 R 671 0 R 676 0 R 680 0 R 682 0 R 696 0 R 708 0 R 714 0 R 720 0 R 723 0 R 731 0 R 739 0 R 744 0 R 749 0 R 754 0 R 756 0 R 758 0 R 760 0 R 768 0 R 777 0 R 781 0 R 788 0 R 796 0 R 802 0 R 806 0 R 813 0 R 817 0 R 822 0 R 830 0 R 832 0 R 836 0 R 847 0 R 852 0 R 854 0 R 857 0 R 861 0 R 867 0 R 870 0 R 882 0 R 886 0 R 891 0 R 899 0 R 904 0 R 908 0 R 912 0 R 914 0 R 917 0 R 919 0 R 923 0 R 925 0 R 928 0 R 933 0 R 937 0 R 942 0 R 946 0 R 949 0 R 951 0 R 958 0 R 962 0 R 967 0 R 980 0 R 984 0 R 988 0 R 993 0 R 995 0 R 1004 0 R 1007 0 R 1012 0 R 1015 0 R 1024 0 R 1027 0 R 1033 0 R 1040 0 R 1043 0 R 1045 0 R 1054 0 R 1056 0 R 1058 0 R 1061 0 R 1063 0 R 1065 0 R 1067 0 R 1069 0 R 1071 0 R 1074 0 R 1077 0 R 1082 0 R 1085 0 R 1087 0 R 1089 0 R 1091 0 R 1096 0 R 1105 0 R 1108 0 R 1110 0 R 1112 0 R 1117 0 R 1119 0 R 1122 0 R 1124 0 R 1126 0 R 1128 0 R 1131 0 R 1137 0 R 1142 0 R 1149 0 R 1154 0 R 1168 0 R 1179 0 R 1184 0 R 1196 0 R 1205 0 R 1221 0 R 1225 0 R 1 235 0 R 1248 0 R 1251 0 R 1263 0 R 1272 0 R 1280 0 R 1284 0 R 1293 0 R 1298 0 R 1302 0 R 1308 0 R 1314 0 R 1321 0 R 1329 0 R 1331 0 R 1342 0 R 1344 0 R 1349 0 R 1353 0 R 1358 0 R 1368 0 R 1374 0 R 1380 0 R 1382 0 R 1384 0 R 1397 0 R 1404 0 R 1413 0 R 1419 0 R 1433 0 R 1441 0 R 1445 0 R 1454 0 R 1462 0 R 1470 0 R 1476 0 R 1480 0 R 1483 0 R 1485 0 R 1494 0 R 1497 0 R 1504 0 R 1508 0 R 1511 0 R 1519 0 R 1523 0 R 1526 0 R 1528 0 R 1536 0 R 1543 0 R 1549 0 R 1554 0 R 1558 0 R 1561 0 R 1567 0 R 1572 0 R 1577 0 R 1579 0 R 1581 0 R 1584 0 R 1586 0 R 1595 0 R 1598 0 R 1604 0 R 1611 0 R 1615 0 R 1621 0 R 1624 0 R 1626 0 R 1631 0 R 1634 0 R 1636 0 R 1638 0 R 1640 0 R 1647 0 R 1657 0 R 1662 0 R 1669 0 R 1673 0 R 1675 0 R 1677 0 R 1679 0 R 1682 0 R 1684 0 R 1686 0 R 1688 0 R 1692 0 R 1696 0 R 1705 0 R 1707 0 R 1709 0 R 1711 0 R 1713 0 R 1719 0 R 1721 0 R 1726 0 R 1728 0 R 1730 0 R 1737 0 R 1742 0 R 1746 0 R 1750 0 R 1753 0 R 1756 0 R 1760 0 R 1762 0 R 1765 0 R 1767 0 R 1769 0 R 1771 0 R 1775 0 R 1777 0 R 1781 0 R 1783 0 R 1785 0 R 1787 0 R 1789 0 R 1793 0 R 1796 0 R 1798 0 R 1800 0 R 1808 0 R 1818 0 R 1821 0 R 1837 0 R 1852 0 R 1856 0 R 1861 0 R 1864 0 R 1867 0 R 1872 0 R 1874 0 R 1881 0 R 1883 0 R 1886 0 R 1888 0 R 1890 0 R 1892 0 R 1894 0 R 1898 0 R 1900 0 R 1909 0 R 1916 0 R 1922 0 R 1934 0 R 1948 0 R 1959 0 R 1978 0 R 1980 0 R 1982 0 R 1986 0 R 2003 0 R 2011 0 R 2018 0 R 2027 0 R 2032 0 R 2041 0 R 2052 0 R 2058 0 R 2067 0 R 2080 0 R 2097 0 R 2107 0 R 2110 0 R 2119 0 R 2134 0 R 2141 0 R 2144 0 R 2149 0 R 2154 0 R 2164 0 R 2172 0 R 2175 0 R 2177 0 R 2181 0 R 2194 0 R 2202 0 R 2208 0 R 2212 0 R 2215 0 R 2217 0 R 2219 0 R 2221 0 R 2223 0 R 2228 0 R 2230 0 R 2240 0 R 2250 0 R 2257 0 R 2269 0 R 2274 0 R 2278 0 R 2290 0 R 2297 0 R 2303 0 R 2305 0 R 2316 0 R 2323 0 R 2334 0 R 2338 0 R 2347 0 R 2354 0 R 2364 0 R 2372 0 R 2381 0 R 2387 0 R 2392 0 R 2397 0 R 2400 0 R 2402 0 R 2408 0 R 2412 0 R 2416 0 R 2422 0 R 2429 0 R 2434 0 R 2438 0 R 2447 0 R 2452 0 R 2457 0 R 2470 0 R 2477 0 R 2481 0 R 2486 0 R 2493 0 R 2497 0 R
[51/52] [partial] hbase-site git commit: Published site at 29c46c4834a3f96e9fca33cb16bc7f3748fcd60c.
Published site at 29c46c4834a3f96e9fca33cb16bc7f3748fcd60c. Project: http://git-wip-us.apache.org/repos/asf/hbase-site/repo Commit: http://git-wip-us.apache.org/repos/asf/hbase-site/commit/6444c276 Tree: http://git-wip-us.apache.org/repos/asf/hbase-site/tree/6444c276 Diff: http://git-wip-us.apache.org/repos/asf/hbase-site/diff/6444c276 Branch: refs/heads/asf-site Commit: 6444c276c803fe4d2fac7f6ae71eb0a96a0c9a7d Parents: 597f30e Author: jenkinsAuthored: Tue Jul 5 18:32:42 2016 + Committer: Sean Busbey Committed: Tue Jul 5 18:48:51 2016 -0500 -- acid-semantics.html | 4 +- apache_hbase_reference_guide.pdf| 18923 + apache_hbase_reference_guide.pdfmarks | 4 +- apidocs/index-all.html | 2 + apidocs/org/apache/hadoop/hbase/CellUtil.html | 177 +- .../apache/hadoop/hbase/KeepDeletedCells.html | 4 +- .../org/apache/hadoop/hbase/class-use/Cell.html | 246 +- .../hadoop/hbase/class-use/TableName.html |10 +- .../hadoop/hbase/client/CompactionState.html| 4 +- .../apache/hadoop/hbase/client/Consistency.html | 4 +- .../apache/hadoop/hbase/client/Durability.html | 4 +- .../hadoop/hbase/client/IsolationLevel.html | 4 +- .../hadoop/hbase/client/SnapshotType.html | 4 +- .../hbase/client/class-use/Consistency.html |10 +- .../hbase/client/class-use/Durability.html |20 +- .../hbase/client/class-use/IsolationLevel.html |10 +- .../hadoop/hbase/client/class-use/Mutation.html | 8 +- .../hadoop/hbase/client/class-use/Result.html |32 +- .../hadoop/hbase/client/class-use/Row.html | 8 +- .../hadoop/hbase/client/class-use/Scan.html |12 +- .../hadoop/hbase/client/package-tree.html | 8 +- .../hbase/filter/CompareFilter.CompareOp.html | 4 +- .../filter/class-use/Filter.ReturnCode.html |62 +- .../hadoop/hbase/filter/class-use/Filter.html |58 +- .../hadoop/hbase/filter/package-tree.html | 2 +- .../io/class-use/ImmutableBytesWritable.html|52 +- .../hadoop/hbase/io/class-use/TimeRange.html| 8 +- .../hbase/io/crypto/class-use/Cipher.html |16 +- .../hbase/io/encoding/DataBlockEncoding.html| 4 +- .../hadoop/hbase/mapred/TableOutputFormat.html | 4 +- .../hadoop/hbase/mapreduce/RowCounter.html |10 +- .../hbase/mapreduce/TableInputFormatBase.html |20 +- .../mapreduce/class-use/TableRecordReader.html | 4 +- .../apache/hadoop/hbase/quotas/QuotaType.html | 4 +- .../hadoop/hbase/quotas/package-tree.html | 4 +- .../hadoop/hbase/regionserver/BloomType.html| 4 +- .../apache/hadoop/hbase/util/RegionMover.html |12 +- .../hadoop/hbase/util/class-use/Bytes.html |16 +- .../hadoop/hbase/util/class-use/Order.html |38 +- .../hadoop/hbase/util/class-use/Pair.html | 4 +- .../util/class-use/PositionedByteRange.html | 136 +- apidocs/overview-tree.html |20 +- .../org/apache/hadoop/hbase/CellUtil.html | 1614 +- .../hadoop/hbase/mapred/TableOutputFormat.html | 121 +- .../apache/hadoop/hbase/mapreduce/Import.html | 6 +- .../hadoop/hbase/mapreduce/RowCounter.html | 406 +- .../hbase/mapreduce/TableInputFormatBase.html | 753 +- .../apache/hadoop/hbase/util/RegionMover.html | 1765 +- book.html |22 +- bulk-loads.html | 4 +- checkstyle-aggregate.html | 16712 +++ checkstyle.rss | 664 +- coc.html| 4 +- cygwin.html | 4 +- dependencies.html | 4 +- dependency-convergence.html | 6 +- dependency-info.html| 4 +- dependency-management.html | 4 +- devapidocs/allclasses-frame.html| 6 +- devapidocs/allclasses-noframe.html | 6 +- devapidocs/constant-values.html |28 +- devapidocs/deprecated-list.html | 178 +- devapidocs/index-all.html | 288 +- .../hbase/CellUtil.EmptyByteBufferedCell.html |56 +- .../apache/hadoop/hbase/CellUtil.EmptyCell.html |38 +- .../CellUtil.FirstOnRowByteBufferedCell.html|20 +- .../hadoop/hbase/CellUtil.FirstOnRowCell.html |20 +- .../CellUtil.FirstOnRowColByteBufferedCell.html |28 +- .../hbase/CellUtil.FirstOnRowColCell.html |28 +- ...ellUtil.FirstOnRowColTSByteBufferedCell.html | 8 +- .../hbase/CellUtil.FirstOnRowColTSCell.html | 8 +-
[43/52] [partial] hbase-site git commit: Published site at 29c46c4834a3f96e9fca33cb16bc7f3748fcd60c.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6444c276/apidocs/src-html/org/apache/hadoop/hbase/mapred/TableOutputFormat.html -- diff --git a/apidocs/src-html/org/apache/hadoop/hbase/mapred/TableOutputFormat.html b/apidocs/src-html/org/apache/hadoop/hbase/mapred/TableOutputFormat.html index 2cffd19..126b093 100644 --- a/apidocs/src-html/org/apache/hadoop/hbase/mapred/TableOutputFormat.html +++ b/apidocs/src-html/org/apache/hadoop/hbase/mapred/TableOutputFormat.html @@ -61,66 +61,71 @@ 053 */ 054 protected static class TableRecordWriter implements RecordWriterImmutableBytesWritable, Put { 055private BufferedMutator m_mutator; -056private Connection connection; -057/** -058 * Instantiate a TableRecordWriter with the HBase HClient for writing. Assumes control over the -059 * lifecycle of {@code conn}. +056private Connection conn; +057 +058/** +059 * Instantiate a TableRecordWriter with a BufferedMutator for batch writing. 060 */ -061public TableRecordWriter(final BufferedMutator mutator) throws IOException { -062 this.m_mutator = mutator; -063} -064 -065public TableRecordWriter(JobConf job) throws IOException { -066 // expecting exactly one path -067 TableName tableName = TableName.valueOf(job.get(OUTPUT_TABLE)); -068 connection = ConnectionFactory.createConnection(job); -069 m_mutator = connection.getBufferedMutator(tableName); -070} -071 -072public void close(Reporter reporter) throws IOException { -073 this.m_mutator.close(); -074 if (connection != null) { -075connection.close(); -076connection = null; -077 } -078} -079 -080public void write(ImmutableBytesWritable key, Put value) throws IOException { -081 m_mutator.mutate(new Put(value)); +061public TableRecordWriter(JobConf job) throws IOException { +062 // expecting exactly one path +063 TableName tableName = TableName.valueOf(job.get(OUTPUT_TABLE)); +064 try { +065this.conn = ConnectionFactory.createConnection(job); +066this.m_mutator = conn.getBufferedMutator(tableName); +067 } finally { +068if (this.m_mutator == null) { +069 conn.close(); +070 conn = null; +071} +072 } +073} +074 +075public void close(Reporter reporter) throws IOException { +076 if (this.m_mutator != null) { +077this.m_mutator.close(); +078 } +079 if (conn != null) { +080this.conn.close(); +081 } 082} -083 } -084 -085 /** -086 * Creates a new record writer. -087 * -088 * Be aware that the baseline javadoc gives the impression that there is a single -089 * {@link RecordWriter} per job but in HBase, it is more natural if we give you a new -090 * RecordWriter per call of this method. You must close the returned RecordWriter when done. -091 * Failure to do so will drop writes. -092 * -093 * @param ignored Ignored filesystem -094 * @param job Current JobConf -095 * @param name Name of the job -096 * @param progress -097 * @return The newly created writer instance. -098 * @throws IOException When creating the writer fails. -099 */ -100 @Override -101 public RecordWriter getRecordWriter(FileSystem ignored, JobConf job, String name, -102 Progressable progress) -103 throws IOException { -104return new TableRecordWriter(job); -105 } -106 -107 @Override -108 public void checkOutputSpecs(FileSystem ignored, JobConf job) -109 throws FileAlreadyExistsException, InvalidJobConfException, IOException { -110String tableName = job.get(OUTPUT_TABLE); -111if (tableName == null) { -112 throw new IOException("Must specify table name"); -113} -114 } -115} +083 +084public void write(ImmutableBytesWritable key, Put value) throws IOException { +085 m_mutator.mutate(new Put(value)); +086} +087 } +088 +089 /** +090 * Creates a new record writer. +091 * +092 * Be aware that the baseline javadoc gives the impression that there is a single +093 * {@link RecordWriter} per job but in HBase, it is more natural if we give you a new +094 * RecordWriter per call of this method. You must close the returned RecordWriter when done. +095 * Failure to do so will drop writes. +096 * +097 * @param ignored Ignored filesystem +098 * @param job Current JobConf +099 * @param name Name of the job +100 * @param progress +101 * @return The newly created writer instance. +102 * @throws IOException When creating the writer fails. +103 */ +104 @Override +105 public RecordWriter getRecordWriter(FileSystem ignored, JobConf job, String name, +106 Progressable progress) +107 throws IOException { +108// Clear write buffer on fail is true by default so no need to reset it. +109return new TableRecordWriter(job); +110 } +111 +112 @Override +113 public
[19/52] [partial] hbase-site git commit: Published site at 29c46c4834a3f96e9fca33cb16bc7f3748fcd60c.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6444c276/devapidocs/org/apache/hadoop/hbase/client/Registry.html -- diff --git a/devapidocs/org/apache/hadoop/hbase/client/Registry.html b/devapidocs/org/apache/hadoop/hbase/client/Registry.html index ddcbcd6..c428352 100644 --- a/devapidocs/org/apache/hadoop/hbase/client/Registry.html +++ b/devapidocs/org/apache/hadoop/hbase/client/Registry.html @@ -151,8 +151,11 @@ interface init -voidinit(Connectionconnection) -Parameters:connection - +voidinit(Connectionconnection) + throws http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true; title="class or interface in java.io">IOException +Parameters:connection - +Throws: +http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true; title="class or interface in java.io">IOException @@ -174,8 +177,11 @@ interface getClusterId -http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true; title="class or interface in java.lang">StringgetClusterId() -Returns:Cluster id. +http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true; title="class or interface in java.lang">StringgetClusterId() +throws http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true; title="class or interface in java.io">IOException +Returns:Cluster id. +Throws: +http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true; title="class or interface in java.io">IOException http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6444c276/devapidocs/org/apache/hadoop/hbase/client/RpcRetryingCallerImpl.html -- diff --git a/devapidocs/org/apache/hadoop/hbase/client/RpcRetryingCallerImpl.html b/devapidocs/org/apache/hadoop/hbase/client/RpcRetryingCallerImpl.html index 84ae57d..2bdd366 100644 --- a/devapidocs/org/apache/hadoop/hbase/client/RpcRetryingCallerImpl.html +++ b/devapidocs/org/apache/hadoop/hbase/client/RpcRetryingCallerImpl.html @@ -428,7 +428,7 @@ implements singleCallDuration -privatelongsingleCallDuration(longexpectedSleep) +privatelongsingleCallDuration(longexpectedSleep) Returns:Calculate how long a single call took @@ -438,7 +438,7 @@ implements callWithoutRetries -publicTcallWithoutRetries(RetryingCallableTcallable, +publicTcallWithoutRetries(RetryingCallableTcallable, intcallTimeout) throws http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true; title="class or interface in java.io">IOException, http://docs.oracle.com/javase/7/docs/api/java/lang/RuntimeException.html?is-external=true; title="class or interface in java.lang">RuntimeException @@ -462,7 +462,7 @@ implements translateException -statichttp://docs.oracle.com/javase/7/docs/api/java/lang/Throwable.html?is-external=true; title="class or interface in java.lang">ThrowabletranslateException(http://docs.oracle.com/javase/7/docs/api/java/lang/Throwable.html?is-external=true; title="class or interface in java.lang">Throwablet) +statichttp://docs.oracle.com/javase/7/docs/api/java/lang/Throwable.html?is-external=true; title="class or interface in java.lang">ThrowabletranslateException(http://docs.oracle.com/javase/7/docs/api/java/lang/Throwable.html?is-external=true; title="class or interface in java.lang">Throwablet) throws DoNotRetryIOException Get the good or the remote exception if any, throws the DoNotRetryIOException. Parameters:t - the throwable to analyze @@ -477,7 +477,7 @@ implements getTimeout -privateintgetTimeout(intcallTimeout) +privateintgetTimeout(intcallTimeout) @@ -486,7 +486,7 @@ implements toString -publichttp://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true; title="class or interface in java.lang">StringtoString() +publichttp://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true; title="class or interface in java.lang">StringtoString() Overrides: http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true#toString()" title="class or interface in java.lang">toStringin classhttp://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true; title="class or interface in java.lang">Object http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6444c276/devapidocs/org/apache/hadoop/hbase/client/ScannerCallableWithReplicas.RetryingRPC.html -- diff --git a/devapidocs/org/apache/hadoop/hbase/client/ScannerCallableWithReplicas.RetryingRPC.html b/devapidocs/org/apache/hadoop/hbase/client/ScannerCallableWithReplicas.RetryingRPC.html index e114790..7a938ae 100644 ---
[10/52] [partial] hbase-site git commit: Published site at 29c46c4834a3f96e9fca33cb16bc7f3748fcd60c.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6444c276/devapidocs/org/apache/hadoop/hbase/filter/class-use/Filter.html -- diff --git a/devapidocs/org/apache/hadoop/hbase/filter/class-use/Filter.html b/devapidocs/org/apache/hadoop/hbase/filter/class-use/Filter.html index 2075033..1eef354 100644 --- a/devapidocs/org/apache/hadoop/hbase/filter/class-use/Filter.html +++ b/devapidocs/org/apache/hadoop/hbase/filter/class-use/Filter.html @@ -174,19 +174,19 @@ Input/OutputFormats, a table indexing MapReduce job, and utility methods. -Get -Get.setFilter(Filterfilter) - - Query Query.setFilter(Filterfilter) Apply the specified server-side filter when performing the Query. - + Scan Scan.setFilter(Filterfilter) + +Get +Get.setFilter(Filterfilter) + @@ -413,16 +413,16 @@ Input/OutputFormats, a table indexing MapReduce job, and utility methods. -private Filter -WhileMatchFilter.filter +(package private) Filter +FilterWrapper.filter private Filter -SkipFilter.filter +WhileMatchFilter.filter -(package private) Filter -FilterWrapper.filter +private Filter +SkipFilter.filter private Filter @@ -452,15 +452,17 @@ Input/OutputFormats, a table indexing MapReduce job, and utility methods. static Filter -QualifierFilter.createFilterFromArguments(http://docs.oracle.com/javase/7/docs/api/java/util/ArrayList.html?is-external=true; title="class or interface in java.util">ArrayListbyte[]filterArguments) +ColumnPaginationFilter.createFilterFromArguments(http://docs.oracle.com/javase/7/docs/api/java/util/ArrayList.html?is-external=true; title="class or interface in java.util">ArrayListbyte[]filterArguments) static Filter -ColumnCountGetFilter.createFilterFromArguments(http://docs.oracle.com/javase/7/docs/api/java/util/ArrayList.html?is-external=true; title="class or interface in java.util">ArrayListbyte[]filterArguments) +InclusiveStopFilter.createFilterFromArguments(http://docs.oracle.com/javase/7/docs/api/java/util/ArrayList.html?is-external=true; title="class or interface in java.util">ArrayListbyte[]filterArguments) static Filter -DependentColumnFilter.createFilterFromArguments(http://docs.oracle.com/javase/7/docs/api/java/util/ArrayList.html?is-external=true; title="class or interface in java.util">ArrayListbyte[]filterArguments) +FilterBase.createFilterFromArguments(http://docs.oracle.com/javase/7/docs/api/java/util/ArrayList.html?is-external=true; title="class or interface in java.util">ArrayListbyte[]filterArguments) +Given the filter's arguments it constructs the filter + static Filter @@ -468,15 +470,15 @@ Input/OutputFormats, a table indexing MapReduce job, and utility methods. static Filter -SingleColumnValueFilter.createFilterFromArguments(http://docs.oracle.com/javase/7/docs/api/java/util/ArrayList.html?is-external=true; title="class or interface in java.util">ArrayListbyte[]filterArguments) +ValueFilter.createFilterFromArguments(http://docs.oracle.com/javase/7/docs/api/java/util/ArrayList.html?is-external=true; title="class or interface in java.util">ArrayListbyte[]filterArguments) static Filter -FamilyFilter.createFilterFromArguments(http://docs.oracle.com/javase/7/docs/api/java/util/ArrayList.html?is-external=true; title="class or interface in java.util">ArrayListbyte[]filterArguments) +PrefixFilter.createFilterFromArguments(http://docs.oracle.com/javase/7/docs/api/java/util/ArrayList.html?is-external=true; title="class or interface in java.util">ArrayListbyte[]filterArguments) static Filter -MultipleColumnPrefixFilter.createFilterFromArguments(http://docs.oracle.com/javase/7/docs/api/java/util/ArrayList.html?is-external=true; title="class or interface in java.util">ArrayListbyte[]filterArguments) +SingleColumnValueExcludeFilter.createFilterFromArguments(http://docs.oracle.com/javase/7/docs/api/java/util/ArrayList.html?is-external=true; title="class or interface in java.util">ArrayListbyte[]filterArguments) static Filter @@ -484,15 +486,15 @@ Input/OutputFormats, a table indexing MapReduce job, and utility methods. static Filter -ColumnRangeFilter.createFilterFromArguments(http://docs.oracle.com/javase/7/docs/api/java/util/ArrayList.html?is-external=true; title="class or interface in java.util">ArrayListbyte[]filterArguments) +TimestampsFilter.createFilterFromArguments(http://docs.oracle.com/javase/7/docs/api/java/util/ArrayList.html?is-external=true; title="class or interface in java.util">ArrayListbyte[]filterArguments) static Filter -PrefixFilter.createFilterFromArguments(http://docs.oracle.com/javase/7/docs/api/java/util/ArrayList.html?is-external=true; title="class or interface in java.util">ArrayListbyte[]filterArguments) +RowFilter.createFilterFromArguments(http://docs.oracle.com/javase/7/docs/api/java/util/ArrayList.html?is-external=true; title="class or interface in
[49/52] [partial] hbase-site git commit: Published site at 29c46c4834a3f96e9fca33cb16bc7f3748fcd60c.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6444c276/apache_hbase_reference_guide.pdfmarks -- diff --git a/apache_hbase_reference_guide.pdfmarks b/apache_hbase_reference_guide.pdfmarks index 2aebcc8..57124b5 100644 --- a/apache_hbase_reference_guide.pdfmarks +++ b/apache_hbase_reference_guide.pdfmarks @@ -2,8 +2,8 @@ /Author (Apache HBase Team) /Subject () /Keywords () - /ModDate (D:2016062711) - /CreationDate (D:2016062711) + /ModDate (D:20160705182124) + /CreationDate (D:20160705182124) /Creator (Asciidoctor PDF 1.5.0.alpha.6, based on Prawn 1.2.1) /Producer () /DOCINFO pdfmark http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6444c276/apidocs/index-all.html -- diff --git a/apidocs/index-all.html b/apidocs/index-all.html index 5dd000c..063fc4a 100644 --- a/apidocs/index-all.html +++ b/apidocs/index-all.html @@ -2005,6 +2005,8 @@ Create a Cell that is smaller than all other possible Cells for the given Cell's row. +createFirstOnRow(byte[], int, short) - Static method in class org.apache.hadoop.hbase.CellUtil + createFirstOnRowCol(Cell) - Static method in class org.apache.hadoop.hbase.CellUtil Create a Cell that is smaller than all other possible Cells for the given Cell's row. http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6444c276/apidocs/org/apache/hadoop/hbase/CellUtil.html -- diff --git a/apidocs/org/apache/hadoop/hbase/CellUtil.html b/apidocs/org/apache/hadoop/hbase/CellUtil.html index cfad832..4d09994 100644 --- a/apidocs/org/apache/hadoop/hbase/CellUtil.html +++ b/apidocs/org/apache/hadoop/hbase/CellUtil.html @@ -265,17 +265,23 @@ extends http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html? static Cell +createFirstOnRow(byte[]row, +introffset, +shortrlength) + + +static Cell createFirstOnRow(Cellcell) Create a Cell that is smaller than all other possible Cells for the given Cell's row. - + static Cell createFirstOnRowCol(Cellcell) Create a Cell that is smaller than all other possible Cells for the given Cell's row. - + static Cell createFirstOnRowCol(Cellcell, byte[]qArray, @@ -285,81 +291,81 @@ extends http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html? passed qualifier. - + static Cell createFirstOnRowColTS(Cellcell, longts) Creates the first cell with the row/family/qualifier of this cell and the given timestamp. - + static Cell createLastOnRow(Cellcell) Create a Cell that is larger than all other possible Cells for the given Cell's row. - + static Cell createLastOnRowCol(Cellcell) Create a Cell that is larger than all other possible Cells for the given Cell's rk:cf:q. - + static boolean equals(Cella, Cellb) equals - + static boolean equalsIgnoreMvccVersion(Cella, Cellb) special case for Cell.equals - + static long estimatedHeapSizeOf(Cellcell) This is an estimate of the heap space occupied by a cell. - + static int estimatedSerializedSizeOf(Cellcell) - + static int estimatedSerializedSizeOfKey(Cellcell) - + static ByteRange fillFamilyRange(Cellcell, ByteRangerange) - + static ByteRange fillQualifierRange(Cellcell, ByteRangerange) - + static ByteRange fillRowRange(Cellcell, ByteRangerange) ByteRange - + static ByteRange fillTagRange(Cellcell, ByteRangerange) - + static ByteRange fillValueRange(Cellcell, ByteRangerange) - + static int findCommonPrefixInFlatKey(Cellc1, Cellc2, @@ -369,17 +375,17 @@ extends http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html? KeyValue. - + static http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true; title="class or interface in java.lang">String getCellKeyAsString(Cellcell) - + static byte[] getCellKeySerializedAsKeyValueKey(Cellcell) This method exists just to encapsulate how we serialize keys. - + static http://docs.oracle.com/javase/7/docs/api/java/nio/ByteBuffer.html?is-external=true; title="class or interface in java.nio">ByteBuffer getQualifierBufferShallowCopy(Cellcell) Deprecated. @@ -387,97 +393,97 @@ extends http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html? - + static int getRowAsInt(Cellcell) Converts the rowkey bytes of the given cell into an int value - + static byte getRowByte(Cellcell, intindex) misc - + static org.apache.hadoop.hbase.Tag
[33/52] [partial] hbase-site git commit: Published site at 29c46c4834a3f96e9fca33cb16bc7f3748fcd60c.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6444c276/devapidocs/org/apache/hadoop/hbase/CellUtil.html -- diff --git a/devapidocs/org/apache/hadoop/hbase/CellUtil.html b/devapidocs/org/apache/hadoop/hbase/CellUtil.html index 400e518..d5437a6 100644 --- a/devapidocs/org/apache/hadoop/hbase/CellUtil.html +++ b/devapidocs/org/apache/hadoop/hbase/CellUtil.html @@ -409,17 +409,23 @@ extends http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html? static Cell +createFirstOnRow(byte[]row, +introffset, +shortrlength) + + +static Cell createFirstOnRow(Cellcell) Create a Cell that is smaller than all other possible Cells for the given Cell's row. - + static Cell createFirstOnRowCol(Cellcell) Create a Cell that is smaller than all other possible Cells for the given Cell's row. - + static Cell createFirstOnRowCol(Cellcell, byte[]qArray, @@ -429,81 +435,81 @@ extends http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html? passed qualifier. - + static Cell createFirstOnRowColTS(Cellcell, longts) Creates the first cell with the row/family/qualifier of this cell and the given timestamp. - + static Cell createLastOnRow(Cellcell) Create a Cell that is larger than all other possible Cells for the given Cell's row. - + static Cell createLastOnRowCol(Cellcell) Create a Cell that is larger than all other possible Cells for the given Cell's rk:cf:q. - + static boolean equals(Cella, Cellb) equals - + static boolean equalsIgnoreMvccVersion(Cella, Cellb) special case for Cell.equals - + static long estimatedHeapSizeOf(Cellcell) This is an estimate of the heap space occupied by a cell. - + static int estimatedSerializedSizeOf(Cellcell) - + static int estimatedSerializedSizeOfKey(Cellcell) - + static ByteRange fillFamilyRange(Cellcell, ByteRangerange) - + static ByteRange fillQualifierRange(Cellcell, ByteRangerange) - + static ByteRange fillRowRange(Cellcell, ByteRangerange) ByteRange - + static ByteRange fillTagRange(Cellcell, ByteRangerange) - + static ByteRange fillValueRange(Cellcell, ByteRangerange) - + static int findCommonPrefixInFlatKey(Cellc1, Cellc2, @@ -513,17 +519,17 @@ extends http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html? KeyValue. - + static http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true; title="class or interface in java.lang">String getCellKeyAsString(Cellcell) - + static byte[] getCellKeySerializedAsKeyValueKey(Cellcell) This method exists just to encapsulate how we serialize keys. - + static http://docs.oracle.com/javase/7/docs/api/java/nio/ByteBuffer.html?is-external=true; title="class or interface in java.nio">ByteBuffer getQualifierBufferShallowCopy(Cellcell) Deprecated. @@ -531,105 +537,105 @@ extends http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html? - + static int getRowAsInt(Cellcell) Converts the rowkey bytes of the given cell into an int value - + static byte getRowByte(Cellcell, intindex) misc - + private static int getSumOfCellElementLengths(Cellcell) - + private static int getSumOfCellKeyElementLengths(Cellcell) - + static Tag getTag(Cellcell, bytetype) Retrieve Cell's first tag, matching the passed in type - + static byte[] getTagArray(Cellcell) Returns tag value in a new byte array. - + static http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true; title="class or interface in java.util">ListTag getTags(Cellcell) - + static http://docs.oracle.com/javase/7/docs/api/java/math/BigDecimal.html?is-external=true; title="class or interface in java.math">BigDecimal getValueAsBigDecimal(Cellcell) Converts the value bytes of the given cell into a BigDecimal - + static double getValueAsDouble(Cellcell) Converts the value bytes of the given cell into a double value - + static long getValueAsLong(Cellcell) Converts the value bytes of the given cell into a long value - + static http://docs.oracle.com/javase/7/docs/api/java/nio/ByteBuffer.html?is-external=true; title="class or interface in java.nio">ByteBuffer getValueBufferShallowCopy(Cellcell) - + static boolean isDelete(bytetype) - + static boolean isDelete(Cellcell) - + static boolean isDeleteColumnOrFamily(Cellcell) - + static boolean isDeleteColumns(Cellcell) - + static boolean isDeleteColumnVersion(Cellcell) - + static boolean
[16/52] [partial] hbase-site git commit: Published site at 29c46c4834a3f96e9fca33cb16bc7f3748fcd60c.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6444c276/devapidocs/org/apache/hadoop/hbase/client/class-use/RowMutations.html -- diff --git a/devapidocs/org/apache/hadoop/hbase/client/class-use/RowMutations.html b/devapidocs/org/apache/hadoop/hbase/client/class-use/RowMutations.html index 5d12e66..91c48e1 100644 --- a/devapidocs/org/apache/hadoop/hbase/client/class-use/RowMutations.html +++ b/devapidocs/org/apache/hadoop/hbase/client/class-use/RowMutations.html @@ -119,23 +119,23 @@ service. boolean -Table.checkAndMutate(byte[]row, +HTable.checkAndMutate(byte[]row, byte[]family, byte[]qualifier, CompareFilter.CompareOpcompareOp, byte[]value, -RowMutationsmutation) +RowMutationsrm) Atomically checks if a row/family/qualifier value matches the expected value. boolean -HTable.checkAndMutate(byte[]row, +Table.checkAndMutate(byte[]row, byte[]family, byte[]qualifier, CompareFilter.CompareOpcompareOp, byte[]value, -RowMutationsrm) +RowMutationsmutation) Atomically checks if a row/family/qualifier value matches the expected value. @@ -150,13 +150,13 @@ service. void -Table.mutateRow(RowMutationsrm) +HTable.mutateRow(RowMutationsrm) Performs multiple mutations atomically on a single row. void -HTable.mutateRow(RowMutationsrm) +Table.mutateRow(RowMutationsrm) Performs multiple mutations atomically on a single row. http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6444c276/devapidocs/org/apache/hadoop/hbase/client/class-use/RpcRetryingCallerFactory.html -- diff --git a/devapidocs/org/apache/hadoop/hbase/client/class-use/RpcRetryingCallerFactory.html b/devapidocs/org/apache/hadoop/hbase/client/class-use/RpcRetryingCallerFactory.html index e529f30..c1f6f6d 100644 --- a/devapidocs/org/apache/hadoop/hbase/client/class-use/RpcRetryingCallerFactory.html +++ b/devapidocs/org/apache/hadoop/hbase/client/class-use/RpcRetryingCallerFactory.html @@ -119,20 +119,20 @@ ResultBoundedCompletionService.retryingCallerFactory -private RpcRetryingCallerFactory -HTable.rpcCallerFactory +protected RpcRetryingCallerFactory +AsyncProcess.rpcCallerFactory private RpcRetryingCallerFactory -HBaseAdmin.rpcCallerFactory +HTable.rpcCallerFactory -protected RpcRetryingCallerFactory -AsyncProcess.rpcCallerFactory +private RpcRetryingCallerFactory +ConnectionImplementation.rpcCallerFactory private RpcRetryingCallerFactory -ConnectionImplementation.rpcCallerFactory +HBaseAdmin.rpcCallerFactory private RpcRetryingCallerFactory http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6444c276/devapidocs/org/apache/hadoop/hbase/client/class-use/Scan.html -- diff --git a/devapidocs/org/apache/hadoop/hbase/client/class-use/Scan.html b/devapidocs/org/apache/hadoop/hbase/client/class-use/Scan.html index 15b8b53..9c87774 100644 --- a/devapidocs/org/apache/hadoop/hbase/client/class-use/Scan.html +++ b/devapidocs/org/apache/hadoop/hbase/client/class-use/Scan.html @@ -458,15 +458,15 @@ service. ResultScanner -Table.getScanner(Scanscan) -Returns a scanner on the current table as specified by the Scan - object. +HTable.getScanner(Scanscan) +The underlying HTable must not be closed. ResultScanner -HTable.getScanner(Scanscan) -The underlying HTable must not be closed. +Table.getScanner(Scanscan) +Returns a scanner on the current table as specified by the Scan + object. @@ -1063,14 +1063,14 @@ service. private Scan -TableRecordReaderImpl.scan - - -private Scan TableInputFormatBase.scan Holds the details for the internal scanner. + +private Scan +TableRecordReaderImpl.scan + @@ -1114,24 +1114,24 @@ service. private static Scan -Export.getConfiguredScanForJob(org.apache.hadoop.conf.Configurationconf, +CellCounter.getConfiguredScanForJob(org.apache.hadoop.conf.Configurationconf, http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true; title="class or interface in java.lang">String[]args) private static Scan -CellCounter.getConfiguredScanForJob(org.apache.hadoop.conf.Configurationconf, +Export.getConfiguredScanForJob(org.apache.hadoop.conf.Configurationconf, http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true; title="class or interface in java.lang">String[]args) Scan -TableSplit.getScan() -Returns a Scan object from the stored string
[46/52] [partial] hbase-site git commit: Published site at 29c46c4834a3f96e9fca33cb16bc7f3748fcd60c.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6444c276/apidocs/org/apache/hadoop/hbase/filter/class-use/Filter.html -- diff --git a/apidocs/org/apache/hadoop/hbase/filter/class-use/Filter.html b/apidocs/org/apache/hadoop/hbase/filter/class-use/Filter.html index af620c0..98310da 100644 --- a/apidocs/org/apache/hadoop/hbase/filter/class-use/Filter.html +++ b/apidocs/org/apache/hadoop/hbase/filter/class-use/Filter.html @@ -148,19 +148,19 @@ Input/OutputFormats, a table indexing MapReduce job, and utility methods. -Get -Get.setFilter(Filterfilter) - - Scan Scan.setFilter(Filterfilter) - + Query Query.setFilter(Filterfilter) Apply the specified server-side filter when performing the Query. + +Get +Get.setFilter(Filterfilter) + @@ -382,83 +382,83 @@ Input/OutputFormats, a table indexing MapReduce job, and utility methods. static Filter -ColumnPrefixFilter.createFilterFromArguments(http://docs.oracle.com/javase/7/docs/api/java/util/ArrayList.html?is-external=true; title="class or interface in java.util">ArrayListbyte[]filterArguments) +FirstKeyOnlyFilter.createFilterFromArguments(http://docs.oracle.com/javase/7/docs/api/java/util/ArrayList.html?is-external=true; title="class or interface in java.util">ArrayListbyte[]filterArguments) static Filter -SingleColumnValueExcludeFilter.createFilterFromArguments(http://docs.oracle.com/javase/7/docs/api/java/util/ArrayList.html?is-external=true; title="class or interface in java.util">ArrayListbyte[]filterArguments) +PrefixFilter.createFilterFromArguments(http://docs.oracle.com/javase/7/docs/api/java/util/ArrayList.html?is-external=true; title="class or interface in java.util">ArrayListbyte[]filterArguments) static Filter -PrefixFilter.createFilterFromArguments(http://docs.oracle.com/javase/7/docs/api/java/util/ArrayList.html?is-external=true; title="class or interface in java.util">ArrayListbyte[]filterArguments) +TimestampsFilter.createFilterFromArguments(http://docs.oracle.com/javase/7/docs/api/java/util/ArrayList.html?is-external=true; title="class or interface in java.util">ArrayListbyte[]filterArguments) static Filter -ColumnCountGetFilter.createFilterFromArguments(http://docs.oracle.com/javase/7/docs/api/java/util/ArrayList.html?is-external=true; title="class or interface in java.util">ArrayListbyte[]filterArguments) +QualifierFilter.createFilterFromArguments(http://docs.oracle.com/javase/7/docs/api/java/util/ArrayList.html?is-external=true; title="class or interface in java.util">ArrayListbyte[]filterArguments) static Filter -FirstKeyOnlyFilter.createFilterFromArguments(http://docs.oracle.com/javase/7/docs/api/java/util/ArrayList.html?is-external=true; title="class or interface in java.util">ArrayListbyte[]filterArguments) +PageFilter.createFilterFromArguments(http://docs.oracle.com/javase/7/docs/api/java/util/ArrayList.html?is-external=true; title="class or interface in java.util">ArrayListbyte[]filterArguments) static Filter -InclusiveStopFilter.createFilterFromArguments(http://docs.oracle.com/javase/7/docs/api/java/util/ArrayList.html?is-external=true; title="class or interface in java.util">ArrayListbyte[]filterArguments) +SingleColumnValueExcludeFilter.createFilterFromArguments(http://docs.oracle.com/javase/7/docs/api/java/util/ArrayList.html?is-external=true; title="class or interface in java.util">ArrayListbyte[]filterArguments) static Filter -MultipleColumnPrefixFilter.createFilterFromArguments(http://docs.oracle.com/javase/7/docs/api/java/util/ArrayList.html?is-external=true; title="class or interface in java.util">ArrayListbyte[]filterArguments) +ColumnPrefixFilter.createFilterFromArguments(http://docs.oracle.com/javase/7/docs/api/java/util/ArrayList.html?is-external=true; title="class or interface in java.util">ArrayListbyte[]filterArguments) static Filter -ValueFilter.createFilterFromArguments(http://docs.oracle.com/javase/7/docs/api/java/util/ArrayList.html?is-external=true; title="class or interface in java.util">ArrayListbyte[]filterArguments) +SingleColumnValueFilter.createFilterFromArguments(http://docs.oracle.com/javase/7/docs/api/java/util/ArrayList.html?is-external=true; title="class or interface in java.util">ArrayListbyte[]filterArguments) static Filter -ColumnPaginationFilter.createFilterFromArguments(http://docs.oracle.com/javase/7/docs/api/java/util/ArrayList.html?is-external=true; title="class or interface in java.util">ArrayListbyte[]filterArguments) +ColumnCountGetFilter.createFilterFromArguments(http://docs.oracle.com/javase/7/docs/api/java/util/ArrayList.html?is-external=true; title="class or interface in java.util">ArrayListbyte[]filterArguments) static Filter -KeyOnlyFilter.createFilterFromArguments(http://docs.oracle.com/javase/7/docs/api/java/util/ArrayList.html?is-external=true; title="class or interface in
[07/52] [partial] hbase-site git commit: Published site at 29c46c4834a3f96e9fca33cb16bc7f3748fcd60c.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6444c276/devapidocs/org/apache/hadoop/hbase/io/hfile/HFileBlockIndex.html -- diff --git a/devapidocs/org/apache/hadoop/hbase/io/hfile/HFileBlockIndex.html b/devapidocs/org/apache/hadoop/hbase/io/hfile/HFileBlockIndex.html index b956989..29fd28a 100644 --- a/devapidocs/org/apache/hadoop/hbase/io/hfile/HFileBlockIndex.html +++ b/devapidocs/org/apache/hadoop/hbase/io/hfile/HFileBlockIndex.html @@ -367,7 +367,7 @@ extends http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html? getMaxChunkSize -public staticintgetMaxChunkSize(org.apache.hadoop.conf.Configurationconf) +public staticintgetMaxChunkSize(org.apache.hadoop.conf.Configurationconf) http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6444c276/devapidocs/org/apache/hadoop/hbase/io/hfile/LruBlockCache.BlockBucket.html -- diff --git a/devapidocs/org/apache/hadoop/hbase/io/hfile/LruBlockCache.BlockBucket.html b/devapidocs/org/apache/hadoop/hbase/io/hfile/LruBlockCache.BlockBucket.html index 39654ec..3c14854 100644 --- a/devapidocs/org/apache/hadoop/hbase/io/hfile/LruBlockCache.BlockBucket.html +++ b/devapidocs/org/apache/hadoop/hbase/io/hfile/LruBlockCache.BlockBucket.html @@ -103,7 +103,7 @@ -private class LruBlockCache.BlockBucket +private class LruBlockCache.BlockBucket extends http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true; title="class or interface in java.lang">Object implements http://docs.oracle.com/javase/7/docs/api/java/lang/Comparable.html?is-external=true; title="class or interface in java.lang">ComparableLruBlockCache.BlockBucket Used to group blocks into priority buckets. There will be a BlockBucket @@ -239,7 +239,7 @@ implements http://docs.oracle.com/javase/7/docs/api/java/lang/Comparabl name -private finalhttp://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true; title="class or interface in java.lang">String name +private finalhttp://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true; title="class or interface in java.lang">String name @@ -248,7 +248,7 @@ implements http://docs.oracle.com/javase/7/docs/api/java/lang/Comparabl queue -privateLruCachedBlockQueue queue +privateLruCachedBlockQueue queue @@ -257,7 +257,7 @@ implements http://docs.oracle.com/javase/7/docs/api/java/lang/Comparabl totalSize -privatelong totalSize +privatelong totalSize @@ -266,7 +266,7 @@ implements http://docs.oracle.com/javase/7/docs/api/java/lang/Comparabl bucketSize -privatelong bucketSize +privatelong bucketSize @@ -283,7 +283,7 @@ implements http://docs.oracle.com/javase/7/docs/api/java/lang/Comparabl LruBlockCache.BlockBucket -publicLruBlockCache.BlockBucket(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true; title="class or interface in java.lang">Stringname, +publicLruBlockCache.BlockBucket(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true; title="class or interface in java.lang">Stringname, longbytesToFree, longblockSize, longbucketSize) @@ -303,7 +303,7 @@ implements http://docs.oracle.com/javase/7/docs/api/java/lang/Comparabl add -publicvoidadd(LruCachedBlockblock) +publicvoidadd(LruCachedBlockblock) @@ -312,7 +312,7 @@ implements http://docs.oracle.com/javase/7/docs/api/java/lang/Comparabl free -publiclongfree(longtoFree) +publiclongfree(longtoFree) @@ -321,7 +321,7 @@ implements http://docs.oracle.com/javase/7/docs/api/java/lang/Comparabl overflow -publiclongoverflow() +publiclongoverflow() @@ -330,7 +330,7 @@ implements http://docs.oracle.com/javase/7/docs/api/java/lang/Comparabl totalSize -publiclongtotalSize() +publiclongtotalSize() @@ -339,7 +339,7 @@ implements http://docs.oracle.com/javase/7/docs/api/java/lang/Comparabl compareTo -publicintcompareTo(LruBlockCache.BlockBucketthat) +publicintcompareTo(LruBlockCache.BlockBucketthat) Specified by: http://docs.oracle.com/javase/7/docs/api/java/lang/Comparable.html?is-external=true#compareTo(T)" title="class or interface in java.lang">compareToin interfacehttp://docs.oracle.com/javase/7/docs/api/java/lang/Comparable.html?is-external=true; title="class or interface in java.lang">ComparableLruBlockCache.BlockBucket @@ -352,7 +352,7 @@ implements http://docs.oracle.com/javase/7/docs/api/java/lang/Comparabl equals -publicbooleanequals(http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true; title="class or interface in java.lang">Objectthat) +publicbooleanequals(http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true; title="class or interface in java.lang">Objectthat) Overrides:
[41/52] [partial] hbase-site git commit: Published site at 29c46c4834a3f96e9fca33cb16bc7f3748fcd60c.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6444c276/apidocs/src-html/org/apache/hadoop/hbase/util/RegionMover.html -- diff --git a/apidocs/src-html/org/apache/hadoop/hbase/util/RegionMover.html b/apidocs/src-html/org/apache/hadoop/hbase/util/RegionMover.html index 1904857..a23c165 100644 --- a/apidocs/src-html/org/apache/hadoop/hbase/util/RegionMover.html +++ b/apidocs/src-html/org/apache/hadoop/hbase/util/RegionMover.html @@ -124,888 +124,889 @@ 116private String hostname; 117private String filename; 118private String excludeFile = null; -119private String defaultDir = "/tmp"; -120private int port = HConstants.DEFAULT_REGIONSERVER_PORT; -121 -122/** -123 * @param hostname Hostname to unload regions from or load regions to. Can be either hostname -124 * or hostname:port. -125 */ -126public RegionMoverBuilder(String hostname) { -127 String[] splitHostname = hostname.split(":"); -128 this.hostname = splitHostname[0]; -129 if (splitHostname.length == 2) { -130this.port = Integer.parseInt(splitHostname[1]); -131 } -132 setDefaultfilename(this.hostname); -133} -134 -135private void setDefaultfilename(String hostname) { -136 this.filename = -137 defaultDir + "/" + System.getProperty("user.name") + this.hostname + ":" -138 + Integer.toString(this.port); -139} -140 -141/** -142 * Path of file where regions will be written to during unloading/read from during loading -143 * @param filename -144 * @return RegionMoverBuilder object -145 */ -146public RegionMoverBuilder filename(String filename) { -147 this.filename = filename; -148 return this; -149} -150 -151/** -152 * Set the max number of threads that will be used to move regions -153 */ -154public RegionMoverBuilder maxthreads(int threads) { -155 this.maxthreads = threads; -156 return this; -157} -158 -159/** -160 * Path of file containing hostnames to be excluded during region movement. Exclude file should -161 * have 'host:port' per line. Port is mandatory here as we can have many RS running on a single -162 * host. -163 */ -164public RegionMoverBuilder excludeFile(String excludefile) { -165 this.excludeFile = excludefile; -166 return this; -167} -168 -169/** -170 * Set ack/noAck mode. -171 * p -172 * In ack mode regions are acknowledged before and after moving and the move is retried -173 * hbase.move.retries.max times, if unsuccessful we quit with exit code 1.No Ack mode is a best -174 * effort mode,each region movement is tried once.This can be used during graceful shutdown as -175 * even if we have a stuck region,upon shutdown it'll be reassigned anyway. -176 * p -177 * @param ack -178 * @return RegionMoverBuilder object -179 */ -180public RegionMoverBuilder ack(boolean ack) { -181 this.ack = ack; -182 return this; -183} -184 -185/** -186 * Set the timeout for Load/Unload operation in seconds.This is a global timeout,threadpool for -187 * movers also have a separate time which is hbase.move.wait.max * number of regions to -188 * load/unload -189 * @param timeout in seconds -190 * @return RegionMoverBuilder object -191 */ -192public RegionMoverBuilder timeout(int timeout) { -193 this.timeout = timeout; -194 return this; -195} -196 -197/** -198 * This method builds the appropriate RegionMover object which can then be used to load/unload -199 * using load and unload methods -200 * @return RegionMover object -201 */ -202public RegionMover build() { -203 return new RegionMover(this); -204} -205 } -206 -207 /** -208 * Loads the specified {@link #hostname} with regions listed in the {@link #filename} RegionMover -209 * Object has to be created using {@link #RegionMover(RegionMoverBuilder)} -210 * @return true if loading succeeded, false otherwise -211 * @throws ExecutionException -212 * @throws InterruptedException if the loader thread was interrupted -213 * @throws TimeoutException -214 */ -215 public boolean load() throws ExecutionException, InterruptedException, TimeoutException { -216setConf(); -217ExecutorService loadPool = Executors.newFixedThreadPool(1); -218FutureBoolean loadTask = loadPool.submit(new Load(this)); -219loadPool.shutdown(); -220try { -221 if (!loadPool.awaitTermination((long) this.timeout, TimeUnit.SECONDS)) { -222LOG.warn("Timed out before finishing the loading operation. Timeout:" + this.timeout -223+ "sec"); -224loadPool.shutdownNow(); -225 } -226} catch (InterruptedException e) { -227 loadPool.shutdownNow(); -228 Thread.currentThread().interrupt(); -229} -230try {
[35/52] [partial] hbase-site git commit: Published site at 29c46c4834a3f96e9fca33cb16bc7f3748fcd60c.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6444c276/devapidocs/org/apache/hadoop/hbase/CellUtil.EmptyByteBufferedCell.html -- diff --git a/devapidocs/org/apache/hadoop/hbase/CellUtil.EmptyByteBufferedCell.html b/devapidocs/org/apache/hadoop/hbase/CellUtil.EmptyByteBufferedCell.html index 9da6302..5269581 100644 --- a/devapidocs/org/apache/hadoop/hbase/CellUtil.EmptyByteBufferedCell.html +++ b/devapidocs/org/apache/hadoop/hbase/CellUtil.EmptyByteBufferedCell.html @@ -113,7 +113,7 @@ @InterfaceAudience.Private -private abstract static class CellUtil.EmptyByteBufferedCell +private abstract static class CellUtil.EmptyByteBufferedCell extends ByteBufferedCell @@ -302,7 +302,7 @@ extends CellUtil.EmptyByteBufferedCell -privateCellUtil.EmptyByteBufferedCell() +privateCellUtil.EmptyByteBufferedCell() @@ -319,7 +319,7 @@ extends getRowArray -publicbyte[]getRowArray() +publicbyte[]getRowArray() Description copied from interface:Cell Contiguous raw bytes that may start at any index in the containing array. Max length is Short.MAX_VALUE which is 32,767 bytes. @@ -332,7 +332,7 @@ extends getRowOffset -publicintgetRowOffset() +publicintgetRowOffset() Returns:Array index of first row byte @@ -342,7 +342,7 @@ extends getRowLength -publicshortgetRowLength() +publicshortgetRowLength() Returns:Number of row bytes. Must be rowArray.length - offset. @@ -352,7 +352,7 @@ extends getFamilyArray -publicbyte[]getFamilyArray() +publicbyte[]getFamilyArray() Description copied from interface:Cell Contiguous bytes composed of legal HDFS filename characters which may start at any index in the containing array. Max length is Byte.MAX_VALUE, which is 127 bytes. @@ -365,7 +365,7 @@ extends getFamilyOffset -publicintgetFamilyOffset() +publicintgetFamilyOffset() Returns:Array index of first family byte @@ -375,7 +375,7 @@ extends getFamilyLength -publicbytegetFamilyLength() +publicbytegetFamilyLength() Returns:Number of family bytes. Must be familyArray.length - offset. @@ -385,7 +385,7 @@ extends getQualifierArray -publicbyte[]getQualifierArray() +publicbyte[]getQualifierArray() Description copied from interface:Cell Contiguous raw bytes that may start at any index in the containing array. Max length is Short.MAX_VALUE which is 32,767 bytes. @@ -398,7 +398,7 @@ extends getQualifierOffset -publicintgetQualifierOffset() +publicintgetQualifierOffset() Returns:Array index of first qualifier byte @@ -408,7 +408,7 @@ extends getQualifierLength -publicintgetQualifierLength() +publicintgetQualifierLength() Returns:Number of qualifier bytes. Must be qualifierArray.length - offset. @@ -418,7 +418,7 @@ extends getSequenceId -publiclonggetSequenceId() +publiclonggetSequenceId() Description copied from interface:Cell A region-specific unique monotonically increasing sequence ID given to each Cell. It always exists for cells in the memstore but is not retained forever. It will be kept for @@ -433,7 +433,7 @@ extends getValueArray -publicbyte[]getValueArray() +publicbyte[]getValueArray() Description copied from interface:Cell Contiguous raw bytes that may start at any index in the containing array. Max length is Integer.MAX_VALUE which is 2,147,483,648 bytes. @@ -446,7 +446,7 @@ extends getValueOffset -publicintgetValueOffset() +publicintgetValueOffset() Returns:Array index of first value byte @@ -456,7 +456,7 @@ extends getValueLength -publicintgetValueLength() +publicintgetValueLength() Returns:Number of value bytes. Must be valueArray.length - offset. @@ -466,7 +466,7 @@ extends getTagsArray -publicbyte[]getTagsArray() +publicbyte[]getTagsArray() Returns:the tags byte array @@ -476,7 +476,7 @@ extends getTagsOffset -publicintgetTagsOffset() +publicintgetTagsOffset() Returns:the first offset where the tags start in the Cell @@ -486,7 +486,7 @@ extends getTagsLength -publicintgetTagsLength() +publicintgetTagsLength() Returns:the total length of the tags in the Cell. @@ -496,7 +496,7 @@ extends getRowByteBuffer -publichttp://docs.oracle.com/javase/7/docs/api/java/nio/ByteBuffer.html?is-external=true; title="class or interface in java.nio">ByteBuffergetRowByteBuffer() +publichttp://docs.oracle.com/javase/7/docs/api/java/nio/ByteBuffer.html?is-external=true; title="class or interface in java.nio">ByteBuffergetRowByteBuffer() Specified by: getRowByteBufferin classByteBufferedCell @@ -509,7 +509,7 @@ extends getRowPosition -publicintgetRowPosition() +publicintgetRowPosition() Specified by: getRowPositionin classByteBufferedCell @@ -522,7 +522,7 @@ extends getFamilyByteBuffer -publichttp://docs.oracle.com/javase/7/docs/api/java/nio/ByteBuffer.html?is-external=true; title="class or interface in java.nio">ByteBuffergetFamilyByteBuffer()
[05/52] [partial] hbase-site git commit: Published site at 29c46c4834a3f96e9fca33cb16bc7f3748fcd60c.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6444c276/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/BlockType.html -- diff --git a/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/BlockType.html b/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/BlockType.html index d0ff062..75780a4 100644 --- a/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/BlockType.html +++ b/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/BlockType.html @@ -120,11 +120,11 @@ BlockType -HFileBlockDefaultEncodingContext.getBlockType() +HFileBlockEncodingContext.getBlockType() BlockType -HFileBlockEncodingContext.getBlockType() +HFileBlockDefaultEncodingContext.getBlockType() @@ -137,14 +137,14 @@ void -HFileBlockDefaultEncodingContext.postEncoding(BlockTypeblockType) - - -void HFileBlockEncodingContext.postEncoding(BlockTypeblockType) Do any action that needs to be performed after the encoding. + +void +HFileBlockDefaultEncodingContext.postEncoding(BlockTypeblockType) + @@ -194,32 +194,32 @@ BlockType -CachedBlock.getBlockType() - - -BlockType HFileBlock.getBlockType() - + BlockType HFileBlock.BlockWritable.getBlockType() The type of block this data should use. - + BlockType -CompoundBloomFilterWriter.getInlineBlockType() +CachedBlock.getBlockType() - + BlockType InlineBlockWriter.getInlineBlockType() The type of blocks this block writer produces. - + BlockType HFileBlockIndex.BlockIndexWriter.getInlineBlockType() + +BlockType +CompoundBloomFilterWriter.getInlineBlockType() + static BlockType BlockType.parse(byte[]buf, @@ -276,26 +276,26 @@ the order they are declared. void -HFileDataBlockEncoder.endBlockEncoding(HFileBlockEncodingContextencodingCtx, +NoOpDataBlockEncoder.endBlockEncoding(HFileBlockEncodingContextencodingCtx, http://docs.oracle.com/javase/7/docs/api/java/io/DataOutputStream.html?is-external=true; title="class or interface in java.io">DataOutputStreamout, byte[]uncompressedBytesWithHeader, -BlockTypeblockType) -Ends encoding for a block of KeyValues. - +BlockTypeblockType) void -NoOpDataBlockEncoder.endBlockEncoding(HFileBlockEncodingContextencodingCtx, +HFileDataBlockEncoderImpl.endBlockEncoding(HFileBlockEncodingContextencodingCtx, http://docs.oracle.com/javase/7/docs/api/java/io/DataOutputStream.html?is-external=true; title="class or interface in java.io">DataOutputStreamout, byte[]uncompressedBytesWithHeader, BlockTypeblockType) void -HFileDataBlockEncoderImpl.endBlockEncoding(HFileBlockEncodingContextencodingCtx, +HFileDataBlockEncoder.endBlockEncoding(HFileBlockEncodingContextencodingCtx, http://docs.oracle.com/javase/7/docs/api/java/io/DataOutputStream.html?is-external=true; title="class or interface in java.io">DataOutputStreamout, byte[]uncompressedBytesWithHeader, -BlockTypeblockType) +BlockTypeblockType) +Ends encoding for a block of KeyValues. + private http://docs.oracle.com/javase/7/docs/api/java/io/DataInput.html?is-external=true; title="class or interface in java.io">DataInput @@ -347,27 +347,27 @@ the order they are declared. HFileBlock -HFileReaderImpl.readBlock(longdataBlockOffset, +HFile.CachingBlockReader.readBlock(longoffset, longonDiskBlockSize, booleancacheBlock, booleanpread, booleanisCompaction, booleanupdateCacheMetrics, BlockTypeexpectedBlockType, - DataBlockEncodingexpectedDataBlockEncoding) + DataBlockEncodingexpectedDataBlockEncoding) +Read in a file block. + HFileBlock -HFile.CachingBlockReader.readBlock(longoffset, +HFileReaderImpl.readBlock(longdataBlockOffset, longonDiskBlockSize, booleancacheBlock, booleanpread, booleanisCompaction, booleanupdateCacheMetrics, BlockTypeexpectedBlockType, - DataBlockEncodingexpectedDataBlockEncoding) -Read in a file block. - + DataBlockEncodingexpectedDataBlockEncoding) private void http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6444c276/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/CacheConfig.html -- diff --git a/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/CacheConfig.html b/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/CacheConfig.html index 61fa959..ce7bde4 100644 ---
[02/52] [partial] hbase-site git commit: Published site at 29c46c4834a3f96e9fca33cb16bc7f3748fcd60c.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6444c276/devapidocs/org/apache/hadoop/hbase/master/HMaster.html -- diff --git a/devapidocs/org/apache/hadoop/hbase/master/HMaster.html b/devapidocs/org/apache/hadoop/hbase/master/HMaster.html index ead9b52..57098aa 100644 --- a/devapidocs/org/apache/hadoop/hbase/master/HMaster.html +++ b/devapidocs/org/apache/hadoop/hbase/master/HMaster.html @@ -114,7 +114,7 @@ @InterfaceAudience.LimitedPrivate(value="Tools") -public class HMaster +public class HMaster extends HRegionServer implements MasterServices HMaster is the "master server" for HBase. An HBase cluster has one active @@ -359,38 +359,42 @@ implements regionServerTracker +private ReplicationZKLockCleanerChore +replicationZKLockCleanerChore + + (package private) MemoryBoundedLogMessageBuffer rsFatals - + private MasterProcedureScheduler.ProcedureEvent serverCrashProcessingEnabled - + private ServerManager serverManager - + (package private) boolean serviceStarted - + (package private) SnapshotManager snapshotManager - + private SplitOrMergeTracker splitOrMergeTracker - + private long splitPlanCount - + private TableStateManager tableStateManager - + private MasterWalManager walManager @@ -556,8 +560,7 @@ implements (package private) ServerManager -createServerManager(Servermaster, - MasterServicesservices) +createServerManager(MasterServicesmaster) Create a ServerManager instance. @@ -1211,7 +1214,7 @@ implements LOG -private static finalorg.apache.commons.logging.Log LOG +private static finalorg.apache.commons.logging.Log LOG @@ -1220,7 +1223,7 @@ implements MASTER -public static finalhttp://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true; title="class or interface in java.lang">String MASTER +public static finalhttp://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true; title="class or interface in java.lang">String MASTER See Also:Constant Field Values @@ -1230,7 +1233,7 @@ implements activeMasterManager -private finalActiveMasterManager activeMasterManager +private finalActiveMasterManager activeMasterManager @@ -1239,7 +1242,7 @@ implements regionServerTracker -RegionServerTracker regionServerTracker +RegionServerTracker regionServerTracker @@ -1248,7 +1251,7 @@ implements drainingServerTracker -privateDrainingServerTracker drainingServerTracker +privateDrainingServerTracker drainingServerTracker @@ -1257,7 +1260,7 @@ implements loadBalancerTracker -LoadBalancerTracker loadBalancerTracker +LoadBalancerTracker loadBalancerTracker @@ -1266,7 +1269,7 @@ implements splitOrMergeTracker -privateSplitOrMergeTracker splitOrMergeTracker +privateSplitOrMergeTracker splitOrMergeTracker @@ -1275,7 +1278,7 @@ implements regionNormalizerTracker -privateRegionNormalizerTracker regionNormalizerTracker +privateRegionNormalizerTracker regionNormalizerTracker @@ -1284,7 +1287,7 @@ implements clusterSchemaService -privateClusterSchemaService clusterSchemaService +privateClusterSchemaService clusterSchemaService @@ -1293,7 +1296,7 @@ implements metricsMaster -finalMetricsMaster metricsMaster +finalMetricsMaster metricsMaster @@ -1302,7 +1305,7 @@ implements fileSystemManager -privateMasterFileSystem fileSystemManager +privateMasterFileSystem fileSystemManager @@ -1311,7 +1314,7 @@ implements walManager -privateMasterWalManager walManager +privateMasterWalManager walManager @@ -1320,7 +1323,7 @@ implements serverManager -private volatileServerManager serverManager +private volatileServerManager serverManager @@ -1329,7 +1332,7 @@ implements assignmentManager -privateAssignmentManager assignmentManager +privateAssignmentManager assignmentManager @@ -1338,7 +1341,7 @@ implements rsFatals -MemoryBoundedLogMessageBuffer rsFatals +MemoryBoundedLogMessageBuffer rsFatals @@ -1347,7 +1350,7 @@ implements isActiveMaster -private volatileboolean isActiveMaster +private volatileboolean isActiveMaster @@ -1356,7 +1359,7 @@ implements initialized -private finalMasterProcedureScheduler.ProcedureEvent initialized +private finalMasterProcedureScheduler.ProcedureEvent initialized @@ -1365,7 +1368,7 @@ implements serviceStarted -volatileboolean serviceStarted +volatileboolean serviceStarted @@ -1374,7 +1377,7 @@ implements serverCrashProcessingEnabled -private finalMasterProcedureScheduler.ProcedureEvent serverCrashProcessingEnabled +private finalMasterProcedureScheduler.ProcedureEvent serverCrashProcessingEnabled @@ -1383,7 +1386,7 @@ implements balancer -privateLoadBalancer balancer +privateLoadBalancer balancer @@ -1392,7 +1395,7 @@ implements normalizer -privateRegionNormalizer normalizer +privateRegionNormalizer
[17/52] [partial] hbase-site git commit: Published site at 29c46c4834a3f96e9fca33cb16bc7f3748fcd60c.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6444c276/devapidocs/org/apache/hadoop/hbase/client/class-use/Result.html -- diff --git a/devapidocs/org/apache/hadoop/hbase/client/class-use/Result.html b/devapidocs/org/apache/hadoop/hbase/client/class-use/Result.html index 90229a4..79477f4 100644 --- a/devapidocs/org/apache/hadoop/hbase/client/class-use/Result.html +++ b/devapidocs/org/apache/hadoop/hbase/client/class-use/Result.html @@ -381,13 +381,13 @@ service. Result -Table.append(Appendappend) +HTable.append(Appendappend) Appends values to one or more columns within a single row. Result -HTable.append(Appendappend) +Table.append(Appendappend) Appends values to one or more columns within a single row. @@ -480,13 +480,13 @@ service. Result -Table.get(Getget) +HTable.get(Getget) Extracts certain cells from a given row. Result -HTable.get(Getget) +Table.get(Getget) Extracts certain cells from a given row. @@ -501,13 +501,13 @@ service. Result[] -Table.get(http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true; title="class or interface in java.util">ListGetgets) +HTable.get(http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true; title="class or interface in java.util">ListGetgets) Extracts certain cells from the given rows, in batch. Result[] -HTable.get(http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true; title="class or interface in java.util">ListGetgets) +Table.get(http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true; title="class or interface in java.util">ListGetgets) Extracts certain cells from the given rows, in batch. @@ -517,13 +517,13 @@ service. Result -Table.increment(Incrementincrement) +HTable.increment(Incrementincrement) Increments one or more columns within a single row. Result -HTable.increment(Incrementincrement) +Table.increment(Incrementincrement) Increments one or more columns within a single row. @@ -533,43 +533,43 @@ service. Result -ClientSimpleScanner.next() +ClientAsyncPrefetchScanner.next() Result -ClientSmallReversedScanner.next() +ResultScanner.next() +Grab the next row's worth of values. + Result -ClientSmallScanner.next() +ClientSmallReversedScanner.next() Result -ClientAsyncPrefetchScanner.next() +ClientSimpleScanner.next() Result -ResultScanner.next() -Grab the next row's worth of values. - +ClientSmallScanner.next() Result -ClientSideRegionScanner.next() +TableSnapshotScanner.next() Result -TableSnapshotScanner.next() +ClientSideRegionScanner.next() Result[] -AbstractClientScanner.next(intnbRows) -Get nbRows rows. - +ResultScanner.next(intnbRows) Result[] -ResultScanner.next(intnbRows) +AbstractClientScanner.next(intnbRows) +Get nbRows rows. + protected Result @@ -893,11 +893,11 @@ service. Result -TableSnapshotInputFormat.TableSnapshotRecordReader.createValue() +TableRecordReader.createValue() Result -TableRecordReader.createValue() +TableSnapshotInputFormat.TableSnapshotRecordReader.createValue() Result @@ -974,12 +974,12 @@ service. boolean -TableSnapshotInputFormat.TableSnapshotRecordReader.next(ImmutableBytesWritablekey, +TableRecordReader.next(ImmutableBytesWritablekey, Resultvalue) boolean -TableRecordReader.next(ImmutableBytesWritablekey, +TableSnapshotInputFormat.TableSnapshotRecordReader.next(ImmutableBytesWritablekey, Resultvalue) @@ -1049,11 +1049,11 @@ service. private Result -TableRecordReaderImpl.value +MultithreadedTableMapper.SubMapRecordReader.value private Result -MultithreadedTableMapper.SubMapRecordReader.value +TableRecordReaderImpl.value @@ -1095,27 +1095,27 @@ service. Result -TableSnapshotInputFormat.TableSnapshotRegionRecordReader.getCurrentValue() +TableRecordReader.getCurrentValue() +Returns the current value. + Result -TableSnapshotInputFormatImpl.RecordReader.getCurrentValue() +MultithreadedTableMapper.SubMapRecordReader.getCurrentValue() Result -TableRecordReader.getCurrentValue() -Returns the current value. - +TableSnapshotInputFormat.TableSnapshotRegionRecordReader.getCurrentValue() Result -TableRecordReaderImpl.getCurrentValue() -Returns the current value. - +TableSnapshotInputFormatImpl.RecordReader.getCurrentValue() Result -MultithreadedTableMapper.SubMapRecordReader.getCurrentValue() +TableRecordReaderImpl.getCurrentValue() +Returns the current value. + @@ -1226,18 +1226,18 @@ service. void -IdentityTableMapper.map(ImmutableBytesWritablekey, - Resultvalue, +RowCounter.RowCounterMapper.map(ImmutableBytesWritablerow, + Resultvalues, org.apache.hadoop.mapreduce.Mapper.Contextcontext) -Pass the key, value to reduce. +Maps the data. void -RowCounter.RowCounterMapper.map(ImmutableBytesWritablerow, - Resultvalues,
[37/52] [partial] hbase-site git commit: Published site at 29c46c4834a3f96e9fca33cb16bc7f3748fcd60c.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6444c276/coc.html -- diff --git a/coc.html b/coc.html index bbc1ac1..f1487a3 100644 --- a/coc.html +++ b/coc.html @@ -7,7 +7,7 @@ - + Apache HBase Code of Conduct Policy @@ -331,7 +331,7 @@ For flagrant violations requiring a firm response the PMC may opt to skip early http://www.apache.org/;>The Apache Software Foundation. All rights reserved. - Last Published: 2016-06-27 + Last Published: 2016-07-05 http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6444c276/cygwin.html -- diff --git a/cygwin.html b/cygwin.html index dd1fb2d..113e850 100644 --- a/cygwin.html +++ b/cygwin.html @@ -7,7 +7,7 @@ - + Apache HBase Installing Apache HBase (TM) on Windows using Cygwin @@ -673,7 +673,7 @@ Now your HBase server is running, start coding and build that next http://www.apache.org/;>The Apache Software Foundation. All rights reserved. - Last Published: 2016-06-27 + Last Published: 2016-07-05 http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6444c276/dependencies.html -- diff --git a/dependencies.html b/dependencies.html index d9a8e3a..a0a6785 100644 --- a/dependencies.html +++ b/dependencies.html @@ -7,7 +7,7 @@ - + Apache HBase Project Dependencies @@ -518,7 +518,7 @@ http://www.apache.org/;>The Apache Software Foundation. All rights reserved. - Last Published: 2016-06-27 + Last Published: 2016-07-05 http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6444c276/dependency-convergence.html -- diff --git a/dependency-convergence.html b/dependency-convergence.html index 01639c7..d537710 100644 --- a/dependency-convergence.html +++ b/dependency-convergence.html @@ -7,7 +7,7 @@ - + Apache HBase Reactor Dependency Convergence @@ -1639,7 +1639,7 @@ -2.1.0 +2.1.0.1 http://hbase.apache.org/hbase-server;>org.apache.hbase:hbase-server @@ -1743,7 +1743,7 @@ http://www.apache.org/;>The Apache Software Foundation. All rights reserved. - Last Published: 2016-06-27 + Last Published: 2016-07-05 http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6444c276/dependency-info.html -- diff --git a/dependency-info.html b/dependency-info.html index 0a15930..f88c311 100644 --- a/dependency-info.html +++ b/dependency-info.html @@ -7,7 +7,7 @@ - + Apache HBase Dependency Information @@ -312,7 +312,7 @@ http://www.apache.org/;>The Apache Software Foundation. All rights reserved. - Last Published: 2016-06-27 + Last Published: 2016-07-05 http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6444c276/dependency-management.html -- diff --git a/dependency-management.html b/dependency-management.html index 052ed02..e30ce3f 100644 --- a/dependency-management.html +++ b/dependency-management.html @@ -7,7 +7,7 @@ - + Apache HBase Project Dependency Management @@ -816,7 +816,7 @@ http://www.apache.org/;>The Apache Software Foundation. All rights reserved. - Last Published: 2016-06-27 + Last Published: 2016-07-05 http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6444c276/devapidocs/allclasses-frame.html -- diff --git a/devapidocs/allclasses-frame.html b/devapidocs/allclasses-frame.html index 6c44a4e..45310b4 100644 --- a/devapidocs/allclasses-frame.html +++ b/devapidocs/allclasses-frame.html @@ -13,6 +13,7 @@ Abortable AbstractByteRange AbstractClientScanner +AbstractFileStatusFilter AbstractFSWAL AbstractFSWALProvider AbstractFSWALProvider.Reader @@ -432,8 +433,6 @@ CompressionTest Compressor ConcatenatedLists -ConcurrentIndex
[26/52] [partial] hbase-site git commit: Published site at 29c46c4834a3f96e9fca33cb16bc7f3748fcd60c.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6444c276/devapidocs/org/apache/hadoop/hbase/class-use/KeepDeletedCells.html -- diff --git a/devapidocs/org/apache/hadoop/hbase/class-use/KeepDeletedCells.html b/devapidocs/org/apache/hadoop/hbase/class-use/KeepDeletedCells.html index 4f4468b..e9a6b35 100644 --- a/devapidocs/org/apache/hadoop/hbase/class-use/KeepDeletedCells.html +++ b/devapidocs/org/apache/hadoop/hbase/class-use/KeepDeletedCells.html @@ -159,14 +159,14 @@ the order they are declared. private KeepDeletedCells -ScanInfo.keepDeletedCells - - -private KeepDeletedCells ScanQueryMatcher.keepDeletedCells whether to return deleted rows + +private KeepDeletedCells +ScanInfo.keepDeletedCells + http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6444c276/devapidocs/org/apache/hadoop/hbase/class-use/KeyValue.KeyOnlyKeyValue.html -- diff --git a/devapidocs/org/apache/hadoop/hbase/class-use/KeyValue.KeyOnlyKeyValue.html b/devapidocs/org/apache/hadoop/hbase/class-use/KeyValue.KeyOnlyKeyValue.html index 6cbd252..025a9fd 100644 --- a/devapidocs/org/apache/hadoop/hbase/class-use/KeyValue.KeyOnlyKeyValue.html +++ b/devapidocs/org/apache/hadoop/hbase/class-use/KeyValue.KeyOnlyKeyValue.html @@ -122,11 +122,11 @@ private KeyValue.KeyOnlyKeyValue -StoreFileWriter.lastBloomKeyOnlyKV +StoreFileReader.lastBloomKeyOnlyKV private KeyValue.KeyOnlyKeyValue -StoreFileReader.lastBloomKeyOnlyKV +StoreFileWriter.lastBloomKeyOnlyKV http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6444c276/devapidocs/org/apache/hadoop/hbase/class-use/KeyValue.html -- diff --git a/devapidocs/org/apache/hadoop/hbase/class-use/KeyValue.html b/devapidocs/org/apache/hadoop/hbase/class-use/KeyValue.html index 350221f..4cec977 100644 --- a/devapidocs/org/apache/hadoop/hbase/class-use/KeyValue.html +++ b/devapidocs/org/apache/hadoop/hbase/class-use/KeyValue.html @@ -201,22 +201,22 @@ Input/OutputFormats, a table indexing MapReduce job, and utility methods. static KeyValue -KeyValue.create(http://docs.oracle.com/javase/7/docs/api/java/io/DataInput.html?is-external=true; title="class or interface in java.io">DataInputin) +KeyValueUtil.create(http://docs.oracle.com/javase/7/docs/api/java/io/DataInput.html?is-external=true; title="class or interface in java.io">DataInputin) static KeyValue -KeyValueUtil.create(http://docs.oracle.com/javase/7/docs/api/java/io/DataInput.html?is-external=true; title="class or interface in java.io">DataInputin) +KeyValue.create(http://docs.oracle.com/javase/7/docs/api/java/io/DataInput.html?is-external=true; title="class or interface in java.io">DataInputin) static KeyValue -KeyValue.create(intlength, +KeyValueUtil.create(intlength, http://docs.oracle.com/javase/7/docs/api/java/io/DataInput.html?is-external=true; title="class or interface in java.io">DataInputin) Create a KeyValue reading length from in static KeyValue -KeyValueUtil.create(intlength, +KeyValue.create(intlength, http://docs.oracle.com/javase/7/docs/api/java/io/DataInput.html?is-external=true; title="class or interface in java.io">DataInputin) Create a KeyValue reading length from in @@ -332,31 +332,31 @@ Input/OutputFormats, a table indexing MapReduce job, and utility methods. static KeyValue -KeyValue.createKeyValueFromKey(byte[]b) +KeyValueUtil.createKeyValueFromKey(byte[]b) static KeyValue -KeyValueUtil.createKeyValueFromKey(byte[]b) +KeyValue.createKeyValueFromKey(byte[]b) static KeyValue -KeyValue.createKeyValueFromKey(byte[]b, +KeyValueUtil.createKeyValueFromKey(byte[]b, into, intl) static KeyValue -KeyValueUtil.createKeyValueFromKey(byte[]b, +KeyValue.createKeyValueFromKey(byte[]b, into, intl) static KeyValue -KeyValue.createKeyValueFromKey(http://docs.oracle.com/javase/7/docs/api/java/nio/ByteBuffer.html?is-external=true; title="class or interface in java.nio">ByteBufferbb) +KeyValueUtil.createKeyValueFromKey(http://docs.oracle.com/javase/7/docs/api/java/nio/ByteBuffer.html?is-external=true; title="class or interface in java.nio">ByteBufferbb) static KeyValue -KeyValueUtil.createKeyValueFromKey(http://docs.oracle.com/javase/7/docs/api/java/nio/ByteBuffer.html?is-external=true; title="class or interface in java.nio">ByteBufferbb) +KeyValue.createKeyValueFromKey(http://docs.oracle.com/javase/7/docs/api/java/nio/ByteBuffer.html?is-external=true; title="class or interface in java.nio">ByteBufferbb) static KeyValue @@ -520,17 +520,17 @@ Input/OutputFormats, a table indexing MapReduce job, and utility methods. static long
[27/52] [partial] hbase-site git commit: Published site at 29c46c4834a3f96e9fca33cb16bc7f3748fcd60c.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6444c276/devapidocs/org/apache/hadoop/hbase/class-use/HTableDescriptor.html -- diff --git a/devapidocs/org/apache/hadoop/hbase/class-use/HTableDescriptor.html b/devapidocs/org/apache/hadoop/hbase/class-use/HTableDescriptor.html index b2596d0..a3915ba 100644 --- a/devapidocs/org/apache/hadoop/hbase/class-use/HTableDescriptor.html +++ b/devapidocs/org/apache/hadoop/hbase/class-use/HTableDescriptor.html @@ -564,24 +564,24 @@ Input/OutputFormats, a table indexing MapReduce job, and utility methods. HTableDescriptor -Table.getTableDescriptor() -Gets the table descriptor for this table. - - - -HTableDescriptor HTable.getTableDescriptor() Gets the table descriptor for this table. - + protected HTableDescriptor HBaseAdmin.CreateTableFuture.getTableDescriptor() - + protected HTableDescriptor HBaseAdmin.TableFuture.getTableDescriptor() + +HTableDescriptor +Table.getTableDescriptor() +Gets the table descriptor for this table. + + HTableDescriptor HTableWrapper.getTableDescriptor() @@ -981,13 +981,13 @@ Input/OutputFormats, a table indexing MapReduce job, and utility methods. void -BaseMasterObserver.postCloneSnapshot(ObserverContextMasterCoprocessorEnvironmentctx, +BaseMasterAndRegionObserver.postCloneSnapshot(ObserverContextMasterCoprocessorEnvironmentctx, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescriptionsnapshot, HTableDescriptorhTableDescriptor) void -BaseMasterAndRegionObserver.postCloneSnapshot(ObserverContextMasterCoprocessorEnvironmentctx, +BaseMasterObserver.postCloneSnapshot(ObserverContextMasterCoprocessorEnvironmentctx, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescriptionsnapshot, HTableDescriptorhTableDescriptor) @@ -1001,13 +1001,13 @@ Input/OutputFormats, a table indexing MapReduce job, and utility methods. void -BaseMasterObserver.postCompletedCreateTableAction(ObserverContextMasterCoprocessorEnvironmentctx, +BaseMasterAndRegionObserver.postCompletedCreateTableAction(ObserverContextMasterCoprocessorEnvironmentctx, HTableDescriptordesc, HRegionInfo[]regions) void -BaseMasterAndRegionObserver.postCompletedCreateTableAction(ObserverContextMasterCoprocessorEnvironmentctx, +BaseMasterObserver.postCompletedCreateTableAction(ObserverContextMasterCoprocessorEnvironmentctx, HTableDescriptordesc, HRegionInfo[]regions) @@ -1021,13 +1021,13 @@ Input/OutputFormats, a table indexing MapReduce job, and utility methods. void -BaseMasterObserver.postCompletedModifyTableAction(ObserverContextMasterCoprocessorEnvironmentctx, +BaseMasterAndRegionObserver.postCompletedModifyTableAction(ObserverContextMasterCoprocessorEnvironmentctx, TableNametableName, HTableDescriptorhtd) void -BaseMasterAndRegionObserver.postCompletedModifyTableAction(ObserverContextMasterCoprocessorEnvironmentctx, +BaseMasterObserver.postCompletedModifyTableAction(ObserverContextMasterCoprocessorEnvironmentctx, TableNametableName, HTableDescriptorhtd) @@ -1041,13 +1041,13 @@ Input/OutputFormats, a table indexing MapReduce job, and utility methods. void -BaseMasterObserver.postCreateTable(ObserverContextMasterCoprocessorEnvironmentctx, +BaseMasterAndRegionObserver.postCreateTable(ObserverContextMasterCoprocessorEnvironmentctx, HTableDescriptordesc, HRegionInfo[]regions) void -BaseMasterAndRegionObserver.postCreateTable(ObserverContextMasterCoprocessorEnvironmentctx, +BaseMasterObserver.postCreateTable(ObserverContextMasterCoprocessorEnvironmentctx, HTableDescriptordesc, HRegionInfo[]regions) @@ -1061,22 +1061,22 @@ Input/OutputFormats, a table indexing MapReduce job, and utility methods. void -BaseMasterObserver.postCreateTableHandler(ObserverContextMasterCoprocessorEnvironmentctx, +BaseMasterAndRegionObserver.postCreateTableHandler(ObserverContextMasterCoprocessorEnvironmentctx, HTableDescriptordesc, HRegionInfo[]regions) -Deprecated. -As of release 2.0.0, this will be removed in HBase 3.0.0 - (https://issues.apache.org/jira/browse/HBASE-15575;>HBASE-15575). - Use
[24/52] [partial] hbase-site git commit: Published site at 29c46c4834a3f96e9fca33cb16bc7f3748fcd60c.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6444c276/devapidocs/org/apache/hadoop/hbase/class-use/ServerName.html -- diff --git a/devapidocs/org/apache/hadoop/hbase/class-use/ServerName.html b/devapidocs/org/apache/hadoop/hbase/class-use/ServerName.html index 689e784..4792ceb 100644 --- a/devapidocs/org/apache/hadoop/hbase/class-use/ServerName.html +++ b/devapidocs/org/apache/hadoop/hbase/class-use/ServerName.html @@ -591,11 +591,11 @@ private ServerName -FastFailInterceptorContext.server +AsyncProcess.AsyncRequestFutureImpl.SingleServerRequestRunnable.server private ServerName -AsyncProcess.AsyncRequestFutureImpl.SingleServerRequestRunnable.server +FastFailInterceptorContext.server private ServerName @@ -698,16 +698,16 @@ -void -MetaCache.cacheLocation(TableNametableName, +private void +ConnectionImplementation.cacheLocation(TableNametableName, ServerNamesource, HRegionLocationlocation) Put a newly discovered HRegionLocation into the cache. -private void -ConnectionImplementation.cacheLocation(TableNametableName, +void +MetaCache.cacheLocation(TableNametableName, ServerNamesource, HRegionLocationlocation) Put a newly discovered HRegionLocation into the cache. @@ -911,22 +911,22 @@ boolean -ConnectionImplementation.isDeadServer(ServerNamesn) +ClusterStatusListener.isDeadServer(ServerNamesn) +Check if we know if a server is dead. + boolean +ConnectionImplementation.isDeadServer(ServerNamesn) + + +boolean ClusterConnection.isDeadServer(ServerNameserverName) Deprecated. internal method, do not use thru ClusterConnection - -boolean -ClusterStatusListener.isDeadServer(ServerNamesn) -Check if we know if a server is dead. - - protected boolean PreemptiveFastFailInterceptor.isServerInFailureMap(ServerNameserverName) @@ -1248,13 +1248,13 @@ long -ClientBackoffPolicy.getBackoffTime(ServerNameserverName, +ClientBackoffPolicyFactory.NoBackoffPolicy.getBackoffTime(ServerNameserverName, byte[]region, ServerStatisticsstats) long -ClientBackoffPolicyFactory.NoBackoffPolicy.getBackoffTime(ServerNameserverName, +ClientBackoffPolicy.getBackoffTime(ServerNameserverName, byte[]region, ServerStatisticsstats) @@ -1379,14 +1379,14 @@ void -BaseMasterObserver.postMove(ObserverContextMasterCoprocessorEnvironmentctx, +BaseMasterAndRegionObserver.postMove(ObserverContextMasterCoprocessorEnvironmentctx, HRegionInforegion, ServerNamesrcServer, ServerNamedestServer) void -BaseMasterAndRegionObserver.postMove(ObserverContextMasterCoprocessorEnvironmentctx, +BaseMasterObserver.postMove(ObserverContextMasterCoprocessorEnvironmentctx, HRegionInforegion, ServerNamesrcServer, ServerNamedestServer) @@ -1402,14 +1402,14 @@ void -BaseMasterObserver.preMove(ObserverContextMasterCoprocessorEnvironmentctx, +BaseMasterAndRegionObserver.preMove(ObserverContextMasterCoprocessorEnvironmentctx, HRegionInforegion, ServerNamesrcServer, ServerNamedestServer) void -BaseMasterAndRegionObserver.preMove(ObserverContextMasterCoprocessorEnvironmentctx, +BaseMasterObserver.preMove(ObserverContextMasterCoprocessorEnvironmentctx, HRegionInforegion, ServerNamesrcServer, ServerNamedestServer) @@ -1501,30 +1501,30 @@ void -RpcClient.cancelConnections(ServerNamesn) -Interrupt the connections to the given server. +RpcClientImpl.cancelConnections(ServerNamesn) +Interrupt the connections to the given ip:port server. void -RpcClientImpl.cancelConnections(ServerNamesn) -Interrupt the connections to the given ip:port server. +RpcClient.cancelConnections(ServerNamesn) +Interrupt the connections to the given server. com.google.protobuf.BlockingRpcChannel +AbstractRpcClient.createBlockingRpcChannel(ServerNamesn, +Userticket, + intdefaultOperationTimeout) + + +com.google.protobuf.BlockingRpcChannel RpcClient.createBlockingRpcChannel(ServerNamesn, Useruser, intrpcTimeout) Creates a "channel" that can be used by a blocking protobuf service. - -com.google.protobuf.BlockingRpcChannel -AbstractRpcClient.createBlockingRpcChannel(ServerNamesn, -Userticket, - intdefaultOperationTimeout) - com.google.protobuf.RpcChannel AsyncRpcClient.createProtobufRpcChannel(ServerNamesn, @@ -1533,17 +1533,17 @@
[22/52] [partial] hbase-site git commit: Published site at 29c46c4834a3f96e9fca33cb16bc7f3748fcd60c.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6444c276/devapidocs/org/apache/hadoop/hbase/class-use/Tag.html -- diff --git a/devapidocs/org/apache/hadoop/hbase/class-use/Tag.html b/devapidocs/org/apache/hadoop/hbase/class-use/Tag.html index c9d1384..17a77fc 100644 --- a/devapidocs/org/apache/hadoop/hbase/class-use/Tag.html +++ b/devapidocs/org/apache/hadoop/hbase/class-use/Tag.html @@ -767,17 +767,17 @@ Input/OutputFormats, a table indexing MapReduce job, and utility methods. http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true; title="class or interface in java.util">ListTag -VisibilityLabelService.createVisibilityExpTags(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true; title="class or interface in java.lang">StringvisExpression, +DefaultVisibilityLabelServiceImpl.createVisibilityExpTags(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true; title="class or interface in java.lang">StringvisExpression, booleanwithSerializationFormat, - booleancheckAuths) -Creates tags corresponding to given visibility expression. - + booleancheckAuths) http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true; title="class or interface in java.util">ListTag -DefaultVisibilityLabelServiceImpl.createVisibilityExpTags(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true; title="class or interface in java.lang">StringvisExpression, +VisibilityLabelService.createVisibilityExpTags(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true; title="class or interface in java.lang">StringvisExpression, booleanwithSerializationFormat, - booleancheckAuths) + booleancheckAuths) +Creates tags corresponding to given visibility expression. + static http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true; title="class or interface in java.util">ListTag @@ -823,6 +823,11 @@ Input/OutputFormats, a table indexing MapReduce job, and utility methods. byte[] +DefaultVisibilityLabelServiceImpl.encodeVisibilityForReplication(http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true; title="class or interface in java.util">ListTagtags, +http://docs.oracle.com/javase/7/docs/api/java/lang/Byte.html?is-external=true; title="class or interface in java.lang">ByteserializationFormat) + + +byte[] VisibilityLabelService.encodeVisibilityForReplication(http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true; title="class or interface in java.util">ListTagvisTags, http://docs.oracle.com/javase/7/docs/api/java/lang/Byte.html?is-external=true; title="class or interface in java.lang">ByteserializationFormat) Provides a way to modify the visibility tags of type TagType @@ -832,11 +837,6 @@ Input/OutputFormats, a table indexing MapReduce job, and utility methods. .replicate(). - -byte[] -DefaultVisibilityLabelServiceImpl.encodeVisibilityForReplication(http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true; title="class or interface in java.util">ListTagtags, -http://docs.oracle.com/javase/7/docs/api/java/lang/Byte.html?is-external=true; title="class or interface in java.lang">ByteserializationFormat) - static http://docs.oracle.com/javase/7/docs/api/java/lang/Byte.html?is-external=true; title="class or interface in java.lang">Byte VisibilityUtils.extractAndPartitionTags(Cellcell, @@ -882,6 +882,20 @@ Input/OutputFormats, a table indexing MapReduce job, and utility methods. boolean +DefaultVisibilityLabelServiceImpl.matchVisibility(http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true; title="class or interface in java.util">ListTagputVisTags, + http://docs.oracle.com/javase/7/docs/api/java/lang/Byte.html?is-external=true; title="class or interface in java.lang">ByteputTagsFormat, + http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true; title="class or interface in java.util">ListTagdeleteVisTags, + http://docs.oracle.com/javase/7/docs/api/java/lang/Byte.html?is-external=true; title="class or interface in java.lang">BytedeleteTagsFormat) + + +boolean +DefaultVisibilityLabelServiceImpl.matchVisibility(http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true; title="class or interface in
hbase git commit: HBASE-16087 Replication shouldn't start on a master if if only hosts system tables
Repository: hbase Updated Branches: refs/heads/master 20a99b4c0 -> ae92668dd HBASE-16087 Replication shouldn't start on a master if if only hosts system tables Project: http://git-wip-us.apache.org/repos/asf/hbase/repo Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/ae92668d Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/ae92668d Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/ae92668d Branch: refs/heads/master Commit: ae92668dd6eff5271ceeecc435165f5fc14fab48 Parents: 20a99b4 Author: Elliott ClarkAuthored: Wed Jun 22 15:40:28 2016 -0700 Committer: Elliott Clark Committed: Tue Jul 5 13:50:37 2016 -0700 -- .../hadoop/hbase/master/balancer/BaseLoadBalancer.java | 13 + .../hadoop/hbase/regionserver/HRegionServer.java | 6 ++ 2 files changed, 19 insertions(+) -- http://git-wip-us.apache.org/repos/asf/hbase/blob/ae92668d/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.java -- diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.java index f52dbdf..dc5bace 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.java @@ -1016,6 +1016,19 @@ public abstract class BaseLoadBalancer implements LoadBalancer { return tables != null && tables.length > 0; } + public static boolean userTablesOnMaster(Configuration conf) { +String[] tables = getTablesOnMaster(conf); +if (tables == null || tables.length == 0) { + return false; +} +for (String tn:tables) { + if (!tn.startsWith("hbase:")) { +return true; + } +} +return false; + } + @Override public void setConf(Configuration conf) { setSlop(conf); http://git-wip-us.apache.org/repos/asf/hbase/blob/ae92668d/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java -- diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java index 1c1000e..877c3db 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java @@ -118,6 +118,7 @@ import org.apache.hadoop.hbase.ipc.ServerRpcController; import org.apache.hadoop.hbase.master.HMaster; import org.apache.hadoop.hbase.master.RegionState.State; import org.apache.hadoop.hbase.master.TableLockManager; +import org.apache.hadoop.hbase.master.balancer.BaseLoadBalancer; import org.apache.hadoop.hbase.mob.MobCacheConfig; import org.apache.hadoop.hbase.procedure.RegionServerProcedureManagerHost; import org.apache.hadoop.hbase.protobuf.ProtobufUtil; @@ -2651,6 +2652,11 @@ public class HRegionServer extends HasThread implements static private void createNewReplicationInstance(Configuration conf, HRegionServer server, FileSystem fs, Path logDir, Path oldLogDir) throws IOException{ +if ((server instanceof HMaster) && +(!BaseLoadBalancer.userTablesOnMaster(conf))) { + return; +} + // read in the name of the source replication class from the config file. String sourceClassname = conf.get(HConstants.REPLICATION_SOURCE_SERVICE_CLASSNAME, HConstants.REPLICATION_SERVICE_CLASSNAME_DEFAULT);
hbase git commit: HBASE-16087 Replication shouldn't start on a master if if only hosts system tables
Repository: hbase Updated Branches: refs/heads/branch-1.3 603decdbf -> 59c5900fa HBASE-16087 Replication shouldn't start on a master if if only hosts system tables Project: http://git-wip-us.apache.org/repos/asf/hbase/repo Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/59c5900f Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/59c5900f Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/59c5900f Branch: refs/heads/branch-1.3 Commit: 59c5900fae4392b9a5fcca8dbf5543e1bea1e452 Parents: 603decd Author: Elliott ClarkAuthored: Wed Jun 22 15:40:28 2016 -0700 Committer: Elliott Clark Committed: Tue Jul 5 13:48:28 2016 -0700 -- .../hbase/master/balancer/BaseLoadBalancer.java | 13 + .../hadoop/hbase/regionserver/HRegionServer.java | 18 -- 2 files changed, 25 insertions(+), 6 deletions(-) -- http://git-wip-us.apache.org/repos/asf/hbase/blob/59c5900f/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.java -- diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.java index b446172..c2529a8 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.java @@ -1001,6 +1001,19 @@ public abstract class BaseLoadBalancer implements LoadBalancer { return tables != null && tables.length > 0; } + public static boolean userTablesOnMaster(Configuration conf) { +String[] tables = getTablesOnMaster(conf); +if (tables == null || tables.length == 0) { + return false; +} +for (String tn:tables) { + if (!tn.startsWith("hbase:")) { +return true; + } +} +return false; + } + @Override public void setConf(Configuration conf) { setSlop(conf); http://git-wip-us.apache.org/repos/asf/hbase/blob/59c5900f/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java -- diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java index 2fe9200..0c6ee83 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java @@ -106,6 +106,7 @@ import org.apache.hadoop.hbase.ipc.ServerRpcController; import org.apache.hadoop.hbase.master.HMaster; import org.apache.hadoop.hbase.master.RegionState.State; import org.apache.hadoop.hbase.master.TableLockManager; +import org.apache.hadoop.hbase.master.balancer.BaseLoadBalancer; import org.apache.hadoop.hbase.procedure.RegionServerProcedureManagerHost; import org.apache.hadoop.hbase.protobuf.ProtobufUtil; import org.apache.hadoop.hbase.protobuf.RequestConverter; @@ -446,7 +447,7 @@ public class HRegionServer extends HasThread implements private RegionServerCoprocessorHost rsHost; private RegionServerProcedureManagerHost rspmHost; - + private RegionServerQuotaManager rsQuotaManager; // Table level lock manager for locking for region operations @@ -874,7 +875,7 @@ public class HRegionServer extends HasThread implements // Setup the Quota Manager rsQuotaManager = new RegionServerQuotaManager(this); - + // Setup RPC client for master communication rpcClient = RpcClientFactory.createClient(conf, clusterId, new InetSocketAddress( rpcServices.isa.getAddress(), 0), clusterConnection.getConnectionMetrics()); @@ -943,7 +944,7 @@ public class HRegionServer extends HasThread implements // since the server is ready to run rspmHost.start(); } - + // Start the Quota Manager if (this.rsQuotaManager != null) { rsQuotaManager.start(getRpcServer().getScheduler()); @@ -1037,7 +1038,7 @@ public class HRegionServer extends HasThread implements if (rsQuotaManager != null) { rsQuotaManager.stop(); } - + // Stop the snapshot and other procedure handlers, forcefully killing all running tasks if (rspmHost != null) { rspmHost.stop(this.abortRequested || this.killed); @@ -2598,7 +2599,7 @@ public class HRegionServer extends HasThread implements public ChoreService getChoreService() { return choreService; } - + @Override public RegionServerQuotaManager getRegionServerQuotaManager() { return rsQuotaManager; @@ -2620,6 +2621,11 @@ public class HRegionServer extends
hbase git commit: HBASE-16144 Revert - test failure in TestMultiSlaveReplication#testMultiSlaveReplication seems to be related
Repository: hbase Updated Branches: refs/heads/master 2eef33930 -> 20a99b4c0 HBASE-16144 Revert - test failure in TestMultiSlaveReplication#testMultiSlaveReplication seems to be related Project: http://git-wip-us.apache.org/repos/asf/hbase/repo Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/20a99b4c Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/20a99b4c Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/20a99b4c Branch: refs/heads/master Commit: 20a99b4c06ecb77c29c3ff173052a00174b9af8c Parents: 2eef339 Author: tedyuAuthored: Tue Jul 5 13:19:33 2016 -0700 Committer: tedyu Committed: Tue Jul 5 13:19:33 2016 -0700 -- .../hbase/replication/ReplicationFactory.java | 4 +- .../replication/ReplicationQueuesZKImpl.java| 13 +-- .../org/apache/hadoop/hbase/master/HMaster.java | 16 --- .../cleaner/ReplicationZKLockCleanerChore.java | 112 --- .../replication/TestMultiSlaveReplication.java | 42 --- 5 files changed, 2 insertions(+), 185 deletions(-) -- http://git-wip-us.apache.org/repos/asf/hbase/blob/20a99b4c/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationFactory.java -- diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationFactory.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationFactory.java index bc7a4ce..b84641c 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationFactory.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationFactory.java @@ -31,12 +31,10 @@ import org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher; @InterfaceAudience.Private public class ReplicationFactory { - public static final Class defaultReplicationQueueClass = ReplicationQueuesZKImpl.class; - public static ReplicationQueues getReplicationQueues(ReplicationQueuesArguments args) throws Exception { Class classToBuild = args.getConf().getClass("hbase.region.replica." + -"replication.replicationQueues.class", defaultReplicationQueueClass); +"replication.replicationQueues.class", ReplicationQueuesZKImpl.class); return (ReplicationQueues) ConstructorUtils.invokeConstructor(classToBuild, args); } http://git-wip-us.apache.org/repos/asf/hbase/blob/20a99b4c/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationQueuesZKImpl.java -- diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationQueuesZKImpl.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationQueuesZKImpl.java index 655aaae..baea74f 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationQueuesZKImpl.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationQueuesZKImpl.java @@ -25,7 +25,6 @@ import java.util.List; import java.util.Map; import java.util.Set; -import com.google.common.annotations.VisibleForTesting; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.classification.InterfaceAudience; @@ -239,8 +238,7 @@ public class ReplicationQueuesZKImpl extends ReplicationStateZKBase implements R * @param znode the server names of the other server * @return true if the lock was acquired, false in every other cases */ - @VisibleForTesting - public boolean lockOtherRS(String znode) { + private boolean lockOtherRS(String znode) { try { String parent = ZKUtil.joinZNode(this.queuesZNode, znode); if (parent.equals(this.myQueuesZnode)) { @@ -267,15 +265,6 @@ public class ReplicationQueuesZKImpl extends ReplicationStateZKBase implements R return true; } - public String getLockZNode(String znode) { -return this.queuesZNode + "/" + znode + "/" + RS_LOCK_ZNODE; - } - - @VisibleForTesting - public boolean checkLockExists(String znode) throws KeeperException { -return ZKUtil.checkExists(zookeeper, getLockZNode(znode)) >= 0; - } - /** * Delete all the replication queues for a given region server. * @param regionserverZnode The znode of the region server to delete. http://git-wip-us.apache.org/repos/asf/hbase/blob/20a99b4c/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java -- diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java index 87daa16..a18a51f 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java +++
hbase git commit: HBASE-16177 In dev mode thrift server can't be run
Repository: hbase Updated Branches: refs/heads/branch-1 8efc6148b -> 1318e84e1 HBASE-16177 In dev mode thrift server can't be run Project: http://git-wip-us.apache.org/repos/asf/hbase/repo Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/1318e84e Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/1318e84e Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/1318e84e Branch: refs/heads/branch-1 Commit: 1318e84e14112a524935f50a380b9a9da29385fd Parents: 8efc614 Author: Elliott ClarkAuthored: Tue Jul 5 11:55:53 2016 -0700 Committer: Elliott Clark Committed: Tue Jul 5 12:15:31 2016 -0700 -- hbase-assembly/pom.xml | 4 1 file changed, 4 insertions(+) -- http://git-wip-us.apache.org/repos/asf/hbase/blob/1318e84e/hbase-assembly/pom.xml -- diff --git a/hbase-assembly/pom.xml b/hbase-assembly/pom.xml index f79ba6b..1c256bb 100644 --- a/hbase-assembly/pom.xml +++ b/hbase-assembly/pom.xml @@ -162,6 +162,10 @@ hbase-server + org.apache.hbase + hbase-thrift + + org.apache.hbase hbase-hadoop-compat
hbase git commit: HBASE-16177 In dev mode thrift server can't be run
Repository: hbase Updated Branches: refs/heads/master 318751cfd -> 2eef33930 HBASE-16177 In dev mode thrift server can't be run Project: http://git-wip-us.apache.org/repos/asf/hbase/repo Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/2eef3393 Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/2eef3393 Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/2eef3393 Branch: refs/heads/master Commit: 2eef33930c358fa00347376604c3fc4ee68019c1 Parents: 318751c Author: Elliott ClarkAuthored: Tue Jul 5 11:55:53 2016 -0700 Committer: Elliott Clark Committed: Tue Jul 5 12:15:21 2016 -0700 -- hbase-assembly/pom.xml | 4 1 file changed, 4 insertions(+) -- http://git-wip-us.apache.org/repos/asf/hbase/blob/2eef3393/hbase-assembly/pom.xml -- diff --git a/hbase-assembly/pom.xml b/hbase-assembly/pom.xml index c21d174..04c3981 100644 --- a/hbase-assembly/pom.xml +++ b/hbase-assembly/pom.xml @@ -162,6 +162,10 @@ hbase-server + org.apache.hbase + hbase-thrift + + org.apache.hbase hbase-hadoop-compat
hbase git commit: HBASE-16177 In dev mode thrift server can't be run
Repository: hbase Updated Branches: refs/heads/branch-1.3 b3834d7f7 -> 603decdbf HBASE-16177 In dev mode thrift server can't be run Project: http://git-wip-us.apache.org/repos/asf/hbase/repo Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/603decdb Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/603decdb Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/603decdb Branch: refs/heads/branch-1.3 Commit: 603decdbf7eea4f86386496d141d3548f384f409 Parents: b3834d7 Author: Elliott ClarkAuthored: Tue Jul 5 11:55:53 2016 -0700 Committer: Elliott Clark Committed: Tue Jul 5 12:15:41 2016 -0700 -- hbase-assembly/pom.xml | 4 1 file changed, 4 insertions(+) -- http://git-wip-us.apache.org/repos/asf/hbase/blob/603decdb/hbase-assembly/pom.xml -- diff --git a/hbase-assembly/pom.xml b/hbase-assembly/pom.xml index 550a7c8..eca3f12 100644 --- a/hbase-assembly/pom.xml +++ b/hbase-assembly/pom.xml @@ -162,6 +162,10 @@ hbase-server + org.apache.hbase + hbase-thrift + + org.apache.hbase hbase-hadoop-compat
hbase git commit: HBASE-16091 Canary takes lot more time when there are delete markers in the table (Vishal Khandelwal)
Repository: hbase Updated Branches: refs/heads/master 29c46c483 -> 318751cfd HBASE-16091 Canary takes lot more time when there are delete markers in the table (Vishal Khandelwal) Project: http://git-wip-us.apache.org/repos/asf/hbase/repo Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/318751cf Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/318751cf Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/318751cf Branch: refs/heads/master Commit: 318751cfd621cfb848d90d623fdd9db1d19894ed Parents: 29c46c4 Author: Andrew PurtellAuthored: Tue Jul 5 10:11:08 2016 -0700 Committer: Andrew Purtell Committed: Tue Jul 5 11:13:48 2016 -0700 -- .../org/apache/hadoop/hbase/HConstants.java | 2 + .../org/apache/hadoop/hbase/tool/Canary.java| 63 +++- .../hadoop/hbase/tool/TestCanaryTool.java | 22 +++ 3 files changed, 72 insertions(+), 15 deletions(-) -- http://git-wip-us.apache.org/repos/asf/hbase/blob/318751cf/hbase-common/src/main/java/org/apache/hadoop/hbase/HConstants.java -- diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/HConstants.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/HConstants.java index fa4ce64..0bc0a07 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/HConstants.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/HConstants.java @@ -1244,6 +1244,8 @@ public final class HConstants { public static final String HBASE_CANARY_WRITE_TABLE_CHECK_PERIOD_KEY = "hbase.canary.write.table.check.period"; + + public static final String HBASE_CANARY_READ_RAW_SCAN_KEY = "hbase.canary.read.raw.enabled"; /** * Configuration keys for programmatic JAAS configuration for secured ZK interaction http://git-wip-us.apache.org/repos/asf/hbase/blob/318751cf/hbase-server/src/main/java/org/apache/hadoop/hbase/tool/Canary.java -- diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/tool/Canary.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/tool/Canary.java index 360b0f5..2e7cf7f 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/tool/Canary.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/tool/Canary.java @@ -264,12 +264,15 @@ public final class Canary implements Tool { private HRegionInfo region; private Sink sink; private TaskType taskType; +private boolean rawScanEnabled; -RegionTask(Connection connection, HRegionInfo region, Sink sink, TaskType taskType) { +RegionTask(Connection connection, HRegionInfo region, Sink sink, TaskType taskType, +boolean rawScanEnabled) { this.connection = connection; this.region = region; this.sink = sink; this.taskType = taskType; + this.rawScanEnabled = rawScanEnabled; } @Override @@ -323,7 +326,11 @@ public final class Canary implements Tool { get.addFamily(column.getName()); } else { scan = new Scan(); - scan.setRaw(true); + if (LOG.isDebugEnabled()) { +LOG.debug(String.format("rawScan : %s for table: %s", rawScanEnabled, + tableDesc.getTableName())); + } + scan.setRaw(rawScanEnabled); scan.setCaching(1); scan.setCacheBlocks(false); scan.setFilter(new FirstKeyOnlyFilter()); @@ -749,6 +756,8 @@ public final class Canary implements Tool { System.err.println(" -treatFailureAsError treats read / write failure as error"); System.err.println(" -writeTableThe table used for write sniffing." + " Default is hbase:canary"); +System.err.println(" -Dhbase.canary.read.raw.enabled= Use this flag to enable or disable raw scan during read canary test" ++ " Default is false and raw is not enabled during scan"); System.err .println(" -D= assigning or override the configuration params"); System.exit(USAGE_EXIT_CODE); @@ -873,6 +882,7 @@ public final class Canary implements Tool { private float regionsLowerLimit; private float regionsUpperLimit; private int checkPeriod; +private boolean rawScanEnabled; public RegionMonitor(Connection connection, String[] monitorTargets, boolean useRegExp, Sink sink, ExecutorService executor, boolean writeSniffing, TableName writeTableName, @@ -890,6 +900,7 @@ public final class Canary implements Tool { this.checkPeriod = conf.getInt(HConstants.HBASE_CANARY_WRITE_TABLE_CHECK_PERIOD_KEY, DEFAULT_WRITE_TABLE_CHECK_PERIOD); + this.rawScanEnabled = conf.getBoolean(HConstants.HBASE_CANARY_READ_RAW_SCAN_KEY, false); } @Override
[2/2] hbase git commit: HBASE-16091 Canary takes lot more time when there are delete markers in the table (Vishal Khandelwal)
HBASE-16091 Canary takes lot more time when there are delete markers in the table (Vishal Khandelwal) Conflicts: hbase-common/src/main/java/org/apache/hadoop/hbase/HConstants.java hbase-server/src/main/java/org/apache/hadoop/hbase/tool/Canary.java hbase-server/src/test/java/org/apache/hadoop/hbase/tool/TestCanaryTool.java Amending-Author: Andrew PurtellProject: http://git-wip-us.apache.org/repos/asf/hbase/repo Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/e3ef8b69 Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/e3ef8b69 Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/e3ef8b69 Branch: refs/heads/0.98 Commit: e3ef8b69bf6834b8a1b7e33aee53792e8ef1f7cb Parents: 47c1960 Author: Andrew Purtell Authored: Tue Jul 5 10:11:08 2016 -0700 Committer: Andrew Purtell Committed: Tue Jul 5 11:10:26 2016 -0700 -- .../org/apache/hadoop/hbase/HConstants.java | 4 +- .../org/apache/hadoop/hbase/tool/Canary.java| 51 +++- .../hadoop/hbase/tool/TestCanaryTool.java | 22 + 3 files changed, 63 insertions(+), 14 deletions(-) -- http://git-wip-us.apache.org/repos/asf/hbase/blob/e3ef8b69/hbase-common/src/main/java/org/apache/hadoop/hbase/HConstants.java -- diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/HConstants.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/HConstants.java index f046784..466c26f 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/HConstants.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/HConstants.java @@ -1135,7 +1135,9 @@ public final class HConstants { public static final String HBASE_CANARY_WRITE_TABLE_CHECK_PERIOD_KEY = "hbase.canary.write.table.check.period"; - + + public static final String HBASE_CANARY_READ_RAW_SCAN_KEY = "hbase.canary.read.raw.enabled"; + /** * Config keys for programmatic JAAS config for secured ZK interaction */ http://git-wip-us.apache.org/repos/asf/hbase/blob/e3ef8b69/hbase-server/src/main/java/org/apache/hadoop/hbase/tool/Canary.java -- diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/tool/Canary.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/tool/Canary.java index 4f2e5fe..51f162f 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/tool/Canary.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/tool/Canary.java @@ -264,12 +264,15 @@ public final class Canary implements Tool { private HRegionInfo region; private Sink sink; private TaskType taskType; +private boolean rawScanEnabled; -RegionTask(HConnection connection, HRegionInfo region, Sink sink, TaskType taskType) { +RegionTask(HConnection connection, HRegionInfo region, Sink sink, TaskType taskType, +boolean rawScanEnabled) { this.connection = connection; this.region = region; this.sink = sink; this.taskType = taskType; + this.rawScanEnabled = rawScanEnabled; } @Override @@ -323,6 +326,11 @@ public final class Canary implements Tool { get.addFamily(column.getName()); } else { scan = new Scan(); + if (LOG.isDebugEnabled()) { +LOG.debug(String.format("rawScan : %s for table: %s", rawScanEnabled, + tableDesc.getTableName())); + } + scan.setRaw(rawScanEnabled); scan.setCaching(1); scan.setCacheBlocks(false); scan.setFilter(new FirstKeyOnlyFilter()); @@ -733,6 +741,8 @@ public final class Canary implements Tool { System.err.println(" -treatFailureAsError treats read / write failure as error"); System.err.println(" -writeTableThe table used for write sniffing." + " Default is hbase:canary"); +System.err.println(" -Dhbase.canary.read.raw.enabled= Use this flag to enable or disable raw scan during read canary test" ++ " Default is false and raw is not enabled during scan"); System.err .println(" -D= assigning or override the configuration params"); System.exit(USAGE_EXIT_CODE); @@ -855,6 +865,7 @@ public final class Canary implements Tool { private float regionsLowerLimit; private float regionsUpperLimit; private int checkPeriod; +private boolean rawScanEnabled; public RegionMonitor(HConnection connection, String[] monitorTargets, boolean useRegExp, Sink sink, ExecutorService executor, boolean writeSniffing, TableName writeTableName, @@ -872,6 +883,7 @@ public final class Canary implements Tool { this.checkPeriod =
[1/2] hbase git commit: HBASE-16091 Canary takes lot more time when there are delete markers in the table (Vishal Khandelwal)
Repository: hbase Updated Branches: refs/heads/0.98 47c19607c -> e3ef8b69b refs/heads/branch-1 84dd9cbcb -> 8efc6148b HBASE-16091 Canary takes lot more time when there are delete markers in the table (Vishal Khandelwal) Conflicts: hbase-server/src/main/java/org/apache/hadoop/hbase/tool/Canary.java Amending-Author: Andrew PurtellProject: http://git-wip-us.apache.org/repos/asf/hbase/repo Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/8efc6148 Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/8efc6148 Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/8efc6148 Branch: refs/heads/branch-1 Commit: 8efc6148b9ccaa29d2608d1d7348d0d3c5d8158d Parents: 84dd9cb Author: Andrew Purtell Authored: Tue Jul 5 10:11:08 2016 -0700 Committer: Andrew Purtell Committed: Tue Jul 5 10:34:27 2016 -0700 -- .../org/apache/hadoop/hbase/HConstants.java | 2 + .../org/apache/hadoop/hbase/tool/Canary.java| 62 +++- .../hadoop/hbase/tool/TestCanaryTool.java | 29 +++-- 3 files changed, 73 insertions(+), 20 deletions(-) -- http://git-wip-us.apache.org/repos/asf/hbase/blob/8efc6148/hbase-common/src/main/java/org/apache/hadoop/hbase/HConstants.java -- diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/HConstants.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/HConstants.java index e062989..769945b 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/HConstants.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/HConstants.java @@ -1237,6 +1237,8 @@ public final class HConstants { public static final String HBASE_CANARY_WRITE_TABLE_CHECK_PERIOD_KEY = "hbase.canary.write.table.check.period"; + + public static final String HBASE_CANARY_READ_RAW_SCAN_KEY = "hbase.canary.read.raw.enabled"; /** * Configuration keys for programmatic JAAS configuration for secured ZK interaction http://git-wip-us.apache.org/repos/asf/hbase/blob/8efc6148/hbase-server/src/main/java/org/apache/hadoop/hbase/tool/Canary.java -- diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/tool/Canary.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/tool/Canary.java index 7d37161..ca27e71 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/tool/Canary.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/tool/Canary.java @@ -262,12 +262,15 @@ public final class Canary implements Tool { private HRegionInfo region; private Sink sink; private TaskType taskType; +private boolean rawScanEnabled; -RegionTask(Connection connection, HRegionInfo region, Sink sink, TaskType taskType) { +RegionTask(Connection connection, HRegionInfo region, Sink sink, TaskType taskType, +boolean rawScanEnabled) { this.connection = connection; this.region = region; this.sink = sink; this.taskType = taskType; + this.rawScanEnabled = rawScanEnabled; } @Override @@ -321,6 +324,11 @@ public final class Canary implements Tool { get.addFamily(column.getName()); } else { scan = new Scan(); + if (LOG.isDebugEnabled()) { +LOG.debug(String.format("rawScan : %s for table: %s", rawScanEnabled, + tableDesc.getTableName())); + } + scan.setRaw(rawScanEnabled); scan.setCaching(1); scan.setCacheBlocks(false); scan.setFilter(new FirstKeyOnlyFilter()); @@ -746,6 +754,8 @@ public final class Canary implements Tool { System.err.println(" -treatFailureAsError treats read / write failure as error"); System.err.println(" -writeTableThe table used for write sniffing." + " Default is hbase:canary"); +System.err.println(" -Dhbase.canary.read.raw.enabled= Use this flag to enable or disable raw scan during read canary test" ++ " Default is false and raw is not enabled during scan"); System.err .println(" -D= assigning or override the configuration params"); System.exit(USAGE_EXIT_CODE); @@ -870,6 +880,7 @@ public final class Canary implements Tool { private float regionsLowerLimit; private float regionsUpperLimit; private int checkPeriod; +private boolean rawScanEnabled; public RegionMonitor(Connection connection, String[] monitorTargets, boolean useRegExp, Sink sink, ExecutorService executor, boolean writeSniffing, TableName writeTableName, @@ -887,6 +898,7 @@ public final class Canary implements Tool { this.checkPeriod = conf.getInt(HConstants.HBASE_CANARY_WRITE_TABLE_CHECK_PERIOD_KEY,
hbase git commit: HBASE-15985 clarify promises about edits from replication in ref guide
Repository: hbase Updated Branches: refs/heads/master d016338e4 -> 29c46c483 HBASE-15985 clarify promises about edits from replication in ref guide Signed-off-by: Andrew PurtellProject: http://git-wip-us.apache.org/repos/asf/hbase/repo Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/29c46c48 Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/29c46c48 Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/29c46c48 Branch: refs/heads/master Commit: 29c46c4834a3f96e9fca33cb16bc7f3748fcd60c Parents: d016338 Author: Sean Busbey Authored: Tue Jun 7 17:16:55 2016 -0500 Committer: Sean Busbey Committed: Tue Jul 5 13:03:18 2016 -0500 -- src/main/asciidoc/_chapters/ops_mgt.adoc | 8 1 file changed, 8 insertions(+) -- http://git-wip-us.apache.org/repos/asf/hbase/blob/29c46c48/src/main/asciidoc/_chapters/ops_mgt.adoc -- diff --git a/src/main/asciidoc/_chapters/ops_mgt.adoc b/src/main/asciidoc/_chapters/ops_mgt.adoc index 13449c1..6e84237 100644 --- a/src/main/asciidoc/_chapters/ops_mgt.adoc +++ b/src/main/asciidoc/_chapters/ops_mgt.adoc @@ -1320,6 +1320,14 @@ The master cluster relies on randomization to attempt to balance the stream of r It is expected that the slave cluster has storage capacity to hold the replicated data, as well as any data it is responsible for ingesting. If a slave cluster does run out of room, or is inaccessible for other reasons, it throws an error and the master retains the WAL and retries the replication at intervals. +.Consistency Across Replicated Clusters +[WARNING] + +How your application builds on top of the HBase API matters when replication is in play. HBase's replication system provides at-least-once delivery of client edits for an enabled column family to each configured destination cluster. In the event of failure to reach a given destination, the replication system will retry sending edits in a way that might repeat a given message. Further more, there is not a guaranteed order of delivery for client edits. In the event of a RegionServer failing, recovery of the replication queue happens independent of recovery of the individual regions that server was previously handling. This means that it is possible for the not-yet-replicated edits to be serviced by a RegionServer that is currently slower to replicate than the one that handles edits from after the failure. + +The combination of these two properties (at-least-once delivery and the lack of message ordering) means that some destination clusters may end up in a different state if your application makes use of operations that are not idempotent, e.g. Increments. + + .Terminology Changes [NOTE]
hbase git commit: HBASE-15945 Patch for Cell
Repository: hbase Updated Branches: refs/heads/HBASE-14850 6c0216034 -> ad276ef32 HBASE-15945 Patch for Cell This patch consists of Cell implementation without additional interfaces. Signed-off-by: Elliott ClarkProject: http://git-wip-us.apache.org/repos/asf/hbase/repo Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/ad276ef3 Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/ad276ef3 Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/ad276ef3 Branch: refs/heads/HBASE-14850 Commit: ad276ef3225ee2058dc2fec7b866b3eaa4d537cb Parents: 6c02160 Author: Sudeep Sunthankar Authored: Mon Jul 4 21:02:25 2016 +1000 Committer: Elliott Clark Committed: Tue Jul 5 10:43:41 2016 -0700 -- hbase-native-client/core/cell-test.cc | 175 + hbase-native-client/core/cell.cc | 77 + hbase-native-client/core/cell.h | 60 ++ 3 files changed, 312 insertions(+) -- http://git-wip-us.apache.org/repos/asf/hbase/blob/ad276ef3/hbase-native-client/core/cell-test.cc -- diff --git a/hbase-native-client/core/cell-test.cc b/hbase-native-client/core/cell-test.cc new file mode 100644 index 000..cbe50eb --- /dev/null +++ b/hbase-native-client/core/cell-test.cc @@ -0,0 +1,175 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +#include "core/cell.h" + +#include +#include +#include + +using namespace hbase; +TEST(CellTest, CellFailureTest) { + CellType cell_type = CellType::PUT; + std::string row = "row"; + std::string family = "family"; + std::string column = "column"; + std::string value = "value"; + long timestamp = std::numeric_limits::max(); + std::string tags = ""; + std::unique_ptr cell( + new Cell(row, family, column, timestamp, value, cell_type)); + if (cell.get()) { +EXPECT_NE("row-value", cell.get()->Row()); +EXPECT_NE("family-value", cell.get()->Family()); +EXPECT_NE("column-value", cell.get()->Qualifier()); +EXPECT_NE("value-value", cell.get()->Value()); +EXPECT_NE(8975431260, cell.get()->Timestamp()); +EXPECT_NE(CellType::MAXIMUM, cell.get()->Type()); + } +} + +TEST(CellTest, CellSuceessTest) { + std::string row = "row-value"; + std::string family = "family-value"; + std::string column = "column-value"; + std::string value = "value-value"; + long timestamp = std::numeric_limits::max(); + CellType cell_type = CellType::PUT; + const std::unique_ptr cell( + new Cell(row, family, column, timestamp, value, cell_type)); + if (cell.get()) { +EXPECT_EQ(row, cell.get()->Row()); +EXPECT_EQ(family, cell.get()->Family()); +EXPECT_EQ(column, cell.get()->Qualifier()); +EXPECT_EQ(value, cell.get()->Value()); +EXPECT_EQ(timestamp, cell.get()->Timestamp()); +EXPECT_EQ(cell_type, cell.get()->Type()); + } +} + +TEST(CellTest, MultipleCellsTest) { + std::vector cells; + for (int i = 0; i < 5; i++) { +std::string row = "row-value"; +std::string family = "family-value"; +std::string column = "column-value"; +std::string value = "value-value"; +long timestamp = std::numeric_limits::max(); +row += std::to_string(i); +value += std::to_string(i); +CellType cell_type = CellType::PUT; +const Cell *cell = new Cell(row, family, column, timestamp, value, +cell_type); +cells.push_back(cell); + } + int i = 0; + for (const auto cell : cells) { +std::string row = "row-value"; +std::string value = "value-value"; +row += std::to_string(i); +value += std::to_string(i); +EXPECT_EQ(row, cell->Row()); +EXPECT_EQ("family-value", cell->Family()); +EXPECT_EQ("column-value", cell->Qualifier()); +EXPECT_EQ(value, cell->Value()); +EXPECT_EQ(std::numeric_limits::max(), cell->Timestamp()); +EXPECT_EQ(CellType::PUT, cell->Type()); +i += 1; + } + for (const auto cell : cells) { +delete cell; + } + cells.clear(); +} + +TEST(CellTest, CellRowTest) {
hbase git commit: HBASE-16157 The incorrect block cache count and size are caused by removing duplicate block key in the LruBlockCache (ChiaPing Tsai)
Repository: hbase Updated Branches: refs/heads/master b5cdb38dc -> d016338e4 HBASE-16157 The incorrect block cache count and size are caused by removing duplicate block key in the LruBlockCache (ChiaPing Tsai) Project: http://git-wip-us.apache.org/repos/asf/hbase/repo Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/d016338e Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/d016338e Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/d016338e Branch: refs/heads/master Commit: d016338e45dca183fc05f254e3e1260d04511269 Parents: b5cdb38 Author: tedyuAuthored: Tue Jul 5 10:21:27 2016 -0700 Committer: tedyu Committed: Tue Jul 5 10:21:27 2016 -0700 -- .../hadoop/hbase/io/hfile/LruBlockCache.java| 19 ++-- .../hbase/io/hfile/TestLruBlockCache.java | 48 2 files changed, 63 insertions(+), 4 deletions(-) -- http://git-wip-us.apache.org/repos/asf/hbase/blob/d016338e/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/LruBlockCache.java -- diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/LruBlockCache.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/LruBlockCache.java index c380318..41b46f2 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/LruBlockCache.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/LruBlockCache.java @@ -473,8 +473,7 @@ public class LruBlockCache implements ResizableBlockCache, HeapSize { public boolean evictBlock(BlockCacheKey cacheKey) { LruCachedBlock cb = map.get(cacheKey); if (cb == null) return false; -evictBlock(cb, false); -return true; +return evictBlock(cb, false) > 0; } /** @@ -511,7 +510,10 @@ public class LruBlockCache implements ResizableBlockCache, HeapSize { * @return the heap size of evicted block */ protected long evictBlock(LruCachedBlock block, boolean evictedByEvictionProcess) { -map.remove(block.getCacheKey()); +boolean found = map.remove(block.getCacheKey()) != null; +if (!found) { + return 0; +} updateSizeMetrics(block, true); long val = elements.decrementAndGet(); if (LOG.isTraceEnabled()) { @@ -543,6 +545,16 @@ public class LruBlockCache implements ResizableBlockCache, HeapSize { } } + @VisibleForTesting + boolean isEvictionInProgress() { +return evictionInProgress; + } + + @VisibleForTesting + long getOverhead() { +return overhead; + } + /** * Eviction method. */ @@ -650,7 +662,6 @@ public class LruBlockCache implements ResizableBlockCache, HeapSize { remainingBuckets--; } } - if (LOG.isTraceEnabled()) { long single = bucketSingle.totalSize(); long multi = bucketMulti.totalSize(); http://git-wip-us.apache.org/repos/asf/hbase/blob/d016338e/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestLruBlockCache.java -- diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestLruBlockCache.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestLruBlockCache.java index d7f9aba..0d8a3bb 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestLruBlockCache.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestLruBlockCache.java @@ -25,6 +25,10 @@ import static org.junit.Assert.assertTrue; import java.nio.ByteBuffer; import java.util.Random; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Executors; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicInteger; import org.apache.hadoop.hbase.testclassification.IOTests; import org.apache.hadoop.hbase.testclassification.SmallTests; @@ -44,7 +48,46 @@ import org.junit.experimental.categories.Category; @Category({IOTests.class, SmallTests.class}) public class TestLruBlockCache { + @Test + public void testCacheEvictionThreadSafe() throws Exception { +long maxSize = 10; +int numBlocks = 9; +int testRuns = 10; +final long blockSize = calculateBlockSizeDefault(maxSize, numBlocks); +assertTrue("calculateBlockSize appears broken.", blockSize * numBlocks <= maxSize); +final LruBlockCache cache = new LruBlockCache(maxSize, blockSize); +EvictionThread evictionThread = cache.getEvictionThread(); +assertTrue(evictionThread != null); +while (!evictionThread.isEnteringRun()) { + Thread.sleep(1); +} +final String hfileName = "hfile"; +int threads = 10; +final int blocksPerThread = 5 * numBlocks; +for (int run = 0; run != testRuns; ++run) { + final AtomicInteger blockCount =
hbase git commit: Revert "HBASE-15650 Remove TimeRangeTracker as point of contention when many threads reading a StoreFile"
Repository: hbase Updated Branches: refs/heads/0.98 f3002bf2f -> 47c19607c Revert "HBASE-15650 Remove TimeRangeTracker as point of contention when many threads reading a StoreFile" This reverts commit 017ab8d7f1e9f8acd9bfeb5cbd24e68f418ee4f1. Investigation on HBASE-16074 (ITBLL fails, reports lost big or tiny families) implicates the HBASE-15650 optimization as cause. Revert, at least for now, out of due concern. Project: http://git-wip-us.apache.org/repos/asf/hbase/repo Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/47c19607 Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/47c19607 Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/47c19607 Branch: refs/heads/0.98 Commit: 47c19607c2c88c44f226940a2357087294fe70a5 Parents: f3002bf Author: Andrew PurtellAuthored: Tue Jul 5 09:53:16 2016 -0700 Committer: Andrew Purtell Committed: Tue Jul 5 09:53:16 2016 -0700 -- .../org/apache/hadoop/hbase/io/TimeRange.java | 37 +++--- .../hbase/io/hfile/HFilePrettyPrinter.java | 3 +- .../hadoop/hbase/regionserver/MemStore.java | 5 +- .../hadoop/hbase/regionserver/StoreFile.java| 59 +--- .../hbase/regionserver/StoreFileScanner.java| 2 +- .../hbase/regionserver/TimeRangeTracker.java| 74 .../hbase/mapreduce/TestHFileOutputFormat.java | 7 +- .../hbase/mapreduce/TestHFileOutputFormat2.java | 7 +- .../hbase/regionserver/MockStoreFile.java | 13 ++-- .../regionserver/TestTimeRangeTracker.java | 2 +- 10 files changed, 80 insertions(+), 129 deletions(-) -- http://git-wip-us.apache.org/repos/asf/hbase/blob/47c19607/hbase-common/src/main/java/org/apache/hadoop/hbase/io/TimeRange.java -- diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/TimeRange.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/TimeRange.java index 95ec6b9..b2c3ebe 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/TimeRange.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/TimeRange.java @@ -36,13 +36,11 @@ import org.apache.hadoop.hbase.util.Bytes; @InterfaceAudience.Public @InterfaceStability.Stable public class TimeRange { - static final long INITIAL_MIN_TIMESTAMP = 0L; - private static final long MIN_TIME = INITIAL_MIN_TIMESTAMP; - static final long INITIAL_MAX_TIMESTAMP = Long.MAX_VALUE; - static final long MAX_TIME = INITIAL_MAX_TIMESTAMP; + private static final long MIN_TIME = 0L; + private static final long MAX_TIME = Long.MAX_VALUE; private long minStamp = MIN_TIME; private long maxStamp = MAX_TIME; - private final boolean allTime; + private boolean allTime = false; /** * Default constructor. @@ -58,7 +56,9 @@ public class TimeRange { */ public TimeRange(long minStamp) { this.minStamp = minStamp; -this.allTime = this.minStamp == MIN_TIME; +if (this.minStamp == MIN_TIME){ + this.allTime = true; +} } /** @@ -67,7 +67,6 @@ public class TimeRange { */ public TimeRange(byte [] minStamp) { this.minStamp = Bytes.toLong(minStamp); - this.allTime = false; } /** @@ -82,12 +81,14 @@ public class TimeRange { throw new IllegalArgumentException("Timestamp cannot be negative. minStamp:" + minStamp + ", maxStamp" + maxStamp); } -if (maxStamp < minStamp) { +if(maxStamp < minStamp) { throw new IOException("maxStamp is smaller than minStamp"); } this.minStamp = minStamp; this.maxStamp = maxStamp; -this.allTime = this.minStamp == MIN_TIME && this.maxStamp == MAX_TIME; +if (this.minStamp == MIN_TIME && this.maxStamp == MAX_TIME){ + this.allTime = true; +} } /** @@ -133,27 +134,11 @@ public class TimeRange { * @return true if within TimeRange, false if not */ public boolean withinTimeRange(byte [] bytes, int offset) { - if (allTime) { - return true; - } + if(allTime) return true; return withinTimeRange(Bytes.toLong(bytes, offset)); } /** - * Check if the range has any overlap with TimeRange - * @param tr TimeRange - * @return True if there is overlap, false otherwise - */ -// This method came from TimeRangeTracker. We used to go there for this function but better -// to come here to the immutable, unsynchronized datastructure at read time. - public boolean includesTimeRange(final TimeRange tr) { -if (this.allTime) { - return true; -} -return getMin() < tr.getMax() && getMax() >= tr.getMin(); - } - - /** * Check if the specified timestamp is within this TimeRange. * * Returns true if within interval [minStamp, maxStamp), false
hbase git commit: HBASE-16144 Replication queue's lock will live forever if RS acquiring the lock has died prematurely (Phil Yang)
Repository: hbase Updated Branches: refs/heads/master 7f44dfd85 -> b5cdb38dc HBASE-16144 Replication queue's lock will live forever if RS acquiring the lock has died prematurely (Phil Yang) Project: http://git-wip-us.apache.org/repos/asf/hbase/repo Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/b5cdb38d Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/b5cdb38d Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/b5cdb38d Branch: refs/heads/master Commit: b5cdb38dc71a5b53a41879a945f9d596c07a0630 Parents: 7f44dfd Author: tedyuAuthored: Tue Jul 5 07:16:12 2016 -0700 Committer: tedyu Committed: Tue Jul 5 07:16:12 2016 -0700 -- .../hbase/replication/ReplicationFactory.java | 4 +- .../replication/ReplicationQueuesZKImpl.java| 13 ++- .../org/apache/hadoop/hbase/master/HMaster.java | 16 +++ .../cleaner/ReplicationZKLockCleanerChore.java | 112 +++ .../replication/TestMultiSlaveReplication.java | 42 +++ 5 files changed, 185 insertions(+), 2 deletions(-) -- http://git-wip-us.apache.org/repos/asf/hbase/blob/b5cdb38d/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationFactory.java -- diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationFactory.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationFactory.java index b84641c..bc7a4ce 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationFactory.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationFactory.java @@ -31,10 +31,12 @@ import org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher; @InterfaceAudience.Private public class ReplicationFactory { + public static final Class defaultReplicationQueueClass = ReplicationQueuesZKImpl.class; + public static ReplicationQueues getReplicationQueues(ReplicationQueuesArguments args) throws Exception { Class classToBuild = args.getConf().getClass("hbase.region.replica." + -"replication.replicationQueues.class", ReplicationQueuesZKImpl.class); +"replication.replicationQueues.class", defaultReplicationQueueClass); return (ReplicationQueues) ConstructorUtils.invokeConstructor(classToBuild, args); } http://git-wip-us.apache.org/repos/asf/hbase/blob/b5cdb38d/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationQueuesZKImpl.java -- diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationQueuesZKImpl.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationQueuesZKImpl.java index baea74f..655aaae 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationQueuesZKImpl.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationQueuesZKImpl.java @@ -25,6 +25,7 @@ import java.util.List; import java.util.Map; import java.util.Set; +import com.google.common.annotations.VisibleForTesting; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.classification.InterfaceAudience; @@ -238,7 +239,8 @@ public class ReplicationQueuesZKImpl extends ReplicationStateZKBase implements R * @param znode the server names of the other server * @return true if the lock was acquired, false in every other cases */ - private boolean lockOtherRS(String znode) { + @VisibleForTesting + public boolean lockOtherRS(String znode) { try { String parent = ZKUtil.joinZNode(this.queuesZNode, znode); if (parent.equals(this.myQueuesZnode)) { @@ -265,6 +267,15 @@ public class ReplicationQueuesZKImpl extends ReplicationStateZKBase implements R return true; } + public String getLockZNode(String znode) { +return this.queuesZNode + "/" + znode + "/" + RS_LOCK_ZNODE; + } + + @VisibleForTesting + public boolean checkLockExists(String znode) throws KeeperException { +return ZKUtil.checkExists(zookeeper, getLockZNode(znode)) >= 0; + } + /** * Delete all the replication queues for a given region server. * @param regionserverZnode The znode of the region server to delete. http://git-wip-us.apache.org/repos/asf/hbase/blob/b5cdb38d/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java -- diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java index a18a51f..87daa16 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java +++