hbase git commit: HBASE-19589 New regions should always be added with state CLOSED (followup of HBASE-19530)
Repository: hbase Updated Branches: refs/heads/branch-2 c89cfd340 -> 11ea19a10 HBASE-19589 New regions should always be added with state CLOSED (followup of HBASE-19530) Project: http://git-wip-us.apache.org/repos/asf/hbase/repo Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/11ea19a1 Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/11ea19a1 Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/11ea19a1 Branch: refs/heads/branch-2 Commit: 11ea19a1015a73904ebf7fcabfed763a90889ef1 Parents: c89cfd3 Author: Apekshit Sharma Authored: Thu Dec 21 17:14:25 2017 -0800 Committer: Apekshit Sharma Committed: Thu Dec 21 22:58:28 2017 -0800 -- .../apache/hadoop/hbase/MetaTableAccessor.java | 247 +-- .../procedure/CloneSnapshotProcedure.java | 2 + .../hbase/snapshot/RestoreSnapshotHelper.java | 2 +- .../hadoop/hbase/HBaseTestingUtility.java | 2 +- .../hadoop/hbase/util/BaseTestHBaseFsck.java| 17 -- .../util/hbck/OfflineMetaRebuildTestCore.java | 2 +- 6 files changed, 73 insertions(+), 199 deletions(-) -- http://git-wip-us.apache.org/repos/asf/hbase/blob/11ea19a1/hbase-client/src/main/java/org/apache/hadoop/hbase/MetaTableAccessor.java -- diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/MetaTableAccessor.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/MetaTableAccessor.java index 4f14192..f75472b 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/MetaTableAccessor.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/MetaTableAccessor.java @@ -1314,16 +1314,14 @@ public class MetaTableAccessor { /** * Generates and returns a Put containing the region into for the catalog table */ - public static Put makePutFromRegionInfo(RegionInfo regionInfo) -throws IOException { + public static Put makePutFromRegionInfo(RegionInfo regionInfo) throws IOException { return makePutFromRegionInfo(regionInfo, EnvironmentEdgeManager.currentTime()); } /** * Generates and returns a Put containing the region into for the catalog table */ - public static Put makePutFromRegionInfo(RegionInfo regionInfo, long ts) -throws IOException { + public static Put makePutFromRegionInfo(RegionInfo regionInfo, long ts) throws IOException { Put put = new Put(regionInfo.getRegionName(), ts); addRegionInfo(put, regionInfo); return put; @@ -1448,9 +1446,7 @@ public class MetaTableAccessor { */ private static void put(final Table t, final List puts) throws IOException { try { - if (METALOG.isDebugEnabled()) { -METALOG.debug(mutationsToString(puts)); - } + debugLogMutations(puts); t.put(puts); } finally { t.close(); @@ -1467,9 +1463,7 @@ public class MetaTableAccessor { throws IOException { Table t = getMetaHTable(connection); try { - if (METALOG.isDebugEnabled()) { -METALOG.debug(mutationsToString(ps)); - } + debugLogMutations(ps); t.put(ps); } finally { t.close(); @@ -1499,9 +1493,7 @@ public class MetaTableAccessor { throws IOException { Table t = getMetaHTable(connection); try { - if (METALOG.isDebugEnabled()) { -METALOG.debug(mutationsToString(deletes)); - } + debugLogMutations(deletes); t.delete(deletes); } finally { t.close(); @@ -1546,9 +1538,7 @@ public class MetaTableAccessor { throws IOException { Table t = getMetaHTable(connection); try { - if (METALOG.isDebugEnabled()) { -METALOG.debug(mutationsToString(mutations)); - } + debugLogMutations(mutations); t.batch(mutations, null); } catch (InterruptedException e) { InterruptedIOException ie = new InterruptedIOException(e.getMessage()); @@ -1559,33 +1549,7 @@ public class MetaTableAccessor { } } - /** - * Adds a hbase:meta row for the specified new region. - * @param connection connection we're using - * @param regionInfo region information - * @throws IOException if problem connecting or updating meta - */ - @VisibleForTesting - static void addRegionToMeta(Connection connection, - RegionInfo regionInfo) -throws IOException { -putToMetaTable(connection, makePutFromRegionInfo(regionInfo)); -LOG.info("Added " + regionInfo.getRegionNameAsString()); - } - - /** - * Adds a hbase:meta row for the specified new region to the given catalog table. The - * Table is not flushed or closed. - * @param meta the Table for META - * @param regionInfo region information - * @throws IOException if problem connecting or updating meta - */ - public static void addRegionToMeta(Table meta, RegionInfo regionInfo) throws IOExcept
hbase git commit: HBASE-19589 New regions should always be added with state CLOSED (followup of HBASE-19530)
Repository: hbase Updated Branches: refs/heads/master 070c2ee7d -> 8ec0aa0d7 HBASE-19589 New regions should always be added with state CLOSED (followup of HBASE-19530) Project: http://git-wip-us.apache.org/repos/asf/hbase/repo Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/8ec0aa0d Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/8ec0aa0d Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/8ec0aa0d Branch: refs/heads/master Commit: 8ec0aa0d709ced78331dd61d28c79f3433198227 Parents: 070c2ee Author: Apekshit Sharma Authored: Thu Dec 21 17:14:25 2017 -0800 Committer: Apekshit Sharma Committed: Thu Dec 21 22:57:53 2017 -0800 -- .../apache/hadoop/hbase/MetaTableAccessor.java | 247 +-- .../procedure/CloneSnapshotProcedure.java | 2 + .../hbase/snapshot/RestoreSnapshotHelper.java | 2 +- .../hadoop/hbase/HBaseTestingUtility.java | 2 +- .../hadoop/hbase/util/BaseTestHBaseFsck.java| 17 -- .../util/hbck/OfflineMetaRebuildTestCore.java | 2 +- 6 files changed, 73 insertions(+), 199 deletions(-) -- http://git-wip-us.apache.org/repos/asf/hbase/blob/8ec0aa0d/hbase-client/src/main/java/org/apache/hadoop/hbase/MetaTableAccessor.java -- diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/MetaTableAccessor.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/MetaTableAccessor.java index 4f14192..f75472b 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/MetaTableAccessor.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/MetaTableAccessor.java @@ -1314,16 +1314,14 @@ public class MetaTableAccessor { /** * Generates and returns a Put containing the region into for the catalog table */ - public static Put makePutFromRegionInfo(RegionInfo regionInfo) -throws IOException { + public static Put makePutFromRegionInfo(RegionInfo regionInfo) throws IOException { return makePutFromRegionInfo(regionInfo, EnvironmentEdgeManager.currentTime()); } /** * Generates and returns a Put containing the region into for the catalog table */ - public static Put makePutFromRegionInfo(RegionInfo regionInfo, long ts) -throws IOException { + public static Put makePutFromRegionInfo(RegionInfo regionInfo, long ts) throws IOException { Put put = new Put(regionInfo.getRegionName(), ts); addRegionInfo(put, regionInfo); return put; @@ -1448,9 +1446,7 @@ public class MetaTableAccessor { */ private static void put(final Table t, final List puts) throws IOException { try { - if (METALOG.isDebugEnabled()) { -METALOG.debug(mutationsToString(puts)); - } + debugLogMutations(puts); t.put(puts); } finally { t.close(); @@ -1467,9 +1463,7 @@ public class MetaTableAccessor { throws IOException { Table t = getMetaHTable(connection); try { - if (METALOG.isDebugEnabled()) { -METALOG.debug(mutationsToString(ps)); - } + debugLogMutations(ps); t.put(ps); } finally { t.close(); @@ -1499,9 +1493,7 @@ public class MetaTableAccessor { throws IOException { Table t = getMetaHTable(connection); try { - if (METALOG.isDebugEnabled()) { -METALOG.debug(mutationsToString(deletes)); - } + debugLogMutations(deletes); t.delete(deletes); } finally { t.close(); @@ -1546,9 +1538,7 @@ public class MetaTableAccessor { throws IOException { Table t = getMetaHTable(connection); try { - if (METALOG.isDebugEnabled()) { -METALOG.debug(mutationsToString(mutations)); - } + debugLogMutations(mutations); t.batch(mutations, null); } catch (InterruptedException e) { InterruptedIOException ie = new InterruptedIOException(e.getMessage()); @@ -1559,33 +1549,7 @@ public class MetaTableAccessor { } } - /** - * Adds a hbase:meta row for the specified new region. - * @param connection connection we're using - * @param regionInfo region information - * @throws IOException if problem connecting or updating meta - */ - @VisibleForTesting - static void addRegionToMeta(Connection connection, - RegionInfo regionInfo) -throws IOException { -putToMetaTable(connection, makePutFromRegionInfo(regionInfo)); -LOG.info("Added " + regionInfo.getRegionNameAsString()); - } - - /** - * Adds a hbase:meta row for the specified new region to the given catalog table. The - * Table is not flushed or closed. - * @param meta the Table for META - * @param regionInfo region information - * @throws IOException if problem connecting or updating meta - */ - public static void addRegionToMeta(Table meta, RegionInfo regionInfo) throws IOException
hbase git commit: HBASE-19525 RS side changes for moving peer modification from zk watcher to procedure
Repository: hbase Updated Branches: refs/heads/HBASE-19397 049904fef -> 61b974789 HBASE-19525 RS side changes for moving peer modification from zk watcher to procedure Project: http://git-wip-us.apache.org/repos/asf/hbase/repo Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/61b97478 Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/61b97478 Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/61b97478 Branch: refs/heads/HBASE-19397 Commit: 61b974789d06ddf97d5ae43fdd84e958d0a152f9 Parents: 049904f Author: huzheng Authored: Wed Dec 20 10:47:18 2017 +0800 Committer: huzheng Committed: Fri Dec 22 14:52:41 2017 +0800 -- .../hadoop/hbase/protobuf/ProtobufUtil.java | 11 +- .../hbase/shaded/protobuf/ProtobufUtil.java | 13 +- .../hbase/replication/ReplicationListener.java | 14 -- .../hbase/replication/ReplicationPeer.java | 28 ++- .../replication/ReplicationPeerZKImpl.java | 180 --- .../replication/ReplicationPeersZKImpl.java | 19 +- .../replication/ReplicationTrackerZKImpl.java | 73 +- .../regionserver/ReplicationSourceService.java | 9 +- .../handler/RSProcedureHandler.java | 3 + .../replication/BaseReplicationEndpoint.java| 2 +- .../regionserver/PeerProcedureHandler.java | 38 .../regionserver/PeerProcedureHandlerImpl.java | 81 +++ .../regionserver/RefreshPeerCallable.java | 39 +++- .../replication/regionserver/Replication.java | 10 + .../regionserver/ReplicationSource.java | 9 +- .../regionserver/ReplicationSourceManager.java | 37 ++- .../replication/TestReplicationAdmin.java | 2 +- .../TestReplicationAdminUsingProcedure.java | 226 +++ .../replication/DummyModifyPeerProcedure.java | 48 .../TestDummyModifyPeerProcedure.java | 80 --- .../TestReplicationTrackerZKImpl.java | 51 - .../TestReplicationSourceManager.java | 32 ++- ...tTableBasedReplicationSourceManagerImpl.java | 7 +- 23 files changed, 536 insertions(+), 476 deletions(-) -- http://git-wip-us.apache.org/repos/asf/hbase/blob/61b97478/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java -- diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java index 267dc7a..d5285dc 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java @@ -17,6 +17,8 @@ */ package org.apache.hadoop.hbase.protobuf; +import static org.apache.hadoop.hbase.protobuf.ProtobufMagic.PB_MAGIC; + import com.google.protobuf.ByteString; import com.google.protobuf.CodedInputStream; import com.google.protobuf.InvalidProtocolBufferException; @@ -199,7 +201,7 @@ public final class ProtobufUtil { * byte array that is bytes.length plus {@link ProtobufMagic#PB_MAGIC}.length. */ public static byte [] prependPBMagic(final byte [] bytes) { -return Bytes.add(ProtobufMagic.PB_MAGIC, bytes); +return Bytes.add(PB_MAGIC, bytes); } /** @@ -224,10 +226,11 @@ public final class ProtobufUtil { * @param bytes bytes to check * @throws DeserializationException if we are missing the pb magic prefix */ - public static void expectPBMagicPrefix(final byte [] bytes) throws DeserializationException { + public static void expectPBMagicPrefix(final byte[] bytes) throws DeserializationException { if (!isPBMagicPrefix(bytes)) { - throw new DeserializationException("Missing pb magic " + - Bytes.toString(ProtobufMagic.PB_MAGIC) + " prefix"); + String bytesPrefix = bytes == null ? "null" : Bytes.toStringBinary(bytes, 0, PB_MAGIC.length); + throw new DeserializationException( + "Missing pb magic " + Bytes.toString(PB_MAGIC) + " prefix, bytes: " + bytesPrefix); } } http://git-wip-us.apache.org/repos/asf/hbase/blob/61b97478/hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/ProtobufUtil.java -- diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/ProtobufUtil.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/ProtobufUtil.java index c9ea5a5..57c8a55 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/ProtobufUtil.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/ProtobufUtil.java @@ -17,6 +17,8 @@ */ package org.apache.hadoop.hbase.shaded.protobuf; +import static org.apache.hadoop.hbase.protobuf.ProtobufMagic.PB_MAGIC; + import java.io.Byte
hbase git commit: HBASE-15124 Document the new 'normalization' feature in refguid
Repository: hbase Updated Branches: refs/heads/master 37bf54a51 -> 070c2ee7d HBASE-15124 Document the new 'normalization' feature in refguid Project: http://git-wip-us.apache.org/repos/asf/hbase/repo Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/070c2ee7 Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/070c2ee7 Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/070c2ee7 Branch: refs/heads/master Commit: 070c2ee7dd371157c1c242fe28b91f76ba47c5c5 Parents: 37bf54a Author: Michael Stack Authored: Thu Dec 21 22:11:12 2017 -0800 Committer: Michael Stack Committed: Thu Dec 21 22:47:25 2017 -0800 -- src/main/asciidoc/_chapters/ops_mgt.adoc | 136 ++ 1 file changed, 136 insertions(+) -- http://git-wip-us.apache.org/repos/asf/hbase/blob/070c2ee7/src/main/asciidoc/_chapters/ops_mgt.adoc -- diff --git a/src/main/asciidoc/_chapters/ops_mgt.adoc b/src/main/asciidoc/_chapters/ops_mgt.adoc index 7b0f89b..f5fa456 100644 --- a/src/main/asciidoc/_chapters/ops_mgt.adoc +++ b/src/main/asciidoc/_chapters/ops_mgt.adoc @@ -2704,4 +2704,140 @@ Viewing the Master log will give you insight on rsgroup operation. If it appears stuck, restart the Master process. +[[normalizer]] +== Region Normalizer + +The Region Normalizer tries to make Regions all in a table about the same in size. +It does this by finding a rough average. Any region that is larger than twice this +size is split. Any region that is much smaller is merged into an adjacent region. +It is good to run the Normalizer on occasion on a down time after the cluster has +been running a while or say after a burst of activity such as a large delete. + +(The bulk of the below detail was copied wholesale from the blog by Romil Choksi at +link:https://community.hortonworks.com/articles/54987/hbase-region-normalizer.html[HBase Region Normalizer]) + +The Region Normalizer is feature available since HBase-1.2. It runs a set of +pre-calculated merge/split actions to resize regions that are either too +large or too small compared to the average region size for a given table. Region +Normalizer when invoked computes a normalization 'plan' for all of the tables in +HBase. System tables (such as hbase:meta, hbase:namespace, Phoenix system tables +etc) and user tables with normalization disabled are ignored while computing the +plan. For normalization enabled tables, normalization plan is carried out in +parallel across multiple tables. + +Normalizer can be enabled or disabled globally for the entire cluster using the +ânormalizer_switchâ command in the HBase shell. Normalization can also be +controlled on a per table basis, which is disabled by default when a table is +created. Normalization for a table can be enabled or disabled by setting the +NORMALIZATION_ENABLED table attribute to true or false. + +To check normalizer status and enable/disable normalizer +[source,bash] + +hbase(main):001:0> normalizer_enabled +true +0 row(s) in 0.4870 seconds + +hbase(main):002:0> normalizer_switch false +true +0 row(s) in 0.0640 seconds + +hbase(main):003:0> normalizer_enabled +false +0 row(s) in 0.0120 seconds + +hbase(main):004:0> normalizer_switch true +false +0 row(s) in 0.0200 seconds + +hbase(main):005:0> normalizer_enabled +true +0 row(s) in 0.0090 seconds + + +When enabled, Normalizer is invoked in the background every 5 mins (by default), +which can be configured using `hbase.normalization.period` in `hbase-site.xml`. +Normalizer can also be invoked manually/programmatically at will using HBase shellâs +`normalize` command. HBase by default uses `SimpleRegionNormalizer`, but users can +design their own normalizer as long as they implement the RegionNormalizer Interface. +Details about the logic used by `SimpleRegionNormalizer` to compute its normalization +plan can be found link:https://hbase.apache.org/devapidocs/org/apache/hadoop/hbase/master/normalizer/SimpleRegionNormalizer.html[here]. + +The below example shows a normalization plan being computed for an user table, and +merge action being taken as a result of the normalization plan computed by SimpleRegionNormalizer. + +Consider an user table with some pre-split regions having 3 equally large regions +(about 100K rows) and 1 relatively small region (about 25K rows). Following is the +snippet from an hbase meta table scan showing each of the pre-split regions for +the user table. + + +table_p8ddpd6q5z,,1469494305548.68b9892220865cb6048 column=info:regioninfo, timestamp=1469494306375, value={ENCODED => 68b9892220865cb604809c950d1adf48, NAME => 'table_p8ddpd6q5z,,1469494305548.68b989222 09c950d1adf48. 0865cb604809c950d1adf48.', STARTKEY => '', ENDKEY => '1'} + +table_p8ddpd6q5z,1,1469494317178.867b77333bdc75a028
hbase git commit: HBASE--17248 fix javadoc in SimpleRegionNormalizer
Repository: hbase Updated Branches: refs/heads/branch-2 313a65a13 -> c89cfd340 HBASE--17248 fix javadoc in SimpleRegionNormalizer Signed-off-by: Michael Stack Project: http://git-wip-us.apache.org/repos/asf/hbase/repo Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/c89cfd34 Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/c89cfd34 Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/c89cfd34 Branch: refs/heads/branch-2 Commit: c89cfd3406823cf05fa83464c5ddee16bf0d473f Parents: 313a65a Author: Daisuke Authored: Sat Dec 3 19:03:38 2016 +0900 Committer: Michael Stack Committed: Thu Dec 21 21:35:50 2017 -0800 -- .../master/normalizer/SimpleRegionNormalizer.java | 14 +++--- 1 file changed, 7 insertions(+), 7 deletions(-) -- http://git-wip-us.apache.org/repos/asf/hbase/blob/c89cfd34/hbase-server/src/main/java/org/apache/hadoop/hbase/master/normalizer/SimpleRegionNormalizer.java -- diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/normalizer/SimpleRegionNormalizer.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/normalizer/SimpleRegionNormalizer.java index 767324a..38cf847 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/normalizer/SimpleRegionNormalizer.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/normalizer/SimpleRegionNormalizer.java @@ -43,13 +43,13 @@ import org.apache.hadoop.hbase.shaded.protobuf.RequestConverter; * Logic in use: * * - * get all regions of a given table - * get avg size S of each region (by total size of store files reported in RegionLoad) - * If biggest region is bigger than S * 2, it is kindly requested to split, - *and normalization stops - * Otherwise, two smallest region R1 and its smallest neighbor R2 are kindly requested - *to merge, if R1 + R1 < S, and normalization stops - * Otherwise, no action is performed + * Get all regions of a given table + * Get avg size S of each region (by total size of store files reported in RegionLoad) + * Seek every single region one by one. If a region R0 is bigger than S * 2, it is + * kindly requested to split. Thereon evaluate the next region R1 + * Otherwise, if R0 + R1 is smaller than S, R0 and R1 are kindly requested to merge. + * Thereon evaluate the next region R2 + * Otherwise, R1 is evaluated * * * Region sizes are coarse and approximate on the order of megabytes. Additionally,
hbase git commit: HBASE--17248 fix javadoc in SimpleRegionNormalizer
Repository: hbase Updated Branches: refs/heads/master 5f02bf5b7 -> 37bf54a51 HBASE--17248 fix javadoc in SimpleRegionNormalizer Signed-off-by: Michael Stack Project: http://git-wip-us.apache.org/repos/asf/hbase/repo Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/37bf54a5 Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/37bf54a5 Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/37bf54a5 Branch: refs/heads/master Commit: 37bf54a516f038a86b818d872452fb7169dd5a42 Parents: 5f02bf5 Author: Daisuke Authored: Sat Dec 3 19:03:38 2016 +0900 Committer: Michael Stack Committed: Thu Dec 21 21:35:08 2017 -0800 -- .../master/normalizer/SimpleRegionNormalizer.java | 14 +++--- 1 file changed, 7 insertions(+), 7 deletions(-) -- http://git-wip-us.apache.org/repos/asf/hbase/blob/37bf54a5/hbase-server/src/main/java/org/apache/hadoop/hbase/master/normalizer/SimpleRegionNormalizer.java -- diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/normalizer/SimpleRegionNormalizer.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/normalizer/SimpleRegionNormalizer.java index 767324a..38cf847 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/normalizer/SimpleRegionNormalizer.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/normalizer/SimpleRegionNormalizer.java @@ -43,13 +43,13 @@ import org.apache.hadoop.hbase.shaded.protobuf.RequestConverter; * Logic in use: * * - * get all regions of a given table - * get avg size S of each region (by total size of store files reported in RegionLoad) - * If biggest region is bigger than S * 2, it is kindly requested to split, - *and normalization stops - * Otherwise, two smallest region R1 and its smallest neighbor R2 are kindly requested - *to merge, if R1 + R1 < S, and normalization stops - * Otherwise, no action is performed + * Get all regions of a given table + * Get avg size S of each region (by total size of store files reported in RegionLoad) + * Seek every single region one by one. If a region R0 is bigger than S * 2, it is + * kindly requested to split. Thereon evaluate the next region R1 + * Otherwise, if R0 + R1 is smaller than S, R0 and R1 are kindly requested to merge. + * Thereon evaluate the next region R2 + * Otherwise, R1 is evaluated * * * Region sizes are coarse and approximate on the order of megabytes. Additionally,
hbase git commit: HBASE-10092 Addendum. Move to slf4j. Few changes in bin/ scripts.
Repository: hbase Updated Branches: refs/heads/master a4272a9f0 -> 5f02bf5b7 HBASE-10092 Addendum. Move to slf4j. Few changes in bin/ scripts. Project: http://git-wip-us.apache.org/repos/asf/hbase/repo Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/5f02bf5b Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/5f02bf5b Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/5f02bf5b Branch: refs/heads/master Commit: 5f02bf5b7b471e093a546ea494b39216570105e7 Parents: a4272a9 Author: Apekshit Sharma Authored: Thu Dec 21 19:20:29 2017 -0800 Committer: Apekshit Sharma Committed: Thu Dec 21 19:22:00 2017 -0800 -- bin/draining_servers.rb | 22 ++ bin/replication/copy_tables_desc.rb | 4 ++-- 2 files changed, 4 insertions(+), 22 deletions(-) -- http://git-wip-us.apache.org/repos/asf/hbase/blob/5f02bf5b/bin/draining_servers.rb -- diff --git a/bin/draining_servers.rb b/bin/draining_servers.rb index 588bac4..27cc0fa 100644 --- a/bin/draining_servers.rb +++ b/bin/draining_servers.rb @@ -27,8 +27,7 @@ java_import org.apache.hadoop.hbase.HBaseConfiguration java_import org.apache.hadoop.hbase.client.ConnectionFactory java_import org.apache.hadoop.hbase.client.HBaseAdmin java_import org.apache.hadoop.hbase.zookeeper.ZKUtil -java_import org.apache.commons.logging.Log -java_import org.apache.commons.logging.LogFactory +java_import org.slf4j.LoggerFactory # Name of this script NAME = 'draining_servers'.freeze @@ -43,10 +42,6 @@ optparse = OptionParser.new do |opts| puts opts exit end - options[:debug] = false - opts.on('-d', '--debug', 'Display extra debug logging') do -options[:debug] = true - end end optparse.parse! @@ -133,21 +128,8 @@ end hostOrServers = ARGV[1..ARGV.size] -# Create a logger and disable the DEBUG-level annoying client logging -def configureLogging(options) - apacheLogger = LogFactory.getLog(NAME) - # Configure log4j to not spew so much - unless options[:debug] -logger = org.apache.log4j.Logger.getLogger('org.apache.hadoop.hbase') -logger.setLevel(org.apache.log4j.Level::WARN) -logger = org.apache.log4j.Logger.getLogger('org.apache.zookeeper') -logger.setLevel(org.apache.log4j.Level::WARN) - end - apacheLogger -end - # Create a logger and save it to ruby global -$LOG = configureLogging(options) +$LOG = LoggerFactory.getLogger(NAME) case ARGV[0] when 'add' if ARGV.length < 2 http://git-wip-us.apache.org/repos/asf/hbase/blob/5f02bf5b/bin/replication/copy_tables_desc.rb -- diff --git a/bin/replication/copy_tables_desc.rb b/bin/replication/copy_tables_desc.rb index c494765..44a24f9 100644 --- a/bin/replication/copy_tables_desc.rb +++ b/bin/replication/copy_tables_desc.rb @@ -24,7 +24,6 @@ # include Java -java_import org.apache.commons.logging.LogFactory java_import org.apache.hadoop.conf.Configuration java_import org.apache.hadoop.hbase.HBaseConfiguration java_import org.apache.hadoop.hbase.HConstants @@ -32,6 +31,7 @@ java_import org.apache.hadoop.hbase.HTableDescriptor java_import org.apache.hadoop.hbase.TableName java_import org.apache.hadoop.hbase.client.ConnectionFactory java_import org.apache.hadoop.hbase.client.HBaseAdmin +java_import org.slf4j.LoggerFactory # Name of this script NAME = 'copy_tables_desc'.freeze @@ -64,7 +64,7 @@ end usage if ARGV.size < 2 || ARGV.size > 3 -LOG = LogFactory.getLog(NAME) +LOG = LoggerFactory.getLogger(NAME) parts1 = ARGV[0].split(':')
hbase git commit: HBASE-10092 Addendum. Move to slf4j. Few changes in bin/ scripts.
Repository: hbase Updated Branches: refs/heads/branch-2 b263eb564 -> 313a65a13 HBASE-10092 Addendum. Move to slf4j. Few changes in bin/ scripts. Project: http://git-wip-us.apache.org/repos/asf/hbase/repo Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/313a65a1 Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/313a65a1 Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/313a65a1 Branch: refs/heads/branch-2 Commit: 313a65a13cd6bc3fde0ac95f9d2355174c963f08 Parents: b263eb5 Author: Apekshit Sharma Authored: Thu Dec 21 19:20:29 2017 -0800 Committer: Apekshit Sharma Committed: Thu Dec 21 19:26:19 2017 -0800 -- bin/draining_servers.rb | 22 ++ bin/replication/copy_tables_desc.rb | 4 ++-- 2 files changed, 4 insertions(+), 22 deletions(-) -- http://git-wip-us.apache.org/repos/asf/hbase/blob/313a65a1/bin/draining_servers.rb -- diff --git a/bin/draining_servers.rb b/bin/draining_servers.rb index 588bac4..27cc0fa 100644 --- a/bin/draining_servers.rb +++ b/bin/draining_servers.rb @@ -27,8 +27,7 @@ java_import org.apache.hadoop.hbase.HBaseConfiguration java_import org.apache.hadoop.hbase.client.ConnectionFactory java_import org.apache.hadoop.hbase.client.HBaseAdmin java_import org.apache.hadoop.hbase.zookeeper.ZKUtil -java_import org.apache.commons.logging.Log -java_import org.apache.commons.logging.LogFactory +java_import org.slf4j.LoggerFactory # Name of this script NAME = 'draining_servers'.freeze @@ -43,10 +42,6 @@ optparse = OptionParser.new do |opts| puts opts exit end - options[:debug] = false - opts.on('-d', '--debug', 'Display extra debug logging') do -options[:debug] = true - end end optparse.parse! @@ -133,21 +128,8 @@ end hostOrServers = ARGV[1..ARGV.size] -# Create a logger and disable the DEBUG-level annoying client logging -def configureLogging(options) - apacheLogger = LogFactory.getLog(NAME) - # Configure log4j to not spew so much - unless options[:debug] -logger = org.apache.log4j.Logger.getLogger('org.apache.hadoop.hbase') -logger.setLevel(org.apache.log4j.Level::WARN) -logger = org.apache.log4j.Logger.getLogger('org.apache.zookeeper') -logger.setLevel(org.apache.log4j.Level::WARN) - end - apacheLogger -end - # Create a logger and save it to ruby global -$LOG = configureLogging(options) +$LOG = LoggerFactory.getLogger(NAME) case ARGV[0] when 'add' if ARGV.length < 2 http://git-wip-us.apache.org/repos/asf/hbase/blob/313a65a1/bin/replication/copy_tables_desc.rb -- diff --git a/bin/replication/copy_tables_desc.rb b/bin/replication/copy_tables_desc.rb index c494765..44a24f9 100644 --- a/bin/replication/copy_tables_desc.rb +++ b/bin/replication/copy_tables_desc.rb @@ -24,7 +24,6 @@ # include Java -java_import org.apache.commons.logging.LogFactory java_import org.apache.hadoop.conf.Configuration java_import org.apache.hadoop.hbase.HBaseConfiguration java_import org.apache.hadoop.hbase.HConstants @@ -32,6 +31,7 @@ java_import org.apache.hadoop.hbase.HTableDescriptor java_import org.apache.hadoop.hbase.TableName java_import org.apache.hadoop.hbase.client.ConnectionFactory java_import org.apache.hadoop.hbase.client.HBaseAdmin +java_import org.slf4j.LoggerFactory # Name of this script NAME = 'copy_tables_desc'.freeze @@ -64,7 +64,7 @@ end usage if ARGV.size < 2 || ARGV.size > 3 -LOG = LogFactory.getLog(NAME) +LOG = LoggerFactory.getLogger(NAME) parts1 = ARGV[0].split(':')
[hbase] Git Push Summary
Repository: hbase Updated Branches: refs/heads/HBASE-19571 [deleted] 1d453e7ab
[hbase] Git Push Summary
Repository: hbase Updated Branches: refs/heads/HBASE-19491 [deleted] 20b4aa971
hbase git commit: HBASE-19570 Add hadoop3 tests to Nightly master/branch-2 runs.
Repository: hbase Updated Branches: refs/heads/branch-1.2 5f7d8e0e1 -> 65da4ed28 HBASE-19570 Add hadoop3 tests to Nightly master/branch-2 runs. Jenkins fails the whole build immediately if any stage fails. Hadoop2 tests run before Hadoop3 tests. So Hadoop3 tests will run only if hadoop2 tests pass. Project: http://git-wip-us.apache.org/repos/asf/hbase/repo Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/65da4ed2 Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/65da4ed2 Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/65da4ed2 Branch: refs/heads/branch-1.2 Commit: 65da4ed28e92864f2332f0a7092c75490e239b03 Parents: 5f7d8e0 Author: Apekshit Sharma Authored: Thu Dec 21 11:20:40 2017 -0800 Committer: Apekshit Sharma Committed: Thu Dec 21 17:46:30 2017 -0800 -- dev-support/Jenkinsfile| 73 ++--- dev-support/hbase-personality.sh | 8 dev-support/hbase_nightly_yetus.sh | 5 +++ 3 files changed, 80 insertions(+), 6 deletions(-) -- http://git-wip-us.apache.org/repos/asf/hbase/blob/65da4ed2/dev-support/Jenkinsfile -- diff --git a/dev-support/Jenkinsfile b/dev-support/Jenkinsfile index f06923b..b01a96b 100644 --- a/dev-support/Jenkinsfile +++ b/dev-support/Jenkinsfile @@ -35,7 +35,7 @@ pipeline { BASEDIR = "${env.WORKSPACE}/component" YETUS_RELEASE = '0.6.0' PROJECT = 'hbase' -PROJET_PERSONALITY = 'https://raw.githubusercontent.com/apache/hbase/master/dev-support/hbase-personality.sh' +PROJECT_PERSONALITY = 'https://raw.githubusercontent.com/apache/hbase/master/dev-support/hbase-personality.sh' // This section of the docs tells folks not to use the javadoc tag. older branches have our old version of the check for said tag. AUTHOR_IGNORE_LIST = 'src/main/asciidoc/_chapters/developer.adoc,dev-support/test-patch.sh' WHITESPACE_IGNORE_LIST = '.*/generated/.*' @@ -102,7 +102,7 @@ fi dir ("${env.TOOLS}") { sh """#!/usr/bin/env bash echo "Downloading Project personality." -curl -L -o personality.sh "${env.PROJET_PERSONALITY}" +curl -L -o personality.sh "${env.PROJECT_PERSONALITY}" """ } stash name: 'yetus', includes: "yetus-*/*,yetus-*/**/*,tools/personality.sh" @@ -203,15 +203,15 @@ curl -L -o personality.sh "${env.PROJET_PERSONALITY}" } } } -stage ('yetus jdk8 checks') { +stage ('yetus jdk8 hadoop2 checks') { when { not { branch 'branch-1.1*' } } environment { -TESTS = 'mvninstall,compile,javac,unit,findbugs,htmlout' -OUTPUT_DIR_RELATIVE = "output-jdk8" +TESTS = 'mvninstall,compile,javac,unit,htmlout' +OUTPUT_DIR_RELATIVE = "output-jdk8-hadoop2" OUTPUT_DIR = "${env.WORKSPACE}/${env.OUTPUT_DIR_RELATIVE}" // This isn't strictly needed on branches that only support jdk8, but doesn't hurt // and is needed on branches that do both jdk7 and jdk8 @@ -253,7 +253,68 @@ curl -L -o personality.sh "${env.PROJET_PERSONALITY}" // Has to be relative to WORKSPACE. reportDir: "${env.OUTPUT_DIR_RELATIVE}", reportFiles : 'console-report.html', -reportName : 'JDK8 Nightly Build Report' +reportName : 'JDK8 Nightly Build Report (Hadoop2)' + ] +} + } +} +stage ('yetus jdk8 hadoop3 checks') { + when { +not { + branch 'branch-1*' +} + } + environment { +// Failure in any stage fails the build and consecutive stages are not built. +// Findbugs is part of this last yetus stage to prevent findbugs precluding hadoop3 +// tests. +TESTS = 'mvninstall,compile,javac,unit,findbugs,htmlout' +OUTPUT_DIR_RELATIVE = "output-jdk8-hadoop3" +OUTPUT_DIR = "${env.WORKSPACE}/${env.OUTPUT_DIR_RELATIVE}" +// This isn't strictly needed on branches that only support jdk8, but doesn't hurt +// and is needed on branches that do both jdk7 and jdk8 +SET_JAVA_HOME = '/usr/lib/jvm/java-8-openjdk-amd64' +// Activates hadoop 3.0 profile in maven runs. +HADOOP_PROFILE = '3.0' + } + steps { +unstash 'yetus' +sh '''#!/usr/bin/env bash + rm -rf "${OUTPUT_DIR}" && mkdir "${OUTPUT_DIR}" + rm -rf "${OUTPUT_DIR}/machine" && mkdir "${OUTPUT_DIR}/machine" + "${BASEDIR}/dev-support/gather_machine_environment.sh" "${OUTPUT_DIR_RELATIVE}/machine" +''' +sh "${env.BASEDIR}/dev-support/hbase_nightly_yetus.sh" + } + post { +always { + // Not sure how two junit test reports will work. Disabling this for now. + // junit
hbase git commit: HBASE-19570 Add hadoop3 tests to Nightly master/branch-2 runs.
Repository: hbase Updated Branches: refs/heads/branch-1.1 28cc1fbd6 -> 30a565bd7 HBASE-19570 Add hadoop3 tests to Nightly master/branch-2 runs. Jenkins fails the whole build immediately if any stage fails. Hadoop2 tests run before Hadoop3 tests. So Hadoop3 tests will run only if hadoop2 tests pass. Project: http://git-wip-us.apache.org/repos/asf/hbase/repo Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/30a565bd Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/30a565bd Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/30a565bd Branch: refs/heads/branch-1.1 Commit: 30a565bd7be9e10dfc8169f1ca06a99538cd13ff Parents: 28cc1fb Author: Apekshit Sharma Authored: Thu Dec 21 11:20:40 2017 -0800 Committer: Apekshit Sharma Committed: Thu Dec 21 17:46:45 2017 -0800 -- dev-support/Jenkinsfile| 73 ++--- dev-support/hbase-personality.sh | 8 dev-support/hbase_nightly_yetus.sh | 5 +++ 3 files changed, 80 insertions(+), 6 deletions(-) -- http://git-wip-us.apache.org/repos/asf/hbase/blob/30a565bd/dev-support/Jenkinsfile -- diff --git a/dev-support/Jenkinsfile b/dev-support/Jenkinsfile index 3070a4d..40abbc7 100644 --- a/dev-support/Jenkinsfile +++ b/dev-support/Jenkinsfile @@ -35,7 +35,7 @@ pipeline { BASEDIR = "${env.WORKSPACE}/component" YETUS_RELEASE = '0.6.0' PROJECT = 'hbase' -PROJET_PERSONALITY = 'https://raw.githubusercontent.com/apache/hbase/master/dev-support/hbase-personality.sh' +PROJECT_PERSONALITY = 'https://raw.githubusercontent.com/apache/hbase/master/dev-support/hbase-personality.sh' // This section of the docs tells folks not to use the javadoc tag. older branches have our old version of the check for said tag. AUTHOR_IGNORE_LIST = 'src/main/asciidoc/_chapters/developer.adoc,dev-support/test-patch.sh' WHITESPACE_IGNORE_LIST = '.*/generated/.*' @@ -102,7 +102,7 @@ fi dir ("${env.TOOLS}") { sh """#!/usr/bin/env bash echo "Downloading Project personality." -curl -L -o personality.sh "${env.PROJET_PERSONALITY}" +curl -L -o personality.sh "${env.PROJECT_PERSONALITY}" """ } stash name: 'yetus', includes: "yetus-*/*,yetus-*/**/*,tools/personality.sh" @@ -203,15 +203,15 @@ curl -L -o personality.sh "${env.PROJET_PERSONALITY}" } } } -stage ('yetus jdk8 checks') { +stage ('yetus jdk8 hadoop2 checks') { when { not { branch 'branch-1.1*' } } environment { -TESTS = 'mvninstall,compile,javac,unit,findbugs,htmlout' -OUTPUT_DIR_RELATIVE = "output-jdk8" +TESTS = 'mvninstall,compile,javac,unit,htmlout' +OUTPUT_DIR_RELATIVE = "output-jdk8-hadoop2" OUTPUT_DIR = "${env.WORKSPACE}/${env.OUTPUT_DIR_RELATIVE}" // This isn't strictly needed on branches that only support jdk8, but doesn't hurt // and is needed on branches that do both jdk7 and jdk8 @@ -253,7 +253,68 @@ curl -L -o personality.sh "${env.PROJET_PERSONALITY}" // Has to be relative to WORKSPACE. reportDir: "${env.OUTPUT_DIR_RELATIVE}", reportFiles : 'console-report.html', -reportName : 'JDK8 Nightly Build Report' +reportName : 'JDK8 Nightly Build Report (Hadoop2)' + ] +} + } +} +stage ('yetus jdk8 hadoop3 checks') { + when { +not { + branch 'branch-1*' +} + } + environment { +// Failure in any stage fails the build and consecutive stages are not built. +// Findbugs is part of this last yetus stage to prevent findbugs precluding hadoop3 +// tests. +TESTS = 'mvninstall,compile,javac,unit,findbugs,htmlout' +OUTPUT_DIR_RELATIVE = "output-jdk8-hadoop3" +OUTPUT_DIR = "${env.WORKSPACE}/${env.OUTPUT_DIR_RELATIVE}" +// This isn't strictly needed on branches that only support jdk8, but doesn't hurt +// and is needed on branches that do both jdk7 and jdk8 +SET_JAVA_HOME = '/usr/lib/jvm/java-8-openjdk-amd64' +// Activates hadoop 3.0 profile in maven runs. +HADOOP_PROFILE = '3.0' + } + steps { +unstash 'yetus' +sh '''#!/usr/bin/env bash + rm -rf "${OUTPUT_DIR}" && mkdir "${OUTPUT_DIR}" + rm -rf "${OUTPUT_DIR}/machine" && mkdir "${OUTPUT_DIR}/machine" + "${BASEDIR}/dev-support/gather_machine_environment.sh" "${OUTPUT_DIR_RELATIVE}/machine" +''' +sh "${env.BASEDIR}/dev-support/hbase_nightly_yetus.sh" + } + post { +always { + // Not sure how two junit test reports will work. Disabling this for now. + // junit
hbase git commit: HBASE-19570 Add hadoop3 tests to Nightly master/branch-2 runs.
Repository: hbase Updated Branches: refs/heads/branch-1.4 8f7dbfda1 -> 64b0a6585 HBASE-19570 Add hadoop3 tests to Nightly master/branch-2 runs. Jenkins fails the whole build immediately if any stage fails. Hadoop2 tests run before Hadoop3 tests. So Hadoop3 tests will run only if hadoop2 tests pass. Project: http://git-wip-us.apache.org/repos/asf/hbase/repo Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/64b0a658 Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/64b0a658 Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/64b0a658 Branch: refs/heads/branch-1.4 Commit: 64b0a6585ee50a9124b26bb2787c5660d21b291d Parents: 8f7dbfd Author: Apekshit Sharma Authored: Thu Dec 21 11:20:40 2017 -0800 Committer: Apekshit Sharma Committed: Thu Dec 21 17:45:43 2017 -0800 -- dev-support/Jenkinsfile| 73 ++--- dev-support/hbase-personality.sh | 8 dev-support/hbase_nightly_yetus.sh | 5 +++ 3 files changed, 80 insertions(+), 6 deletions(-) -- http://git-wip-us.apache.org/repos/asf/hbase/blob/64b0a658/dev-support/Jenkinsfile -- diff --git a/dev-support/Jenkinsfile b/dev-support/Jenkinsfile index f06923b..b01a96b 100644 --- a/dev-support/Jenkinsfile +++ b/dev-support/Jenkinsfile @@ -35,7 +35,7 @@ pipeline { BASEDIR = "${env.WORKSPACE}/component" YETUS_RELEASE = '0.6.0' PROJECT = 'hbase' -PROJET_PERSONALITY = 'https://raw.githubusercontent.com/apache/hbase/master/dev-support/hbase-personality.sh' +PROJECT_PERSONALITY = 'https://raw.githubusercontent.com/apache/hbase/master/dev-support/hbase-personality.sh' // This section of the docs tells folks not to use the javadoc tag. older branches have our old version of the check for said tag. AUTHOR_IGNORE_LIST = 'src/main/asciidoc/_chapters/developer.adoc,dev-support/test-patch.sh' WHITESPACE_IGNORE_LIST = '.*/generated/.*' @@ -102,7 +102,7 @@ fi dir ("${env.TOOLS}") { sh """#!/usr/bin/env bash echo "Downloading Project personality." -curl -L -o personality.sh "${env.PROJET_PERSONALITY}" +curl -L -o personality.sh "${env.PROJECT_PERSONALITY}" """ } stash name: 'yetus', includes: "yetus-*/*,yetus-*/**/*,tools/personality.sh" @@ -203,15 +203,15 @@ curl -L -o personality.sh "${env.PROJET_PERSONALITY}" } } } -stage ('yetus jdk8 checks') { +stage ('yetus jdk8 hadoop2 checks') { when { not { branch 'branch-1.1*' } } environment { -TESTS = 'mvninstall,compile,javac,unit,findbugs,htmlout' -OUTPUT_DIR_RELATIVE = "output-jdk8" +TESTS = 'mvninstall,compile,javac,unit,htmlout' +OUTPUT_DIR_RELATIVE = "output-jdk8-hadoop2" OUTPUT_DIR = "${env.WORKSPACE}/${env.OUTPUT_DIR_RELATIVE}" // This isn't strictly needed on branches that only support jdk8, but doesn't hurt // and is needed on branches that do both jdk7 and jdk8 @@ -253,7 +253,68 @@ curl -L -o personality.sh "${env.PROJET_PERSONALITY}" // Has to be relative to WORKSPACE. reportDir: "${env.OUTPUT_DIR_RELATIVE}", reportFiles : 'console-report.html', -reportName : 'JDK8 Nightly Build Report' +reportName : 'JDK8 Nightly Build Report (Hadoop2)' + ] +} + } +} +stage ('yetus jdk8 hadoop3 checks') { + when { +not { + branch 'branch-1*' +} + } + environment { +// Failure in any stage fails the build and consecutive stages are not built. +// Findbugs is part of this last yetus stage to prevent findbugs precluding hadoop3 +// tests. +TESTS = 'mvninstall,compile,javac,unit,findbugs,htmlout' +OUTPUT_DIR_RELATIVE = "output-jdk8-hadoop3" +OUTPUT_DIR = "${env.WORKSPACE}/${env.OUTPUT_DIR_RELATIVE}" +// This isn't strictly needed on branches that only support jdk8, but doesn't hurt +// and is needed on branches that do both jdk7 and jdk8 +SET_JAVA_HOME = '/usr/lib/jvm/java-8-openjdk-amd64' +// Activates hadoop 3.0 profile in maven runs. +HADOOP_PROFILE = '3.0' + } + steps { +unstash 'yetus' +sh '''#!/usr/bin/env bash + rm -rf "${OUTPUT_DIR}" && mkdir "${OUTPUT_DIR}" + rm -rf "${OUTPUT_DIR}/machine" && mkdir "${OUTPUT_DIR}/machine" + "${BASEDIR}/dev-support/gather_machine_environment.sh" "${OUTPUT_DIR_RELATIVE}/machine" +''' +sh "${env.BASEDIR}/dev-support/hbase_nightly_yetus.sh" + } + post { +always { + // Not sure how two junit test reports will work. Disabling this for now. + // junit
hbase git commit: HBASE-19570 Add hadoop3 tests to Nightly master/branch-2 runs.
Repository: hbase Updated Branches: refs/heads/branch-1.3 ef4b4edfb -> 1779417bc HBASE-19570 Add hadoop3 tests to Nightly master/branch-2 runs. Jenkins fails the whole build immediately if any stage fails. Hadoop2 tests run before Hadoop3 tests. So Hadoop3 tests will run only if hadoop2 tests pass. Project: http://git-wip-us.apache.org/repos/asf/hbase/repo Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/1779417b Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/1779417b Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/1779417b Branch: refs/heads/branch-1.3 Commit: 1779417bc20205b0e70757384fd2280b6cb05f66 Parents: ef4b4ed Author: Apekshit Sharma Authored: Thu Dec 21 11:20:40 2017 -0800 Committer: Apekshit Sharma Committed: Thu Dec 21 17:46:09 2017 -0800 -- dev-support/Jenkinsfile| 73 ++--- dev-support/hbase-personality.sh | 8 dev-support/hbase_nightly_yetus.sh | 5 +++ 3 files changed, 80 insertions(+), 6 deletions(-) -- http://git-wip-us.apache.org/repos/asf/hbase/blob/1779417b/dev-support/Jenkinsfile -- diff --git a/dev-support/Jenkinsfile b/dev-support/Jenkinsfile index f06923b..b01a96b 100644 --- a/dev-support/Jenkinsfile +++ b/dev-support/Jenkinsfile @@ -35,7 +35,7 @@ pipeline { BASEDIR = "${env.WORKSPACE}/component" YETUS_RELEASE = '0.6.0' PROJECT = 'hbase' -PROJET_PERSONALITY = 'https://raw.githubusercontent.com/apache/hbase/master/dev-support/hbase-personality.sh' +PROJECT_PERSONALITY = 'https://raw.githubusercontent.com/apache/hbase/master/dev-support/hbase-personality.sh' // This section of the docs tells folks not to use the javadoc tag. older branches have our old version of the check for said tag. AUTHOR_IGNORE_LIST = 'src/main/asciidoc/_chapters/developer.adoc,dev-support/test-patch.sh' WHITESPACE_IGNORE_LIST = '.*/generated/.*' @@ -102,7 +102,7 @@ fi dir ("${env.TOOLS}") { sh """#!/usr/bin/env bash echo "Downloading Project personality." -curl -L -o personality.sh "${env.PROJET_PERSONALITY}" +curl -L -o personality.sh "${env.PROJECT_PERSONALITY}" """ } stash name: 'yetus', includes: "yetus-*/*,yetus-*/**/*,tools/personality.sh" @@ -203,15 +203,15 @@ curl -L -o personality.sh "${env.PROJET_PERSONALITY}" } } } -stage ('yetus jdk8 checks') { +stage ('yetus jdk8 hadoop2 checks') { when { not { branch 'branch-1.1*' } } environment { -TESTS = 'mvninstall,compile,javac,unit,findbugs,htmlout' -OUTPUT_DIR_RELATIVE = "output-jdk8" +TESTS = 'mvninstall,compile,javac,unit,htmlout' +OUTPUT_DIR_RELATIVE = "output-jdk8-hadoop2" OUTPUT_DIR = "${env.WORKSPACE}/${env.OUTPUT_DIR_RELATIVE}" // This isn't strictly needed on branches that only support jdk8, but doesn't hurt // and is needed on branches that do both jdk7 and jdk8 @@ -253,7 +253,68 @@ curl -L -o personality.sh "${env.PROJET_PERSONALITY}" // Has to be relative to WORKSPACE. reportDir: "${env.OUTPUT_DIR_RELATIVE}", reportFiles : 'console-report.html', -reportName : 'JDK8 Nightly Build Report' +reportName : 'JDK8 Nightly Build Report (Hadoop2)' + ] +} + } +} +stage ('yetus jdk8 hadoop3 checks') { + when { +not { + branch 'branch-1*' +} + } + environment { +// Failure in any stage fails the build and consecutive stages are not built. +// Findbugs is part of this last yetus stage to prevent findbugs precluding hadoop3 +// tests. +TESTS = 'mvninstall,compile,javac,unit,findbugs,htmlout' +OUTPUT_DIR_RELATIVE = "output-jdk8-hadoop3" +OUTPUT_DIR = "${env.WORKSPACE}/${env.OUTPUT_DIR_RELATIVE}" +// This isn't strictly needed on branches that only support jdk8, but doesn't hurt +// and is needed on branches that do both jdk7 and jdk8 +SET_JAVA_HOME = '/usr/lib/jvm/java-8-openjdk-amd64' +// Activates hadoop 3.0 profile in maven runs. +HADOOP_PROFILE = '3.0' + } + steps { +unstash 'yetus' +sh '''#!/usr/bin/env bash + rm -rf "${OUTPUT_DIR}" && mkdir "${OUTPUT_DIR}" + rm -rf "${OUTPUT_DIR}/machine" && mkdir "${OUTPUT_DIR}/machine" + "${BASEDIR}/dev-support/gather_machine_environment.sh" "${OUTPUT_DIR_RELATIVE}/machine" +''' +sh "${env.BASEDIR}/dev-support/hbase_nightly_yetus.sh" + } + post { +always { + // Not sure how two junit test reports will work. Disabling this for now. + // junit
hbase git commit: HBASE-19570 Add hadoop3 tests to Nightly master/branch-2 runs.
Repository: hbase Updated Branches: refs/heads/branch-1 304b20524 -> f547f6e7d HBASE-19570 Add hadoop3 tests to Nightly master/branch-2 runs. Jenkins fails the whole build immediately if any stage fails. Hadoop2 tests run before Hadoop3 tests. So Hadoop3 tests will run only if hadoop2 tests pass. Project: http://git-wip-us.apache.org/repos/asf/hbase/repo Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/f547f6e7 Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/f547f6e7 Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/f547f6e7 Branch: refs/heads/branch-1 Commit: f547f6e7de4147f940cc2608a719e5a3531e2754 Parents: 304b205 Author: Apekshit Sharma Authored: Thu Dec 21 11:20:40 2017 -0800 Committer: Apekshit Sharma Committed: Thu Dec 21 17:45:18 2017 -0800 -- dev-support/Jenkinsfile| 73 ++--- dev-support/hbase-personality.sh | 8 dev-support/hbase_nightly_yetus.sh | 5 +++ 3 files changed, 80 insertions(+), 6 deletions(-) -- http://git-wip-us.apache.org/repos/asf/hbase/blob/f547f6e7/dev-support/Jenkinsfile -- diff --git a/dev-support/Jenkinsfile b/dev-support/Jenkinsfile index 98519df..c694d1b 100644 --- a/dev-support/Jenkinsfile +++ b/dev-support/Jenkinsfile @@ -35,7 +35,7 @@ pipeline { BASEDIR = "${env.WORKSPACE}/component" YETUS_RELEASE = '0.6.0' PROJECT = 'hbase' -PROJET_PERSONALITY = 'https://raw.githubusercontent.com/apache/hbase/master/dev-support/hbase-personality.sh' +PROJECT_PERSONALITY = 'https://raw.githubusercontent.com/apache/hbase/master/dev-support/hbase-personality.sh' // This section of the docs tells folks not to use the javadoc tag. older branches have our old version of the check for said tag. AUTHOR_IGNORE_LIST = 'src/main/asciidoc/_chapters/developer.adoc,dev-support/test-patch.sh' WHITESPACE_IGNORE_LIST = '.*/generated/.*' @@ -102,7 +102,7 @@ fi dir ("${env.TOOLS}") { sh """#!/usr/bin/env bash echo "Downloading Project personality." -curl -L -o personality.sh "${env.PROJET_PERSONALITY}" +curl -L -o personality.sh "${env.PROJECT_PERSONALITY}" """ } stash name: 'yetus', includes: "yetus-*/*,yetus-*/**/*,tools/personality.sh" @@ -203,15 +203,15 @@ curl -L -o personality.sh "${env.PROJET_PERSONALITY}" } } } -stage ('yetus jdk8 checks') { +stage ('yetus jdk8 hadoop2 checks') { when { not { branch 'branch-1.1*' } } environment { -TESTS = 'mvninstall,compile,javac,unit,findbugs,htmlout' -OUTPUT_DIR_RELATIVE = "output-jdk8" +TESTS = 'mvninstall,compile,javac,unit,htmlout' +OUTPUT_DIR_RELATIVE = "output-jdk8-hadoop2" OUTPUT_DIR = "${env.WORKSPACE}/${env.OUTPUT_DIR_RELATIVE}" // This isn't strictly needed on branches that only support jdk8, but doesn't hurt // and is needed on branches that do both jdk7 and jdk8 @@ -253,7 +253,68 @@ curl -L -o personality.sh "${env.PROJET_PERSONALITY}" // Has to be relative to WORKSPACE. reportDir: "${env.OUTPUT_DIR_RELATIVE}", reportFiles : 'console-report.html', -reportName : 'JDK8 Nightly Build Report' +reportName : 'JDK8 Nightly Build Report (Hadoop2)' + ] +} + } +} +stage ('yetus jdk8 hadoop3 checks') { + when { +not { + branch 'branch-1*' +} + } + environment { +// Failure in any stage fails the build and consecutive stages are not built. +// Findbugs is part of this last yetus stage to prevent findbugs precluding hadoop3 +// tests. +TESTS = 'mvninstall,compile,javac,unit,findbugs,htmlout' +OUTPUT_DIR_RELATIVE = "output-jdk8-hadoop3" +OUTPUT_DIR = "${env.WORKSPACE}/${env.OUTPUT_DIR_RELATIVE}" +// This isn't strictly needed on branches that only support jdk8, but doesn't hurt +// and is needed on branches that do both jdk7 and jdk8 +SET_JAVA_HOME = '/usr/lib/jvm/java-8-openjdk-amd64' +// Activates hadoop 3.0 profile in maven runs. +HADOOP_PROFILE = '3.0' + } + steps { +unstash 'yetus' +sh '''#!/usr/bin/env bash + rm -rf "${OUTPUT_DIR}" && mkdir "${OUTPUT_DIR}" + rm -rf "${OUTPUT_DIR}/machine" && mkdir "${OUTPUT_DIR}/machine" + "${BASEDIR}/dev-support/gather_machine_environment.sh" "${OUTPUT_DIR_RELATIVE}/machine" +''' +sh "${env.BASEDIR}/dev-support/hbase_nightly_yetus.sh" + } + post { +always { + // Not sure how two junit test reports will work. Disabling this for now. + // junit test
hbase git commit: HBASE-19570 Add hadoop3 tests to Nightly master/branch-2 runs.
Repository: hbase Updated Branches: refs/heads/branch-2 a150062bf -> b263eb564 HBASE-19570 Add hadoop3 tests to Nightly master/branch-2 runs. Jenkins fails the whole build immediately if any stage fails. Hadoop2 tests run before Hadoop3 tests. So Hadoop3 tests will run only if hadoop2 tests pass. Project: http://git-wip-us.apache.org/repos/asf/hbase/repo Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/b263eb56 Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/b263eb56 Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/b263eb56 Branch: refs/heads/branch-2 Commit: b263eb56481f54aa18e16c5ad1871d7259708284 Parents: a150062 Author: Apekshit Sharma Authored: Thu Dec 21 11:20:40 2017 -0800 Committer: Apekshit Sharma Committed: Thu Dec 21 17:44:33 2017 -0800 -- dev-support/Jenkinsfile| 73 ++--- dev-support/hbase-personality.sh | 8 dev-support/hbase_nightly_yetus.sh | 5 +++ 3 files changed, 80 insertions(+), 6 deletions(-) -- http://git-wip-us.apache.org/repos/asf/hbase/blob/b263eb56/dev-support/Jenkinsfile -- diff --git a/dev-support/Jenkinsfile b/dev-support/Jenkinsfile index 744e9b0..dcef649 100644 --- a/dev-support/Jenkinsfile +++ b/dev-support/Jenkinsfile @@ -35,7 +35,7 @@ pipeline { BASEDIR = "${env.WORKSPACE}/component" YETUS_RELEASE = '0.6.0' PROJECT = 'hbase' -PROJET_PERSONALITY = 'https://raw.githubusercontent.com/apache/hbase/master/dev-support/hbase-personality.sh' +PROJECT_PERSONALITY = 'https://raw.githubusercontent.com/apache/hbase/master/dev-support/hbase-personality.sh' // This section of the docs tells folks not to use the javadoc tag. older branches have our old version of the check for said tag. AUTHOR_IGNORE_LIST = 'src/main/asciidoc/_chapters/developer.adoc,dev-support/test-patch.sh' WHITESPACE_IGNORE_LIST = '.*/generated/.*' @@ -102,7 +102,7 @@ fi dir ("${env.TOOLS}") { sh """#!/usr/bin/env bash echo "Downloading Project personality." -curl -L -o personality.sh "${env.PROJET_PERSONALITY}" +curl -L -o personality.sh "${env.PROJECT_PERSONALITY}" """ } stash name: 'yetus', includes: "yetus-*/*,yetus-*/**/*,tools/personality.sh" @@ -203,15 +203,15 @@ curl -L -o personality.sh "${env.PROJET_PERSONALITY}" } } } -stage ('yetus jdk8 checks') { +stage ('yetus jdk8 hadoop2 checks') { when { not { branch 'branch-1.1*' } } environment { -TESTS = 'mvninstall,compile,javac,unit,findbugs,htmlout' -OUTPUT_DIR_RELATIVE = "output-jdk8" +TESTS = 'mvninstall,compile,javac,unit,htmlout' +OUTPUT_DIR_RELATIVE = "output-jdk8-hadoop2" OUTPUT_DIR = "${env.WORKSPACE}/${env.OUTPUT_DIR_RELATIVE}" // This isn't strictly needed on branches that only support jdk8, but doesn't hurt // and is needed on branches that do both jdk7 and jdk8 @@ -253,7 +253,68 @@ curl -L -o personality.sh "${env.PROJET_PERSONALITY}" // Has to be relative to WORKSPACE. reportDir: "${env.OUTPUT_DIR_RELATIVE}", reportFiles : 'console-report.html', -reportName : 'JDK8 Nightly Build Report' +reportName : 'JDK8 Nightly Build Report (Hadoop2)' + ] +} + } +} +stage ('yetus jdk8 hadoop3 checks') { + when { +not { + branch 'branch-1*' +} + } + environment { +// Failure in any stage fails the build and consecutive stages are not built. +// Findbugs is part of this last yetus stage to prevent findbugs precluding hadoop3 +// tests. +TESTS = 'mvninstall,compile,javac,unit,findbugs,htmlout' +OUTPUT_DIR_RELATIVE = "output-jdk8-hadoop3" +OUTPUT_DIR = "${env.WORKSPACE}/${env.OUTPUT_DIR_RELATIVE}" +// This isn't strictly needed on branches that only support jdk8, but doesn't hurt +// and is needed on branches that do both jdk7 and jdk8 +SET_JAVA_HOME = '/usr/lib/jvm/java-8-openjdk-amd64' +// Activates hadoop 3.0 profile in maven runs. +HADOOP_PROFILE = '3.0' + } + steps { +unstash 'yetus' +sh '''#!/usr/bin/env bash + rm -rf "${OUTPUT_DIR}" && mkdir "${OUTPUT_DIR}" + rm -rf "${OUTPUT_DIR}/machine" && mkdir "${OUTPUT_DIR}/machine" + "${BASEDIR}/dev-support/gather_machine_environment.sh" "${OUTPUT_DIR_RELATIVE}/machine" +''' +sh "${env.BASEDIR}/dev-support/hbase_nightly_yetus.sh" + } + post { +always { + // Not sure how two junit test reports will work. Disabling this for now. + // junit test
hbase git commit: HBASE-19570 Add hadoop3 tests to Nightly master/branch-2 runs.
Repository: hbase Updated Branches: refs/heads/master bcaf2fd20 -> a4272a9f0 HBASE-19570 Add hadoop3 tests to Nightly master/branch-2 runs. Jenkins fails the whole build immediately if any stage fails. Hadoop2 tests run before Hadoop3 tests. So Hadoop3 tests will run only if hadoop2 tests pass. Project: http://git-wip-us.apache.org/repos/asf/hbase/repo Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/a4272a9f Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/a4272a9f Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/a4272a9f Branch: refs/heads/master Commit: a4272a9f08f1005d1d3b2511dfbe8eca577f0d49 Parents: bcaf2fd Author: Apekshit Sharma Authored: Thu Dec 21 11:20:40 2017 -0800 Committer: Apekshit Sharma Committed: Thu Dec 21 17:44:04 2017 -0800 -- dev-support/Jenkinsfile| 73 ++--- dev-support/hbase-personality.sh | 8 dev-support/hbase_nightly_yetus.sh | 5 +++ 3 files changed, 80 insertions(+), 6 deletions(-) -- http://git-wip-us.apache.org/repos/asf/hbase/blob/a4272a9f/dev-support/Jenkinsfile -- diff --git a/dev-support/Jenkinsfile b/dev-support/Jenkinsfile index 744e9b0..dcef649 100644 --- a/dev-support/Jenkinsfile +++ b/dev-support/Jenkinsfile @@ -35,7 +35,7 @@ pipeline { BASEDIR = "${env.WORKSPACE}/component" YETUS_RELEASE = '0.6.0' PROJECT = 'hbase' -PROJET_PERSONALITY = 'https://raw.githubusercontent.com/apache/hbase/master/dev-support/hbase-personality.sh' +PROJECT_PERSONALITY = 'https://raw.githubusercontent.com/apache/hbase/master/dev-support/hbase-personality.sh' // This section of the docs tells folks not to use the javadoc tag. older branches have our old version of the check for said tag. AUTHOR_IGNORE_LIST = 'src/main/asciidoc/_chapters/developer.adoc,dev-support/test-patch.sh' WHITESPACE_IGNORE_LIST = '.*/generated/.*' @@ -102,7 +102,7 @@ fi dir ("${env.TOOLS}") { sh """#!/usr/bin/env bash echo "Downloading Project personality." -curl -L -o personality.sh "${env.PROJET_PERSONALITY}" +curl -L -o personality.sh "${env.PROJECT_PERSONALITY}" """ } stash name: 'yetus', includes: "yetus-*/*,yetus-*/**/*,tools/personality.sh" @@ -203,15 +203,15 @@ curl -L -o personality.sh "${env.PROJET_PERSONALITY}" } } } -stage ('yetus jdk8 checks') { +stage ('yetus jdk8 hadoop2 checks') { when { not { branch 'branch-1.1*' } } environment { -TESTS = 'mvninstall,compile,javac,unit,findbugs,htmlout' -OUTPUT_DIR_RELATIVE = "output-jdk8" +TESTS = 'mvninstall,compile,javac,unit,htmlout' +OUTPUT_DIR_RELATIVE = "output-jdk8-hadoop2" OUTPUT_DIR = "${env.WORKSPACE}/${env.OUTPUT_DIR_RELATIVE}" // This isn't strictly needed on branches that only support jdk8, but doesn't hurt // and is needed on branches that do both jdk7 and jdk8 @@ -253,7 +253,68 @@ curl -L -o personality.sh "${env.PROJET_PERSONALITY}" // Has to be relative to WORKSPACE. reportDir: "${env.OUTPUT_DIR_RELATIVE}", reportFiles : 'console-report.html', -reportName : 'JDK8 Nightly Build Report' +reportName : 'JDK8 Nightly Build Report (Hadoop2)' + ] +} + } +} +stage ('yetus jdk8 hadoop3 checks') { + when { +not { + branch 'branch-1*' +} + } + environment { +// Failure in any stage fails the build and consecutive stages are not built. +// Findbugs is part of this last yetus stage to prevent findbugs precluding hadoop3 +// tests. +TESTS = 'mvninstall,compile,javac,unit,findbugs,htmlout' +OUTPUT_DIR_RELATIVE = "output-jdk8-hadoop3" +OUTPUT_DIR = "${env.WORKSPACE}/${env.OUTPUT_DIR_RELATIVE}" +// This isn't strictly needed on branches that only support jdk8, but doesn't hurt +// and is needed on branches that do both jdk7 and jdk8 +SET_JAVA_HOME = '/usr/lib/jvm/java-8-openjdk-amd64' +// Activates hadoop 3.0 profile in maven runs. +HADOOP_PROFILE = '3.0' + } + steps { +unstash 'yetus' +sh '''#!/usr/bin/env bash + rm -rf "${OUTPUT_DIR}" && mkdir "${OUTPUT_DIR}" + rm -rf "${OUTPUT_DIR}/machine" && mkdir "${OUTPUT_DIR}/machine" + "${BASEDIR}/dev-support/gather_machine_environment.sh" "${OUTPUT_DIR_RELATIVE}/machine" +''' +sh "${env.BASEDIR}/dev-support/hbase_nightly_yetus.sh" + } + post { +always { + // Not sure how two junit test reports will work. Disabling this for now. + // junit testResu
hbase git commit: HBASE-19148 Reevaluate default values of configurations; minor ADDENDUM
Repository: hbase Updated Branches: refs/heads/master 4d6b92868 -> bcaf2fd20 HBASE-19148 Reevaluate default values of configurations; minor ADDENDUM Project: http://git-wip-us.apache.org/repos/asf/hbase/repo Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/bcaf2fd2 Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/bcaf2fd2 Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/bcaf2fd2 Branch: refs/heads/master Commit: bcaf2fd20c542137c1d11dd32f2702af93d5ce70 Parents: 4d6b928 Author: Michael Stack Authored: Thu Dec 21 14:13:23 2017 -0800 Committer: Michael Stack Committed: Thu Dec 21 14:14:07 2017 -0800 -- .../src/main/java/org/apache/hadoop/hbase/master/HMaster.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) -- http://git-wip-us.apache.org/repos/asf/hbase/blob/bcaf2fd2/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java -- diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java index d0f435b..e31db82 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java @@ -1399,7 +1399,7 @@ public class HMaster extends HRegionServer implements MasterServices { } } - boolean isByTable = getConfiguration().getBoolean("hbase.master.loadbalance.bytable", true); + boolean isByTable = getConfiguration().getBoolean("hbase.master.loadbalance.bytable", false); Map>> assignmentsByTable = this.assignmentManager.getRegionStates().getAssignmentsByTable(!isByTable);
hbase git commit: HBASE-19148 Reevaluate default values of configurations; minor ADDENDUM
Repository: hbase Updated Branches: refs/heads/branch-2 18987ed86 -> a150062bf HBASE-19148 Reevaluate default values of configurations; minor ADDENDUM Project: http://git-wip-us.apache.org/repos/asf/hbase/repo Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/a150062b Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/a150062b Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/a150062b Branch: refs/heads/branch-2 Commit: a150062bfb6c41bc5c5fda20f797d64ff220657b Parents: 18987ed Author: Michael Stack Authored: Thu Dec 21 14:13:23 2017 -0800 Committer: Michael Stack Committed: Thu Dec 21 14:13:23 2017 -0800 -- .../src/main/java/org/apache/hadoop/hbase/master/HMaster.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) -- http://git-wip-us.apache.org/repos/asf/hbase/blob/a150062b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java -- diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java index d0f435b..e31db82 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java @@ -1399,7 +1399,7 @@ public class HMaster extends HRegionServer implements MasterServices { } } - boolean isByTable = getConfiguration().getBoolean("hbase.master.loadbalance.bytable", true); + boolean isByTable = getConfiguration().getBoolean("hbase.master.loadbalance.bytable", false); Map>> assignmentsByTable = this.assignmentManager.getRegionStates().getAssignmentsByTable(!isByTable);
hbase git commit: HBASE-19148 Reevaluate default values of configurations
Repository: hbase Updated Branches: refs/heads/branch-2 4aec7a491 -> 18987ed86 HBASE-19148 Reevaluate default values of configurations Removed unused: hbase.fs.tmp.dir Added hbase.master.loadbalance.bytable Edit of description text. Moved stuff around to put configs beside each other. M hbase-server/src/main/java/org/apache/hadoop/hbase/util/ServerCommandLine.java Emit some hbase configs in log on startup. Signed-off-by: Michael Stack Project: http://git-wip-us.apache.org/repos/asf/hbase/repo Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/18987ed8 Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/18987ed8 Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/18987ed8 Branch: refs/heads/branch-2 Commit: 18987ed86777201a8eff80c018848c86bcd052de Parents: 4aec7a4 Author: Michael Stack Authored: Fri Dec 15 17:56:38 2017 -0800 Committer: Michael Stack Committed: Thu Dec 21 14:07:47 2017 -0800 -- .../src/main/resources/hbase-default.xml| 177 +++ .../org/apache/hadoop/hbase/master/HMaster.java | 2 +- .../assignment/SplitTableRegionProcedure.java | 7 +- .../hbase/regionserver/wal/AbstractFSWAL.java | 14 +- .../hadoop/hbase/util/ServerCommandLine.java| 28 ++- 5 files changed, 140 insertions(+), 88 deletions(-) -- http://git-wip-us.apache.org/repos/asf/hbase/blob/18987ed8/hbase-common/src/main/resources/hbase-default.xml -- diff --git a/hbase-common/src/main/resources/hbase-default.xml b/hbase-common/src/main/resources/hbase-default.xml index dfc6eeb..a79ead4 100644 --- a/hbase-common/src/main/resources/hbase-default.xml +++ b/hbase-common/src/main/resources/hbase-default.xml @@ -63,13 +63,6 @@ possible configurations would overwhelm and obscure the important. machine restart. -hbase.fs.tmp.dir -/user/${user.name}/hbase-staging -A staging directory in default file system (HDFS) -for keeping temporary data. - - - hbase.cluster.distributed false The mode the cluster will be in. Possible values are @@ -146,8 +139,8 @@ possible configurations would overwhelm and obscure the important. hbase.master.procedurewalcleaner.ttl 60480 -How long a Procedure WAL stays will remain in the -{hbase.rootdir}/oldWALs/masterProcedureWALs directory, after which it will be cleaned +How long a Procedure WAL will remain in the +{hbase.rootdir}/MasterProcedureWALs directory, after which it will be cleaned by a Master thread. The value is in milliseconds. @@ -166,7 +159,19 @@ possible configurations would overwhelm and obscure the important. true Whether or not the Master listens to the Master web UI port (hbase.master.info.port) and redirects requests to the web - UI server shared by the Master and RegionServer. + UI server shared by the Master and RegionServer. Config. makes + sense when Master is serving Regions (not the default). + + +hbase.master.fileSplitTimeout +60 +Splitting a region, how long to wait on the file-splitting + step before aborting the attempt. Default: 60. This setting used + to be known as hbase.regionserver.fileSplitTimeout in hbase-1.x. + Split is now run master-side hence the rename (If a + 'hbase.master.fileSplitTimeout' setting found, will use it to + prime the current 'hbase.master.fileSplitTimeout' + Configuration. @@ -198,7 +203,10 @@ possible configurations would overwhelm and obscure the important. hbase.regionserver.handler.count 30 Count of RPC Listener instances spun up on RegionServers. -Same property is used by the Master for count of master handlers. + Same property is used by the Master for count of master handlers. + Too many handlers can be counter-productive. Make it a multiple of + CPU count. If mostly read-only, handlers count close to cpu count + does well. Start with twice the CPU count and tune from there. hbase.ipc.server.callqueue.handler.factor @@ -292,31 +300,37 @@ possible configurations would overwhelm and obscure the important. Updates are blocked and flushes are forced until size of all memstores in a region server hits hbase.regionserver.global.memstore.size.lower.limit. The default value in this configuration has been intentionally left empty in order to - honor the old hbase.regionserver.global.memstore.upperLimit property if present. + honor the old hbase.regionserver.global.memstore.upperLimit property if present. + hbase.regionserver.global.memstore.size.lower.limit -Maximum size of all memstores in a region server before flushes are forced. - Defaults to 95% of hbase.regions
hbase git commit: HBASE-19148 Reevaluate default values of configurations
Repository: hbase Updated Branches: refs/heads/master 59baf12c0 -> 4d6b92868 HBASE-19148 Reevaluate default values of configurations Removed unused: hbase.fs.tmp.dir Added hbase.master.loadbalance.bytable Edit of description text. Moved stuff around to put configs beside each other. M hbase-server/src/main/java/org/apache/hadoop/hbase/util/ServerCommandLine.java Emit some hbase configs in log on startup. Signed-off-by: Michael Stack Project: http://git-wip-us.apache.org/repos/asf/hbase/repo Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/4d6b9286 Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/4d6b9286 Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/4d6b9286 Branch: refs/heads/master Commit: 4d6b928682cc2a17f3dfd0179fb3fd46fd9e0a1f Parents: 59baf12 Author: Michael Stack Authored: Fri Dec 15 17:56:38 2017 -0800 Committer: Michael Stack Committed: Thu Dec 21 14:07:16 2017 -0800 -- .../src/main/resources/hbase-default.xml| 177 +++ .../org/apache/hadoop/hbase/master/HMaster.java | 2 +- .../assignment/SplitTableRegionProcedure.java | 7 +- .../hbase/regionserver/wal/AbstractFSWAL.java | 14 +- .../hadoop/hbase/util/ServerCommandLine.java| 28 ++- 5 files changed, 140 insertions(+), 88 deletions(-) -- http://git-wip-us.apache.org/repos/asf/hbase/blob/4d6b9286/hbase-common/src/main/resources/hbase-default.xml -- diff --git a/hbase-common/src/main/resources/hbase-default.xml b/hbase-common/src/main/resources/hbase-default.xml index 7995c41..b42ce3f 100644 --- a/hbase-common/src/main/resources/hbase-default.xml +++ b/hbase-common/src/main/resources/hbase-default.xml @@ -63,13 +63,6 @@ possible configurations would overwhelm and obscure the important. machine restart. -hbase.fs.tmp.dir -/user/${user.name}/hbase-staging -A staging directory in default file system (HDFS) -for keeping temporary data. - - - hbase.cluster.distributed false The mode the cluster will be in. Possible values are @@ -146,8 +139,8 @@ possible configurations would overwhelm and obscure the important. hbase.master.procedurewalcleaner.ttl 60480 -How long a Procedure WAL stays will remain in the -{hbase.rootdir}/oldWALs/masterProcedureWALs directory, after which it will be cleaned +How long a Procedure WAL will remain in the +{hbase.rootdir}/MasterProcedureWALs directory, after which it will be cleaned by a Master thread. The value is in milliseconds. @@ -166,7 +159,19 @@ possible configurations would overwhelm and obscure the important. true Whether or not the Master listens to the Master web UI port (hbase.master.info.port) and redirects requests to the web - UI server shared by the Master and RegionServer. + UI server shared by the Master and RegionServer. Config. makes + sense when Master is serving Regions (not the default). + + +hbase.master.fileSplitTimeout +60 +Splitting a region, how long to wait on the file-splitting + step before aborting the attempt. Default: 60. This setting used + to be known as hbase.regionserver.fileSplitTimeout in hbase-1.x. + Split is now run master-side hence the rename (If a + 'hbase.master.fileSplitTimeout' setting found, will use it to + prime the current 'hbase.master.fileSplitTimeout' + Configuration. @@ -198,7 +203,10 @@ possible configurations would overwhelm and obscure the important. hbase.regionserver.handler.count 30 Count of RPC Listener instances spun up on RegionServers. -Same property is used by the Master for count of master handlers. + Same property is used by the Master for count of master handlers. + Too many handlers can be counter-productive. Make it a multiple of + CPU count. If mostly read-only, handlers count close to cpu count + does well. Start with twice the CPU count and tune from there. hbase.ipc.server.callqueue.handler.factor @@ -292,31 +300,37 @@ possible configurations would overwhelm and obscure the important. Updates are blocked and flushes are forced until size of all memstores in a region server hits hbase.regionserver.global.memstore.size.lower.limit. The default value in this configuration has been intentionally left empty in order to - honor the old hbase.regionserver.global.memstore.upperLimit property if present. + honor the old hbase.regionserver.global.memstore.upperLimit property if present. + hbase.regionserver.global.memstore.size.lower.limit -Maximum size of all memstores in a region server before flushes are forced. - Defaults to 95% of hbase.regionserve
hbase git commit: .
Repository: hbase Updated Branches: refs/heads/HBASE-19491 7e656e352 -> 20b4aa971 . Project: http://git-wip-us.apache.org/repos/asf/hbase/repo Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/20b4aa97 Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/20b4aa97 Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/20b4aa97 Branch: refs/heads/HBASE-19491 Commit: 20b4aa9716d747bc8b0f581dc30ea5d5f139f663 Parents: 7e656e3 Author: Apekshit Sharma Authored: Thu Dec 21 11:40:24 2017 -0800 Committer: Apekshit Sharma Committed: Thu Dec 21 11:40:24 2017 -0800 -- dev-support/Jenkinsfile | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) -- http://git-wip-us.apache.org/repos/asf/hbase/blob/20b4aa97/dev-support/Jenkinsfile -- diff --git a/dev-support/Jenkinsfile b/dev-support/Jenkinsfile index e197cb0..07be9c0 100644 --- a/dev-support/Jenkinsfile +++ b/dev-support/Jenkinsfile @@ -212,9 +212,10 @@ curl -L -o personality.sh "${env.PROJET_PERSONALITY}" } stage ('yetus jdk8 hadoop2 checks') { when { -not { +// todo: revert. disabling for testing. +//not { branch 'branch-1.1*' -} +//} } environment { TESTS = 'mvninstall,compile,javac,unit,htmlout'
hbase git commit: HBASE-19575 add copy constructor to Mutation
Repository: hbase Updated Branches: refs/heads/master 6cefabee2 -> 59baf12c0 HBASE-19575 add copy constructor to Mutation Project: http://git-wip-us.apache.org/repos/asf/hbase/repo Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/59baf12c Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/59baf12c Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/59baf12c Branch: refs/heads/master Commit: 59baf12c03303dab2a69352d5f35d38bd464d943 Parents: 6cefabe Author: Chia-Ping Tsai Authored: Fri Dec 22 03:13:03 2017 +0800 Committer: Chia-Ping Tsai Committed: Fri Dec 22 03:24:43 2017 +0800 -- .../org/apache/hadoop/hbase/client/Append.java | 32 ++- .../org/apache/hadoop/hbase/client/Delete.java | 32 ++- .../apache/hadoop/hbase/client/Increment.java | 32 ++- .../apache/hadoop/hbase/client/Mutation.java| 45 +++- .../hbase/client/OperationWithAttributes.java | 21 +- .../org/apache/hadoop/hbase/client/Put.java | 30 ++- .../hadoop/hbase/client/TestMutation.java | 239 +++ .../org/apache/hadoop/hbase/client/TestPut.java | 125 -- .../hbase/regionserver/TestMetricsRegion.java | 2 +- 9 files changed, 387 insertions(+), 171 deletions(-) -- http://git-wip-us.apache.org/repos/asf/hbase/blob/59baf12c/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Append.java -- diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Append.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Append.java index 24e9512..0cb51a2 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Append.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Append.java @@ -112,17 +112,11 @@ public class Append extends Mutation { } /** * Copy constructor - * @param a + * @param appendToCopy append to copy */ - public Append(Append a) { -this.row = a.getRow(); -this.ts = a.getTimeStamp(); -this.tr = a.getTimeRange(); -this.familyMap.putAll(a.getFamilyCellMap()); -for (Map.Entry entry : a.getAttributesMap().entrySet()) { - this.setAttribute(entry.getKey(), entry.getValue()); -} -this.setPriority(a.getPriority()); + public Append(Append appendToCopy) { +super(appendToCopy); +this.tr = appendToCopy.getTimeRange(); } /** Create a Append operation for the specified row. @@ -138,6 +132,18 @@ public class Append extends Mutation { } /** + * Construct the Append with user defined data. NOTED: + * 1) all cells in the familyMap must have the DataType.Put + * 2) the row of each cell must be same with passed row. + * @param row row. CAN'T be null + * @param ts timestamp + * @param familyMap the map to collect all cells internally. CAN'T be null + */ + public Append(byte[] row, long ts, NavigableMap> familyMap) { +super(row, ts, familyMap); + } + + /** * Add the specified column and value to this Append operation. * @param family family name * @param qualifier column qualifier @@ -202,6 +208,12 @@ public class Append extends Mutation { return (Append) super.setDurability(d); } + /** + * Method for setting the Append's familyMap + * @deprecated As of release 2.0.0, this will be removed in HBase 3.0.0. + * Use {@link Append#Append(byte[], long, NavigableMap)} instead + */ + @Deprecated @Override public Append setFamilyCellMap(NavigableMap> map) { return (Append) super.setFamilyCellMap(map); http://git-wip-us.apache.org/repos/asf/hbase/blob/59baf12c/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Delete.java -- diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Delete.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Delete.java index 2d296fb..57f5648 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Delete.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Delete.java @@ -135,17 +135,23 @@ public class Delete extends Mutation implements Comparable { } /** - * @param d Delete to clone. + * @param deleteToCopy delete to copy */ - public Delete(final Delete d) { -this.row = d.getRow(); -this.ts = d.getTimeStamp(); -this.familyMap.putAll(d.getFamilyCellMap()); -this.durability = d.durability; -for (Map.Entry entry : d.getAttributesMap().entrySet()) { - this.setAttribute(entry.getKey(), entry.getValue()); -} -super.setPriority(d.getPriority()); + public Delete(final Delete deleteToCopy) { +super(deleteToCopy); + } + + /** + * Construct the Delete with user defined data. NOTED: + * 1) all cells in the familyMap must have the delete type. + * see {@link org.apac
hbase git commit: HBASE-19575 add copy constructor to Mutation
Repository: hbase Updated Branches: refs/heads/branch-2 a28156be9 -> 4aec7a491 HBASE-19575 add copy constructor to Mutation Project: http://git-wip-us.apache.org/repos/asf/hbase/repo Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/4aec7a49 Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/4aec7a49 Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/4aec7a49 Branch: refs/heads/branch-2 Commit: 4aec7a491a15edac33bcb192f8c347b299b3990c Parents: a28156b Author: Chia-Ping Tsai Authored: Fri Dec 22 03:13:03 2017 +0800 Committer: Chia-Ping Tsai Committed: Fri Dec 22 03:19:47 2017 +0800 -- .../org/apache/hadoop/hbase/client/Append.java | 32 ++- .../org/apache/hadoop/hbase/client/Delete.java | 32 ++- .../apache/hadoop/hbase/client/Increment.java | 32 ++- .../apache/hadoop/hbase/client/Mutation.java| 45 +++- .../hbase/client/OperationWithAttributes.java | 21 +- .../org/apache/hadoop/hbase/client/Put.java | 30 ++- .../hadoop/hbase/client/TestMutation.java | 239 +++ .../org/apache/hadoop/hbase/client/TestPut.java | 125 -- .../hbase/regionserver/TestMetricsRegion.java | 2 +- 9 files changed, 387 insertions(+), 171 deletions(-) -- http://git-wip-us.apache.org/repos/asf/hbase/blob/4aec7a49/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Append.java -- diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Append.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Append.java index 24e9512..0cb51a2 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Append.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Append.java @@ -112,17 +112,11 @@ public class Append extends Mutation { } /** * Copy constructor - * @param a + * @param appendToCopy append to copy */ - public Append(Append a) { -this.row = a.getRow(); -this.ts = a.getTimeStamp(); -this.tr = a.getTimeRange(); -this.familyMap.putAll(a.getFamilyCellMap()); -for (Map.Entry entry : a.getAttributesMap().entrySet()) { - this.setAttribute(entry.getKey(), entry.getValue()); -} -this.setPriority(a.getPriority()); + public Append(Append appendToCopy) { +super(appendToCopy); +this.tr = appendToCopy.getTimeRange(); } /** Create a Append operation for the specified row. @@ -138,6 +132,18 @@ public class Append extends Mutation { } /** + * Construct the Append with user defined data. NOTED: + * 1) all cells in the familyMap must have the DataType.Put + * 2) the row of each cell must be same with passed row. + * @param row row. CAN'T be null + * @param ts timestamp + * @param familyMap the map to collect all cells internally. CAN'T be null + */ + public Append(byte[] row, long ts, NavigableMap> familyMap) { +super(row, ts, familyMap); + } + + /** * Add the specified column and value to this Append operation. * @param family family name * @param qualifier column qualifier @@ -202,6 +208,12 @@ public class Append extends Mutation { return (Append) super.setDurability(d); } + /** + * Method for setting the Append's familyMap + * @deprecated As of release 2.0.0, this will be removed in HBase 3.0.0. + * Use {@link Append#Append(byte[], long, NavigableMap)} instead + */ + @Deprecated @Override public Append setFamilyCellMap(NavigableMap> map) { return (Append) super.setFamilyCellMap(map); http://git-wip-us.apache.org/repos/asf/hbase/blob/4aec7a49/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Delete.java -- diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Delete.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Delete.java index 2d296fb..57f5648 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Delete.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Delete.java @@ -135,17 +135,23 @@ public class Delete extends Mutation implements Comparable { } /** - * @param d Delete to clone. + * @param deleteToCopy delete to copy */ - public Delete(final Delete d) { -this.row = d.getRow(); -this.ts = d.getTimeStamp(); -this.familyMap.putAll(d.getFamilyCellMap()); -this.durability = d.durability; -for (Map.Entry entry : d.getAttributesMap().entrySet()) { - this.setAttribute(entry.getKey(), entry.getValue()); -} -super.setPriority(d.getPriority()); + public Delete(final Delete deleteToCopy) { +super(deleteToCopy); + } + + /** + * Construct the Delete with user defined data. NOTED: + * 1) all cells in the familyMap must have the delete type. + * see {@link org.
hbase git commit: HBASE-19570 Add hadoop3 tests to Nightly master/branch-2 runs
Repository: hbase Updated Branches: refs/heads/HBASE-19491 [created] 7e656e352 HBASE-19570 Add hadoop3 tests to Nightly master/branch-2 runs Project: http://git-wip-us.apache.org/repos/asf/hbase/repo Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/7e656e35 Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/7e656e35 Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/7e656e35 Branch: refs/heads/HBASE-19491 Commit: 7e656e352e47b8b7a3c5dbfe0dbc963c7d54e1a5 Parents: 6cefabe Author: Apekshit Sharma Authored: Thu Dec 21 11:20:40 2017 -0800 Committer: Apekshit Sharma Committed: Thu Dec 21 11:20:40 2017 -0800 -- dev-support/Jenkinsfile| 75 +++-- dev-support/hbase-personality.sh | 7 +++ dev-support/hbase_nightly_yetus.sh | 5 +++ 3 files changed, 83 insertions(+), 4 deletions(-) -- http://git-wip-us.apache.org/repos/asf/hbase/blob/7e656e35/dev-support/Jenkinsfile -- diff --git a/dev-support/Jenkinsfile b/dev-support/Jenkinsfile index 744e9b0..e197cb0 100644 --- a/dev-support/Jenkinsfile +++ b/dev-support/Jenkinsfile @@ -35,7 +35,8 @@ pipeline { BASEDIR = "${env.WORKSPACE}/component" YETUS_RELEASE = '0.6.0' PROJECT = 'hbase' -PROJET_PERSONALITY = 'https://raw.githubusercontent.com/apache/hbase/master/dev-support/hbase-personality.sh' +// todo: revert +PROJET_PERSONALITY = 'https://raw.githubusercontent.com/apache/hbase/HBASE-19491/dev-support/hbase-personality.sh' // This section of the docs tells folks not to use the javadoc tag. older branches have our old version of the check for said tag. AUTHOR_IGNORE_LIST = 'src/main/asciidoc/_chapters/developer.adoc,dev-support/test-patch.sh' WHITESPACE_IGNORE_LIST = '.*/generated/.*' @@ -109,6 +110,12 @@ curl -L -o personality.sh "${env.PROJET_PERSONALITY}" } } stage ('yetus general check') { + // todo: revert + when { +not { + branch 'HBASE-19491' +} + } environment { // TODO does hadoopcheck need to be jdk specific? // Should be things that work with multijdk @@ -203,19 +210,79 @@ curl -L -o personality.sh "${env.PROJET_PERSONALITY}" } } } -stage ('yetus jdk8 checks') { +stage ('yetus jdk8 hadoop2 checks') { when { not { branch 'branch-1.1*' } } environment { +TESTS = 'mvninstall,compile,javac,unit,htmlout' +OUTPUT_DIR_RELATIVE = "output-jdk8-hadoop2" +OUTPUT_DIR = "${env.WORKSPACE}/${env.OUTPUT_DIR_RELATIVE}" +// This isn't strictly needed on branches that only support jdk8, but doesn't hurt +// and is needed on branches that do both jdk7 and jdk8 +SET_JAVA_HOME = '/usr/lib/jvm/java-8-openjdk-amd64' + } + steps { +unstash 'yetus' +sh '''#!/usr/bin/env bash + rm -rf "${OUTPUT_DIR}" && mkdir "${OUTPUT_DIR}" + rm -rf "${OUTPUT_DIR}/machine" && mkdir "${OUTPUT_DIR}/machine" + "${BASEDIR}/dev-support/gather_machine_environment.sh" "${OUTPUT_DIR_RELATIVE}/machine" +''' +sh "${env.BASEDIR}/dev-support/hbase_nightly_yetus.sh" + } + post { +always { + junit testResults: "${env.OUTPUT_DIR_RELATIVE}/**/target/**/TEST-*.xml", allowEmptyResults: true + // zip surefire reports. + sh '''#!/bin/bash -e +if [ -d "${OUTPUT_DIR}/archiver" ]; then + count=$(find "${OUTPUT_DIR}/archiver" -type f | wc -l) + if [[ 0 -ne ${count} ]]; then +echo "zipping ${count} archived files" +zip -q -m -r "${OUTPUT_DIR}/test_logs.zip" "${OUTPUT_DIR}/archiver" + else +echo "No archived files, skipping compressing." + fi +else + echo "No archiver directory, skipping compressing." +fi +''' + // Has to be relative to WORKSPACE. + archive "${env.OUTPUT_DIR_RELATIVE}/*" + archive "${env.OUTPUT_DIR_RELATIVE}/**/*" + publishHTML target: [ +allowMissing : true, +keepAll : true, +alwaysLinkToLastBuild: true, +// Has to be relative to WORKSPACE. +reportDir: "${env.OUTPUT_DIR_RELATIVE}", +reportFiles : 'console-report.html', +reportName : 'JDK8 Nightly Build Report (Hadoop2)' + ] +} + } +} +stage ('yetus jdk8 hadoop3 checks') { + when { +not { + branch 'branch-1*' +} + } + environment { +// Failure in any stage fails the build and consecutive stages are not built. +// Findbugs is pa
hbase git commit: HBASE-19571 Renames OUTPUTDIR to OUTPUT_DIR and OUTPUT_RELATIVE to OUTPUT_DIR_RELATIVE.
Repository: hbase Updated Branches: refs/heads/branch-1.1 bcacdeb21 -> 28cc1fbd6 HBASE-19571 Renames OUTPUTDIR to OUTPUT_DIR and OUTPUT_RELATIVE to OUTPUT_DIR_RELATIVE. Project: http://git-wip-us.apache.org/repos/asf/hbase/repo Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/28cc1fbd Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/28cc1fbd Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/28cc1fbd Branch: refs/heads/branch-1.1 Commit: 28cc1fbd6c9376ad9dd4695595e84462347971c9 Parents: bcacdeb Author: Apekshit Sharma Authored: Wed Dec 20 13:27:25 2017 -0800 Committer: Apekshit Sharma Committed: Thu Dec 21 10:56:39 2017 -0800 -- dev-support/Jenkinsfile| 99 +++-- dev-support/hbase_nightly_yetus.sh | 17 +++--- 2 files changed, 55 insertions(+), 61 deletions(-) -- http://git-wip-us.apache.org/repos/asf/hbase/blob/28cc1fbd/dev-support/Jenkinsfile -- diff --git a/dev-support/Jenkinsfile b/dev-support/Jenkinsfile index 94685e4..3070a4d 100644 --- a/dev-support/Jenkinsfile +++ b/dev-support/Jenkinsfile @@ -34,13 +34,6 @@ pipeline { // where we check out to across stages BASEDIR = "${env.WORKSPACE}/component" YETUS_RELEASE = '0.6.0' -// where we'll write everything from different steps. -OUTPUT_RELATIVE_GENERAL = 'output-general' -OUTPUTDIR_GENERAL = "${env.WORKSPACE}/output-general" -OUTPUT_RELATIVE_JDK7 = 'output-jdk7' -OUTPUTDIR_JDK7 = "${env.WORKSPACE}/output-jdk7" -OUTPUT_RELATIVE_JDK8 = 'output-jdk8' -OUTPUTDIR_JDK8 = "${env.WORKSPACE}/output-jdk8" PROJECT = 'hbase' PROJET_PERSONALITY = 'https://raw.githubusercontent.com/apache/hbase/master/dev-support/hbase-personality.sh' // This section of the docs tells folks not to use the javadoc tag. older branches have our old version of the check for said tag. @@ -123,30 +116,30 @@ curl -L -o personality.sh "${env.PROJET_PERSONALITY}" // on branches that don't support jdk7, this will already be JAVA_HOME, so we'll end up not // doing multijdk there. MULTIJDK = '/usr/lib/jvm/java-8-openjdk-amd64' -OUTPUT_RELATIVE = "${env.OUTPUT_RELATIVE_GENERAL}" -OUTPUTDIR = "${env.OUTPUTDIR_GENERAL}" +OUTPUT_DIR_RELATIVE = "output-general" +OUTPUT_DIR = "${env.WORKSPACE}/${env.OUTPUT_DIR_RELATIVE}" } steps { unstash 'yetus' sh '''#!/usr/bin/env bash - rm -rf "${OUTPUTDIR}" && mkdir "${OUTPUTDIR}" - rm -rf "${OUTPUTDIR}/machine" && mkdir "${OUTPUTDIR}/machine" - "${BASEDIR}/dev-support/gather_machine_environment.sh" "${OUTPUT_RELATIVE}/machine" + rm -rf "${OUTPUT_DIR}" && mkdir "${OUTPUT_DIR}" + rm -rf "${OUTPUT_DIR}/machine" && mkdir "${OUTPUT_DIR}/machine" + "${BASEDIR}/dev-support/gather_machine_environment.sh" "${OUTPUT_DIR_RELATIVE}/machine" ''' // TODO should this be a download from master, similar to how the personality is? sh "${env.BASEDIR}/dev-support/hbase_nightly_yetus.sh" } post { always { - // env variables don't work in archive? or this has to be relative to WORKSPACE. :( - archive 'output-general/*' - archive 'output-general/**/*' + // Has to be relative to WORKSPACE. + archive "${env.OUTPUT_DIR_RELATIVE}/*" + archive "${env.OUTPUT_DIR_RELATIVE}/**/*" publishHTML target: [ allowMissing: true, keepAll: true, alwaysLinkToLastBuild: true, -// has to be relative to WORKSPACE :( -reportDir: 'output-general', +// Has to be relative to WORKSPACE +reportDir: "${env.OUTPUT_DIR_RELATIVE}", reportFiles: 'console-report.html', reportName: 'General Nightly Build Report' ] @@ -159,16 +152,16 @@ curl -L -o personality.sh "${env.PROJET_PERSONALITY}" } environment { TESTS = 'mvninstall,compile,javac,unit,htmlout' -OUTPUT_RELATIVE = "${env.OUTPUT_RELATIVE_JDK7}" -OUTPUTDIR = "${env.OUTPUTDIR_JDK7}" +OUTPUT_DIR_RELATIVE = "output-jdk7" +OUTPUT_DIR = "${env.WORKSPACE}/${env.OUTPUT_DIR_RELATIVE}" // On branches where we do jdk7 checks, jdk7 will be JAVA_HOME already. } steps { unstash 'yetus' sh '''#!/usr/bin/env bash - rm -rf "${OUTPUTDIR}" && mkdir "${OUTPUTDIR}" - rm -rf "${OUTPUTDIR}/machine" && mkdir "${OUTPUTDIR}/machine" - "${BASEDIR}/dev-support/gather_machine_environment.sh" "${OUTPUT_RELATIVE}/machine" + rm -rf "${OUTPUT_DIR}" && mkdir "${OUTPUT_DIR}" + rm -rf "${OUTPUT_DIR}/machine" && mkdir "${OUTPUT_DIR}/machine" +
hbase git commit: HBASE-19571 Renames OUTPUTDIR to OUTPUT_DIR and OUTPUT_RELATIVE to OUTPUT_DIR_RELATIVE.
Repository: hbase Updated Branches: refs/heads/branch-1.2 605d92e3c -> 5f7d8e0e1 HBASE-19571 Renames OUTPUTDIR to OUTPUT_DIR and OUTPUT_RELATIVE to OUTPUT_DIR_RELATIVE. Project: http://git-wip-us.apache.org/repos/asf/hbase/repo Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/5f7d8e0e Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/5f7d8e0e Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/5f7d8e0e Branch: refs/heads/branch-1.2 Commit: 5f7d8e0e1bb4c4af118a2e96ed7aa7b957a388d2 Parents: 605d92e Author: Apekshit Sharma Authored: Wed Dec 20 13:27:25 2017 -0800 Committer: Apekshit Sharma Committed: Thu Dec 21 10:56:27 2017 -0800 -- dev-support/Jenkinsfile| 99 +++-- dev-support/hbase_nightly_yetus.sh | 17 +++--- 2 files changed, 55 insertions(+), 61 deletions(-) -- http://git-wip-us.apache.org/repos/asf/hbase/blob/5f7d8e0e/dev-support/Jenkinsfile -- diff --git a/dev-support/Jenkinsfile b/dev-support/Jenkinsfile index 25ccc2f..f06923b 100644 --- a/dev-support/Jenkinsfile +++ b/dev-support/Jenkinsfile @@ -34,13 +34,6 @@ pipeline { // where we check out to across stages BASEDIR = "${env.WORKSPACE}/component" YETUS_RELEASE = '0.6.0' -// where we'll write everything from different steps. -OUTPUT_RELATIVE_GENERAL = 'output-general' -OUTPUTDIR_GENERAL = "${env.WORKSPACE}/output-general" -OUTPUT_RELATIVE_JDK7 = 'output-jdk7' -OUTPUTDIR_JDK7 = "${env.WORKSPACE}/output-jdk7" -OUTPUT_RELATIVE_JDK8 = 'output-jdk8' -OUTPUTDIR_JDK8 = "${env.WORKSPACE}/output-jdk8" PROJECT = 'hbase' PROJET_PERSONALITY = 'https://raw.githubusercontent.com/apache/hbase/master/dev-support/hbase-personality.sh' // This section of the docs tells folks not to use the javadoc tag. older branches have our old version of the check for said tag. @@ -123,30 +116,30 @@ curl -L -o personality.sh "${env.PROJET_PERSONALITY}" // on branches that don't support jdk7, this will already be JAVA_HOME, so we'll end up not // doing multijdk there. MULTIJDK = '/usr/lib/jvm/java-8-openjdk-amd64' -OUTPUT_RELATIVE = "${env.OUTPUT_RELATIVE_GENERAL}" -OUTPUTDIR = "${env.OUTPUTDIR_GENERAL}" +OUTPUT_DIR_RELATIVE = "output-general" +OUTPUT_DIR = "${env.WORKSPACE}/${env.OUTPUT_DIR_RELATIVE}" } steps { unstash 'yetus' sh '''#!/usr/bin/env bash - rm -rf "${OUTPUTDIR}" && mkdir "${OUTPUTDIR}" - rm -rf "${OUTPUTDIR}/machine" && mkdir "${OUTPUTDIR}/machine" - "${BASEDIR}/dev-support/gather_machine_environment.sh" "${OUTPUT_RELATIVE}/machine" + rm -rf "${OUTPUT_DIR}" && mkdir "${OUTPUT_DIR}" + rm -rf "${OUTPUT_DIR}/machine" && mkdir "${OUTPUT_DIR}/machine" + "${BASEDIR}/dev-support/gather_machine_environment.sh" "${OUTPUT_DIR_RELATIVE}/machine" ''' // TODO should this be a download from master, similar to how the personality is? sh "${env.BASEDIR}/dev-support/hbase_nightly_yetus.sh" } post { always { - // env variables don't work in archive? or this has to be relative to WORKSPACE. :( - archive 'output-general/*' - archive 'output-general/**/*' + // Has to be relative to WORKSPACE. + archive "${env.OUTPUT_DIR_RELATIVE}/*" + archive "${env.OUTPUT_DIR_RELATIVE}/**/*" publishHTML target: [ allowMissing: true, keepAll: true, alwaysLinkToLastBuild: true, -// has to be relative to WORKSPACE :( -reportDir: 'output-general', +// Has to be relative to WORKSPACE +reportDir: "${env.OUTPUT_DIR_RELATIVE}", reportFiles: 'console-report.html', reportName: 'General Nightly Build Report' ] @@ -159,16 +152,16 @@ curl -L -o personality.sh "${env.PROJET_PERSONALITY}" } environment { TESTS = 'mvninstall,compile,javac,unit,htmlout' -OUTPUT_RELATIVE = "${env.OUTPUT_RELATIVE_JDK7}" -OUTPUTDIR = "${env.OUTPUTDIR_JDK7}" +OUTPUT_DIR_RELATIVE = "output-jdk7" +OUTPUT_DIR = "${env.WORKSPACE}/${env.OUTPUT_DIR_RELATIVE}" // On branches where we do jdk7 checks, jdk7 will be JAVA_HOME already. } steps { unstash 'yetus' sh '''#!/usr/bin/env bash - rm -rf "${OUTPUTDIR}" && mkdir "${OUTPUTDIR}" - rm -rf "${OUTPUTDIR}/machine" && mkdir "${OUTPUTDIR}/machine" - "${BASEDIR}/dev-support/gather_machine_environment.sh" "${OUTPUT_RELATIVE}/machine" + rm -rf "${OUTPUT_DIR}" && mkdir "${OUTPUT_DIR}" + rm -rf "${OUTPUT_DIR}/machine" && mkdir "${OUTPUT_DIR}/machine" +
hbase git commit: HBASE-19571 Renames OUTPUTDIR to OUTPUT_DIR and OUTPUT_RELATIVE to OUTPUT_DIR_RELATIVE.
Repository: hbase Updated Branches: refs/heads/branch-1.3 b0b4eb217 -> ef4b4edfb HBASE-19571 Renames OUTPUTDIR to OUTPUT_DIR and OUTPUT_RELATIVE to OUTPUT_DIR_RELATIVE. Project: http://git-wip-us.apache.org/repos/asf/hbase/repo Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/ef4b4edf Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/ef4b4edf Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/ef4b4edf Branch: refs/heads/branch-1.3 Commit: ef4b4edfb38a825c01ea720f18df7faa3dbc50c6 Parents: b0b4eb2 Author: Apekshit Sharma Authored: Wed Dec 20 13:27:25 2017 -0800 Committer: Apekshit Sharma Committed: Thu Dec 21 10:56:16 2017 -0800 -- dev-support/Jenkinsfile| 99 +++-- dev-support/hbase_nightly_yetus.sh | 17 +++--- 2 files changed, 55 insertions(+), 61 deletions(-) -- http://git-wip-us.apache.org/repos/asf/hbase/blob/ef4b4edf/dev-support/Jenkinsfile -- diff --git a/dev-support/Jenkinsfile b/dev-support/Jenkinsfile index 25ccc2f..f06923b 100644 --- a/dev-support/Jenkinsfile +++ b/dev-support/Jenkinsfile @@ -34,13 +34,6 @@ pipeline { // where we check out to across stages BASEDIR = "${env.WORKSPACE}/component" YETUS_RELEASE = '0.6.0' -// where we'll write everything from different steps. -OUTPUT_RELATIVE_GENERAL = 'output-general' -OUTPUTDIR_GENERAL = "${env.WORKSPACE}/output-general" -OUTPUT_RELATIVE_JDK7 = 'output-jdk7' -OUTPUTDIR_JDK7 = "${env.WORKSPACE}/output-jdk7" -OUTPUT_RELATIVE_JDK8 = 'output-jdk8' -OUTPUTDIR_JDK8 = "${env.WORKSPACE}/output-jdk8" PROJECT = 'hbase' PROJET_PERSONALITY = 'https://raw.githubusercontent.com/apache/hbase/master/dev-support/hbase-personality.sh' // This section of the docs tells folks not to use the javadoc tag. older branches have our old version of the check for said tag. @@ -123,30 +116,30 @@ curl -L -o personality.sh "${env.PROJET_PERSONALITY}" // on branches that don't support jdk7, this will already be JAVA_HOME, so we'll end up not // doing multijdk there. MULTIJDK = '/usr/lib/jvm/java-8-openjdk-amd64' -OUTPUT_RELATIVE = "${env.OUTPUT_RELATIVE_GENERAL}" -OUTPUTDIR = "${env.OUTPUTDIR_GENERAL}" +OUTPUT_DIR_RELATIVE = "output-general" +OUTPUT_DIR = "${env.WORKSPACE}/${env.OUTPUT_DIR_RELATIVE}" } steps { unstash 'yetus' sh '''#!/usr/bin/env bash - rm -rf "${OUTPUTDIR}" && mkdir "${OUTPUTDIR}" - rm -rf "${OUTPUTDIR}/machine" && mkdir "${OUTPUTDIR}/machine" - "${BASEDIR}/dev-support/gather_machine_environment.sh" "${OUTPUT_RELATIVE}/machine" + rm -rf "${OUTPUT_DIR}" && mkdir "${OUTPUT_DIR}" + rm -rf "${OUTPUT_DIR}/machine" && mkdir "${OUTPUT_DIR}/machine" + "${BASEDIR}/dev-support/gather_machine_environment.sh" "${OUTPUT_DIR_RELATIVE}/machine" ''' // TODO should this be a download from master, similar to how the personality is? sh "${env.BASEDIR}/dev-support/hbase_nightly_yetus.sh" } post { always { - // env variables don't work in archive? or this has to be relative to WORKSPACE. :( - archive 'output-general/*' - archive 'output-general/**/*' + // Has to be relative to WORKSPACE. + archive "${env.OUTPUT_DIR_RELATIVE}/*" + archive "${env.OUTPUT_DIR_RELATIVE}/**/*" publishHTML target: [ allowMissing: true, keepAll: true, alwaysLinkToLastBuild: true, -// has to be relative to WORKSPACE :( -reportDir: 'output-general', +// Has to be relative to WORKSPACE +reportDir: "${env.OUTPUT_DIR_RELATIVE}", reportFiles: 'console-report.html', reportName: 'General Nightly Build Report' ] @@ -159,16 +152,16 @@ curl -L -o personality.sh "${env.PROJET_PERSONALITY}" } environment { TESTS = 'mvninstall,compile,javac,unit,htmlout' -OUTPUT_RELATIVE = "${env.OUTPUT_RELATIVE_JDK7}" -OUTPUTDIR = "${env.OUTPUTDIR_JDK7}" +OUTPUT_DIR_RELATIVE = "output-jdk7" +OUTPUT_DIR = "${env.WORKSPACE}/${env.OUTPUT_DIR_RELATIVE}" // On branches where we do jdk7 checks, jdk7 will be JAVA_HOME already. } steps { unstash 'yetus' sh '''#!/usr/bin/env bash - rm -rf "${OUTPUTDIR}" && mkdir "${OUTPUTDIR}" - rm -rf "${OUTPUTDIR}/machine" && mkdir "${OUTPUTDIR}/machine" - "${BASEDIR}/dev-support/gather_machine_environment.sh" "${OUTPUT_RELATIVE}/machine" + rm -rf "${OUTPUT_DIR}" && mkdir "${OUTPUT_DIR}" + rm -rf "${OUTPUT_DIR}/machine" && mkdir "${OUTPUT_DIR}/machine" +
hbase git commit: HBASE-19571 Renames OUTPUTDIR to OUTPUT_DIR and OUTPUT_RELATIVE to OUTPUT_DIR_RELATIVE.
Repository: hbase Updated Branches: refs/heads/branch-1 0a31209f9 -> 304b20524 HBASE-19571 Renames OUTPUTDIR to OUTPUT_DIR and OUTPUT_RELATIVE to OUTPUT_DIR_RELATIVE. Project: http://git-wip-us.apache.org/repos/asf/hbase/repo Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/304b2052 Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/304b2052 Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/304b2052 Branch: refs/heads/branch-1 Commit: 304b2052460c230c16721d3b564b7e217dfc43b8 Parents: 0a31209 Author: Apekshit Sharma Authored: Wed Dec 20 13:27:25 2017 -0800 Committer: Apekshit Sharma Committed: Thu Dec 21 10:55:50 2017 -0800 -- dev-support/Jenkinsfile| 99 +++-- dev-support/hbase_nightly_yetus.sh | 17 +++--- 2 files changed, 55 insertions(+), 61 deletions(-) -- http://git-wip-us.apache.org/repos/asf/hbase/blob/304b2052/dev-support/Jenkinsfile -- diff --git a/dev-support/Jenkinsfile b/dev-support/Jenkinsfile index 1b2c974..98519df 100644 --- a/dev-support/Jenkinsfile +++ b/dev-support/Jenkinsfile @@ -34,13 +34,6 @@ pipeline { // where we check out to across stages BASEDIR = "${env.WORKSPACE}/component" YETUS_RELEASE = '0.6.0' -// where we'll write everything from different steps. -OUTPUT_RELATIVE_GENERAL = 'output-general' -OUTPUTDIR_GENERAL = "${env.WORKSPACE}/output-general" -OUTPUT_RELATIVE_JDK7 = 'output-jdk7' -OUTPUTDIR_JDK7 = "${env.WORKSPACE}/output-jdk7" -OUTPUT_RELATIVE_JDK8 = 'output-jdk8' -OUTPUTDIR_JDK8 = "${env.WORKSPACE}/output-jdk8" PROJECT = 'hbase' PROJET_PERSONALITY = 'https://raw.githubusercontent.com/apache/hbase/master/dev-support/hbase-personality.sh' // This section of the docs tells folks not to use the javadoc tag. older branches have our old version of the check for said tag. @@ -123,30 +116,30 @@ curl -L -o personality.sh "${env.PROJET_PERSONALITY}" // on branches that don't support jdk7, this will already be JAVA_HOME, so we'll end up not // doing multijdk there. MULTIJDK = '/usr/lib/jvm/java-8-openjdk-amd64' -OUTPUT_RELATIVE = "${env.OUTPUT_RELATIVE_GENERAL}" -OUTPUTDIR = "${env.OUTPUTDIR_GENERAL}" +OUTPUT_DIR_RELATIVE = "output-general" +OUTPUT_DIR = "${env.WORKSPACE}/${env.OUTPUT_DIR_RELATIVE}" } steps { unstash 'yetus' sh '''#!/usr/bin/env bash - rm -rf "${OUTPUTDIR}" && mkdir "${OUTPUTDIR}" - rm -rf "${OUTPUTDIR}/machine" && mkdir "${OUTPUTDIR}/machine" - "${BASEDIR}/dev-support/gather_machine_environment.sh" "${OUTPUT_RELATIVE}/machine" + rm -rf "${OUTPUT_DIR}" && mkdir "${OUTPUT_DIR}" + rm -rf "${OUTPUT_DIR}/machine" && mkdir "${OUTPUT_DIR}/machine" + "${BASEDIR}/dev-support/gather_machine_environment.sh" "${OUTPUT_DIR_RELATIVE}/machine" ''' // TODO should this be a download from master, similar to how the personality is? sh "${env.BASEDIR}/dev-support/hbase_nightly_yetus.sh" } post { always { - // env variables don't work in archive? or this has to be relative to WORKSPACE. :( - archive 'output-general/*' - archive 'output-general/**/*' + // Has to be relative to WORKSPACE. + archive "${env.OUTPUT_DIR_RELATIVE}/*" + archive "${env.OUTPUT_DIR_RELATIVE}/**/*" publishHTML target: [ allowMissing: true, keepAll: true, alwaysLinkToLastBuild: true, -// has to be relative to WORKSPACE :( -reportDir: 'output-general', +// Has to be relative to WORKSPACE +reportDir: "${env.OUTPUT_DIR_RELATIVE}", reportFiles: 'console-report.html', reportName: 'General Nightly Build Report' ] @@ -159,16 +152,16 @@ curl -L -o personality.sh "${env.PROJET_PERSONALITY}" } environment { TESTS = 'mvninstall,compile,javac,unit,htmlout' -OUTPUT_RELATIVE = "${env.OUTPUT_RELATIVE_JDK7}" -OUTPUTDIR = "${env.OUTPUTDIR_JDK7}" +OUTPUT_DIR_RELATIVE = "output-jdk7" +OUTPUT_DIR = "${env.WORKSPACE}/${env.OUTPUT_DIR_RELATIVE}" // On branches where we do jdk7 checks, jdk7 will be JAVA_HOME already. } steps { unstash 'yetus' sh '''#!/usr/bin/env bash - rm -rf "${OUTPUTDIR}" && mkdir "${OUTPUTDIR}" - rm -rf "${OUTPUTDIR}/machine" && mkdir "${OUTPUTDIR}/machine" - "${BASEDIR}/dev-support/gather_machine_environment.sh" "${OUTPUT_RELATIVE}/machine" + rm -rf "${OUTPUT_DIR}" && mkdir "${OUTPUT_DIR}" + rm -rf "${OUTPUT_DIR}/machine" && mkdir "${OUTPUT_DIR}/machine" + "$
hbase git commit: HBASE-19571 Renames OUTPUTDIR to OUTPUT_DIR and OUTPUT_RELATIVE to OUTPUT_DIR_RELATIVE.
Repository: hbase Updated Branches: refs/heads/branch-1.4 85211b1b5 -> 8f7dbfda1 HBASE-19571 Renames OUTPUTDIR to OUTPUT_DIR and OUTPUT_RELATIVE to OUTPUT_DIR_RELATIVE. Project: http://git-wip-us.apache.org/repos/asf/hbase/repo Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/8f7dbfda Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/8f7dbfda Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/8f7dbfda Branch: refs/heads/branch-1.4 Commit: 8f7dbfda1afdc62dd6ceb3cc249e3bf52857314b Parents: 85211b1 Author: Apekshit Sharma Authored: Wed Dec 20 13:27:25 2017 -0800 Committer: Apekshit Sharma Committed: Thu Dec 21 10:56:04 2017 -0800 -- dev-support/Jenkinsfile| 99 +++-- dev-support/hbase_nightly_yetus.sh | 17 +++--- 2 files changed, 55 insertions(+), 61 deletions(-) -- http://git-wip-us.apache.org/repos/asf/hbase/blob/8f7dbfda/dev-support/Jenkinsfile -- diff --git a/dev-support/Jenkinsfile b/dev-support/Jenkinsfile index 25ccc2f..f06923b 100644 --- a/dev-support/Jenkinsfile +++ b/dev-support/Jenkinsfile @@ -34,13 +34,6 @@ pipeline { // where we check out to across stages BASEDIR = "${env.WORKSPACE}/component" YETUS_RELEASE = '0.6.0' -// where we'll write everything from different steps. -OUTPUT_RELATIVE_GENERAL = 'output-general' -OUTPUTDIR_GENERAL = "${env.WORKSPACE}/output-general" -OUTPUT_RELATIVE_JDK7 = 'output-jdk7' -OUTPUTDIR_JDK7 = "${env.WORKSPACE}/output-jdk7" -OUTPUT_RELATIVE_JDK8 = 'output-jdk8' -OUTPUTDIR_JDK8 = "${env.WORKSPACE}/output-jdk8" PROJECT = 'hbase' PROJET_PERSONALITY = 'https://raw.githubusercontent.com/apache/hbase/master/dev-support/hbase-personality.sh' // This section of the docs tells folks not to use the javadoc tag. older branches have our old version of the check for said tag. @@ -123,30 +116,30 @@ curl -L -o personality.sh "${env.PROJET_PERSONALITY}" // on branches that don't support jdk7, this will already be JAVA_HOME, so we'll end up not // doing multijdk there. MULTIJDK = '/usr/lib/jvm/java-8-openjdk-amd64' -OUTPUT_RELATIVE = "${env.OUTPUT_RELATIVE_GENERAL}" -OUTPUTDIR = "${env.OUTPUTDIR_GENERAL}" +OUTPUT_DIR_RELATIVE = "output-general" +OUTPUT_DIR = "${env.WORKSPACE}/${env.OUTPUT_DIR_RELATIVE}" } steps { unstash 'yetus' sh '''#!/usr/bin/env bash - rm -rf "${OUTPUTDIR}" && mkdir "${OUTPUTDIR}" - rm -rf "${OUTPUTDIR}/machine" && mkdir "${OUTPUTDIR}/machine" - "${BASEDIR}/dev-support/gather_machine_environment.sh" "${OUTPUT_RELATIVE}/machine" + rm -rf "${OUTPUT_DIR}" && mkdir "${OUTPUT_DIR}" + rm -rf "${OUTPUT_DIR}/machine" && mkdir "${OUTPUT_DIR}/machine" + "${BASEDIR}/dev-support/gather_machine_environment.sh" "${OUTPUT_DIR_RELATIVE}/machine" ''' // TODO should this be a download from master, similar to how the personality is? sh "${env.BASEDIR}/dev-support/hbase_nightly_yetus.sh" } post { always { - // env variables don't work in archive? or this has to be relative to WORKSPACE. :( - archive 'output-general/*' - archive 'output-general/**/*' + // Has to be relative to WORKSPACE. + archive "${env.OUTPUT_DIR_RELATIVE}/*" + archive "${env.OUTPUT_DIR_RELATIVE}/**/*" publishHTML target: [ allowMissing: true, keepAll: true, alwaysLinkToLastBuild: true, -// has to be relative to WORKSPACE :( -reportDir: 'output-general', +// Has to be relative to WORKSPACE +reportDir: "${env.OUTPUT_DIR_RELATIVE}", reportFiles: 'console-report.html', reportName: 'General Nightly Build Report' ] @@ -159,16 +152,16 @@ curl -L -o personality.sh "${env.PROJET_PERSONALITY}" } environment { TESTS = 'mvninstall,compile,javac,unit,htmlout' -OUTPUT_RELATIVE = "${env.OUTPUT_RELATIVE_JDK7}" -OUTPUTDIR = "${env.OUTPUTDIR_JDK7}" +OUTPUT_DIR_RELATIVE = "output-jdk7" +OUTPUT_DIR = "${env.WORKSPACE}/${env.OUTPUT_DIR_RELATIVE}" // On branches where we do jdk7 checks, jdk7 will be JAVA_HOME already. } steps { unstash 'yetus' sh '''#!/usr/bin/env bash - rm -rf "${OUTPUTDIR}" && mkdir "${OUTPUTDIR}" - rm -rf "${OUTPUTDIR}/machine" && mkdir "${OUTPUTDIR}/machine" - "${BASEDIR}/dev-support/gather_machine_environment.sh" "${OUTPUT_RELATIVE}/machine" + rm -rf "${OUTPUT_DIR}" && mkdir "${OUTPUT_DIR}" + rm -rf "${OUTPUT_DIR}/machine" && mkdir "${OUTPUT_DIR}/machine" +
hbase git commit: HBASE-19571 Renames OUTPUTDIR to OUTPUT_DIR and OUTPUT_RELATIVE to OUTPUT_DIR_RELATIVE.
Repository: hbase Updated Branches: refs/heads/branch-2 bf998077b -> a28156be9 HBASE-19571 Renames OUTPUTDIR to OUTPUT_DIR and OUTPUT_RELATIVE to OUTPUT_DIR_RELATIVE. Project: http://git-wip-us.apache.org/repos/asf/hbase/repo Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/a28156be Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/a28156be Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/a28156be Branch: refs/heads/branch-2 Commit: a28156be945f9cb86514287107ebd451ae5309f6 Parents: bf99807 Author: Apekshit Sharma Authored: Wed Dec 20 13:27:25 2017 -0800 Committer: Apekshit Sharma Committed: Thu Dec 21 10:55:12 2017 -0800 -- dev-support/Jenkinsfile| 99 +++-- dev-support/hbase_nightly_yetus.sh | 17 +++--- 2 files changed, 55 insertions(+), 61 deletions(-) -- http://git-wip-us.apache.org/repos/asf/hbase/blob/a28156be/dev-support/Jenkinsfile -- diff --git a/dev-support/Jenkinsfile b/dev-support/Jenkinsfile index fda3299..744e9b0 100644 --- a/dev-support/Jenkinsfile +++ b/dev-support/Jenkinsfile @@ -34,13 +34,6 @@ pipeline { // where we check out to across stages BASEDIR = "${env.WORKSPACE}/component" YETUS_RELEASE = '0.6.0' -// where we'll write everything from different steps. -OUTPUT_RELATIVE_GENERAL = 'output-general' -OUTPUTDIR_GENERAL = "${env.WORKSPACE}/output-general" -OUTPUT_RELATIVE_JDK7 = 'output-jdk7' -OUTPUTDIR_JDK7 = "${env.WORKSPACE}/output-jdk7" -OUTPUT_RELATIVE_JDK8 = 'output-jdk8' -OUTPUTDIR_JDK8 = "${env.WORKSPACE}/output-jdk8" PROJECT = 'hbase' PROJET_PERSONALITY = 'https://raw.githubusercontent.com/apache/hbase/master/dev-support/hbase-personality.sh' // This section of the docs tells folks not to use the javadoc tag. older branches have our old version of the check for said tag. @@ -123,30 +116,30 @@ curl -L -o personality.sh "${env.PROJET_PERSONALITY}" // on branches that don't support jdk7, this will already be JAVA_HOME, so we'll end up not // doing multijdk there. MULTIJDK = '/usr/lib/jvm/java-8-openjdk-amd64' -OUTPUT_RELATIVE = "${env.OUTPUT_RELATIVE_GENERAL}" -OUTPUTDIR = "${env.OUTPUTDIR_GENERAL}" +OUTPUT_DIR_RELATIVE = "output-general" +OUTPUT_DIR = "${env.WORKSPACE}/${env.OUTPUT_DIR_RELATIVE}" } steps { unstash 'yetus' sh '''#!/usr/bin/env bash - rm -rf "${OUTPUTDIR}" && mkdir "${OUTPUTDIR}" - rm -rf "${OUTPUTDIR}/machine" && mkdir "${OUTPUTDIR}/machine" - "${BASEDIR}/dev-support/gather_machine_environment.sh" "${OUTPUT_RELATIVE}/machine" + rm -rf "${OUTPUT_DIR}" && mkdir "${OUTPUT_DIR}" + rm -rf "${OUTPUT_DIR}/machine" && mkdir "${OUTPUT_DIR}/machine" + "${BASEDIR}/dev-support/gather_machine_environment.sh" "${OUTPUT_DIR_RELATIVE}/machine" ''' // TODO should this be a download from master, similar to how the personality is? sh "${env.BASEDIR}/dev-support/hbase_nightly_yetus.sh" } post { always { - // env variables don't work in archive? or this has to be relative to WORKSPACE. :( - archive 'output-general/*' - archive 'output-general/**/*' + // Has to be relative to WORKSPACE. + archive "${env.OUTPUT_DIR_RELATIVE}/*" + archive "${env.OUTPUT_DIR_RELATIVE}/**/*" publishHTML target: [ allowMissing: true, keepAll: true, alwaysLinkToLastBuild: true, -// has to be relative to WORKSPACE :( -reportDir: 'output-general', +// Has to be relative to WORKSPACE +reportDir: "${env.OUTPUT_DIR_RELATIVE}", reportFiles: 'console-report.html', reportName: 'General Nightly Build Report' ] @@ -159,16 +152,16 @@ curl -L -o personality.sh "${env.PROJET_PERSONALITY}" } environment { TESTS = 'mvninstall,compile,javac,unit,htmlout' -OUTPUT_RELATIVE = "${env.OUTPUT_RELATIVE_JDK7}" -OUTPUTDIR = "${env.OUTPUTDIR_JDK7}" +OUTPUT_DIR_RELATIVE = "output-jdk7" +OUTPUT_DIR = "${env.WORKSPACE}/${env.OUTPUT_DIR_RELATIVE}" // On branches where we do jdk7 checks, jdk7 will be JAVA_HOME already. } steps { unstash 'yetus' sh '''#!/usr/bin/env bash - rm -rf "${OUTPUTDIR}" && mkdir "${OUTPUTDIR}" - rm -rf "${OUTPUTDIR}/machine" && mkdir "${OUTPUTDIR}/machine" - "${BASEDIR}/dev-support/gather_machine_environment.sh" "${OUTPUT_RELATIVE}/machine" + rm -rf "${OUTPUT_DIR}" && mkdir "${OUTPUT_DIR}" + rm -rf "${OUTPUT_DIR}/machine" && mkdir "${OUTPUT_DIR}/machine" + "$
hbase git commit: HBASE-19571 Renames OUTPUTDIR to OUTPUT_DIR and OUTPUT_RELATIVE to OUTPUT_DIR_RELATIVE.
Repository: hbase Updated Branches: refs/heads/master 35728acd2 -> 6cefabee2 HBASE-19571 Renames OUTPUTDIR to OUTPUT_DIR and OUTPUT_RELATIVE to OUTPUT_DIR_RELATIVE. Project: http://git-wip-us.apache.org/repos/asf/hbase/repo Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/6cefabee Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/6cefabee Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/6cefabee Branch: refs/heads/master Commit: 6cefabee268f8509d7a50c632ff0ff7effcaf685 Parents: 35728ac Author: Apekshit Sharma Authored: Wed Dec 20 13:27:25 2017 -0800 Committer: Apekshit Sharma Committed: Thu Dec 21 10:46:43 2017 -0800 -- dev-support/Jenkinsfile| 99 +++-- dev-support/hbase_nightly_yetus.sh | 17 +++--- 2 files changed, 55 insertions(+), 61 deletions(-) -- http://git-wip-us.apache.org/repos/asf/hbase/blob/6cefabee/dev-support/Jenkinsfile -- diff --git a/dev-support/Jenkinsfile b/dev-support/Jenkinsfile index fda3299..744e9b0 100644 --- a/dev-support/Jenkinsfile +++ b/dev-support/Jenkinsfile @@ -34,13 +34,6 @@ pipeline { // where we check out to across stages BASEDIR = "${env.WORKSPACE}/component" YETUS_RELEASE = '0.6.0' -// where we'll write everything from different steps. -OUTPUT_RELATIVE_GENERAL = 'output-general' -OUTPUTDIR_GENERAL = "${env.WORKSPACE}/output-general" -OUTPUT_RELATIVE_JDK7 = 'output-jdk7' -OUTPUTDIR_JDK7 = "${env.WORKSPACE}/output-jdk7" -OUTPUT_RELATIVE_JDK8 = 'output-jdk8' -OUTPUTDIR_JDK8 = "${env.WORKSPACE}/output-jdk8" PROJECT = 'hbase' PROJET_PERSONALITY = 'https://raw.githubusercontent.com/apache/hbase/master/dev-support/hbase-personality.sh' // This section of the docs tells folks not to use the javadoc tag. older branches have our old version of the check for said tag. @@ -123,30 +116,30 @@ curl -L -o personality.sh "${env.PROJET_PERSONALITY}" // on branches that don't support jdk7, this will already be JAVA_HOME, so we'll end up not // doing multijdk there. MULTIJDK = '/usr/lib/jvm/java-8-openjdk-amd64' -OUTPUT_RELATIVE = "${env.OUTPUT_RELATIVE_GENERAL}" -OUTPUTDIR = "${env.OUTPUTDIR_GENERAL}" +OUTPUT_DIR_RELATIVE = "output-general" +OUTPUT_DIR = "${env.WORKSPACE}/${env.OUTPUT_DIR_RELATIVE}" } steps { unstash 'yetus' sh '''#!/usr/bin/env bash - rm -rf "${OUTPUTDIR}" && mkdir "${OUTPUTDIR}" - rm -rf "${OUTPUTDIR}/machine" && mkdir "${OUTPUTDIR}/machine" - "${BASEDIR}/dev-support/gather_machine_environment.sh" "${OUTPUT_RELATIVE}/machine" + rm -rf "${OUTPUT_DIR}" && mkdir "${OUTPUT_DIR}" + rm -rf "${OUTPUT_DIR}/machine" && mkdir "${OUTPUT_DIR}/machine" + "${BASEDIR}/dev-support/gather_machine_environment.sh" "${OUTPUT_DIR_RELATIVE}/machine" ''' // TODO should this be a download from master, similar to how the personality is? sh "${env.BASEDIR}/dev-support/hbase_nightly_yetus.sh" } post { always { - // env variables don't work in archive? or this has to be relative to WORKSPACE. :( - archive 'output-general/*' - archive 'output-general/**/*' + // Has to be relative to WORKSPACE. + archive "${env.OUTPUT_DIR_RELATIVE}/*" + archive "${env.OUTPUT_DIR_RELATIVE}/**/*" publishHTML target: [ allowMissing: true, keepAll: true, alwaysLinkToLastBuild: true, -// has to be relative to WORKSPACE :( -reportDir: 'output-general', +// Has to be relative to WORKSPACE +reportDir: "${env.OUTPUT_DIR_RELATIVE}", reportFiles: 'console-report.html', reportName: 'General Nightly Build Report' ] @@ -159,16 +152,16 @@ curl -L -o personality.sh "${env.PROJET_PERSONALITY}" } environment { TESTS = 'mvninstall,compile,javac,unit,htmlout' -OUTPUT_RELATIVE = "${env.OUTPUT_RELATIVE_JDK7}" -OUTPUTDIR = "${env.OUTPUTDIR_JDK7}" +OUTPUT_DIR_RELATIVE = "output-jdk7" +OUTPUT_DIR = "${env.WORKSPACE}/${env.OUTPUT_DIR_RELATIVE}" // On branches where we do jdk7 checks, jdk7 will be JAVA_HOME already. } steps { unstash 'yetus' sh '''#!/usr/bin/env bash - rm -rf "${OUTPUTDIR}" && mkdir "${OUTPUTDIR}" - rm -rf "${OUTPUTDIR}/machine" && mkdir "${OUTPUTDIR}/machine" - "${BASEDIR}/dev-support/gather_machine_environment.sh" "${OUTPUT_RELATIVE}/machine" + rm -rf "${OUTPUT_DIR}" && mkdir "${OUTPUT_DIR}" + rm -rf "${OUTPUT_DIR}/machine" && mkdir "${OUTPUT_DIR}/machine" + "${BAS
hbase git commit: HBASE-19578 MasterProcWALs cleaning is incorrect
Repository: hbase Updated Branches: refs/heads/branch-2 a82f89bd3 -> bf998077b HBASE-19578 MasterProcWALs cleaning is incorrect Signed-off-by: tedyu Project: http://git-wip-us.apache.org/repos/asf/hbase/repo Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/bf998077 Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/bf998077 Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/bf998077 Branch: refs/heads/branch-2 Commit: bf998077b9dafd0df83f650f1366072e065495d4 Parents: a82f89b Author: Peter Somogyi Authored: Thu Dec 21 13:15:05 2017 +0100 Committer: tedyu Committed: Thu Dec 21 09:39:31 2017 -0800 -- .../hadoop/hbase/procedure2/store/wal/ProcedureWALFile.java | 2 +- .../hadoop/hbase/master/procedure/MasterProcedureUtil.java | 2 +- .../org/apache/hadoop/hbase/master/cleaner/TestLogsCleaner.java | 4 ++-- 3 files changed, 4 insertions(+), 4 deletions(-) -- http://git-wip-us.apache.org/repos/asf/hbase/blob/bf998077/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/store/wal/ProcedureWALFile.java -- diff --git a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/store/wal/ProcedureWALFile.java b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/store/wal/ProcedureWALFile.java index 1e9ef6e..6226350 100644 --- a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/store/wal/ProcedureWALFile.java +++ b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/store/wal/ProcedureWALFile.java @@ -160,7 +160,7 @@ public class ProcedureWALFile implements Comparable { boolean archived = false; if (walArchiveDir != null) { Path archivedFile = new Path(walArchiveDir, logFile.getName()); - LOG.info("ARCHIVED (TODO: FILES ARE NOT PURGED FROM ARCHIVE!) " + logFile + " to " + archivedFile); + LOG.info("Archiving " + logFile + " to " + archivedFile); if (!fs.rename(logFile, archivedFile)) { LOG.warn("Failed archive of " + logFile + ", deleting"); } else { http://git-wip-us.apache.org/repos/asf/hbase/blob/bf998077/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/MasterProcedureUtil.java -- diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/MasterProcedureUtil.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/MasterProcedureUtil.java index b87f437..4afd711 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/MasterProcedureUtil.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/MasterProcedureUtil.java @@ -147,7 +147,7 @@ public final class MasterProcedureUtil { * Pattern used to validate a Procedure WAL file name see * {@link #validateProcedureWALFilename(String)} for description. */ - private static final Pattern pattern = Pattern.compile(".*pv-\\d{20}.log"); + private static final Pattern pattern = Pattern.compile(".*pv2-\\d{20}.log"); /** * A Procedure WAL file name is of the format: pv-.log where wal-id is 20 digits. http://git-wip-us.apache.org/repos/asf/hbase/blob/bf998077/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestLogsCleaner.java -- diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestLogsCleaner.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestLogsCleaner.java index 43fc6a4..2d517c4 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestLogsCleaner.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestLogsCleaner.java @@ -134,7 +134,7 @@ public class TestLogsCleaner { // Case 2: 5 Procedure WALs that are old which would be deleted for (int i = 1; i < 6; i++) { - Path fileName = new Path(oldProcedureWALDir, String.format("pv-%020d.log", i)); + Path fileName = new Path(oldProcedureWALDir, String.format("pv2-%020d.log", i)); fs.createNewFile(fileName); } @@ -155,7 +155,7 @@ public class TestLogsCleaner { // Case 5: 5 Procedure WALs that are new, will stay for (int i = 6; i < 11; i++) { - Path fileName = new Path(oldProcedureWALDir, String.format("pv-%020d.log", i)); + Path fileName = new Path(oldProcedureWALDir, String.format("pv2-%020d.log", i)); fs.createNewFile(fileName); }
hbase git commit: HBASE-19578 MasterProcWALs cleaning is incorrect
Repository: hbase Updated Branches: refs/heads/master 00b0288c9 -> 35728acd2 HBASE-19578 MasterProcWALs cleaning is incorrect Signed-off-by: tedyu Project: http://git-wip-us.apache.org/repos/asf/hbase/repo Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/35728acd Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/35728acd Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/35728acd Branch: refs/heads/master Commit: 35728acd217dcfa52a2f274a064ba6e847835b9f Parents: 00b0288 Author: Peter Somogyi Authored: Thu Dec 21 13:15:05 2017 +0100 Committer: tedyu Committed: Thu Dec 21 09:38:25 2017 -0800 -- .../hadoop/hbase/procedure2/store/wal/ProcedureWALFile.java | 2 +- .../hadoop/hbase/master/procedure/MasterProcedureUtil.java | 2 +- .../org/apache/hadoop/hbase/master/cleaner/TestLogsCleaner.java | 4 ++-- 3 files changed, 4 insertions(+), 4 deletions(-) -- http://git-wip-us.apache.org/repos/asf/hbase/blob/35728acd/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/store/wal/ProcedureWALFile.java -- diff --git a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/store/wal/ProcedureWALFile.java b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/store/wal/ProcedureWALFile.java index 1e9ef6e..6226350 100644 --- a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/store/wal/ProcedureWALFile.java +++ b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/store/wal/ProcedureWALFile.java @@ -160,7 +160,7 @@ public class ProcedureWALFile implements Comparable { boolean archived = false; if (walArchiveDir != null) { Path archivedFile = new Path(walArchiveDir, logFile.getName()); - LOG.info("ARCHIVED (TODO: FILES ARE NOT PURGED FROM ARCHIVE!) " + logFile + " to " + archivedFile); + LOG.info("Archiving " + logFile + " to " + archivedFile); if (!fs.rename(logFile, archivedFile)) { LOG.warn("Failed archive of " + logFile + ", deleting"); } else { http://git-wip-us.apache.org/repos/asf/hbase/blob/35728acd/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/MasterProcedureUtil.java -- diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/MasterProcedureUtil.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/MasterProcedureUtil.java index b87f437..4afd711 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/MasterProcedureUtil.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/MasterProcedureUtil.java @@ -147,7 +147,7 @@ public final class MasterProcedureUtil { * Pattern used to validate a Procedure WAL file name see * {@link #validateProcedureWALFilename(String)} for description. */ - private static final Pattern pattern = Pattern.compile(".*pv-\\d{20}.log"); + private static final Pattern pattern = Pattern.compile(".*pv2-\\d{20}.log"); /** * A Procedure WAL file name is of the format: pv-.log where wal-id is 20 digits. http://git-wip-us.apache.org/repos/asf/hbase/blob/35728acd/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestLogsCleaner.java -- diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestLogsCleaner.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestLogsCleaner.java index 43fc6a4..2d517c4 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestLogsCleaner.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestLogsCleaner.java @@ -134,7 +134,7 @@ public class TestLogsCleaner { // Case 2: 5 Procedure WALs that are old which would be deleted for (int i = 1; i < 6; i++) { - Path fileName = new Path(oldProcedureWALDir, String.format("pv-%020d.log", i)); + Path fileName = new Path(oldProcedureWALDir, String.format("pv2-%020d.log", i)); fs.createNewFile(fileName); } @@ -155,7 +155,7 @@ public class TestLogsCleaner { // Case 5: 5 Procedure WALs that are new, will stay for (int i = 6; i < 11; i++) { - Path fileName = new Path(oldProcedureWALDir, String.format("pv-%020d.log", i)); + Path fileName = new Path(oldProcedureWALDir, String.format("pv2-%020d.log", i)); fs.createNewFile(fileName); }
[24/51] [partial] hbase-site git commit: Published site at .
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/505bbb2e/apidocs/src-html/org/apache/hadoop/hbase/util/Base64.html -- diff --git a/apidocs/src-html/org/apache/hadoop/hbase/util/Base64.html b/apidocs/src-html/org/apache/hadoop/hbase/util/Base64.html index 70481ce..b9f6622 100644 --- a/apidocs/src-html/org/apache/hadoop/hbase/util/Base64.html +++ b/apidocs/src-html/org/apache/hadoop/hbase/util/Base64.html @@ -47,1648 +47,1649 @@ 039import java.util.zip.GZIPInputStream; 040import java.util.zip.GZIPOutputStream; 041 -042import org.apache.commons.logging.Log; -043import org.apache.commons.logging.LogFactory; -044import org.apache.yetus.audience.InterfaceAudience; -045 -046/** -047 * Encodes and decodes to and from Base64 notation. -048 * -049 *-050 * Homepage: http://iharder.net/base64;. -051 *
-052 * -053 *-054 * Change Log: -055 *
-056 *-057 *
-107 * -108 *- v2.2.1 - Fixed bug using URL_SAFE and ORDERED encodings. Fixed bug -058 * when using very small files (~< 40 bytes).
-059 *- v2.2 - Added some helper methods for encoding/decoding directly from -060 * one file to the next. Also added a main() method to support command -061 * line encoding/decoding from one file to the next. Also added these -062 * Base64 dialects: -063 *
-076 * -077 *-064 *
-072 *- The default is RFC3548 format.
-065 *- Using Base64.URLSAFE generates URL and file name friendly format as -066 * described in Section 4 of RFC3548. -067 * http://www.faqs.org/rfcs/rfc3548.html
; -068 *- Using Base64.ORDERED generates URL and file name friendly format -069 * that preserves lexical ordering as described in -070 * http://www.faqs.org/qa/rfcc-1940.html
; -071 *-073 * Special thanks to Jim Kellerman at ; -074 * http://www.powerset.com/; for contributing the new Base64 dialects. -075 *
- v2.1 - Cleaned up javadoc comments and unused variables and methods. -078 * Added some convenience methods for reading and writing to and from files. -079 *
-080 *- v2.0.2 - Now specifies UTF-8 encoding in places where the code fails on -081 * systems with other encodings (like EBCDIC).
-082 *- v2.0.1 - Fixed an error when decoding a single byte, that is, when the -083 * encoded data was a single byte.
-084 *- v2.0 - I got rid of methods that used booleans to set options. Now -085 * everything is more consolidated and cleaner. The code now detects when -086 * data that's being decoded is gzip-compressed and will decompress it -087 * automatically. Generally things are cleaner. You'll probably have to -088 * change some method calls that you were making to support the new options -089 * format (ints that you "OR" together).
-090 *- v1.5.1 - Fixed bug when decompressing and decoding to a byte[] using -091 * decode( String s, boolean gzipCompressed ). Added the ability to -092 * "suspend" encoding in the Output Stream so you can turn on and off the -093 * encoding if you need to embed base64 data in an otherwise "normal" stream -094 * (like an XML file).
-095 *- v1.5 - Output stream pases on flush() command but doesn't do anything -096 * itself. This helps when using GZIP streams. Added the ability to -097 * GZip-compress objects before encoding them.
-098 *- v1.4 - Added helper methods to read/write files.
-099 *- v1.3.6 - Fixed OutputStream.flush() so that 'position' is reset.
-100 *- v1.3.5 - Added flag to turn on and off line breaks. Fixed bug in input -101 * stream where last buffer being read, if not completely full, was not -102 * returned.
-103 *- v1.3.4 - Fixed when "improperly padded stream" error was thrown at the -104 * wrong time.
-105 *- v1.3.3 - Fixed I/O streams which were totally messed up.
-106 *-109 * I am placing this code in the Public Domain. Do with it as you will. This -110 * software comes with no guarantees or warranties but with plenty of -111 * well-wishing instead! -112 *
-113 * Please visit http://iharder.net/base64; -114 * periodically to check for updates or to contribute improvements. -115 *
-116 * author: Robert Harder, r...@iharder.net -117 *
-118 * version: 2.2.1 -119 */ -120@InterfaceAudience.Public -121public class Base64 { -122 -123 /* P U B L I C F I E L D S */ -124 -125 /** No options specified. Value is zero. */ -126 public final static int NO_OPTIONS = 0; -127 -128 /** Specify encoding. */ -129 public final static int ENCODE = 1; -130 -131 /** Specify decoding. */ -
[43/51] [partial] hbase-site git commit: Published site at .
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/505bbb2e/apidocs/src-html/org/apache/hadoop/hbase/HRegionInfo.html -- diff --git a/apidocs/src-html/org/apache/hadoop/hbase/HRegionInfo.html b/apidocs/src-html/org/apache/hadoop/hbase/HRegionInfo.html index b0b983c..d560922 100644 --- a/apidocs/src-html/org/apache/hadoop/hbase/HRegionInfo.html +++ b/apidocs/src-html/org/apache/hadoop/hbase/HRegionInfo.html @@ -33,1024 +33,1023 @@ 025import java.util.List; 026import java.util.stream.Collectors; 027 -028import org.apache.commons.logging.Log; -029import org.apache.commons.logging.LogFactory; -030import org.apache.hadoop.conf.Configuration; -031import org.apache.hadoop.hbase.KeyValue.KVComparator; -032import org.apache.hadoop.hbase.client.RegionInfo; -033import org.apache.hadoop.hbase.client.RegionInfoBuilder; -034import org.apache.hadoop.hbase.client.RegionInfoDisplay; -035import org.apache.hadoop.hbase.exceptions.DeserializationException; -036import org.apache.hadoop.hbase.master.RegionState; -037import org.apache.hadoop.hbase.util.Bytes; -038import org.apache.hadoop.io.DataInputBuffer; -039import org.apache.yetus.audience.InterfaceAudience; -040 -041import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil; -042import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos; -043 -044/** -045 * Information about a region. A region is a range of keys in the whole keyspace of a table, an -046 * identifier (a timestamp) for differentiating between subset ranges (after region split) -047 * and a replicaId for differentiating the instance for the same range and some status information -048 * about the region. -049 * -050 * The region has a unique name which consists of the following fields: -051 *-052 *
-059 * -060 *- tableName : The name of the table
-053 *- startKey: The startKey for the region.
-054 *- regionId: A timestamp when the region is created.
-055 *- replicaId : An id starting from 0 to differentiate replicas of the same region range -056 * but hosted in separated servers. The same region range can be hosted in multiple locations.
-057 *- encodedName : An MD5 encoded string for the region name.
-058 *
Other than the fields in the region name, region info contains: -061 *-062 *
-066 * -067 * In 0.98 or before, a list of table's regions would fully cover the total keyspace, and at any -068 * point in time, a row key always belongs to a single region, which is hosted in a single server. -069 * In 0.99+, a region can have multiple instances (called replicas), and thus a range (or row) can -070 * correspond to multiple HRegionInfo's. These HRI's share the same fields however except the -071 * replicaId field. If the replicaId is not set, it defaults to 0, which is compatible with the -072 * previous behavior of a range corresponding to 1 region. -073 * @deprecated As of release 2.0.0, this will be removed in HBase 3.0.0. -074 * use {@link RegionInfoBuilder} to build {@link RegionInfo}. -075 */ -076@Deprecated -077@InterfaceAudience.Public -078public class HRegionInfo implements RegionInfo, Comparable- endKey : the endKey for the region (exclusive)
-063 *- split : Whether the region is split
-064 *- offline : Whether the region is offline
-065 *{ -079 private static final Log LOG = LogFactory.getLog(HRegionInfo.class); -080 -081 /** -082 * The new format for a region name contains its encodedName at the end. -083 * The encoded name also serves as the directory name for the region -084 * in the filesystem. -085 * -086 * New region name format: -087 * <tablename>,,<startkey>,<regionIdTimestamp>.<encodedName>. -088 * where, -089 *<encodedName> is a hex version of the MD5 hash of -090 * <tablename>,<startkey>,<regionIdTimestamp> -091 * -092 * The old region name format: -093 * <tablename>,<startkey>,<regionIdTimestamp> -094 * For region names in the old format, the encoded name is a 32-bit -095 * JenkinsHash integer value (in its decimal notation, string form). -096 * -097 * **NOTE** -098 * -099 * The first hbase:meta region, and regions created by an older -100 * version of HBase (0.20 or prior) will continue to use the -101 * old region name format. -102 */ -103 -104 /** A non-capture group so that this can be embedded. */ -105 public static final String ENCODED_REGION_NAME_REGEX = RegionInfoBuilder.ENCODED_REGION_NAME_REGEX; -106 -107 private static final int MAX_REPLICA_ID = 0x; -108 -109 /** -110 * @param regionName -111 * @return the encodedName -112 * @deprecated As of release 2.0.0, this will be removed in HBase 3.0.0 -113 * Use {@link org.apache.hadoop.hbase.client.RegionInfo#encodeRegionName(
[40/51] [partial] hbase-site git commit: Published site at .
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/505bbb2e/apidocs/src-html/org/apache/hadoop/hbase/client/HTableMultiplexer.html -- diff --git a/apidocs/src-html/org/apache/hadoop/hbase/client/HTableMultiplexer.html b/apidocs/src-html/org/apache/hadoop/hbase/client/HTableMultiplexer.html index 7ab77ff..3a23125 100644 --- a/apidocs/src-html/org/apache/hadoop/hbase/client/HTableMultiplexer.html +++ b/apidocs/src-html/org/apache/hadoop/hbase/client/HTableMultiplexer.html @@ -43,634 +43,633 @@ 035import java.util.concurrent.atomic.AtomicInteger; 036import java.util.concurrent.atomic.AtomicLong; 037 -038import org.apache.commons.logging.Log; -039import org.apache.commons.logging.LogFactory; -040import org.apache.hadoop.conf.Configuration; -041import org.apache.hadoop.hbase.HBaseConfiguration; -042import org.apache.hadoop.hbase.HConstants; -043import org.apache.hadoop.hbase.HRegionLocation; -044import org.apache.hadoop.hbase.ServerName; -045import org.apache.hadoop.hbase.TableName; -046import org.apache.hadoop.hbase.ipc.RpcControllerFactory; -047import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; -048import org.apache.yetus.audience.InterfaceAudience; -049 -050import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; -051import org.apache.hadoop.hbase.shaded.com.google.common.util.concurrent.ThreadFactoryBuilder; -052 -053/** -054 * HTableMultiplexer provides a thread-safe non blocking PUT API across all the tables. -055 * Each put will be sharded into different buffer queues based on its destination region server. -056 * So each region server buffer queue will only have the puts which share the same destination. -057 * And each queue will have a flush worker thread to flush the puts request to the region server. -058 * If any queue is full, the HTableMultiplexer starts to drop the Put requests for that -059 * particular queue. -060 * -061 * Also all the puts will be retried as a configuration number before dropping. -062 * And the HTableMultiplexer can report the number of buffered requests and the number of the -063 * failed (dropped) requests in total or on per region server basis. -064 * -065 * This class is thread safe. -066 */ -067@InterfaceAudience.Public -068public class HTableMultiplexer { -069 private static final Log LOG = LogFactory.getLog(HTableMultiplexer.class.getName()); -070 -071 public static final String TABLE_MULTIPLEXER_FLUSH_PERIOD_MS = -072 "hbase.tablemultiplexer.flush.period.ms"; -073 public static final String TABLE_MULTIPLEXER_INIT_THREADS = "hbase.tablemultiplexer.init.threads"; -074 public static final String TABLE_MULTIPLEXER_MAX_RETRIES_IN_QUEUE = -075 "hbase.client.max.retries.in.queue"; -076 -077 /** The map between each region server to its flush worker */ -078 private final MapserverToFlushWorkerMap = -079 new ConcurrentHashMap<>(); -080 -081 private final Configuration workerConf; -082 private final ClusterConnection conn; -083 private final ExecutorService pool; -084 private final int maxAttempts; -085 private final int perRegionServerBufferQueueSize; -086 private final int maxKeyValueSize; -087 private final ScheduledExecutorService executor; -088 private final long flushPeriod; -089 -090 /** -091 * @param conf The HBaseConfiguration -092 * @param perRegionServerBufferQueueSize determines the max number of the buffered Put ops for -093 * each region server before dropping the request. -094 */ -095 public HTableMultiplexer(Configuration conf, int perRegionServerBufferQueueSize) -096 throws IOException { -097 this(ConnectionFactory.createConnection(conf), conf, perRegionServerBufferQueueSize); -098 } -099 -100 /** -101 * @param conn The HBase connection. -102 * @param conf The HBase configuration -103 * @param perRegionServerBufferQueueSize determines the max number of the buffered Put ops for -104 * each region server before dropping the request. -105 */ -106 public HTableMultiplexer(Connection conn, Configuration conf, -107 int perRegionServerBufferQueueSize) { -108this.conn = (ClusterConnection) conn; -109this.pool = HTable.getDefaultExecutor(conf); -110// how many times we could try in total, one more than retry number -111this.maxAttempts = conf.getInt(HConstants.HBASE_CLIENT_RETRIES_NUMBER, -112 HConstants.DEFAULT_HBASE_CLIENT_RETRIES_NUMBER) + 1; -113this.perRegionServerBufferQueueSize = perRegionServerBufferQueueSize; -114this.maxKeyValueSize = HTable.getMaxKeyValueSize(conf); -115this.flushPeriod = conf.getLong(TABLE_MULTIPLEXER_FLUSH_PERIOD_MS, 100); -116int initThreads = conf.getInt(TABLE_MULTIPLEXER_INIT_THREADS, 10); -117this.executor = -118 Executors.newScheduledThreadPool(initThreads, -119 new ThreadFactoryBuilder().setDaem
[04/51] [partial] hbase-site git commit: Published site at .
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/505bbb2e/devapidocs/org/apache/hadoop/hbase/client/ConnectionUtils.ShortCircuitingClusterConnection.html -- diff --git a/devapidocs/org/apache/hadoop/hbase/client/ConnectionUtils.ShortCircuitingClusterConnection.html b/devapidocs/org/apache/hadoop/hbase/client/ConnectionUtils.ShortCircuitingClusterConnection.html index 9078098..eb1d1fe 100644 --- a/devapidocs/org/apache/hadoop/hbase/client/ConnectionUtils.ShortCircuitingClusterConnection.html +++ b/devapidocs/org/apache/hadoop/hbase/client/ConnectionUtils.ShortCircuitingClusterConnection.html @@ -122,7 +122,7 @@ var activeTableTab = "activeTableTab"; -public static class ConnectionUtils.ShortCircuitingClusterConnection +public static class ConnectionUtils.ShortCircuitingClusterConnection extends ConnectionImplementation A ClusterConnection that will short-circuit RPC making direct invocations against the localhost if the invocation target is 'this' server; save on network and protobuf @@ -287,7 +287,7 @@ extends serverName -private final ServerName serverName +private final ServerName serverName @@ -296,7 +296,7 @@ extends localHostAdmin -private final org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminService.BlockingInterface localHostAdmin +private final org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminService.BlockingInterface localHostAdmin @@ -305,7 +305,7 @@ extends localHostClient -private final org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ClientService.BlockingInterface localHostClient +private final org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ClientService.BlockingInterface localHostClient @@ -322,7 +322,7 @@ extends ShortCircuitingClusterConnection -private ShortCircuitingClusterConnection(org.apache.hadoop.conf.Configuration conf, +private ShortCircuitingClusterConnection(org.apache.hadoop.conf.Configuration conf, http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/ExecutorService.html?is-external=true"; title="class or interface in java.util.concurrent">ExecutorService pool, User user, ServerName serverName, @@ -349,7 +349,7 @@ extends getAdmin -public org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminService.BlockingInterface getAdmin(ServerName sn) +public org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminService.BlockingInterface getAdmin(ServerName sn) throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true"; title="class or interface in java.io">IOException Description copied from interface: ClusterConnection Establishes a connection to the region server at the specified address. @@ -371,7 +371,7 @@ extends getClient -public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ClientService.BlockingInterface getClient(ServerName sn) +public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ClientService.BlockingInterface getClient(ServerName sn) throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true"; title="class or interface in java.io">IOException Description copied from interface: ClusterConnection Establishes a connection to the region server at the specified address, and returns @@ -394,7 +394,7 @@ extends getKeepAliveMasterService -public MasterKeepAliveConnection getKeepAliveMasterService() +public MasterKeepAliveConnection getKeepAliveMasterService() throws MasterNotRunningException Description copied from interface: ClusterConnection This function allows HBaseAdmin and potentially others to get a shared MasterService http://git-wip-us.apache.org/repos/asf/hbase-site/blob/505bbb2e/devapidocs/org/apache/hadoop/hbase/client/ConnectionUtils.html -- diff --git a/devapidocs/org/apache/hadoop/hbase/client/ConnectionUtils.html b/devapidocs/org/apache/hadoop/hbase/client/ConnectionUtils.html index 0468b33..d77d8ec 100644 --- a/devapidocs/org/apache/hadoop/hbase/client/ConnectionUtils.html +++ b/devapidocs/org/apache/hadoop/hbase/client/ConnectionUtils.html @@ -110,7 +110,7 @@ var activeTableTab = "activeTableTab"; @InterfaceAudience.Private -public final class ConnectionUtils +public final class ConnectionUtils extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true"; title="class or interface in java.lang">Object Utility used by client c
[38/51] [partial] hbase-site git commit: Published site at .
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/505bbb2e/apidocs/src-html/org/apache/hadoop/hbase/filter/RegexStringComparator.EngineType.html -- diff --git a/apidocs/src-html/org/apache/hadoop/hbase/filter/RegexStringComparator.EngineType.html b/apidocs/src-html/org/apache/hadoop/hbase/filter/RegexStringComparator.EngineType.html index 84b747e..fe5233e 100644 --- a/apidocs/src-html/org/apache/hadoop/hbase/filter/RegexStringComparator.EngineType.html +++ b/apidocs/src-html/org/apache/hadoop/hbase/filter/RegexStringComparator.EngineType.html @@ -31,400 +31,399 @@ 023import java.util.Arrays; 024import java.util.regex.Pattern; 025 -026import org.apache.commons.logging.Log; -027import org.apache.commons.logging.LogFactory; -028import org.apache.yetus.audience.InterfaceAudience; -029import org.apache.hadoop.hbase.exceptions.DeserializationException; -030import org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos; -031import org.apache.hadoop.hbase.util.Bytes; -032import org.jcodings.Encoding; -033import org.jcodings.EncodingDB; -034import org.jcodings.specific.UTF8Encoding; -035import org.joni.Matcher; -036import org.joni.Option; -037import org.joni.Regex; -038import org.joni.Syntax; -039 -040import org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException; -041 -042/** -043 * This comparator is for use with {@link CompareFilter} implementations, such -044 * as {@link RowFilter}, {@link QualifierFilter}, and {@link ValueFilter}, for -045 * filtering based on the value of a given column. Use it to test if a given -046 * regular expression matches a cell value in the column. -047 *-048 * Only EQUAL or NOT_EQUAL comparisons are valid with this comparator. -049 *
-050 * For example: -051 *
-052 *
-053 * ValueFilter vf = new ValueFilter(CompareOp.EQUAL, -054 * new RegexStringComparator( -055 * // v4 IP address -056 * "(((25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\\.){3,3}" + -057 * "(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?))(\\/[0-9]+)?" + -058 * "|" + -059 * // v6 IP address -060 * "((([\\dA-Fa-f]{1,4}:){7}[\\dA-Fa-f]{1,4})(:([\\d]{1,3}.)" + -061 * "{3}[\\d]{1,3})?)(\\/[0-9]+)?")); -062 *-063 *-064 * Supports {@link java.util.regex.Pattern} flags as well: -065 *
-066 *
-067 * ValueFilter vf = new ValueFilter(CompareOp.EQUAL, -068 * new RegexStringComparator("regex", Pattern.CASE_INSENSITIVE | Pattern.DOTALL)); -069 *-070 * @see java.util.regex.Pattern -071 */ -072@InterfaceAudience.Public -073public class RegexStringComparator extends ByteArrayComparable { -074 -075 private static final Log LOG = LogFactory.getLog(RegexStringComparator.class); -076 -077 private Engine engine; -078 -079 /** Engine implementation type (default=JAVA) */ -080 @InterfaceAudience.Public -081 public enum EngineType { -082JAVA, -083JONI -084 } -085 -086 /** -087 * Constructor -088 * Adds Pattern.DOTALL to the underlying Pattern -089 * @param expr a valid regular expression -090 */ -091 public RegexStringComparator(String expr) { -092this(expr, Pattern.DOTALL); -093 } -094 -095 /** -096 * Constructor -097 * Adds Pattern.DOTALL to the underlying Pattern -098 * @param expr a valid regular expression -099 * @param engine engine implementation type -100 */ -101 public RegexStringComparator(String expr, EngineType engine) { -102this(expr, Pattern.DOTALL, engine); -103 } -104 -105 /** -106 * Constructor -107 * @param expr a valid regular expression -108 * @param flags java.util.regex.Pattern flags -109 */ -110 public RegexStringComparator(String expr, int flags) { -111this(expr, flags, EngineType.JAVA); -112 } -113 -114 /** -115 * Constructor -116 * @param expr a valid regular expression -117 * @param flags java.util.regex.Pattern flags -118 * @param engine engine implementation type -119 */ -120 public RegexStringComparator(String expr, int flags, EngineType engine) { -121super(Bytes.toBytes(expr)); -122switch (engine) { -123 case JAVA: -124this.engine = new JavaRegexEngine(expr, flags); -125break; -126 case JONI: -127this.engine = new JoniRegexEngine(expr, flags); -128break; -129} -130 } -131 -132 /** -133 * Specifies the {@link Charset} to use to convert the row key to a String. -134 *-135 * The row key needs to be converted to a String in order to be matched -136 * against the regular expression. This method controls which charset is -137 * used to do this conversion. -138 *
-139 * If the row key is made of arbitrary bytes, the charset {@code ISO-8859-1} -140 * is recommended. -141 * @param charset The charset to use. -142 */ -143 public void setCharset(final Charset charset) { -144engine.setCharset(charset.name()); -145 }
[42/51] [partial] hbase-site git commit: Published site at .
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/505bbb2e/apidocs/src-html/org/apache/hadoop/hbase/LocalHBaseCluster.html -- diff --git a/apidocs/src-html/org/apache/hadoop/hbase/LocalHBaseCluster.html b/apidocs/src-html/org/apache/hadoop/hbase/LocalHBaseCluster.html index 8fdb647..af9da12 100644 --- a/apidocs/src-html/org/apache/hadoop/hbase/LocalHBaseCluster.html +++ b/apidocs/src-html/org/apache/hadoop/hbase/LocalHBaseCluster.html @@ -32,9 +32,9 @@ 024import java.util.Collections; 025import java.util.List; 026 -027import org.apache.commons.logging.Log; -028import org.apache.commons.logging.LogFactory; -029import org.apache.yetus.audience.InterfaceAudience; +027import org.apache.yetus.audience.InterfaceAudience; +028import org.slf4j.Logger; +029import org.slf4j.LoggerFactory; 030import org.apache.hadoop.conf.Configuration; 031import org.apache.hadoop.hbase.client.Admin; 032import org.apache.hadoop.hbase.client.Connection; @@ -66,7 +66,7 @@ 058 */ 059@InterfaceAudience.Public 060public class LocalHBaseCluster { -061 private static final Log LOG = LogFactory.getLog(LocalHBaseCluster.class); +061 private static final Logger LOG = LoggerFactory.getLogger(LocalHBaseCluster.class); 062 private final ListmasterThreads = new CopyOnWriteArrayList<>(); 063 private final List regionThreads = new CopyOnWriteArrayList<>(); 064 private final static int DEFAULT_NO = 1; http://git-wip-us.apache.org/repos/asf/hbase-site/blob/505bbb2e/apidocs/src-html/org/apache/hadoop/hbase/ScheduledChore.html -- diff --git a/apidocs/src-html/org/apache/hadoop/hbase/ScheduledChore.html b/apidocs/src-html/org/apache/hadoop/hbase/ScheduledChore.html index b7dce34..255e6d1 100644 --- a/apidocs/src-html/org/apache/hadoop/hbase/ScheduledChore.html +++ b/apidocs/src-html/org/apache/hadoop/hbase/ScheduledChore.html @@ -29,352 +29,351 @@ 021import java.util.concurrent.ScheduledThreadPoolExecutor; 022import java.util.concurrent.TimeUnit; 023 -024import org.apache.commons.logging.Log; -025import org.apache.commons.logging.LogFactory; -026import org.apache.yetus.audience.InterfaceAudience; -027 -028import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; -029 -030/** -031 * ScheduledChore is a task performed on a period in hbase. ScheduledChores become active once -032 * scheduled with a {@link ChoreService} via {@link ChoreService#scheduleChore(ScheduledChore)}. The -033 * chore is run in a {@link ScheduledThreadPoolExecutor} and competes with other ScheduledChores for -034 * access to the threads in the core thread pool. If an unhandled exception occurs, the chore -035 * cancellation is logged. Implementers should consider whether or not the Chore will be able to -036 * execute within the defined period. It is bad practice to define a ScheduledChore whose execution -037 * time exceeds its period since it will try to hog one of the threads in the {@link ChoreService}'s -038 * thread pool. -039 * -040 * Don't subclass ScheduledChore if the task relies on being woken up for something to do, such as -041 * an entry being added to a queue, etc. -042 */ -043@InterfaceAudience.Public -044public abstract class ScheduledChore implements Runnable { -045 private static final Log LOG = LogFactory.getLog(ScheduledChore.class); -046 -047 private final String name; -048 -049 /** -050 * Default values for scheduling parameters should they be excluded during construction -051 */ -052 private final static TimeUnit DEFAULT_TIME_UNIT = TimeUnit.MILLISECONDS; -053 private final static long DEFAULT_INITIAL_DELAY = 0; -054 -055 /** -056 * Scheduling parameters. Used by ChoreService when scheduling the chore to run periodically -057 */ -058 private final int period; // in TimeUnit units -059 private final TimeUnit timeUnit; -060 private final long initialDelay; // in TimeUnit units -061 -062 /** -063 * Interface to the ChoreService that this ScheduledChore is scheduled with. null if the chore is -064 * not scheduled. -065 */ -066 private ChoreServicer choreServicer; -067 -068 /** -069 * Variables that encapsulate the meaningful state information -070 */ -071 private long timeOfLastRun = -1; // system time millis -072 private long timeOfThisRun = -1; // system time millis -073 private boolean initialChoreComplete = false; -074 -075 /** -076 * A means by which a ScheduledChore can be stopped. Once a chore recognizes that it has been -077 * stopped, it will cancel itself. This is particularly useful in the case where a single stopper -078 * instance is given to multiple chores. In such a case, a single {@link Stoppable#stop(String)} -079 * command can cause many chores to stop together. -080 */ -081 private final Stoppable stop
[37/51] [partial] hbase-site git commit: Published site at .
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/505bbb2e/apidocs/src-html/org/apache/hadoop/hbase/filter/RegexStringComparator.html -- diff --git a/apidocs/src-html/org/apache/hadoop/hbase/filter/RegexStringComparator.html b/apidocs/src-html/org/apache/hadoop/hbase/filter/RegexStringComparator.html index 84b747e..fe5233e 100644 --- a/apidocs/src-html/org/apache/hadoop/hbase/filter/RegexStringComparator.html +++ b/apidocs/src-html/org/apache/hadoop/hbase/filter/RegexStringComparator.html @@ -31,400 +31,399 @@ 023import java.util.Arrays; 024import java.util.regex.Pattern; 025 -026import org.apache.commons.logging.Log; -027import org.apache.commons.logging.LogFactory; -028import org.apache.yetus.audience.InterfaceAudience; -029import org.apache.hadoop.hbase.exceptions.DeserializationException; -030import org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos; -031import org.apache.hadoop.hbase.util.Bytes; -032import org.jcodings.Encoding; -033import org.jcodings.EncodingDB; -034import org.jcodings.specific.UTF8Encoding; -035import org.joni.Matcher; -036import org.joni.Option; -037import org.joni.Regex; -038import org.joni.Syntax; -039 -040import org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException; -041 -042/** -043 * This comparator is for use with {@link CompareFilter} implementations, such -044 * as {@link RowFilter}, {@link QualifierFilter}, and {@link ValueFilter}, for -045 * filtering based on the value of a given column. Use it to test if a given -046 * regular expression matches a cell value in the column. -047 *-048 * Only EQUAL or NOT_EQUAL comparisons are valid with this comparator. -049 *
-050 * For example: -051 *
-052 *
-053 * ValueFilter vf = new ValueFilter(CompareOp.EQUAL, -054 * new RegexStringComparator( -055 * // v4 IP address -056 * "(((25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\\.){3,3}" + -057 * "(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?))(\\/[0-9]+)?" + -058 * "|" + -059 * // v6 IP address -060 * "((([\\dA-Fa-f]{1,4}:){7}[\\dA-Fa-f]{1,4})(:([\\d]{1,3}.)" + -061 * "{3}[\\d]{1,3})?)(\\/[0-9]+)?")); -062 *-063 *-064 * Supports {@link java.util.regex.Pattern} flags as well: -065 *
-066 *
-067 * ValueFilter vf = new ValueFilter(CompareOp.EQUAL, -068 * new RegexStringComparator("regex", Pattern.CASE_INSENSITIVE | Pattern.DOTALL)); -069 *-070 * @see java.util.regex.Pattern -071 */ -072@InterfaceAudience.Public -073public class RegexStringComparator extends ByteArrayComparable { -074 -075 private static final Log LOG = LogFactory.getLog(RegexStringComparator.class); -076 -077 private Engine engine; -078 -079 /** Engine implementation type (default=JAVA) */ -080 @InterfaceAudience.Public -081 public enum EngineType { -082JAVA, -083JONI -084 } -085 -086 /** -087 * Constructor -088 * Adds Pattern.DOTALL to the underlying Pattern -089 * @param expr a valid regular expression -090 */ -091 public RegexStringComparator(String expr) { -092this(expr, Pattern.DOTALL); -093 } -094 -095 /** -096 * Constructor -097 * Adds Pattern.DOTALL to the underlying Pattern -098 * @param expr a valid regular expression -099 * @param engine engine implementation type -100 */ -101 public RegexStringComparator(String expr, EngineType engine) { -102this(expr, Pattern.DOTALL, engine); -103 } -104 -105 /** -106 * Constructor -107 * @param expr a valid regular expression -108 * @param flags java.util.regex.Pattern flags -109 */ -110 public RegexStringComparator(String expr, int flags) { -111this(expr, flags, EngineType.JAVA); -112 } -113 -114 /** -115 * Constructor -116 * @param expr a valid regular expression -117 * @param flags java.util.regex.Pattern flags -118 * @param engine engine implementation type -119 */ -120 public RegexStringComparator(String expr, int flags, EngineType engine) { -121super(Bytes.toBytes(expr)); -122switch (engine) { -123 case JAVA: -124this.engine = new JavaRegexEngine(expr, flags); -125break; -126 case JONI: -127this.engine = new JoniRegexEngine(expr, flags); -128break; -129} -130 } -131 -132 /** -133 * Specifies the {@link Charset} to use to convert the row key to a String. -134 *-135 * The row key needs to be converted to a String in order to be matched -136 * against the regular expression. This method controls which charset is -137 * used to do this conversion. -138 *
-139 * If the row key is made of arbitrary bytes, the charset {@code ISO-8859-1} -140 * is recommended. -141 * @param charset The charset to use. -142 */ -143 public void setCharset(final Charset charset) { -144engine.setCharset(charset.name()); -145 } -146 -147 @Override -148 public int compareTo(byte[] va
hbase-site git commit: INFRA-10751 Empty commit
Repository: hbase-site Updated Branches: refs/heads/asf-site 505bbb2e1 -> 4f18f8a6e INFRA-10751 Empty commit Project: http://git-wip-us.apache.org/repos/asf/hbase-site/repo Commit: http://git-wip-us.apache.org/repos/asf/hbase-site/commit/4f18f8a6 Tree: http://git-wip-us.apache.org/repos/asf/hbase-site/tree/4f18f8a6 Diff: http://git-wip-us.apache.org/repos/asf/hbase-site/diff/4f18f8a6 Branch: refs/heads/asf-site Commit: 4f18f8a6ea627c92621a837084c4cd711ad4a3b0 Parents: 505bbb2 Author: jenkins Authored: Thu Dec 21 15:20:01 2017 + Committer: jenkins Committed: Thu Dec 21 15:20:01 2017 + -- --
[39/51] [partial] hbase-site git commit: Published site at .
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/505bbb2e/apidocs/src-html/org/apache/hadoop/hbase/client/Scan.ReadType.html -- diff --git a/apidocs/src-html/org/apache/hadoop/hbase/client/Scan.ReadType.html b/apidocs/src-html/org/apache/hadoop/hbase/client/Scan.ReadType.html index ffeadbf..935839d 100644 --- a/apidocs/src-html/org/apache/hadoop/hbase/client/Scan.ReadType.html +++ b/apidocs/src-html/org/apache/hadoop/hbase/client/Scan.ReadType.html @@ -37,10 +37,10 @@ 029import java.util.TreeMap; 030import java.util.TreeSet; 031 -032import org.apache.commons.logging.Log; -033import org.apache.commons.logging.LogFactory; -034import org.apache.hadoop.hbase.HConstants; -035import org.apache.yetus.audience.InterfaceAudience; +032import org.apache.hadoop.hbase.HConstants; +033import org.apache.yetus.audience.InterfaceAudience; +034import org.slf4j.Logger; +035import org.slf4j.LoggerFactory; 036import org.apache.hadoop.hbase.client.metrics.ScanMetrics; 037import org.apache.hadoop.hbase.filter.Filter; 038import org.apache.hadoop.hbase.filter.IncompatibleFilterException; @@ -95,7 +95,7 @@ 087 */ 088@InterfaceAudience.Public 089public class Scan extends Query { -090 private static final Log LOG = LogFactory.getLog(Scan.class); +090 private static final Logger LOG = LoggerFactory.getLogger(Scan.class); 091 092 private static final String RAW_ATTR = "_raw_"; 093 http://git-wip-us.apache.org/repos/asf/hbase-site/blob/505bbb2e/apidocs/src-html/org/apache/hadoop/hbase/client/Scan.html -- diff --git a/apidocs/src-html/org/apache/hadoop/hbase/client/Scan.html b/apidocs/src-html/org/apache/hadoop/hbase/client/Scan.html index ffeadbf..935839d 100644 --- a/apidocs/src-html/org/apache/hadoop/hbase/client/Scan.html +++ b/apidocs/src-html/org/apache/hadoop/hbase/client/Scan.html @@ -37,10 +37,10 @@ 029import java.util.TreeMap; 030import java.util.TreeSet; 031 -032import org.apache.commons.logging.Log; -033import org.apache.commons.logging.LogFactory; -034import org.apache.hadoop.hbase.HConstants; -035import org.apache.yetus.audience.InterfaceAudience; +032import org.apache.hadoop.hbase.HConstants; +033import org.apache.yetus.audience.InterfaceAudience; +034import org.slf4j.Logger; +035import org.slf4j.LoggerFactory; 036import org.apache.hadoop.hbase.client.metrics.ScanMetrics; 037import org.apache.hadoop.hbase.filter.Filter; 038import org.apache.hadoop.hbase.filter.IncompatibleFilterException; @@ -95,7 +95,7 @@ 087 */ 088@InterfaceAudience.Public 089public class Scan extends Query { -090 private static final Log LOG = LogFactory.getLog(Scan.class); +090 private static final Logger LOG = LoggerFactory.getLogger(Scan.class); 091 092 private static final String RAW_ATTR = "_raw_"; 093 http://git-wip-us.apache.org/repos/asf/hbase-site/blob/505bbb2e/apidocs/src-html/org/apache/hadoop/hbase/client/TableDescriptorBuilder.html -- diff --git a/apidocs/src-html/org/apache/hadoop/hbase/client/TableDescriptorBuilder.html b/apidocs/src-html/org/apache/hadoop/hbase/client/TableDescriptorBuilder.html index 0e05116..a4a6614 100644 --- a/apidocs/src-html/org/apache/hadoop/hbase/client/TableDescriptorBuilder.html +++ b/apidocs/src-html/org/apache/hadoop/hbase/client/TableDescriptorBuilder.html @@ -42,25 +42,25 @@ 034import java.util.regex.Matcher; 035import java.util.stream.Stream; 036 -037import org.apache.commons.logging.Log; -038import org.apache.commons.logging.LogFactory; -039import org.apache.hadoop.fs.Path; -040import org.apache.hadoop.hbase.Coprocessor; -041import org.apache.hadoop.hbase.HConstants; -042import org.apache.hadoop.hbase.TableName; -043import org.apache.hadoop.hbase.exceptions.DeserializationException; -044import org.apache.hadoop.hbase.security.User; -045import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil; -046import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos; -047import org.apache.hadoop.hbase.util.Bytes; -048import org.apache.yetus.audience.InterfaceAudience; +037import org.apache.hadoop.fs.Path; +038import org.apache.hadoop.hbase.Coprocessor; +039import org.apache.hadoop.hbase.HConstants; +040import org.apache.hadoop.hbase.TableName; +041import org.apache.hadoop.hbase.exceptions.DeserializationException; +042import org.apache.hadoop.hbase.security.User; +043import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil; +044import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos; +045import org.apache.hadoop.hbase.util.Bytes; +046import org.apache.yetus.audience.InterfaceAudience; +047import org.slf4j.Logger; +048import org.slf4j.LoggerFactory; 049 050/** 051 * @since 2.0.0 052 */ 053@InterfaceAudience.Public 054public class TableDescriptorBuilder { -055 public static final Log LOG = LogFac
[16/51] [partial] hbase-site git commit: Published site at .
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/505bbb2e/dependency-info.html -- diff --git a/dependency-info.html b/dependency-info.html index 209ea2d..11c5b71 100644 --- a/dependency-info.html +++ b/dependency-info.html @@ -7,7 +7,7 @@ - + Apache HBase – Dependency Information @@ -318,7 +318,7 @@ https://www.apache.org/";>The Apache Software Foundation. All rights reserved. - Last Published: 2017-12-20 + Last Published: 2017-12-21 http://git-wip-us.apache.org/repos/asf/hbase-site/blob/505bbb2e/dependency-management.html -- diff --git a/dependency-management.html b/dependency-management.html index e88f31c..6aadd88 100644 --- a/dependency-management.html +++ b/dependency-management.html @@ -7,7 +7,7 @@ - + Apache HBase – Project Dependency Management @@ -335,501 +335,501 @@ jar http://www.apache.org/licenses/LICENSE-2.0.txt";>Apache License, Version 2.0 -commons-logging -http://commons.apache.org/proper/commons-logging/";>commons-logging -1.2 -jar -http://www.apache.org/licenses/LICENSE-2.0.txt";>The Apache Software License, Version 2.0 - io.dropwizard.metrics http://metrics.codahale.com/metrics-core/";>metrics-core 3.2.1 jar http://www.apache.org/licenses/LICENSE-2.0.html";>Apache License 2.0 - + javax.servlet http://servlet-spec.java.net";>javax.servlet-api 3.1.0 jar https://glassfish.dev.java.net/nonav/public/CDDL+GPL.html";>CDDL + GPLv2 with classpath exception - + javax.ws.rs http://jax-rs-spec.java.net";>javax.ws.rs-api 2.0.1 jar http://glassfish.java.net/public/CDDL+GPL_1_1.html";>CDDL 1.1, http://glassfish.java.net/public/CDDL+GPL_1_1.html";>GPL2 w/ CPE - + javax.xml.bind http://jaxb.java.net/";>jaxb-api 2.2.12 jar https://glassfish.java.net/public/CDDL+GPL_1_1.html";>CDDL 1.1, https://glassfish.java.net/public/CDDL+GPL_1_1.html";>GPL2 w/ CPE - + junit http://junit.org";>junit 4.12 jar http://www.eclipse.org/legal/epl-v10.html";>Eclipse Public License 1.0 - + log4j http://logging.apache.org/log4j/1.2/";>log4j 1.2.17 jar http://www.apache.org/licenses/LICENSE-2.0.txt";>The Apache Software License, Version 2.0 - + net.spy http://www.couchbase.org/code/couchbase/java";>spymemcached 2.12.2 jar http://www.apache.org/licenses/LICENSE-2.0.txt";>The Apache Software License, Version 2.0 - + org.apache.avro http://avro.apache.org";>avro 1.7.7 jar http://www.apache.org/licenses/LICENSE-2.0.txt";>The Apache Software License, Version 2.0 - + org.apache.commons http://commons.apache.org/proper/commons-collections/";>commons-collections4 4.1 jar http://www.apache.org/licenses/LICENSE-2.0.txt";>Apache License, Version 2.0 - + org.apache.commons http://commons.apache.org/proper/commons-crypto/";>commons-crypto 1.0.0 jar http://www.apache.org/licenses/LICENSE-2.0.txt";>Apache License, Version 2.0 - + org.apache.commons http://commons.apache.org/proper/commons-lang/";>commons-lang3 3.6 jar https://www.apache.org/licenses/LICENSE-2.0.txt";>Apache License, Version 2.0 - + org.apache.commons http://commons.apache.org/proper/commons-math/";>commons-math3 3.6.1 jar http://www.apache.org/licenses/LICENSE-2.0.txt";>Apache License, Version 2.0 - + org.apache.curator http://curator.apache.org/curator-client";>curator-client 4.0.0 jar http://www.apache.org/licenses/LICENSE-2.0.txt";>The Apache Software License, Version 2.0 - + org.apache.curator http://curator.apache.org/curator-framework";>curator-framework 4.0.0 jar http://www.apache.org/licenses/LICENSE-2.0.txt";>The Apache Software License, Version 2.0 - + org.apache.curator http://curator.apache.org/curator-recipes";>curator-recipes 4.0.0 jar http://www.apache.org/licenses/LICENSE-2.0.txt";>The Apache Software License, Version 2.0 - + org.apache.hadoop hadoop-auth 2.7.4 jar http://www.apache.org/licenses/LICENSE-2.0.txt";>Apache License, Version 2.0 - + org.apache.hadoop hadoop-client 2.7.4 jar http://www.apache.org/licenses/LICENSE-2.0.txt";>Apache License, Version 2.0 - + org.apache.hadoop hadoop-common 2.7.4 jar http://www.apache.org/licenses/LICENSE-2.0.txt";>Apache License, Version 2.0 - + org.apache.hadoop hadoop-hdfs 2.7.4 jar http://www.apache.org/licenses/LICENSE-2.0.txt";>Apache License, Version 2.0 - + org.apache.hadoop hadoop-mapreduce-client-core 2.7.4 jar http://www.apache.org/licenses/LICENSE-2.0.txt";>Apache License, Version 2.0 - + org.apache.hadoop hadoop-mapreduce-client-jobclient 2.7.4 jar http://www.apache.org/licenses/LICENSE-2.0.txt";>Apache License, Version 2.0 - + org.apache.hadoop hadoop-minicluster 2.7.4 jar http://www.apache.org/licenses/LICENSE-2.0.txt"
[47/51] [partial] hbase-site git commit: Published site at .
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/505bbb2e/apidocs/org/apache/hadoop/hbase/rest/client/RemoteHTable.html -- diff --git a/apidocs/org/apache/hadoop/hbase/rest/client/RemoteHTable.html b/apidocs/org/apache/hadoop/hbase/rest/client/RemoteHTable.html index 0f32c7f..0dc5d45 100644 --- a/apidocs/org/apache/hadoop/hbase/rest/client/RemoteHTable.html +++ b/apidocs/org/apache/hadoop/hbase/rest/client/RemoteHTable.html @@ -114,7 +114,7 @@ var activeTableTab = "activeTableTab"; @InterfaceAudience.Public -public class RemoteHTable +public class RemoteHTable extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true"; title="class or interface in java.lang">Object implements Table HTable interface to remote tables accessed via REST gateway @@ -611,7 +611,7 @@ implements RemoteHTable -public RemoteHTable(Client client, +public RemoteHTable(Client client, http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true"; title="class or interface in java.lang">String name) Constructor @@ -622,7 +622,7 @@ implements RemoteHTable -public RemoteHTable(Client client, +public RemoteHTable(Client client, org.apache.hadoop.conf.Configuration conf, http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true"; title="class or interface in java.lang">String name) Constructor @@ -634,7 +634,7 @@ implements RemoteHTable -public RemoteHTable(Client client, +public RemoteHTable(Client client, org.apache.hadoop.conf.Configuration conf, byte[] name) Constructor @@ -654,7 +654,7 @@ implements buildRowSpec -protected http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true"; title="class or interface in java.lang">String buildRowSpec(byte[] row, +protected http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true"; title="class or interface in java.lang">String buildRowSpec(byte[] row, http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true"; title="class or interface in java.util">Map familyMap, long startTime, long endTime, @@ -667,7 +667,7 @@ implements buildMultiRowSpec -protected http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true"; title="class or interface in java.lang">String buildMultiRowSpec(byte[][] rows, +protected http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true"; title="class or interface in java.lang">String buildMultiRowSpec(byte[][] rows, int maxVersions) @@ -677,7 +677,7 @@ implements buildResultFromModel -protected Result[] buildResultFromModel(org.apache.hadoop.hbase.rest.model.CellSetModel model) +protected Result[] buildResultFromModel(org.apache.hadoop.hbase.rest.model.CellSetModel model) @@ -686,7 +686,7 @@ implements buildModelFromPut -protected org.apache.hadoop.hbase.rest.model.CellSetModel buildModelFromPut(Put put) +protected org.apache.hadoop.hbase.rest.model.CellSetModel buildModelFromPut(Put put) @@ -695,7 +695,7 @@ implements getTableName -public byte[] getTableName() +public byte[] getTableName() @@ -704,7 +704,7 @@ implements getName -public TableName getName() +public TableName getName() Description copied from interface: Table Gets the fully qualified table name instance of this table. @@ -719,7 +719,7 @@ implements getConfiguration -public org.apache.hadoop.conf.Configuration getConfiguration() +public org.apache.hadoop.conf.Configuration getConfiguration() Description copied from interface: Table Returns the Configuration object used by this instance. @@ -738,7 +738,7 @@ implements getTableDescriptor http://docs.oracle.com/javase/8/docs/api/java/lang/Deprecated.html?is-external=true"; title="class or interface in java.lang">@Deprecated -public HTableDescriptor getTableDescriptor() +public HTableDescriptor getTableDescriptor() throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true"; title="class or interface in java.io">IOException Deprecated. Description copied from interface: Table @@ -757,7 +757,7 @@ public close -public void close() +public void close() throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true"; title="class or interface in java.io">IOException Description copied from interface: Table Releases any resources held or pending changes in internal buffers. @@ -779,7 +779,7 @@ public get -public Result get(Get get) +public Result get(Get get) throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=tru
[46/51] [partial] hbase-site git commit: Published site at .
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/505bbb2e/apidocs/org/apache/hadoop/hbase/util/Base64.Base64InputStream.html -- diff --git a/apidocs/org/apache/hadoop/hbase/util/Base64.Base64InputStream.html b/apidocs/org/apache/hadoop/hbase/util/Base64.Base64InputStream.html index 1a92ec4..494bbc8 100644 --- a/apidocs/org/apache/hadoop/hbase/util/Base64.Base64InputStream.html +++ b/apidocs/org/apache/hadoop/hbase/util/Base64.Base64InputStream.html @@ -128,7 +128,7 @@ var activeTableTab = "activeTableTab"; @InterfaceAudience.Public -public static class Base64.Base64InputStream +public static class Base64.Base64InputStream extends http://docs.oracle.com/javase/8/docs/api/java/io/FilterInputStream.html?is-external=true"; title="class or interface in java.io">FilterInputStream A Base64.Base64InputStream will read data from another InputStream, given in the constructor, and @@ -248,7 +248,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/io/FilterInputStr Base64InputStream -public Base64InputStream(http://docs.oracle.com/javase/8/docs/api/java/io/InputStream.html?is-external=true"; title="class or interface in java.io">InputStream in) +public Base64InputStream(http://docs.oracle.com/javase/8/docs/api/java/io/InputStream.html?is-external=true"; title="class or interface in java.io">InputStream in) Constructs a Base64.Base64InputStream in DECODE mode. Parameters: @@ -264,7 +264,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/io/FilterInputStr Base64InputStream -public Base64InputStream(http://docs.oracle.com/javase/8/docs/api/java/io/InputStream.html?is-external=true"; title="class or interface in java.io">InputStream in, +public Base64InputStream(http://docs.oracle.com/javase/8/docs/api/java/io/InputStream.html?is-external=true"; title="class or interface in java.io">InputStream in, int options) Constructs a Base64.Base64InputStream in either ENCODE or DECODE mode. @@ -306,7 +306,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/io/FilterInputStr read -public int read() +public int read() throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true"; title="class or interface in java.io">IOException Reads enough of the input stream to convert to/from Base64 and returns the next byte. @@ -328,7 +328,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/io/FilterInputStr read -public int read(byte[] dest, +public int read(byte[] dest, int off, int len) throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true"; title="class or interface in java.io">IOException http://git-wip-us.apache.org/repos/asf/hbase-site/blob/505bbb2e/apidocs/org/apache/hadoop/hbase/util/Base64.Base64OutputStream.html -- diff --git a/apidocs/org/apache/hadoop/hbase/util/Base64.Base64OutputStream.html b/apidocs/org/apache/hadoop/hbase/util/Base64.Base64OutputStream.html index 94ffb90..4afefbe 100644 --- a/apidocs/org/apache/hadoop/hbase/util/Base64.Base64OutputStream.html +++ b/apidocs/org/apache/hadoop/hbase/util/Base64.Base64OutputStream.html @@ -128,7 +128,7 @@ var activeTableTab = "activeTableTab"; @InterfaceAudience.Public -public static class Base64.Base64OutputStream +public static class Base64.Base64OutputStream extends http://docs.oracle.com/javase/8/docs/api/java/io/FilterOutputStream.html?is-external=true"; title="class or interface in java.io">FilterOutputStream A Base64.Base64OutputStream will write data to another OutputStream, given in the constructor, and @@ -272,7 +272,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/io/FilterOutputSt Base64OutputStream -public Base64OutputStream(http://docs.oracle.com/javase/8/docs/api/java/io/OutputStream.html?is-external=true"; title="class or interface in java.io">OutputStream out) +public Base64OutputStream(http://docs.oracle.com/javase/8/docs/api/java/io/OutputStream.html?is-external=true"; title="class or interface in java.io">OutputStream out) Constructs a Base64.Base64OutputStream in ENCODE mode. Parameters: @@ -289,7 +289,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/io/FilterOutputSt Base64OutputStream @InterfaceAudience.Public -public Base64OutputStream(http://docs.oracle.com/javase/8/docs/api/java/io/OutputStream.html?is-external=true"; title="class or interface in java.io">OutputStream out, +public Base64OutputStream(http://docs.oracle.com/javase/8/docs/api/java/io/OutputStream.html?is-external=true"; title="class or interface in java.io">OutputStream out, int options) Constructs a Base64.Base64OutputStream in either ENCODE or DECODE mode. @@ -331,7 +331,7 @@ public write -public void write(int theByte
[21/51] [partial] hbase-site git commit: Published site at .
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/505bbb2e/apidocs/src-html/org/apache/hadoop/hbase/util/Bytes.html -- diff --git a/apidocs/src-html/org/apache/hadoop/hbase/util/Bytes.html b/apidocs/src-html/org/apache/hadoop/hbase/util/Bytes.html index 51d92c2..86fc15e 100644 --- a/apidocs/src-html/org/apache/hadoop/hbase/util/Bytes.html +++ b/apidocs/src-html/org/apache/hadoop/hbase/util/Bytes.html @@ -44,2578 +44,2580 @@ 036import java.util.Iterator; 037import java.util.List; 038 -039import com.google.protobuf.ByteString; -040import org.apache.commons.logging.Log; -041import org.apache.commons.logging.LogFactory; -042import org.apache.hadoop.hbase.Cell; -043import org.apache.hadoop.hbase.CellComparator; -044import org.apache.hadoop.hbase.KeyValue; -045import org.apache.hadoop.io.RawComparator; -046import org.apache.hadoop.io.WritableComparator; -047import org.apache.hadoop.io.WritableUtils; -048import org.apache.yetus.audience.InterfaceAudience; -049import sun.misc.Unsafe; -050 -051import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; -052import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; +039import org.apache.hadoop.hbase.Cell; +040import org.apache.hadoop.hbase.CellComparator; +041import org.apache.hadoop.hbase.KeyValue; +042import org.apache.hadoop.io.RawComparator; +043import org.apache.hadoop.io.WritableComparator; +044import org.apache.hadoop.io.WritableUtils; +045import org.apache.yetus.audience.InterfaceAudience; +046import org.slf4j.Logger; +047import org.slf4j.LoggerFactory; +048 +049import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; +050import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; +051 +052import com.google.protobuf.ByteString; 053 -054/** -055 * Utility class that handles byte arrays, conversions to/from other types, -056 * comparisons, hash code generation, manufacturing keys for HashMaps or -057 * HashSets, and can be used as key in maps or trees. -058 */ -059@SuppressWarnings("restriction") -060@InterfaceAudience.Public -061@edu.umd.cs.findbugs.annotations.SuppressWarnings( -062 value="EQ_CHECK_FOR_OPERAND_NOT_COMPATIBLE_WITH_THIS", -063justification="It has been like this forever") -064public class Bytes implements Comparable{ -065 -066 // Using the charset canonical name for String/byte[] conversions is much -067 // more efficient due to use of cached encoders/decoders. -068 private static final String UTF8_CSN = StandardCharsets.UTF_8.name(); -069 -070 //HConstants.EMPTY_BYTE_ARRAY should be updated if this changed -071 private static final byte [] EMPTY_BYTE_ARRAY = new byte [0]; -072 -073 private static final Log LOG = LogFactory.getLog(Bytes.class); +054import sun.misc.Unsafe; +055 +056/** +057 * Utility class that handles byte arrays, conversions to/from other types, +058 * comparisons, hash code generation, manufacturing keys for HashMaps or +059 * HashSets, and can be used as key in maps or trees. +060 */ +061@SuppressWarnings("restriction") +062@InterfaceAudience.Public +063@edu.umd.cs.findbugs.annotations.SuppressWarnings( +064 value="EQ_CHECK_FOR_OPERAND_NOT_COMPATIBLE_WITH_THIS", +065justification="It has been like this forever") +066public class Bytes implements Comparable { +067 +068 // Using the charset canonical name for String/byte[] conversions is much +069 // more efficient due to use of cached encoders/decoders. +070 private static final String UTF8_CSN = StandardCharsets.UTF_8.name(); +071 +072 //HConstants.EMPTY_BYTE_ARRAY should be updated if this changed +073 private static final byte [] EMPTY_BYTE_ARRAY = new byte [0]; 074 -075 /** -076 * Size of boolean in bytes -077 */ -078 public static final int SIZEOF_BOOLEAN = Byte.SIZE / Byte.SIZE; -079 -080 /** -081 * Size of byte in bytes -082 */ -083 public static final int SIZEOF_BYTE = SIZEOF_BOOLEAN; -084 -085 /** -086 * Size of char in bytes -087 */ -088 public static final int SIZEOF_CHAR = Character.SIZE / Byte.SIZE; -089 -090 /** -091 * Size of double in bytes -092 */ -093 public static final int SIZEOF_DOUBLE = Double.SIZE / Byte.SIZE; -094 -095 /** -096 * Size of float in bytes -097 */ -098 public static final int SIZEOF_FLOAT = Float.SIZE / Byte.SIZE; -099 -100 /** -101 * Size of int in bytes -102 */ -103 public static final int SIZEOF_INT = Integer.SIZE / Byte.SIZE; -104 -105 /** -106 * Size of long in bytes -107 */ -108 public static final int SIZEOF_LONG = Long.SIZE / Byte.SIZE; -109 -110 /** -111 * Size of short in bytes -112 */ -113 public static final int SIZEOF_SHORT = Short.SIZE / Byte.SIZE; -114 -115 /** -116 * Mask to apply to a long to reveal the lower int only. Use like this: -117 * int i = (int)(0xL ^ some_long_value); -118 */ -119 public static fi
[18/51] [partial] hbase-site git commit: Published site at .
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/505bbb2e/checkstyle.rss -- diff --git a/checkstyle.rss b/checkstyle.rss index b06fe09..e8c7328 100644 --- a/checkstyle.rss +++ b/checkstyle.rss @@ -25,8 +25,8 @@ under the License. en-us ©2007 - 2017 The Apache Software Foundation - File: 3457, - Errors: 19536, + File: 3458, + Errors: 19378, Warnings: 0, Infos: 0 @@ -167,7 +167,7 @@ under the License. 0 - 2 + 0 @@ -1525,7 +1525,7 @@ under the License. 0 - 1 + 0 @@ -2897,7 +2897,7 @@ under the License. 0 - 3 + 2 @@ -4199,7 +4199,7 @@ under the License. 0 - 4 + 3 @@ -5151,7 +5151,7 @@ under the License. 0 - 4 + 0 @@ -5277,7 +5277,7 @@ under the License. 0 - 1 + 0 @@ -5366,6 +5366,20 @@ under the License. + http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.log.HBaseMarkers.java";>org/apache/hadoop/hbase/log/HBaseMarkers.java + + + 0 + + + 0 + + + 1 + + + + http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.RawCellBuilder.java";>org/apache/hadoop/hbase/RawCellBuilder.java @@ -5627,7 +5641,7 @@ under the License. 0 - 7 + 4 @@ -5767,7 +5781,7 @@ under the License. 0 - 5 + 4 @@ -9379,7 +9393,7 @@ under the License. 0 - 1 + 0 @@ -9519,7 +9533,7 @@ under the License. 0 - 2 + 3 @@ -9589,7 +9603,7 @@ under the License. 0 - 9 + 8 @@ -9715,7 +9729,7 @@ under the License. 0 - 4 + 2 @@ -9771,7 +9785,7 @@ under the License. 0 - 2 + 0 @@ -10527,7 +10541,7 @@ under the License. 0 - 1 + 0 @@ -10807,7 +10821,7 @@ under the License. 0 - 5 + 1 @@ -11521,7 +11535,7 @@ under the License. 0 - 4 + 0 @@ -12151,7 +12165,7 @@ under the License. 0 - 3 + 1 @@ -12361,7 +12375,7 @@ under the License. 0 - 22 + 21 @@ -12837,7 +12851,7 @@ under the License. 0 - 22 +
[33/51] [partial] hbase-site git commit: Published site at .
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/505bbb2e/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/HRegionPartitioner.html -- diff --git a/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/HRegionPartitioner.html b/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/HRegionPartitioner.html index ca4c22a..35fccf3 100644 --- a/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/HRegionPartitioner.html +++ b/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/HRegionPartitioner.html @@ -27,125 +27,124 @@ 019package org.apache.hadoop.hbase.mapreduce; 020 021import java.io.IOException; -022 -023import org.apache.commons.logging.Log; -024import org.apache.commons.logging.LogFactory; -025import org.apache.yetus.audience.InterfaceAudience; -026import org.apache.hadoop.conf.Configurable; -027import org.apache.hadoop.conf.Configuration; -028import org.apache.hadoop.hbase.HBaseConfiguration; -029import org.apache.hadoop.hbase.TableName; -030import org.apache.hadoop.hbase.client.Connection; -031import org.apache.hadoop.hbase.client.ConnectionFactory; -032import org.apache.hadoop.hbase.client.RegionLocator; -033import org.apache.hadoop.hbase.io.ImmutableBytesWritable; -034import org.apache.hadoop.hbase.mapred.TableOutputFormat; -035import org.apache.hadoop.hbase.util.Bytes; -036import org.apache.hadoop.mapreduce.Partitioner; -037 -038/** -039 * This is used to partition the output keys into groups of keys. -040 * Keys are grouped according to the regions that currently exist -041 * so that each reducer fills a single region so load is distributed. -042 * -043 *This class is not suitable as partitioner creating hfiles -044 * for incremental bulk loads as region spread will likely change between time of -045 * hfile creation and load time. See {@link org.apache.hadoop.hbase.tool.LoadIncrementalHFiles} -046 * and Bulk Load.
-047 * -048 * @paramThe type of the key. -049 * @param The type of the value. -050 */ -051@InterfaceAudience.Public -052public class HRegionPartitioner -053extends Partitioner -054implements Configurable { -055 -056 private static final Log LOG = LogFactory.getLog(HRegionPartitioner.class); -057 private Configuration conf = null; -058 // Connection and locator are not cleaned up; they just die when partitioner is done. -059 private Connection connection; -060 private RegionLocator locator; -061 private byte[][] startKeys; -062 -063 /** -064 * Gets the partition number for a given key (hence record) given the total -065 * number of partitions i.e. number of reduce-tasks for the job. -066 * -067 * Typically a hash function on a all or a subset of the key.
-068 * -069 * @param key The key to be partitioned. -070 * @param value The entry value. -071 * @param numPartitions The total number of partitions. -072 * @return The partition number for thekey
. -073 * @see org.apache.hadoop.mapreduce.Partitioner#getPartition( -074 * java.lang.Object, java.lang.Object, int) -075 */ -076 @Override -077 public int getPartition(ImmutableBytesWritable key, -078 VALUE value, int numPartitions) { -079byte[] region = null; -080// Only one region return 0 -081if (this.startKeys.length == 1){ -082 return 0; -083} -084try { -085 // Not sure if this is cached after a split so we could have problems -086 // here if a region splits while mapping -087 region = this.locator.getRegionLocation(key.get()).getRegionInfo().getStartKey(); -088} catch (IOException e) { -089 LOG.error(e); -090} -091for (int i = 0; i < this.startKeys.length; i++){ -092 if (Bytes.compareTo(region, this.startKeys[i]) == 0 ){ -093if (i >= numPartitions-1){ -094 // cover if we have less reduces then regions. -095 return (Integer.toString(i).hashCode() -096 & Integer.MAX_VALUE) % numPartitions; -097} -098return i; -099 } -100} -101// if above fails to find start key that match we need to return something -102return 0; -103 } -104 -105 /** -106 * Returns the current configuration. -107 * -108 * @return The current configuration. -109 * @see org.apache.hadoop.conf.Configurable#getConf() -110 */ -111 @Override -112 public Configuration getConf() { -113return conf; -114 } -115 -116 /** -117 * Sets the configuration. This is used to determine the start keys for the -118 * given table. -119 * -120 * @param configuration The configuration to set. -121 * @see org.apache.hadoop.conf.Configurable#setConf( -122 * org.apache.hadoop.conf.Configuration) -123 */ -124 @Override -125 public void setConf(Configuration configuration) { -126this.conf = HBaseConfiguration.create(co
[10/51] [partial] hbase-site git commit: Published site at .
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/505bbb2e/devapidocs/org/apache/hadoop/hbase/ScheduledChore.ChoreServicer.html -- diff --git a/devapidocs/org/apache/hadoop/hbase/ScheduledChore.ChoreServicer.html b/devapidocs/org/apache/hadoop/hbase/ScheduledChore.ChoreServicer.html index 580681e..bc8777a 100644 --- a/devapidocs/org/apache/hadoop/hbase/ScheduledChore.ChoreServicer.html +++ b/devapidocs/org/apache/hadoop/hbase/ScheduledChore.ChoreServicer.html @@ -109,7 +109,7 @@ var activeTableTab = "activeTableTab"; -static interface ScheduledChore.ChoreServicer +static interface ScheduledChore.ChoreServicer @@ -177,7 +177,7 @@ var activeTableTab = "activeTableTab"; cancelChore -void cancelChore(ScheduledChore chore) +void cancelChore(ScheduledChore chore) Cancel any ongoing schedules that this chore has with the implementer of this interface. @@ -187,7 +187,7 @@ var activeTableTab = "activeTableTab"; cancelChore -void cancelChore(ScheduledChore chore, +void cancelChore(ScheduledChore chore, boolean mayInterruptIfRunning) @@ -197,7 +197,7 @@ var activeTableTab = "activeTableTab"; isChoreScheduled -boolean isChoreScheduled(ScheduledChore chore) +boolean isChoreScheduled(ScheduledChore chore) Returns: true when the chore is scheduled with the implementer of this interface @@ -210,7 +210,7 @@ var activeTableTab = "activeTableTab"; triggerNow -boolean triggerNow(ScheduledChore chore) +boolean triggerNow(ScheduledChore chore) This method tries to execute the chore immediately. If the chore is executing at the time of this call, the chore will begin another execution as soon as the current execution finishes @@ -227,7 +227,7 @@ var activeTableTab = "activeTableTab"; onChoreMissedStartTime -void onChoreMissedStartTime(ScheduledChore chore) +void onChoreMissedStartTime(ScheduledChore chore) A callback that tells the implementer of this interface that one of the scheduled chores is missing its start time. The implication of a chore missing its start time is that the service's current means of scheduling may not be sufficient to handle the number of ongoing http://git-wip-us.apache.org/repos/asf/hbase-site/blob/505bbb2e/devapidocs/org/apache/hadoop/hbase/ScheduledChore.html -- diff --git a/devapidocs/org/apache/hadoop/hbase/ScheduledChore.html b/devapidocs/org/apache/hadoop/hbase/ScheduledChore.html index 2bb1d8c..279930f 100644 --- a/devapidocs/org/apache/hadoop/hbase/ScheduledChore.html +++ b/devapidocs/org/apache/hadoop/hbase/ScheduledChore.html @@ -118,7 +118,7 @@ var activeTableTab = "activeTableTab"; @InterfaceAudience.Public -public abstract class ScheduledChore +public abstract class ScheduledChore extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true"; title="class or interface in java.lang">Object implements http://docs.oracle.com/javase/8/docs/api/java/lang/Runnable.html?is-external=true"; title="class or interface in java.lang">Runnable ScheduledChore is a task performed on a period in hbase. ScheduledChores become active once @@ -194,7 +194,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/lang/Runnable. initialDelay -private static org.apache.commons.logging.Log +private static org.slf4j.Logger LOG @@ -428,7 +428,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/lang/Runnable. LOG -private static final org.apache.commons.logging.Log LOG +private static final org.slf4j.Logger LOG @@ -437,7 +437,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/lang/Runnable. name -private final http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true"; title="class or interface in java.lang">String name +private final http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true"; title="class or interface in java.lang">String name @@ -446,7 +446,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/lang/Runnable. DEFAULT_TIME_UNIT -private static final http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/TimeUnit.html?is-external=true"; title="class or interface in java.util.concurrent">TimeUnit DEFAULT_TIME_UNIT +private static final http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/TimeUnit.html?is-external=true"; title="class or interface in java.util.concurrent">TimeUnit DEFAULT_TIME_UNIT Default values for scheduling parameters should they be excluded during construction @@ -456,7 +456,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/lang/Runnable. DEFAULT_INITIAL_DELAY -private static final long DEFAULT_INITIAL_DELAY +private static final long DEFAULT_INITIAL_DELAY See Also: Constant Field Values @@ -469,7 +469,7 @@ implements http://docs.oracle.com/javase/8/docs/ap
[28/51] [partial] hbase-site git commit: Published site at .
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/505bbb2e/apidocs/src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.html -- diff --git a/apidocs/src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.html b/apidocs/src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.html index add30e1..594966b 100644 --- a/apidocs/src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.html +++ b/apidocs/src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.html @@ -40,1084 +40,1083 @@ 032 033import org.apache.commons.cli.CommandLine; 034import org.apache.commons.cli.Option; -035import org.apache.commons.logging.Log; -036import org.apache.commons.logging.LogFactory; -037import org.apache.hadoop.conf.Configuration; -038import org.apache.hadoop.fs.FSDataInputStream; -039import org.apache.hadoop.fs.FSDataOutputStream; -040import org.apache.hadoop.fs.FileChecksum; -041import org.apache.hadoop.fs.FileStatus; -042import org.apache.hadoop.fs.FileSystem; -043import org.apache.hadoop.fs.FileUtil; -044import org.apache.hadoop.fs.Path; -045import org.apache.hadoop.fs.permission.FsPermission; -046import org.apache.hadoop.hbase.HBaseConfiguration; -047import org.apache.hadoop.hbase.HConstants; -048import org.apache.hadoop.hbase.TableName; -049import org.apache.hadoop.hbase.client.RegionInfo; -050import org.apache.hadoop.hbase.io.FileLink; -051import org.apache.hadoop.hbase.io.HFileLink; -052import org.apache.hadoop.hbase.io.WALLink; -053import org.apache.hadoop.hbase.io.hadoopbackport.ThrottledInputStream; -054import org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil; -055import org.apache.hadoop.hbase.mob.MobUtils; -056import org.apache.hadoop.hbase.util.AbstractHBaseTool; -057import org.apache.hadoop.hbase.util.FSUtils; -058import org.apache.hadoop.hbase.util.HFileArchiveUtil; -059import org.apache.hadoop.hbase.util.Pair; -060import org.apache.hadoop.io.BytesWritable; -061import org.apache.hadoop.io.IOUtils; -062import org.apache.hadoop.io.NullWritable; -063import org.apache.hadoop.io.Writable; -064import org.apache.hadoop.mapreduce.InputFormat; -065import org.apache.hadoop.mapreduce.InputSplit; -066import org.apache.hadoop.mapreduce.Job; -067import org.apache.hadoop.mapreduce.JobContext; -068import org.apache.hadoop.mapreduce.Mapper; -069import org.apache.hadoop.mapreduce.RecordReader; -070import org.apache.hadoop.mapreduce.TaskAttemptContext; -071import org.apache.hadoop.mapreduce.lib.output.NullOutputFormat; -072import org.apache.hadoop.mapreduce.security.TokenCache; -073import org.apache.hadoop.util.StringUtils; -074import org.apache.hadoop.util.Tool; -075import org.apache.yetus.audience.InterfaceAudience; -076 -077import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription; -078import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotFileInfo; -079import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotRegionManifest; -080 -081/** -082 * Export the specified snapshot to a given FileSystem. -083 * -084 * The .snapshot/name folder is copied to the destination cluster -085 * and then all the hfiles/wals are copied using a Map-Reduce Job in the .archive/ location. -086 * When everything is done, the second cluster can restore the snapshot. -087 */ -088@InterfaceAudience.Public -089public class ExportSnapshot extends AbstractHBaseTool implements Tool { -090 public static final String NAME = "exportsnapshot"; -091 /** Configuration prefix for overrides for the source filesystem */ -092 public static final String CONF_SOURCE_PREFIX = NAME + ".from."; -093 /** Configuration prefix for overrides for the destination filesystem */ -094 public static final String CONF_DEST_PREFIX = NAME + ".to."; -095 -096 private static final Log LOG = LogFactory.getLog(ExportSnapshot.class); -097 -098 private static final String MR_NUM_MAPS = "mapreduce.job.maps"; -099 private static final String CONF_NUM_SPLITS = "snapshot.export.format.splits"; -100 private static final String CONF_SNAPSHOT_NAME = "snapshot.export.format.snapshot.name"; -101 private static final String CONF_SNAPSHOT_DIR = "snapshot.export.format.snapshot.dir"; -102 private static final String CONF_FILES_USER = "snapshot.export.files.attributes.user"; -103 private static final String CONF_FILES_GROUP = "snapshot.export.files.attributes.group"; -104 private static final String CONF_FILES_MODE = "snapshot.export.files.attributes.mode"; -105 private static final String CONF_CHECKSUM_VERIFY = "snapshot.export.checksum.verify"; -106 private static final String CONF_OUTPUT_ROOT = "snapshot.export.output.root"; -107 private static final String CONF_INPUT_ROOT = "snapshot.export.input.root"; -108 private static final String CONF_BUFFER_SIZE = "snapshot.export.buffer.size"; -109 private static final String CONF_MAP_GROUP = "snapshot.export.defaul
[19/51] [partial] hbase-site git commit: Published site at .
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/505bbb2e/checkstyle-aggregate.html -- diff --git a/checkstyle-aggregate.html b/checkstyle-aggregate.html index 2e1d4e8..d5a2465 100644 --- a/checkstyle-aggregate.html +++ b/checkstyle-aggregate.html @@ -7,7 +7,7 @@ - + Apache HBase – Checkstyle Results @@ -286,10 +286,10 @@ Warnings Errors -3457 +3458 0 0 -19536 +19378 Files @@ -379,16 +379,6 @@ 0 4 -org/apache/hadoop/hbase/CompatibilityFactory.java -0 -0 -1 - -org/apache/hadoop/hbase/CompatibilitySingletonFactory.java -0 -0 -1 - org/apache/hadoop/hbase/CompoundConfiguration.java 0 0 @@ -432,7 +422,7 @@ org/apache/hadoop/hbase/GenericTestUtils.java 0 0 -3 +2 org/apache/hadoop/hbase/HBaseCluster.java 0 @@ -442,7 +432,7 @@ org/apache/hadoop/hbase/HBaseClusterManager.java 0 0 -3 +2 org/apache/hadoop/hbase/HBaseCommonTestingUtility.java 0 @@ -462,7 +452,7 @@ org/apache/hadoop/hbase/HBaseTestingUtility.java 0 0 -266 +267 org/apache/hadoop/hbase/HColumnDescriptor.java 0 @@ -852,7 +842,7 @@ org/apache/hadoop/hbase/TestKeyValue.java 0 0 -2 +3 org/apache/hadoop/hbase/TestLocalHBaseCluster.java 0 @@ -967,7 +957,7 @@ org/apache/hadoop/hbase/ZKNamespaceManager.java 0 0 -5 +1 org/apache/hadoop/hbase/ZNodeClearer.java 0 @@ -984,942 +974,932 @@ 0 1 -org/apache/hadoop/hbase/backup/BackupDriver.java -0 -0 -1 - org/apache/hadoop/hbase/backup/BackupHFileCleaner.java 0 0 3 - + org/apache/hadoop/hbase/backup/BackupInfo.java 0 0 1 - + org/apache/hadoop/hbase/backup/BackupMergeJob.java 0 0 1 - + org/apache/hadoop/hbase/backup/BackupRestoreConstants.java 0 0 5 - + org/apache/hadoop/hbase/backup/BackupRestoreFactory.java 0 0 1 - + org/apache/hadoop/hbase/backup/BackupTableInfo.java 0 0 1 - + org/apache/hadoop/hbase/backup/FailedArchiveException.java 0 0 1 - + org/apache/hadoop/hbase/backup/HBackupFileSystem.java 0 0 1 - + org/apache/hadoop/hbase/backup/HFileArchiver.java 0 0 19 - + org/apache/hadoop/hbase/backup/LogUtils.java 0 0 -2 - +1 + org/apache/hadoop/hbase/backup/RestoreDriver.java 0 0 2 - + org/apache/hadoop/hbase/backup/RestoreJob.java 0 0 1 - + org/apache/hadoop/hbase/backup/RestoreRequest.java 0 0 1 - + org/apache/hadoop/hbase/backup/TestBackupBase.java 0 0 13 - + org/apache/hadoop/hbase/backup/TestBackupBoundaryTests.java 0 0 5 - + org/apache/hadoop/hbase/backup/TestBackupDelete.java 0 0 2 - + org/apache/hadoop/hbase/backup/TestBackupDeleteRestore.java 0 0 1 - + org/apache/hadoop/hbase/backup/TestBackupDeleteWithFailures.java 0 0 16 - + org/apache/hadoop/hbase/backup/TestBackupDescribe.java 0 0 1 - + org/apache/hadoop/hbase/backup/TestBackupHFileCleaner.java 0 0 4 - + org/apache/hadoop/hbase/backup/TestBackupShowHistory.java 0 0 2 - + org/apache/hadoop/hbase/backup/TestBackupStatusProgress.java 0 0 1 - + org/apache/hadoop/hbase/backup/TestBackupSystemTable.java 0 0 2 - + org/apache/hadoop/hbase/backup/TestFullBackupSet.java 0 0 1 - + org/apache/hadoop/hbase/backup/TestFullRestore.java 0 0 11 - + org/apache/hadoop/hbase/backup/TestHFileArchiving.java 0 0 13 - + org/apache/hadoop/hbase/backup/TestIncrementalBackupMergeWithFailures.java 0 0 4 - + org/apache/hadoop/hbase/backup/TestIncrementalBackupWithBulkLoad.java 0 0 5 - + org/apache/hadoop/hbase/backup/TestRemoteBackup.java 0 0 2 - + org/apache/hadoop/hbase/backup/TestRemoteRestore.java 0 0 2 - + org/apache/hadoop/hbase/backup/TestRepairAfterFailedDelete.java 0 0 1 - + org/apache/hadoop/hbase/backup/TestRestoreBoundaryTests.java 0 0 2 - + org/apache/hadoop/hbase/backup/TestSystemTableSnapshot.java 0 0 1 - + org/apache/hadoop/hbase/backup/example/HFileArchiveManager.java 0 0 4 - + org/apache/hadoop/hbase/backup/example/LongTermArchivingHFileCleaner.java 0 0 5 - + org/apache/hadoop/hbase/backup/example/TableHFileArchiveTracker.java 0 0 7 - + org/apache/hadoop/hbase/backup/example/TestZooKeeperTableArchiveClient.java 0 0 9 - + org/apache/hadoop/hbase/backup/example/ZKTableArchiveClient.java 0 0 2 - + org/apache/hadoop/hbase/backup/impl/BackupAdminImpl.java 0 0 15 - + org/apache/hadoop/hbase/backup/impl/BackupCommands.java 0 0 57 - + org/apache/hadoop/hbase/backup/impl/BackupManager.java 0 0 -4 - +3 + org/apache/hadoop/hbase/backup/impl/BackupManifest.java 0 0 2 - + org/apache/hadoop/hbase/backup/impl/BackupSystemTable.java 0 0 -22 - +21 + org/apache/hadoop/hbase/backup/impl/FullTableBackupClient.java 0 0 3 - + org/apache/hadoop/hbase/backup/impl/IncrementalBackupManager.java 0 0 -5 - +4 + org/apache/hadoop/hbase/backup/impl/IncrementalTableBackupClient.java 0 0 10 - + org/apache/hadoop/hbase/backup/impl/RestoreTablesClient.java 0 0 2 - + org/apache/hadoop/hbase/backup/impl/TableBackupClient.java 0 0 13 - + org/apache/
[31/51] [partial] hbase-site git commit: Published site at .
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/505bbb2e/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/TableRecordReaderImpl.html -- diff --git a/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/TableRecordReaderImpl.html b/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/TableRecordReaderImpl.html index ebbc833..36c4c5c 100644 --- a/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/TableRecordReaderImpl.html +++ b/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/TableRecordReaderImpl.html @@ -28,299 +28,298 @@ 020import java.io.IOException; 021import java.lang.reflect.Method; 022import java.util.Map; -023 -024import org.apache.commons.logging.Log; -025import org.apache.commons.logging.LogFactory; -026import org.apache.yetus.audience.InterfaceAudience; -027import org.apache.hadoop.conf.Configuration; -028import org.apache.hadoop.hbase.client.Result; -029import org.apache.hadoop.hbase.client.ResultScanner; -030import org.apache.hadoop.hbase.client.Scan; -031import org.apache.hadoop.hbase.client.ScannerCallable; -032import org.apache.hadoop.hbase.client.Table; -033import org.apache.hadoop.hbase.client.metrics.ScanMetrics; -034import org.apache.hadoop.hbase.DoNotRetryIOException; -035import org.apache.hadoop.hbase.io.ImmutableBytesWritable; -036import org.apache.hadoop.hbase.util.Bytes; -037import org.apache.hadoop.mapreduce.Counter; -038import org.apache.hadoop.mapreduce.InputSplit; -039import org.apache.hadoop.mapreduce.TaskAttemptContext; -040import org.apache.hadoop.util.StringUtils; -041 -042import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; -043 -044/** -045 * Iterate over an HBase table data, return (ImmutableBytesWritable, Result) -046 * pairs. -047 */ -048@InterfaceAudience.Public -049public class TableRecordReaderImpl { -050 public static final String LOG_PER_ROW_COUNT -051 = "hbase.mapreduce.log.scanner.rowcount"; -052 -053 private static final Log LOG = LogFactory.getLog(TableRecordReaderImpl.class); -054 -055 // HBASE_COUNTER_GROUP_NAME is the name of mapreduce counter group for HBase -056 @VisibleForTesting -057 static final String HBASE_COUNTER_GROUP_NAME = "HBase Counters"; -058 private ResultScanner scanner = null; -059 private Scan scan = null; -060 private Scan currentScan = null; -061 private Table htable = null; -062 private byte[] lastSuccessfulRow = null; -063 private ImmutableBytesWritable key = null; -064 private Result value = null; -065 private TaskAttemptContext context = null; -066 private Method getCounter = null; -067 private long numRestarts = 0; -068 private long numStale = 0; -069 private long timestamp; -070 private int rowcount; -071 private boolean logScannerActivity = false; -072 private int logPerRowCount = 100; -073 -074 /** -075 * Restart from survivable exceptions by creating a new scanner. -076 * -077 * @param firstRow The first row to start at. -078 * @throws IOException When restarting fails. -079 */ -080 public void restart(byte[] firstRow) throws IOException { -081currentScan = new Scan(scan); -082currentScan.withStartRow(firstRow); -083 currentScan.setScanMetricsEnabled(true); -084if (this.scanner != null) { -085 if (logScannerActivity) { -086LOG.info("Closing the previously opened scanner object."); -087 } -088 this.scanner.close(); -089} -090this.scanner = this.htable.getScanner(currentScan); -091if (logScannerActivity) { -092 LOG.info("Current scan=" + currentScan.toString()); -093 timestamp = System.currentTimeMillis(); -094 rowcount = 0; -095} -096 } -097 -098 /** -099 * In new mapreduce APIs, TaskAttemptContext has two getCounter methods -100 * Check if getCounter(String, String) method is available. -101 * @return The getCounter method or null if not available. -102 * @throws IOException -103 */ -104 protected static Method retrieveGetCounterWithStringsParams(TaskAttemptContext context) -105 throws IOException { -106Method m = null; -107try { -108 m = context.getClass().getMethod("getCounter", -109new Class [] {String.class, String.class}); -110} catch (SecurityException e) { -111 throw new IOException("Failed test for getCounter", e); -112} catch (NoSuchMethodException e) { -113 // Ignore -114} -115return m; -116 } -117 -118 /** -119 * Sets the HBase table. -120 * -121 * @param htable The {@link org.apache.hadoop.hbase.HTableDescriptor} to scan. -122 */ -123 public void setHTable(Table htable) { -124Configuration conf = htable.getConfiguration(); -125logScannerActivity = conf.getBoolean( -126 ScannerCallable.LOG_SCANNER_ACTIVITY, false); -127logPerRowCount = conf.getInt(LOG_PER_ROW_COUNT, 100); -128this.htable = htable; -129 } -130 -131 /** -132 * Sets the scan defining the actu
[26/51] [partial] hbase-site git commit: Published site at .
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/505bbb2e/apidocs/src-html/org/apache/hadoop/hbase/util/Base64.Base64InputStream.html -- diff --git a/apidocs/src-html/org/apache/hadoop/hbase/util/Base64.Base64InputStream.html b/apidocs/src-html/org/apache/hadoop/hbase/util/Base64.Base64InputStream.html index 70481ce..b9f6622 100644 --- a/apidocs/src-html/org/apache/hadoop/hbase/util/Base64.Base64InputStream.html +++ b/apidocs/src-html/org/apache/hadoop/hbase/util/Base64.Base64InputStream.html @@ -47,1648 +47,1649 @@ 039import java.util.zip.GZIPInputStream; 040import java.util.zip.GZIPOutputStream; 041 -042import org.apache.commons.logging.Log; -043import org.apache.commons.logging.LogFactory; -044import org.apache.yetus.audience.InterfaceAudience; -045 -046/** -047 * Encodes and decodes to and from Base64 notation. -048 * -049 *-050 * Homepage: http://iharder.net/base64;. -051 *
-052 * -053 *-054 * Change Log: -055 *
-056 *-057 *
-107 * -108 *- v2.2.1 - Fixed bug using URL_SAFE and ORDERED encodings. Fixed bug -058 * when using very small files (~< 40 bytes).
-059 *- v2.2 - Added some helper methods for encoding/decoding directly from -060 * one file to the next. Also added a main() method to support command -061 * line encoding/decoding from one file to the next. Also added these -062 * Base64 dialects: -063 *
-076 * -077 *-064 *
-072 *- The default is RFC3548 format.
-065 *- Using Base64.URLSAFE generates URL and file name friendly format as -066 * described in Section 4 of RFC3548. -067 * http://www.faqs.org/rfcs/rfc3548.html
; -068 *- Using Base64.ORDERED generates URL and file name friendly format -069 * that preserves lexical ordering as described in -070 * http://www.faqs.org/qa/rfcc-1940.html
; -071 *-073 * Special thanks to Jim Kellerman at ; -074 * http://www.powerset.com/; for contributing the new Base64 dialects. -075 *
- v2.1 - Cleaned up javadoc comments and unused variables and methods. -078 * Added some convenience methods for reading and writing to and from files. -079 *
-080 *- v2.0.2 - Now specifies UTF-8 encoding in places where the code fails on -081 * systems with other encodings (like EBCDIC).
-082 *- v2.0.1 - Fixed an error when decoding a single byte, that is, when the -083 * encoded data was a single byte.
-084 *- v2.0 - I got rid of methods that used booleans to set options. Now -085 * everything is more consolidated and cleaner. The code now detects when -086 * data that's being decoded is gzip-compressed and will decompress it -087 * automatically. Generally things are cleaner. You'll probably have to -088 * change some method calls that you were making to support the new options -089 * format (ints that you "OR" together).
-090 *- v1.5.1 - Fixed bug when decompressing and decoding to a byte[] using -091 * decode( String s, boolean gzipCompressed ). Added the ability to -092 * "suspend" encoding in the Output Stream so you can turn on and off the -093 * encoding if you need to embed base64 data in an otherwise "normal" stream -094 * (like an XML file).
-095 *- v1.5 - Output stream pases on flush() command but doesn't do anything -096 * itself. This helps when using GZIP streams. Added the ability to -097 * GZip-compress objects before encoding them.
-098 *- v1.4 - Added helper methods to read/write files.
-099 *- v1.3.6 - Fixed OutputStream.flush() so that 'position' is reset.
-100 *- v1.3.5 - Added flag to turn on and off line breaks. Fixed bug in input -101 * stream where last buffer being read, if not completely full, was not -102 * returned.
-103 *- v1.3.4 - Fixed when "improperly padded stream" error was thrown at the -104 * wrong time.
-105 *- v1.3.3 - Fixed I/O streams which were totally messed up.
-106 *-109 * I am placing this code in the Public Domain. Do with it as you will. This -110 * software comes with no guarantees or warranties but with plenty of -111 * well-wishing instead! -112 *
-113 * Please visit http://iharder.net/base64; -114 * periodically to check for updates or to contribute improvements. -115 *
-116 * author: Robert Harder, r...@iharder.net -117 *
-118 * version: 2.2.1 -119 */ -120@InterfaceAudience.Public -121public class Base64 { -122 -123 /* P U B L I C F I E L D S */ -124 -125 /** No options specified. Value is zero. */ -126 public final static int NO_OPTIONS = 0; -127 -128 /** Specify
[11/51] [partial] hbase-site git commit: Published site at .
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/505bbb2e/devapidocs/org/apache/hadoop/hbase/MetaTableAccessor.html -- diff --git a/devapidocs/org/apache/hadoop/hbase/MetaTableAccessor.html b/devapidocs/org/apache/hadoop/hbase/MetaTableAccessor.html index 422ec86..350b409 100644 --- a/devapidocs/org/apache/hadoop/hbase/MetaTableAccessor.html +++ b/devapidocs/org/apache/hadoop/hbase/MetaTableAccessor.html @@ -110,7 +110,7 @@ var activeTableTab = "activeTableTab"; @InterfaceAudience.Private -public class MetaTableAccessor +public class MetaTableAccessor extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true"; title="class or interface in java.lang">Object Read/write operations on region and assignment information store in hbase:meta. @@ -197,7 +197,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html? daughterNameCq -private static org.apache.commons.logging.Log +private static org.slf4j.Logger LOG @@ -211,7 +211,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html? -private static org.apache.commons.logging.Log +private static org.slf4j.Logger METALOG @@ -1053,7 +1053,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html? LOG -private static final org.apache.commons.logging.Log LOG +private static final org.slf4j.Logger LOG @@ -1062,7 +1062,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html? METALOG -private static final org.apache.commons.logging.Log METALOG +private static final org.slf4j.Logger METALOG @@ -1071,7 +1071,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html? daughterNameCq -private static final byte[] daughterNameCq +private static final byte[] daughterNameCq @@ -1080,7 +1080,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html? parentNameCq -private static final byte[] parentNameCq +private static final byte[] parentNameCq @@ -1089,7 +1089,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html? tableNameCq -private static final byte[] tableNameCq +private static final byte[] tableNameCq @@ -1098,7 +1098,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html? META_REGION_PREFIX -static final byte[] META_REGION_PREFIX +static final byte[] META_REGION_PREFIX @@ -1107,7 +1107,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html? META_REPLICA_ID_DELIMITER -protected static final char META_REPLICA_ID_DELIMITER +protected static final char META_REPLICA_ID_DELIMITER The delimiter for meta columns for replicaIds > 0 See Also: @@ -1121,7 +1121,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html? SERVER_COLUMN_PATTERN -private static final http://docs.oracle.com/javase/8/docs/api/java/util/regex/Pattern.html?is-external=true"; title="class or interface in java.util.regex">Pattern SERVER_COLUMN_PATTERN +private static final http://docs.oracle.com/javase/8/docs/api/java/util/regex/Pattern.html?is-external=true"; title="class or interface in java.util.regex">Pattern SERVER_COLUMN_PATTERN A regex for parsing server columns from meta. See above javadoc for meta layout @@ -1139,7 +1139,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html? MetaTableAccessor -public MetaTableAccessor() +public MetaTableAccessor() @@ -1157,7 +1157,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html? allTableRegions http://docs.oracle.com/javase/8/docs/api/java/lang/Deprecated.html?is-external=true"; title="class or interface in java.lang">@Deprecated -public static http://docs.oracle.com/javase/8/docs/api/java/util/NavigableMap.html?is-external=true"; title="class or interface in java.util">NavigableMapallTableRegions(Connection connection, +public static http://docs.oracle.com/javase/8/docs/api/java/util/NavigableMap.html?is-external=true"; title="class or interface in java.util">NavigableMap allTableRegions(Connection connection, TableName tableName) throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true"; title="class or interface in java.io">IOException Deprecated. use getTableRegionsAndLocations(org.apache.hadoop.hbase.client.Connection, org.apache.hadoop.hbase.TableName), region can have multiple locations @@ -1180,7 +1180,7 @@ public static http://docs.oracle.com/javase/8/docs/api/java/util/N fullScanRegions -public static void fullScanRegions(Connection connection, +public static void fullScanRegions(Connection connection, Meta
[50/51] [partial] hbase-site git commit: Published site at .
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/505bbb2e/acid-semantics.html -- diff --git a/acid-semantics.html b/acid-semantics.html index 40d8308..2f66807 100644 --- a/acid-semantics.html +++ b/acid-semantics.html @@ -7,7 +7,7 @@ - + Apache HBase – Apache HBase (TM) ACID Properties @@ -606,7 +606,7 @@ under the License. --> https://www.apache.org/";>The Apache Software Foundation. All rights reserved. - Last Published: 2017-12-20 + Last Published: 2017-12-21 http://git-wip-us.apache.org/repos/asf/hbase-site/blob/505bbb2e/apache_hbase_reference_guide.pdf -- diff --git a/apache_hbase_reference_guide.pdf b/apache_hbase_reference_guide.pdf index 4087772..c8382bd 100644 --- a/apache_hbase_reference_guide.pdf +++ b/apache_hbase_reference_guide.pdf @@ -5,8 +5,8 @@ /Author (Apache HBase Team) /Creator (Asciidoctor PDF 1.5.0.alpha.15, based on Prawn 2.2.2) /Producer (Apache HBase Team) -/ModDate (D:20171220144721+00'00') -/CreationDate (D:20171220144721+00'00') +/ModDate (D:20171221144809+00'00') +/CreationDate (D:20171221144809+00'00') >> endobj 2 0 obj @@ -27974,7 +27974,7 @@ endobj endobj 136 0 obj << /Limits [(__anchor-top) (adding.new.node)] -/Names [(__anchor-top) 25 0 R (__indexterm-7346122) 3431 0 R (__indexterm-7348372) 3433 0 R (__indexterm-7350434) 3434 0 R (__indexterm-7352308) 3435 0 R (acid) 908 0 R (add-metric-name-and-function-to-hadoop-compat-interface) 3530 0 R (add-the-implementation-to-both-hadoop-1-and-hadoop-2-compat-modules) 3531 0 R (add.metrics) 3528 0 R (adding-a-new-chapter-to-the-hbase-reference-guide) 3772 0 R (adding.new.node) 3011 0 R] +/Names [(__anchor-top) 25 0 R (__indexterm-7346124) 3431 0 R (__indexterm-7348374) 3433 0 R (__indexterm-7350436) 3434 0 R (__indexterm-7352310) 3435 0 R (acid) 908 0 R (add-metric-name-and-function-to-hadoop-compat-interface) 3530 0 R (add-the-implementation-to-both-hadoop-1-and-hadoop-2-compat-modules) 3531 0 R (add.metrics) 3528 0 R (adding-a-new-chapter-to-the-hbase-reference-guide) 3772 0 R (adding.new.node) 3011 0 R] >> endobj 137 0 obj http://git-wip-us.apache.org/repos/asf/hbase-site/blob/505bbb2e/apidocs/index-all.html -- diff --git a/apidocs/index-all.html b/apidocs/index-all.html index a9ffc4b..94d2d12 100644 --- a/apidocs/index-all.html +++ b/apidocs/index-all.html @@ -12397,6 +12397,8 @@ Tools to help define network clients and servers. +org.apache.hadoop.hbase.log - package org.apache.hadoop.hbase.log + org.apache.hadoop.hbase.mapred - package org.apache.hadoop.hbase.mapred Provides HBase http://wiki.apache.org/hadoop/HadoopMapReduce";>MapReduce http://git-wip-us.apache.org/repos/asf/hbase-site/blob/505bbb2e/apidocs/org/apache/hadoop/hbase/ChoreService.html -- diff --git a/apidocs/org/apache/hadoop/hbase/ChoreService.html b/apidocs/org/apache/hadoop/hbase/ChoreService.html index ab25e14..b382c26 100644 --- a/apidocs/org/apache/hadoop/hbase/ChoreService.html +++ b/apidocs/org/apache/hadoop/hbase/ChoreService.html @@ -258,7 +258,7 @@ public Parameters: coreThreadPoolPrefix - Prefix that will be applied to the Thread name of all threads spawned by this service -corePoolSize - The initial size to set the core pool of the ScheduledThreadPoolExecutor +corePoolSize - The initial size to set the core pool of the ScheduledThreadPoolExecutor to during initialization. The default size is 1, but specifying a larger size may be beneficial if you know that 1 thread will not be enough. jitter - Should chore service add some jitter for all of the scheduled chores. When set http://git-wip-us.apache.org/repos/asf/hbase-site/blob/505bbb2e/apidocs/org/apache/hadoop/hbase/HRegionInfo.html -- diff --git a/apidocs/org/apache/hadoop/hbase/HRegionInfo.html b/apidocs/org/apache/hadoop/hbase/HRegionInfo.html index f89cf85..97ab06d 100644 --- a/apidocs/org/apache/hadoop/hbase/HRegionInfo.html +++ b/apidocs/org/apache/hadoop/hbase/HRegionInfo.html @@ -119,7 +119,7 @@ var activeTableTab = "activeTableTab"; http://docs.oracle.com/javase/8/docs/api/java/lang/Deprecated.html?is-external=true"; title="class or interface in java.lang">@Deprecated @InterfaceAudience.Public -public class HRegionInfo +public class HRegionInfo extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true"; title="class or interface in java.lang">Object implements RegionInfo, http://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true
[01/51] [partial] hbase-site git commit: Published site at .
Repository: hbase-site Updated Branches: refs/heads/asf-site 5eb63ae9d -> 505bbb2e1 http://git-wip-us.apache.org/repos/asf/hbase-site/blob/505bbb2e/devapidocs/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.html -- diff --git a/devapidocs/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.html b/devapidocs/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.html index 4f79024..d3e92f5 100644 --- a/devapidocs/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.html +++ b/devapidocs/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.html @@ -114,7 +114,7 @@ var activeTableTab = "activeTableTab"; @InterfaceAudience.Private -class RawAsyncHBaseAdmin +class RawAsyncHBaseAdmin extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true"; title="class or interface in java.lang">Object implements AsyncAdmin The implementation of AsyncAdmin. @@ -257,7 +257,7 @@ implements FLUSH_TABLE_PROCEDURE_SIGNATURE -private static org.apache.commons.logging.Log +private static org.slf4j.Logger LOG @@ -1401,7 +1401,7 @@ implements FLUSH_TABLE_PROCEDURE_SIGNATURE -public static final http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true"; title="class or interface in java.lang">String FLUSH_TABLE_PROCEDURE_SIGNATURE +public static final http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true"; title="class or interface in java.lang">String FLUSH_TABLE_PROCEDURE_SIGNATURE See Also: Constant Field Values @@ -1414,7 +1414,7 @@ implements LOG -private static final org.apache.commons.logging.Log LOG +private static final org.slf4j.Logger LOG @@ -1423,7 +1423,7 @@ implements connection -private final AsyncConnectionImpl connection +private final AsyncConnectionImpl connection @@ -1432,7 +1432,7 @@ implements retryTimer -private final org.apache.hadoop.hbase.shaded.io.netty.util.HashedWheelTimer retryTimer +private final org.apache.hadoop.hbase.shaded.io.netty.util.HashedWheelTimer retryTimer @@ -1441,7 +1441,7 @@ implements metaTable -private final AsyncTablemetaTable +private final AsyncTable metaTable @@ -1450,7 +1450,7 @@ implements rpcTimeoutNs -private final long rpcTimeoutNs +private final long rpcTimeoutNs @@ -1459,7 +1459,7 @@ implements operationTimeoutNs -private final long operationTimeoutNs +private final long operationTimeoutNs @@ -1468,7 +1468,7 @@ implements pauseNs -private final long pauseNs +private final long pauseNs @@ -1477,7 +1477,7 @@ implements maxAttempts -private final int maxAttempts +private final int maxAttempts @@ -1486,7 +1486,7 @@ implements startLogErrorsCnt -private final int startLogErrorsCnt +private final int startLogErrorsCnt @@ -1495,7 +1495,7 @@ implements ng -private final NonceGenerator ng +private final NonceGenerator ng @@ -1512,7 +1512,7 @@ implements RawAsyncHBaseAdmin -RawAsyncHBaseAdmin(AsyncConnectionImpl connection, +RawAsyncHBaseAdmin(AsyncConnectionImpl connection, org.apache.hadoop.hbase.shaded.io.netty.util.HashedWheelTimer retryTimer, AsyncAdminBuilderBase builder) @@ -1531,7 +1531,7 @@ implements newMasterCaller -private AsyncRpcRetryingCallerFactory.MasterRequestCallerBuilder newMasterCaller() +private AsyncRpcRetryingCallerFactory.MasterRequestCallerBuilder newMasterCaller() @@ -1540,7 +1540,7 @@ implements newAdminCaller -private AsyncRpcRetryingCallerFactory.AdminRequestCallerBuilder newAdminCaller() +private AsyncRpcRetryingCallerFactory.AdminRequestCallerBuilder newAdminCaller() @@ -1551,7 +1551,7 @@ implements call -private http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true"; title="class or interface in java.util.concurrent">CompletableFuture call(HBaseRpcController controller, +private http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true"; title="class or interface in java.util.concurrent">CompletableFuture call(HBaseRpcController controller, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MasterService.Interface stub, PREQ preq, RawAsyncHBaseAdmin.MasterRpcCall rpcCall, @@ -1566,7 +1566,7 @@ implements adminCall -private http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true"; title="class or interface in java.util.concurrent">CompletableFuture adminCall(HBaseRpcController controller, +private http://docs.o
[29/51] [partial] hbase-site git commit: Published site at .
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/505bbb2e/apidocs/src-html/org/apache/hadoop/hbase/rest/client/Response.html -- diff --git a/apidocs/src-html/org/apache/hadoop/hbase/rest/client/Response.html b/apidocs/src-html/org/apache/hadoop/hbase/rest/client/Response.html index 4eb0b84..7b7da4d 100644 --- a/apidocs/src-html/org/apache/hadoop/hbase/rest/client/Response.html +++ b/apidocs/src-html/org/apache/hadoop/hbase/rest/client/Response.html @@ -30,9 +30,9 @@ 022import java.io.IOException; 023import java.io.InputStream; 024 -025import org.apache.commons.logging.Log; -026import org.apache.commons.logging.LogFactory; -027import org.apache.yetus.audience.InterfaceAudience; +025import org.apache.yetus.audience.InterfaceAudience; +026import org.slf4j.Logger; +027import org.slf4j.LoggerFactory; 028import org.apache.http.Header; 029import org.apache.http.HttpResponse; 030 @@ -41,7 +41,7 @@ 033 */ 034@InterfaceAudience.Public 035public class Response { -036 private static final Log LOG = LogFactory.getLog(Response.class); +036 private static final Logger LOG = LoggerFactory.getLogger(Response.class); 037 038 private int code; 039 private Header[] headers;
[45/51] [partial] hbase-site git commit: Published site at .
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/505bbb2e/apidocs/org/apache/hadoop/hbase/util/Bytes.html -- diff --git a/apidocs/org/apache/hadoop/hbase/util/Bytes.html b/apidocs/org/apache/hadoop/hbase/util/Bytes.html index 06a97fb..95054de 100644 --- a/apidocs/org/apache/hadoop/hbase/util/Bytes.html +++ b/apidocs/org/apache/hadoop/hbase/util/Bytes.html @@ -114,7 +114,7 @@ var activeTableTab = "activeTableTab"; @InterfaceAudience.Public -public class Bytes +public class Bytes extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true"; title="class or interface in java.lang">Object implements http://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true"; title="class or interface in java.lang">ComparableUtility class that handles byte arrays, conversions to/from other types, @@ -1204,7 +1204,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/lang/Comparabl SIZEOF_BOOLEAN -public static final int SIZEOF_BOOLEAN +public static final int SIZEOF_BOOLEAN Size of boolean in bytes See Also: @@ -1218,7 +1218,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/lang/Comparabl SIZEOF_BYTE -public static final int SIZEOF_BYTE +public static final int SIZEOF_BYTE Size of byte in bytes See Also: @@ -1232,7 +1232,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/lang/Comparabl SIZEOF_CHAR -public static final int SIZEOF_CHAR +public static final int SIZEOF_CHAR Size of char in bytes See Also: @@ -1246,7 +1246,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/lang/Comparabl SIZEOF_DOUBLE -public static final int SIZEOF_DOUBLE +public static final int SIZEOF_DOUBLE Size of double in bytes See Also: @@ -1260,7 +1260,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/lang/Comparabl SIZEOF_FLOAT -public static final int SIZEOF_FLOAT +public static final int SIZEOF_FLOAT Size of float in bytes See Also: @@ -1274,7 +1274,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/lang/Comparabl SIZEOF_INT -public static final int SIZEOF_INT +public static final int SIZEOF_INT Size of int in bytes See Also: @@ -1288,7 +1288,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/lang/Comparabl SIZEOF_LONG -public static final int SIZEOF_LONG +public static final int SIZEOF_LONG Size of long in bytes See Also: @@ -1302,7 +1302,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/lang/Comparabl SIZEOF_SHORT -public static final int SIZEOF_SHORT +public static final int SIZEOF_SHORT Size of short in bytes See Also: @@ -1316,7 +1316,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/lang/Comparabl MASK_FOR_LOWER_INT_IN_LONG -public static final long MASK_FOR_LOWER_INT_IN_LONG +public static final long MASK_FOR_LOWER_INT_IN_LONG Mask to apply to a long to reveal the lower int only. Use like this: int i = (int)(0xL ^ some_long_value); @@ -1331,7 +1331,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/lang/Comparabl ESTIMATED_HEAP_TAX -public static final int ESTIMATED_HEAP_TAX +public static final int ESTIMATED_HEAP_TAX Estimate of size cost to pay beyond payload in jvm for instance of byte []. Estimate based on study of jhat and jprofiler numbers. @@ -1346,7 +1346,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/lang/Comparabl BYTES_COMPARATOR -public static final http://docs.oracle.com/javase/8/docs/api/java/util/Comparator.html?is-external=true"; title="class or interface in java.util">Comparator BYTES_COMPARATOR +public static final http://docs.oracle.com/javase/8/docs/api/java/util/Comparator.html?is-external=true"; title="class or interface in java.util">Comparator BYTES_COMPARATOR Pass this to TreeMaps where byte [] are keys. @@ -1356,7 +1356,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/lang/Comparabl BYTES_RAWCOMPARATOR -public static final org.apache.hadoop.io.RawComparator BYTES_RAWCOMPARATOR +public static final org.apache.hadoop.io.RawComparator BYTES_RAWCOMPARATOR Use comparing byte arrays, byte-by-byte @@ -1374,7 +1374,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/lang/Comparabl Bytes -public Bytes() +public Bytes() Create a zero-size sequence. @@ -1384,7 +1384,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/lang/Comparabl Bytes -public Bytes(byte[] bytes) +public Bytes(byte[] bytes) Create a Bytes using the byte array as the initial value. Parameters: @@ -1398,7 +1398,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/lang/Comparabl Bytes -public Bytes(Bytes ibw) +public Bytes(Bytes ibw) Set the new Bytes to the contents of the passed ibw. @@ -1413,7 +1413,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/l
[08/51] [partial] hbase-site git commit: Published site at .
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/505bbb2e/devapidocs/org/apache/hadoop/hbase/backup/impl/BackupManifest.html -- diff --git a/devapidocs/org/apache/hadoop/hbase/backup/impl/BackupManifest.html b/devapidocs/org/apache/hadoop/hbase/backup/impl/BackupManifest.html index 621b7a0..d5e6cae 100644 --- a/devapidocs/org/apache/hadoop/hbase/backup/impl/BackupManifest.html +++ b/devapidocs/org/apache/hadoop/hbase/backup/impl/BackupManifest.html @@ -110,7 +110,7 @@ var activeTableTab = "activeTableTab"; @InterfaceAudience.Private -public class BackupManifest +public class BackupManifest extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true"; title="class or interface in java.lang">Object Backup manifest contains all the meta data of a backup image. The manifest info will be bundled as manifest file together with data. So that each backup image will contain all the info needed @@ -161,7 +161,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html? backupImage -private static org.apache.commons.logging.Log +private static org.slf4j.Logger LOG @@ -328,7 +328,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html? LOG -private static final org.apache.commons.logging.Log LOG +private static final org.slf4j.Logger LOG @@ -337,7 +337,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html? MANIFEST_FILE_NAME -public static final http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true"; title="class or interface in java.lang">String MANIFEST_FILE_NAME +public static final http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true"; title="class or interface in java.lang">String MANIFEST_FILE_NAME See Also: Constant Field Values @@ -350,7 +350,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html? tableBackupDir -private http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true"; title="class or interface in java.lang">String tableBackupDir +private http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true"; title="class or interface in java.lang">String tableBackupDir @@ -359,7 +359,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html? backupImage -private BackupManifest.BackupImage backupImage +private BackupManifest.BackupImage backupImage @@ -376,7 +376,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html? BackupManifest -public BackupManifest(BackupInfo backup) +public BackupManifest(BackupInfo backup) Construct manifest for a ongoing backup. Parameters: @@ -390,7 +390,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html? BackupManifest -public BackupManifest(BackupInfo backup, +public BackupManifest(BackupInfo backup, TableName table) Construct a table level manifest for a backup of the named table. @@ -405,7 +405,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html? BackupManifest -public BackupManifest(org.apache.hadoop.conf.Configuration conf, +public BackupManifest(org.apache.hadoop.conf.Configuration conf, org.apache.hadoop.fs.Path backupPath) throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true"; title="class or interface in java.io">IOException Construct manifest from a backup directory. @@ -424,7 +424,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html? BackupManifest -public BackupManifest(org.apache.hadoop.fs.FileSystem fs, +public BackupManifest(org.apache.hadoop.fs.FileSystem fs, org.apache.hadoop.fs.Path backupPath) throws BackupException Construct manifest from a backup directory. @@ -451,7 +451,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html? getType -public BackupType getType() +public BackupType getType() @@ -460,7 +460,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html? getTableList -public http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true"; title="class or interface in java.util">ListgetTableList() +public http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true"; title="class or interface in java.util">List getTableList() Get the table set of this image. Returns: @@ -474,7 +474,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html? store -public void store(org.apache.hadoop.conf.Configuration conf) +public void store(org.apache.hadoop.conf.Configuration conf) throws BackupException TODO: fix it. Persist the manifest file. @@ -490,7 +490,7 @@ extends http://docs.oracle.com/jav
[20/51] [partial] hbase-site git commit: Published site at .
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/505bbb2e/apidocs/src-html/org/apache/hadoop/hbase/util/MD5Hash.html -- diff --git a/apidocs/src-html/org/apache/hadoop/hbase/util/MD5Hash.html b/apidocs/src-html/org/apache/hadoop/hbase/util/MD5Hash.html index 999b0f2..3970aa2 100644 --- a/apidocs/src-html/org/apache/hadoop/hbase/util/MD5Hash.html +++ b/apidocs/src-html/org/apache/hadoop/hbase/util/MD5Hash.html @@ -31,9 +31,9 @@ 023import java.security.NoSuchAlgorithmException; 024 025import org.apache.commons.codec.binary.Hex; -026import org.apache.commons.logging.Log; -027import org.apache.commons.logging.LogFactory; -028import org.apache.yetus.audience.InterfaceAudience; +026import org.apache.yetus.audience.InterfaceAudience; +027import org.slf4j.Logger; +028import org.slf4j.LoggerFactory; 029 030/** 031 * Utility class for MD5 @@ -41,7 +41,7 @@ 033 */ 034@InterfaceAudience.Public 035public class MD5Hash { -036 private static final Log LOG = LogFactory.getLog(MD5Hash.class); +036 private static final Logger LOG = LoggerFactory.getLogger(MD5Hash.class); 037 038 /** 039 * Given a byte array, returns in MD5 hash as a hex string. http://git-wip-us.apache.org/repos/asf/hbase-site/blob/505bbb2e/apidocs/src-html/org/apache/hadoop/hbase/util/RegionMover.html -- diff --git a/apidocs/src-html/org/apache/hadoop/hbase/util/RegionMover.html b/apidocs/src-html/org/apache/hadoop/hbase/util/RegionMover.html index d426656..cc11564 100644 --- a/apidocs/src-html/org/apache/hadoop/hbase/util/RegionMover.html +++ b/apidocs/src-html/org/apache/hadoop/hbase/util/RegionMover.html @@ -51,27 +51,27 @@ 043import java.util.concurrent.TimeoutException; 044 045import org.apache.commons.cli.CommandLine; -046import org.apache.commons.logging.Log; -047import org.apache.commons.logging.LogFactory; -048import org.apache.hadoop.conf.Configuration; -049import org.apache.hadoop.hbase.ClusterStatus.Option; -050import org.apache.hadoop.hbase.HBaseConfiguration; -051import org.apache.hadoop.hbase.HConstants; -052import org.apache.hadoop.hbase.ServerName; -053import org.apache.hadoop.hbase.TableName; -054import org.apache.hadoop.hbase.client.Admin; -055import org.apache.hadoop.hbase.client.Connection; -056import org.apache.hadoop.hbase.client.ConnectionFactory; -057import org.apache.hadoop.hbase.client.Get; -058import org.apache.hadoop.hbase.client.RegionInfo; -059import org.apache.hadoop.hbase.client.Result; -060import org.apache.hadoop.hbase.client.ResultScanner; -061import org.apache.hadoop.hbase.client.Scan; -062import org.apache.hadoop.hbase.client.Table; -063import org.apache.hadoop.hbase.filter.FirstKeyOnlyFilter; -064import org.apache.hadoop.hbase.zookeeper.MetaTableLocator; -065import org.apache.hadoop.hbase.zookeeper.ZKWatcher; -066import org.apache.yetus.audience.InterfaceAudience; +046import org.apache.hadoop.conf.Configuration; +047import org.apache.hadoop.hbase.ClusterStatus.Option; +048import org.apache.hadoop.hbase.HBaseConfiguration; +049import org.apache.hadoop.hbase.HConstants; +050import org.apache.hadoop.hbase.ServerName; +051import org.apache.hadoop.hbase.TableName; +052import org.apache.hadoop.hbase.client.Admin; +053import org.apache.hadoop.hbase.client.Connection; +054import org.apache.hadoop.hbase.client.ConnectionFactory; +055import org.apache.hadoop.hbase.client.Get; +056import org.apache.hadoop.hbase.client.RegionInfo; +057import org.apache.hadoop.hbase.client.Result; +058import org.apache.hadoop.hbase.client.ResultScanner; +059import org.apache.hadoop.hbase.client.Scan; +060import org.apache.hadoop.hbase.client.Table; +061import org.apache.hadoop.hbase.filter.FirstKeyOnlyFilter; +062import org.apache.hadoop.hbase.zookeeper.MetaTableLocator; +063import org.apache.hadoop.hbase.zookeeper.ZKWatcher; +064import org.apache.yetus.audience.InterfaceAudience; +065import org.slf4j.Logger; +066import org.slf4j.LoggerFactory; 067 068/** 069 * Tool for loading/unloading regions to/from given regionserver This tool can be run from Command @@ -90,7 +90,7 @@ 082 public static final int DEFAULT_MOVE_RETRIES_MAX = 5; 083 public static final int DEFAULT_MOVE_WAIT_MAX = 60; 084 public static final int DEFAULT_SERVERSTART_WAIT_MAX = 180; -085 static final Log LOG = LogFactory.getLog(RegionMover.class); +085 static final Logger LOG = LoggerFactory.getLogger(RegionMover.class); 086 private RegionMoverBuilder rmbuilder; 087 private boolean ack = true; 088 private int maxthreads = 1; http://git-wip-us.apache.org/repos/asf/hbase-site/blob/505bbb2e/apidocs/src-html/org/apache/hadoop/hbase/util/VersionInfo.html -- diff --git a/apidocs/src-html/org/apache/hadoop/hbase/util/VersionInfo.html b/apidocs/src-html/org/apache/hadoop/hbase/util/Version
[15/51] [partial] hbase-site git commit: Published site at .
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/505bbb2e/devapidocs/org/apache/hadoop/hbase/HRegionInfo.html -- diff --git a/devapidocs/org/apache/hadoop/hbase/HRegionInfo.html b/devapidocs/org/apache/hadoop/hbase/HRegionInfo.html index 8a9cf9c..50fdead 100644 --- a/devapidocs/org/apache/hadoop/hbase/HRegionInfo.html +++ b/devapidocs/org/apache/hadoop/hbase/HRegionInfo.html @@ -123,7 +123,7 @@ var activeTableTab = "activeTableTab"; http://docs.oracle.com/javase/8/docs/api/java/lang/Deprecated.html?is-external=true"; title="class or interface in java.lang">@Deprecated @InterfaceAudience.Public -public class HRegionInfo +public class HRegionInfo extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true"; title="class or interface in java.lang">Object implements RegionInfo, http://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true"; title="class or interface in java.lang">ComparableInformation about a region. A region is a range of keys in the whole keyspace of a table, an @@ -229,7 +229,7 @@ implements -private static org.apache.commons.logging.Log +private static org.slf4j.Logger LOG Deprecated. @@ -917,7 +917,7 @@ implements LOG -private static final org.apache.commons.logging.Log LOG +private static final org.slf4j.Logger LOG Deprecated. @@ -927,7 +927,7 @@ implements ENCODED_REGION_NAME_REGEX -public static final http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true"; title="class or interface in java.lang">String ENCODED_REGION_NAME_REGEX +public static final http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true"; title="class or interface in java.lang">String ENCODED_REGION_NAME_REGEX Deprecated. A non-capture group so that this can be embedded. @@ -942,7 +942,7 @@ implements MAX_REPLICA_ID -private static final int MAX_REPLICA_ID +private static final int MAX_REPLICA_ID Deprecated. See Also: @@ -956,7 +956,7 @@ implements endKey -private byte[] endKey +private byte[] endKey Deprecated. @@ -966,7 +966,7 @@ implements offLine -private boolean offLine +private boolean offLine Deprecated. @@ -976,7 +976,7 @@ implements regionId -private long regionId +private long regionId Deprecated. @@ -986,7 +986,7 @@ implements regionName -private transient byte[] regionName +private transient byte[] regionName Deprecated. @@ -996,7 +996,7 @@ implements split -private boolean split +private boolean split Deprecated. @@ -1006,7 +1006,7 @@ implements startKey -private byte[] startKey +private byte[] startKey Deprecated. @@ -1016,7 +1016,7 @@ implements hashCode -private int hashCode +private int hashCode Deprecated. @@ -1026,7 +1026,7 @@ implements NO_HASH -public static final http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true"; title="class or interface in java.lang">String NO_HASH +public static final http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true"; title="class or interface in java.lang">String NO_HASH Deprecated. @@ -1036,7 +1036,7 @@ implements encodedName -private http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true"; title="class or interface in java.lang">String encodedName +private http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true"; title="class or interface in java.lang">String encodedName Deprecated. @@ -1046,7 +1046,7 @@ implements encodedNameAsBytes -private byte[] encodedNameAsBytes +private byte[] encodedNameAsBytes Deprecated. @@ -1056,7 +1056,7 @@ implements replicaId -private int replicaId +private int replicaId Deprecated. @@ -1066,7 +1066,7 @@ implements tableName -private TableName tableName +private TableName tableName Deprecated. @@ -1076,7 +1076,7 @@ implements DISPLAY_KEYS_KEY -static final http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true"; title="class or interface in java.lang">String DISPLAY_KEYS_KEY +static final http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true"; title="class or interface in java.lang">String DISPLAY_KEYS_KEY Deprecated. See Also: @@ -1090,7 +1090,7 @@ implements HIDDEN_END_KEY -public static final byte[] HIDDEN_END_KEY +public static final byte[] HIDDEN_END_KEY Deprecated. @@ -1100,7 +1100,7 @@ implements HIDDEN_START_KEY -public static final byte[] HIDDEN_START_KEY +public static final byte[] HIDDEN_START_KEY Deprecated. @@ -1110,7 +1110,7 @@ implements FIRST_META_REGIONINFO -public static final HRegionInfo FIRST_META_REGIONINFO +public static final HRegionInfo FIRST_META_REGIONINFO Deprecated. HRegionInfo for first meta region @@ -1129,7 +1129,7 @@ implements HRegionInfo -private HR
[05/51] [partial] hbase-site git commit: Published site at .
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/505bbb2e/devapidocs/org/apache/hadoop/hbase/client/AsyncRequestFutureImpl.html -- diff --git a/devapidocs/org/apache/hadoop/hbase/client/AsyncRequestFutureImpl.html b/devapidocs/org/apache/hadoop/hbase/client/AsyncRequestFutureImpl.html index 897cf16..8731466 100644 --- a/devapidocs/org/apache/hadoop/hbase/client/AsyncRequestFutureImpl.html +++ b/devapidocs/org/apache/hadoop/hbase/client/AsyncRequestFutureImpl.html @@ -217,7 +217,7 @@ implements hasAnyReplicaGets -private static org.apache.commons.logging.Log +private static org.slf4j.Logger LOG @@ -533,7 +533,7 @@ implements LOG -private static final org.apache.commons.logging.Log LOG +private static final org.slf4j.Logger LOG http://git-wip-us.apache.org/repos/asf/hbase-site/blob/505bbb2e/devapidocs/org/apache/hadoop/hbase/client/AsyncRpcRetryingCaller.html -- diff --git a/devapidocs/org/apache/hadoop/hbase/client/AsyncRpcRetryingCaller.html b/devapidocs/org/apache/hadoop/hbase/client/AsyncRpcRetryingCaller.html index 5e9367f..2e49153 100644 --- a/devapidocs/org/apache/hadoop/hbase/client/AsyncRpcRetryingCaller.html +++ b/devapidocs/org/apache/hadoop/hbase/client/AsyncRpcRetryingCaller.html @@ -151,7 +151,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html? future -private static org.apache.commons.logging.Log +private static org.slf4j.Logger LOG @@ -282,7 +282,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html? LOG -private static final org.apache.commons.logging.Log LOG +private static final org.slf4j.Logger LOG http://git-wip-us.apache.org/repos/asf/hbase-site/blob/505bbb2e/devapidocs/org/apache/hadoop/hbase/client/AsyncScanSingleRegionRpcRetryingCaller.html -- diff --git a/devapidocs/org/apache/hadoop/hbase/client/AsyncScanSingleRegionRpcRetryingCaller.html b/devapidocs/org/apache/hadoop/hbase/client/AsyncScanSingleRegionRpcRetryingCaller.html index 09f4f1a..29068cd 100644 --- a/devapidocs/org/apache/hadoop/hbase/client/AsyncScanSingleRegionRpcRetryingCaller.html +++ b/devapidocs/org/apache/hadoop/hbase/client/AsyncScanSingleRegionRpcRetryingCaller.html @@ -110,7 +110,7 @@ var activeTableTab = "activeTableTab"; @InterfaceAudience.Private -class AsyncScanSingleRegionRpcRetryingCaller +class AsyncScanSingleRegionRpcRetryingCaller extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true"; title="class or interface in java.lang">Object Retry caller for scanning a region. @@ -194,7 +194,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html? loc -private static org.apache.commons.logging.Log +private static org.slf4j.Logger LOG @@ -415,7 +415,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html? LOG -private static final org.apache.commons.logging.Log LOG +private static final org.slf4j.Logger LOG http://git-wip-us.apache.org/repos/asf/hbase-site/blob/505bbb2e/devapidocs/org/apache/hadoop/hbase/client/AsyncTableResultScanner.html -- diff --git a/devapidocs/org/apache/hadoop/hbase/client/AsyncTableResultScanner.html b/devapidocs/org/apache/hadoop/hbase/client/AsyncTableResultScanner.html index fa53027..80b31e3 100644 --- a/devapidocs/org/apache/hadoop/hbase/client/AsyncTableResultScanner.html +++ b/devapidocs/org/apache/hadoop/hbase/client/AsyncTableResultScanner.html @@ -166,7 +166,7 @@ implements error -private static org.apache.commons.logging.Log +private static org.slf4j.Logger LOG @@ -341,7 +341,7 @@ implements LOG -private static final org.apache.commons.logging.Log LOG +private static final org.slf4j.Logger LOG http://git-wip-us.apache.org/repos/asf/hbase-site/blob/505bbb2e/devapidocs/org/apache/hadoop/hbase/client/BatchErrors.html -- diff --git a/devapidocs/org/apache/hadoop/hbase/client/BatchErrors.html b/devapidocs/org/apache/hadoop/hbase/client/BatchErrors.html index 2ed1bab..947c2bc 100644 --- a/devapidocs/org/apache/hadoop/hbase/client/BatchErrors.html +++ b/devapidocs/org/apache/hadoop/hbase/client/BatchErrors.html @@ -138,7 +138,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html? addresses -private static org.apache.commons.logging.Log +private static org.slf4j.Logger LOG @@ -227,7 +227,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html? LOG -private static final org.apache.commons.logging.Log LOG +private static final org.slf4j.Logger LOG http://git-wip-us.apache.org/repos/asf/hbase-site/blob/505bbb2e/devapidocs/org/apache/hadoop/hbase/clie
[41/51] [partial] hbase-site git commit: Published site at .
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/505bbb2e/apidocs/src-html/org/apache/hadoop/hbase/client/HTableMultiplexer.HTableMultiplexerStatus.html -- diff --git a/apidocs/src-html/org/apache/hadoop/hbase/client/HTableMultiplexer.HTableMultiplexerStatus.html b/apidocs/src-html/org/apache/hadoop/hbase/client/HTableMultiplexer.HTableMultiplexerStatus.html index 7ab77ff..3a23125 100644 --- a/apidocs/src-html/org/apache/hadoop/hbase/client/HTableMultiplexer.HTableMultiplexerStatus.html +++ b/apidocs/src-html/org/apache/hadoop/hbase/client/HTableMultiplexer.HTableMultiplexerStatus.html @@ -43,634 +43,633 @@ 035import java.util.concurrent.atomic.AtomicInteger; 036import java.util.concurrent.atomic.AtomicLong; 037 -038import org.apache.commons.logging.Log; -039import org.apache.commons.logging.LogFactory; -040import org.apache.hadoop.conf.Configuration; -041import org.apache.hadoop.hbase.HBaseConfiguration; -042import org.apache.hadoop.hbase.HConstants; -043import org.apache.hadoop.hbase.HRegionLocation; -044import org.apache.hadoop.hbase.ServerName; -045import org.apache.hadoop.hbase.TableName; -046import org.apache.hadoop.hbase.ipc.RpcControllerFactory; -047import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; -048import org.apache.yetus.audience.InterfaceAudience; -049 -050import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; -051import org.apache.hadoop.hbase.shaded.com.google.common.util.concurrent.ThreadFactoryBuilder; -052 -053/** -054 * HTableMultiplexer provides a thread-safe non blocking PUT API across all the tables. -055 * Each put will be sharded into different buffer queues based on its destination region server. -056 * So each region server buffer queue will only have the puts which share the same destination. -057 * And each queue will have a flush worker thread to flush the puts request to the region server. -058 * If any queue is full, the HTableMultiplexer starts to drop the Put requests for that -059 * particular queue. -060 * -061 * Also all the puts will be retried as a configuration number before dropping. -062 * And the HTableMultiplexer can report the number of buffered requests and the number of the -063 * failed (dropped) requests in total or on per region server basis. -064 * -065 * This class is thread safe. -066 */ -067@InterfaceAudience.Public -068public class HTableMultiplexer { -069 private static final Log LOG = LogFactory.getLog(HTableMultiplexer.class.getName()); -070 -071 public static final String TABLE_MULTIPLEXER_FLUSH_PERIOD_MS = -072 "hbase.tablemultiplexer.flush.period.ms"; -073 public static final String TABLE_MULTIPLEXER_INIT_THREADS = "hbase.tablemultiplexer.init.threads"; -074 public static final String TABLE_MULTIPLEXER_MAX_RETRIES_IN_QUEUE = -075 "hbase.client.max.retries.in.queue"; -076 -077 /** The map between each region server to its flush worker */ -078 private final MapserverToFlushWorkerMap = -079 new ConcurrentHashMap<>(); -080 -081 private final Configuration workerConf; -082 private final ClusterConnection conn; -083 private final ExecutorService pool; -084 private final int maxAttempts; -085 private final int perRegionServerBufferQueueSize; -086 private final int maxKeyValueSize; -087 private final ScheduledExecutorService executor; -088 private final long flushPeriod; -089 -090 /** -091 * @param conf The HBaseConfiguration -092 * @param perRegionServerBufferQueueSize determines the max number of the buffered Put ops for -093 * each region server before dropping the request. -094 */ -095 public HTableMultiplexer(Configuration conf, int perRegionServerBufferQueueSize) -096 throws IOException { -097 this(ConnectionFactory.createConnection(conf), conf, perRegionServerBufferQueueSize); -098 } -099 -100 /** -101 * @param conn The HBase connection. -102 * @param conf The HBase configuration -103 * @param perRegionServerBufferQueueSize determines the max number of the buffered Put ops for -104 * each region server before dropping the request. -105 */ -106 public HTableMultiplexer(Connection conn, Configuration conf, -107 int perRegionServerBufferQueueSize) { -108this.conn = (ClusterConnection) conn; -109this.pool = HTable.getDefaultExecutor(conf); -110// how many times we could try in total, one more than retry number -111this.maxAttempts = conf.getInt(HConstants.HBASE_CLIENT_RETRIES_NUMBER, -112 HConstants.DEFAULT_HBASE_CLIENT_RETRIES_NUMBER) + 1; -113this.perRegionServerBufferQueueSize = perRegionServerBufferQueueSize; -114this.maxKeyValueSize = HTable.getMaxKeyValueSize(conf); -115this.flushPeriod = conf.getLong(TABLE_MULTIPLEXER_FLUSH_PERIOD_MS, 100); -116int initThreads = conf.getInt(TABLE_MULTIPLEXER_INIT_THREADS, 10); -117t
[34/51] [partial] hbase-site git commit: Published site at .
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/505bbb2e/apidocs/src-html/org/apache/hadoop/hbase/mapred/HRegionPartitioner.html -- diff --git a/apidocs/src-html/org/apache/hadoop/hbase/mapred/HRegionPartitioner.html b/apidocs/src-html/org/apache/hadoop/hbase/mapred/HRegionPartitioner.html index 08be3b2..77665c7 100644 --- a/apidocs/src-html/org/apache/hadoop/hbase/mapred/HRegionPartitioner.html +++ b/apidocs/src-html/org/apache/hadoop/hbase/mapred/HRegionPartitioner.html @@ -27,80 +27,81 @@ 019package org.apache.hadoop.hbase.mapred; 020 021import java.io.IOException; -022 -023import org.apache.commons.logging.Log; -024import org.apache.commons.logging.LogFactory; -025import org.apache.hadoop.hbase.HBaseConfiguration; -026import org.apache.hadoop.hbase.TableName; -027import org.apache.yetus.audience.InterfaceAudience; -028import org.apache.hadoop.hbase.client.Connection; -029import org.apache.hadoop.hbase.client.ConnectionFactory; -030import org.apache.hadoop.hbase.client.RegionLocator; -031import org.apache.hadoop.hbase.io.ImmutableBytesWritable; -032import org.apache.hadoop.hbase.util.Bytes; -033import org.apache.hadoop.mapred.JobConf; -034import org.apache.hadoop.mapred.Partitioner; -035 -036/** -037 * This is used to partition the output keys into groups of keys. -038 * Keys are grouped according to the regions that currently exist -039 * so that each reducer fills a single region so load is distributed. -040 * -041 * @param-042 * @param -043 */ -044@InterfaceAudience.Public -045public class HRegionPartitioner -046implements Partitioner { -047 private static final Log LOG = LogFactory.getLog(HRegionPartitioner.class); -048 // Connection and locator are not cleaned up; they just die when partitioner is done. -049 private Connection connection; -050 private RegionLocator locator; -051 private byte[][] startKeys; -052 +022import org.apache.hadoop.hbase.HBaseConfiguration; +023import org.apache.hadoop.hbase.TableName; +024import org.apache.yetus.audience.InterfaceAudience; +025import org.slf4j.Logger; +026import org.slf4j.LoggerFactory; +027import org.apache.hadoop.hbase.client.Connection; +028import org.apache.hadoop.hbase.client.ConnectionFactory; +029import org.apache.hadoop.hbase.client.RegionLocator; +030import org.apache.hadoop.hbase.io.ImmutableBytesWritable; +031import org.apache.hadoop.hbase.util.Bytes; +032import org.apache.hadoop.mapred.JobConf; +033import org.apache.hadoop.mapred.Partitioner; +034 +035/** +036 * This is used to partition the output keys into groups of keys. +037 * Keys are grouped according to the regions that currently exist +038 * so that each reducer fills a single region so load is distributed. +039 * +040 * @param +041 * @param +042 */ +043@InterfaceAudience.Public +044public class HRegionPartitioner +045implements Partitioner { +046 private static final Logger LOG = LoggerFactory.getLogger(HRegionPartitioner.class); +047 // Connection and locator are not cleaned up; they just die when partitioner is done. +048 private Connection connection; +049 private RegionLocator locator; +050 private byte[][] startKeys; +051 +052 @Override 053 public void configure(JobConf job) { 054try { 055 this.connection = ConnectionFactory.createConnection(HBaseConfiguration.create(job)); 056 TableName tableName = TableName.valueOf(job.get(TableOutputFormat.OUTPUT_TABLE)); 057 this.locator = this.connection.getRegionLocator(tableName); 058} catch (IOException e) { -059 LOG.error(e); +059 LOG.error(e.toString(), e); 060} 061 062try { 063 this.startKeys = this.locator.getStartKeys(); 064} catch (IOException e) { -065 LOG.error(e); +065 LOG.error(e.toString(), e); 066} 067 } 068 -069 public int getPartition(ImmutableBytesWritable key, V2 value, int numPartitions) { -070byte[] region = null; -071// Only one region return 0 -072if (this.startKeys.length == 1){ -073 return 0; -074} -075try { -076 // Not sure if this is cached after a split so we could have problems -077 // here if a region splits while mapping -078 region = locator.getRegionLocation(key.get()).getRegionInfo().getStartKey(); -079} catch (IOException e) { -080 LOG.error(e); -081} -082for (int i = 0; i < this.startKeys.length; i++){ -083 if (Bytes.compareTo(region, this.startKeys[i]) == 0 ){ -084if (i >= numPartitions-1){ -085 // cover if we have less reduces then regions. -086 return (Integer.toString(i).hashCode() -087 & Integer.MAX_VALUE) % numPartitions; -088} -089return i; -090 } -091} -092// if above fails to find start key that match we need to return something -093return 0; -094 } -095} +069
[30/51] [partial] hbase-site git commit: Published site at .
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/505bbb2e/apidocs/src-html/org/apache/hadoop/hbase/rest/client/RemoteHTable.html -- diff --git a/apidocs/src-html/org/apache/hadoop/hbase/rest/client/RemoteHTable.html b/apidocs/src-html/org/apache/hadoop/hbase/rest/client/RemoteHTable.html index 3445d05..b85b00e 100644 --- a/apidocs/src-html/org/apache/hadoop/hbase/rest/client/RemoteHTable.html +++ b/apidocs/src-html/org/apache/hadoop/hbase/rest/client/RemoteHTable.html @@ -31,996 +31,997 @@ 023import com.google.protobuf.Message; 024import com.google.protobuf.Service; 025import com.google.protobuf.ServiceException; -026import org.apache.commons.logging.Log; -027import org.apache.commons.logging.LogFactory; -028import org.apache.hadoop.conf.Configuration; -029import org.apache.hadoop.hbase.Cell; -030import org.apache.hadoop.hbase.CellUtil; -031import org.apache.hadoop.hbase.CompareOperator; -032import org.apache.hadoop.hbase.HBaseConfiguration; -033import org.apache.hadoop.hbase.HConstants; -034import org.apache.hadoop.hbase.HTableDescriptor; -035import org.apache.hadoop.hbase.KeyValue; -036import org.apache.hadoop.hbase.TableName; -037import org.apache.yetus.audience.InterfaceAudience; -038import org.apache.hadoop.hbase.client.Append; -039import org.apache.hadoop.hbase.client.Delete; -040import org.apache.hadoop.hbase.client.Durability; -041import org.apache.hadoop.hbase.client.Get; -042import org.apache.hadoop.hbase.client.Increment; -043import org.apache.hadoop.hbase.client.Put; -044import org.apache.hadoop.hbase.client.Result; -045import org.apache.hadoop.hbase.client.ResultScanner; -046import org.apache.hadoop.hbase.client.Row; -047import org.apache.hadoop.hbase.client.RowMutations; -048import org.apache.hadoop.hbase.client.Scan; -049import org.apache.hadoop.hbase.client.Table; -050import org.apache.hadoop.hbase.client.TableDescriptor; -051import org.apache.hadoop.hbase.client.coprocessor.Batch; -052import org.apache.hadoop.hbase.client.coprocessor.Batch.Callback; -053import org.apache.hadoop.hbase.client.metrics.ScanMetrics; -054import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp; -055import org.apache.hadoop.hbase.io.TimeRange; -056import org.apache.hadoop.hbase.ipc.CoprocessorRpcChannel; -057import org.apache.hadoop.hbase.rest.Constants; -058import org.apache.hadoop.hbase.rest.model.CellModel; -059import org.apache.hadoop.hbase.rest.model.CellSetModel; -060import org.apache.hadoop.hbase.rest.model.RowModel; -061import org.apache.hadoop.hbase.rest.model.ScannerModel; -062import org.apache.hadoop.hbase.rest.model.TableSchemaModel; -063import org.apache.hadoop.hbase.util.Bytes; -064import org.apache.hadoop.util.StringUtils; -065 -066import java.io.IOException; -067import java.io.InterruptedIOException; -068import java.io.UnsupportedEncodingException; -069import java.net.URLEncoder; -070import java.util.ArrayList; -071import java.util.Collection; -072import java.util.Iterator; -073import java.util.List; -074import java.util.Map; -075import java.util.Set; -076import java.util.TreeMap; -077import java.util.concurrent.TimeUnit; -078 -079import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions; -080 -081/** -082 * HTable interface to remote tables accessed via REST gateway -083 */ -084@InterfaceAudience.Public -085public class RemoteHTable implements Table { -086 -087 private static final Log LOG = LogFactory.getLog(RemoteHTable.class); -088 -089 final Client client; -090 final Configuration conf; -091 final byte[] name; -092 final int maxRetries; -093 final long sleepTime; -094 -095 @SuppressWarnings("rawtypes") -096 protected String buildRowSpec(final byte[] row, final Map familyMap, -097 final long startTime, final long endTime, final int maxVersions) { -098StringBuffer sb = new StringBuffer(); -099sb.append('/'); -100sb.append(Bytes.toString(name)); -101sb.append('/'); -102sb.append(toURLEncodedBytes(row)); -103Set families = familyMap.entrySet(); -104if (families != null) { -105 Iterator i = familyMap.entrySet().iterator(); -106 sb.append('/'); -107 while (i.hasNext()) { -108Map.Entry e = (Map.Entry)i.next(); -109Collection quals = (Collection)e.getValue(); -110if (quals == null || quals.isEmpty()) { -111 // this is an unqualified family. append the family name and NO ':' -112 sb.append(toURLEncodedBytes((byte[])e.getKey())); -113} else { -114 Iterator ii = quals.iterator(); -115 while (ii.hasNext()) { -116 sb.append(toURLEncodedBytes((byte[])e.getKey())); -117sb.append(':'); -118Object o = ii.next(); -119// Puts use byte[] but Deletes use KeyValue -120if (o instanceof byte[]) { -121 sb.append(toURLEncodedBytes((byte[])o)); -122} else if (
[25/51] [partial] hbase-site git commit: Published site at .
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/505bbb2e/apidocs/src-html/org/apache/hadoop/hbase/util/Base64.Base64OutputStream.html -- diff --git a/apidocs/src-html/org/apache/hadoop/hbase/util/Base64.Base64OutputStream.html b/apidocs/src-html/org/apache/hadoop/hbase/util/Base64.Base64OutputStream.html index 70481ce..b9f6622 100644 --- a/apidocs/src-html/org/apache/hadoop/hbase/util/Base64.Base64OutputStream.html +++ b/apidocs/src-html/org/apache/hadoop/hbase/util/Base64.Base64OutputStream.html @@ -47,1648 +47,1649 @@ 039import java.util.zip.GZIPInputStream; 040import java.util.zip.GZIPOutputStream; 041 -042import org.apache.commons.logging.Log; -043import org.apache.commons.logging.LogFactory; -044import org.apache.yetus.audience.InterfaceAudience; -045 -046/** -047 * Encodes and decodes to and from Base64 notation. -048 * -049 *-050 * Homepage: http://iharder.net/base64;. -051 *
-052 * -053 *-054 * Change Log: -055 *
-056 *-057 *
-107 * -108 *- v2.2.1 - Fixed bug using URL_SAFE and ORDERED encodings. Fixed bug -058 * when using very small files (~< 40 bytes).
-059 *- v2.2 - Added some helper methods for encoding/decoding directly from -060 * one file to the next. Also added a main() method to support command -061 * line encoding/decoding from one file to the next. Also added these -062 * Base64 dialects: -063 *
-076 * -077 *-064 *
-072 *- The default is RFC3548 format.
-065 *- Using Base64.URLSAFE generates URL and file name friendly format as -066 * described in Section 4 of RFC3548. -067 * http://www.faqs.org/rfcs/rfc3548.html
; -068 *- Using Base64.ORDERED generates URL and file name friendly format -069 * that preserves lexical ordering as described in -070 * http://www.faqs.org/qa/rfcc-1940.html
; -071 *-073 * Special thanks to Jim Kellerman at ; -074 * http://www.powerset.com/; for contributing the new Base64 dialects. -075 *
- v2.1 - Cleaned up javadoc comments and unused variables and methods. -078 * Added some convenience methods for reading and writing to and from files. -079 *
-080 *- v2.0.2 - Now specifies UTF-8 encoding in places where the code fails on -081 * systems with other encodings (like EBCDIC).
-082 *- v2.0.1 - Fixed an error when decoding a single byte, that is, when the -083 * encoded data was a single byte.
-084 *- v2.0 - I got rid of methods that used booleans to set options. Now -085 * everything is more consolidated and cleaner. The code now detects when -086 * data that's being decoded is gzip-compressed and will decompress it -087 * automatically. Generally things are cleaner. You'll probably have to -088 * change some method calls that you were making to support the new options -089 * format (ints that you "OR" together).
-090 *- v1.5.1 - Fixed bug when decompressing and decoding to a byte[] using -091 * decode( String s, boolean gzipCompressed ). Added the ability to -092 * "suspend" encoding in the Output Stream so you can turn on and off the -093 * encoding if you need to embed base64 data in an otherwise "normal" stream -094 * (like an XML file).
-095 *- v1.5 - Output stream pases on flush() command but doesn't do anything -096 * itself. This helps when using GZIP streams. Added the ability to -097 * GZip-compress objects before encoding them.
-098 *- v1.4 - Added helper methods to read/write files.
-099 *- v1.3.6 - Fixed OutputStream.flush() so that 'position' is reset.
-100 *- v1.3.5 - Added flag to turn on and off line breaks. Fixed bug in input -101 * stream where last buffer being read, if not completely full, was not -102 * returned.
-103 *- v1.3.4 - Fixed when "improperly padded stream" error was thrown at the -104 * wrong time.
-105 *- v1.3.3 - Fixed I/O streams which were totally messed up.
-106 *-109 * I am placing this code in the Public Domain. Do with it as you will. This -110 * software comes with no guarantees or warranties but with plenty of -111 * well-wishing instead! -112 *
-113 * Please visit http://iharder.net/base64; -114 * periodically to check for updates or to contribute improvements. -115 *
-116 * author: Robert Harder, r...@iharder.net -117 *
-118 * version: 2.2.1 -119 */ -120@InterfaceAudience.Public -121public class Base64 { -122 -123 /* P U B L I C F I E L D S */ -124 -125 /** No options specified. Value is zero. */ -126 public final static int NO_OPTIONS = 0; -127 -128 /** Sp
[48/51] [partial] hbase-site git commit: Published site at .
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/505bbb2e/apidocs/org/apache/hadoop/hbase/mapred/HRegionPartitioner.html -- diff --git a/apidocs/org/apache/hadoop/hbase/mapred/HRegionPartitioner.html b/apidocs/org/apache/hadoop/hbase/mapred/HRegionPartitioner.html index f543930..ae8064d 100644 --- a/apidocs/org/apache/hadoop/hbase/mapred/HRegionPartitioner.html +++ b/apidocs/org/apache/hadoop/hbase/mapred/HRegionPartitioner.html @@ -119,7 +119,7 @@ var activeTableTab = "activeTableTab"; @InterfaceAudience.Public -public class HRegionPartitioner+public class HRegionPartitioner extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true"; title="class or interface in java.lang">Object implements org.apache.hadoop.mapred.Partitioner This is used to partition the output keys into groups of keys. @@ -198,7 +198,7 @@ implements org.apache.hadoop.mapred.Partitioner< HRegionPartitioner -public HRegionPartitioner() +public HRegionPartitioner() @@ -230,7 +230,7 @@ implements org.apache.hadoop.mapred.Partitioner< getPartition -public int getPartition(ImmutableBytesWritable key, +public int getPartition(ImmutableBytesWritable key, V2 value, int numPartitions) http://git-wip-us.apache.org/repos/asf/hbase-site/blob/505bbb2e/apidocs/org/apache/hadoop/hbase/mapred/TableRecordReaderImpl.html -- diff --git a/apidocs/org/apache/hadoop/hbase/mapred/TableRecordReaderImpl.html b/apidocs/org/apache/hadoop/hbase/mapred/TableRecordReaderImpl.html index 9efff38..937d7c5 100644 --- a/apidocs/org/apache/hadoop/hbase/mapred/TableRecordReaderImpl.html +++ b/apidocs/org/apache/hadoop/hbase/mapred/TableRecordReaderImpl.html @@ -110,7 +110,7 @@ var activeTableTab = "activeTableTab"; @InterfaceAudience.Public -public class TableRecordReaderImpl +public class TableRecordReaderImpl extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true"; title="class or interface in java.lang">Object Iterate over an HBase table data, return (Text, RowResult) pairs @@ -233,7 +233,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html? TableRecordReaderImpl -public TableRecordReaderImpl() +public TableRecordReaderImpl() @@ -250,7 +250,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html? restart -public void restart(byte[] firstRow) +public void restart(byte[] firstRow) throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true"; title="class or interface in java.io">IOException Restart from survivable exceptions by creating a new scanner. @@ -267,7 +267,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html? init -public void init() +public void init() throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true"; title="class or interface in java.io">IOException Build the scanner. Not done in constructor to allow for extension. @@ -282,7 +282,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html? setHTable -public void setHTable(Table htable) +public void setHTable(Table htable) Parameters: htable - the HTableDescriptor to scan. @@ -295,7 +295,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html? setInputColumns -public void setInputColumns(byte[][] inputColumns) +public void setInputColumns(byte[][] inputColumns) Parameters: inputColumns - the columns to be placed in Result. @@ -308,7 +308,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html? setStartRow -public void setStartRow(byte[] startRow) +public void setStartRow(byte[] startRow) Parameters: startRow - the first row in the split @@ -321,7 +321,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html? setEndRow -public void setEndRow(byte[] endRow) +public void setEndRow(byte[] endRow) Parameters: endRow - the last row in the split @@ -334,7 +334,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html? setRowFilter -public void setRowFilter(Filter rowFilter) +public void setRowFilter(Filter rowFilter) Parameters: rowFilter - the Filter to be used. @@ -347,7 +347,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html? close -public void close() +public void close() @@ -356,7 +356,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html? createKey -public ImmutableBytesWritable createKey() +public ImmutableBytesWritable createKey() Returns: ImmutableBytesWritable @@ -371,7 +371,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html? createValue -public Result creat
[35/51] [partial] hbase-site git commit: Published site at .
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/505bbb2e/apidocs/src-html/org/apache/hadoop/hbase/io/crypto/Encryption.html -- diff --git a/apidocs/src-html/org/apache/hadoop/hbase/io/crypto/Encryption.html b/apidocs/src-html/org/apache/hadoop/hbase/io/crypto/Encryption.html index 3ce8f8a..9b96088 100644 --- a/apidocs/src-html/org/apache/hadoop/hbase/io/crypto/Encryption.html +++ b/apidocs/src-html/org/apache/hadoop/hbase/io/crypto/Encryption.html @@ -35,557 +35,558 @@ 027import java.util.Arrays; 028import java.util.Map; 029import java.util.concurrent.ConcurrentHashMap; -030import javax.crypto.SecretKeyFactory; -031import javax.crypto.spec.PBEKeySpec; -032import javax.crypto.spec.SecretKeySpec; -033 -034import org.apache.commons.io.IOUtils; -035import org.apache.commons.logging.Log; -036import org.apache.commons.logging.LogFactory; -037import org.apache.hadoop.conf.Configuration; -038import org.apache.hadoop.hbase.HBaseConfiguration; -039import org.apache.hadoop.hbase.HConstants; -040import org.apache.hadoop.hbase.util.Bytes; -041import org.apache.hadoop.hbase.util.Pair; -042import org.apache.hadoop.util.ReflectionUtils; -043import org.apache.yetus.audience.InterfaceAudience; -044 -045/** -046 * A facade for encryption algorithms and related support. -047 */ -048@InterfaceAudience.Public -049public final class Encryption { -050 -051 private static final Log LOG = LogFactory.getLog(Encryption.class); -052 -053 /** -054 * Crypto context -055 */ -056 @InterfaceAudience.Public -057 public static class Context extends org.apache.hadoop.hbase.io.crypto.Context { -058 -059/** The null crypto context */ -060public static final Context NONE = new Context(); -061 -062private Context() { -063 super(); -064} -065 -066private Context(Configuration conf) { -067 super(conf); -068} -069 -070@Override -071public Context setCipher(Cipher cipher) { -072 super.setCipher(cipher); -073 return this; -074} -075 -076@Override -077public Context setKey(Key key) { -078 super.setKey(key); -079 return this; -080} -081 -082public Context setKey(byte[] key) { -083 super.setKey(new SecretKeySpec(key, getCipher().getName())); -084 return this; -085} -086 } -087 -088 public static Context newContext() { -089return new Context(); -090 } -091 -092 public static Context newContext(Configuration conf) { -093return new Context(conf); -094 } -095 -096 // Prevent instantiation -097 private Encryption() { -098super(); -099 } -100 -101 /** -102 * Get an cipher given a name -103 * @param name the cipher name -104 * @return the cipher, or null if a suitable one could not be found -105 */ -106 public static Cipher getCipher(Configuration conf, String name) { -107return getCipherProvider(conf).getCipher(name); -108 } -109 -110 /** -111 * Get names of supported encryption algorithms -112 * -113 * @return Array of strings, each represents a supported encryption algorithm -114 */ -115 public static String[] getSupportedCiphers() { -116return getSupportedCiphers(HBaseConfiguration.create()); -117 } -118 -119 /** -120 * Get names of supported encryption algorithms -121 * -122 * @return Array of strings, each represents a supported encryption algorithm -123 */ -124 public static String[] getSupportedCiphers(Configuration conf) { -125return getCipherProvider(conf).getSupportedCiphers(); -126 } -127 -128 /** -129 * Return the MD5 digest of the concatenation of the supplied arguments. -130 */ -131 public static byte[] hash128(String... args) { -132byte[] result = new byte[16]; -133try { -134 MessageDigest md = MessageDigest.getInstance("MD5"); -135 for (String arg: args) { -136md.update(Bytes.toBytes(arg)); -137 } -138 md.digest(result, 0, result.length); -139 return result; -140} catch (NoSuchAlgorithmException e) { -141 throw new RuntimeException(e); -142} catch (DigestException e) { -143 throw new RuntimeException(e); -144} -145 } -146 -147 /** -148 * Return the MD5 digest of the concatenation of the supplied arguments. -149 */ -150 public static byte[] hash128(byte[]... args) { -151byte[] result = new byte[16]; -152try { -153 MessageDigest md = MessageDigest.getInstance("MD5"); -154 for (byte[] arg: args) { -155md.update(arg); -156 } -157 md.digest(result, 0, result.length); -158 return result; -159} catch (NoSuchAlgorithmException e) { -160 throw new RuntimeException(e); -161} catch (DigestException e) { -162 throw new RuntimeException(e); -163} -164 } -165 -166 /** -167 * Return the SHA-256 digest of the concatenation of the supplied arguments. -168 */ -169 public static byte[] hash256(String... args) { -170byte[] result = ne
[36/51] [partial] hbase-site git commit: Published site at .
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/505bbb2e/apidocs/src-html/org/apache/hadoop/hbase/io/crypto/Encryption.Context.html -- diff --git a/apidocs/src-html/org/apache/hadoop/hbase/io/crypto/Encryption.Context.html b/apidocs/src-html/org/apache/hadoop/hbase/io/crypto/Encryption.Context.html index 3ce8f8a..9b96088 100644 --- a/apidocs/src-html/org/apache/hadoop/hbase/io/crypto/Encryption.Context.html +++ b/apidocs/src-html/org/apache/hadoop/hbase/io/crypto/Encryption.Context.html @@ -35,557 +35,558 @@ 027import java.util.Arrays; 028import java.util.Map; 029import java.util.concurrent.ConcurrentHashMap; -030import javax.crypto.SecretKeyFactory; -031import javax.crypto.spec.PBEKeySpec; -032import javax.crypto.spec.SecretKeySpec; -033 -034import org.apache.commons.io.IOUtils; -035import org.apache.commons.logging.Log; -036import org.apache.commons.logging.LogFactory; -037import org.apache.hadoop.conf.Configuration; -038import org.apache.hadoop.hbase.HBaseConfiguration; -039import org.apache.hadoop.hbase.HConstants; -040import org.apache.hadoop.hbase.util.Bytes; -041import org.apache.hadoop.hbase.util.Pair; -042import org.apache.hadoop.util.ReflectionUtils; -043import org.apache.yetus.audience.InterfaceAudience; -044 -045/** -046 * A facade for encryption algorithms and related support. -047 */ -048@InterfaceAudience.Public -049public final class Encryption { -050 -051 private static final Log LOG = LogFactory.getLog(Encryption.class); -052 -053 /** -054 * Crypto context -055 */ -056 @InterfaceAudience.Public -057 public static class Context extends org.apache.hadoop.hbase.io.crypto.Context { -058 -059/** The null crypto context */ -060public static final Context NONE = new Context(); -061 -062private Context() { -063 super(); -064} -065 -066private Context(Configuration conf) { -067 super(conf); -068} -069 -070@Override -071public Context setCipher(Cipher cipher) { -072 super.setCipher(cipher); -073 return this; -074} -075 -076@Override -077public Context setKey(Key key) { -078 super.setKey(key); -079 return this; -080} -081 -082public Context setKey(byte[] key) { -083 super.setKey(new SecretKeySpec(key, getCipher().getName())); -084 return this; -085} -086 } -087 -088 public static Context newContext() { -089return new Context(); -090 } -091 -092 public static Context newContext(Configuration conf) { -093return new Context(conf); -094 } -095 -096 // Prevent instantiation -097 private Encryption() { -098super(); -099 } -100 -101 /** -102 * Get an cipher given a name -103 * @param name the cipher name -104 * @return the cipher, or null if a suitable one could not be found -105 */ -106 public static Cipher getCipher(Configuration conf, String name) { -107return getCipherProvider(conf).getCipher(name); -108 } -109 -110 /** -111 * Get names of supported encryption algorithms -112 * -113 * @return Array of strings, each represents a supported encryption algorithm -114 */ -115 public static String[] getSupportedCiphers() { -116return getSupportedCiphers(HBaseConfiguration.create()); -117 } -118 -119 /** -120 * Get names of supported encryption algorithms -121 * -122 * @return Array of strings, each represents a supported encryption algorithm -123 */ -124 public static String[] getSupportedCiphers(Configuration conf) { -125return getCipherProvider(conf).getSupportedCiphers(); -126 } -127 -128 /** -129 * Return the MD5 digest of the concatenation of the supplied arguments. -130 */ -131 public static byte[] hash128(String... args) { -132byte[] result = new byte[16]; -133try { -134 MessageDigest md = MessageDigest.getInstance("MD5"); -135 for (String arg: args) { -136md.update(Bytes.toBytes(arg)); -137 } -138 md.digest(result, 0, result.length); -139 return result; -140} catch (NoSuchAlgorithmException e) { -141 throw new RuntimeException(e); -142} catch (DigestException e) { -143 throw new RuntimeException(e); -144} -145 } -146 -147 /** -148 * Return the MD5 digest of the concatenation of the supplied arguments. -149 */ -150 public static byte[] hash128(byte[]... args) { -151byte[] result = new byte[16]; -152try { -153 MessageDigest md = MessageDigest.getInstance("MD5"); -154 for (byte[] arg: args) { -155md.update(arg); -156 } -157 md.digest(result, 0, result.length); -158 return result; -159} catch (NoSuchAlgorithmException e) { -160 throw new RuntimeException(e); -161} catch (DigestException e) { -162 throw new RuntimeException(e); -163} -164 } -165 -166 /** -167 * Return the SHA-256 digest of the concatenation of the supplied arguments. -168 */ -169 public static byte[] hash256(Stri
[32/51] [partial] hbase-site git commit: Published site at .
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/505bbb2e/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/TableOutputFormat.html -- diff --git a/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/TableOutputFormat.html b/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/TableOutputFormat.html index 342840a..4c42b96 100644 --- a/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/TableOutputFormat.html +++ b/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/TableOutputFormat.html @@ -27,224 +27,223 @@ 019package org.apache.hadoop.hbase.mapreduce; 020 021import java.io.IOException; -022 -023import org.apache.commons.logging.Log; -024import org.apache.commons.logging.LogFactory; -025import org.apache.yetus.audience.InterfaceAudience; -026import org.apache.hadoop.conf.Configurable; -027import org.apache.hadoop.conf.Configuration; -028import org.apache.hadoop.hbase.HBaseConfiguration; -029import org.apache.hadoop.hbase.HConstants; -030import org.apache.hadoop.hbase.TableName; -031import org.apache.hadoop.hbase.TableNotEnabledException; -032import org.apache.hadoop.hbase.TableNotFoundException; -033import org.apache.hadoop.hbase.client.Admin; -034import org.apache.hadoop.hbase.client.BufferedMutator; -035import org.apache.hadoop.hbase.client.Connection; -036import org.apache.hadoop.hbase.client.ConnectionFactory; -037import org.apache.hadoop.hbase.client.Delete; -038import org.apache.hadoop.hbase.client.Mutation; -039import org.apache.hadoop.hbase.client.Put; -040import org.apache.hadoop.mapreduce.JobContext; -041import org.apache.hadoop.mapreduce.OutputCommitter; -042import org.apache.hadoop.mapreduce.OutputFormat; -043import org.apache.hadoop.mapreduce.RecordWriter; -044import org.apache.hadoop.mapreduce.TaskAttemptContext; -045 -046/** -047 * Convert Map/Reduce output and write it to an HBase table. The KEY is ignored -048 * while the output value must be either a {@link Put} or a -049 * {@link Delete} instance. -050 */ -051@InterfaceAudience.Public -052public class TableOutputFormatextends OutputFormat -053implements Configurable { -054 -055 private static final Log LOG = LogFactory.getLog(TableOutputFormat.class); -056 -057 /** Job parameter that specifies the output table. */ -058 public static final String OUTPUT_TABLE = "hbase.mapred.outputtable"; -059 -060 /** -061 * Prefix for configuration property overrides to apply in {@link #setConf(Configuration)}. -062 * For keys matching this prefix, the prefix is stripped, and the value is set in the -063 * configuration with the resulting key, ie. the entry "hbase.mapred.output.key1 = value1" -064 * would be set in the configuration as "key1 = value1". Use this to set properties -065 * which should only be applied to the {@code TableOutputFormat} configuration and not the -066 * input configuration. -067 */ -068 public static final String OUTPUT_CONF_PREFIX = "hbase.mapred.output."; -069 -070 /** -071 * Optional job parameter to specify a peer cluster. -072 * Used specifying remote cluster when copying between hbase clusters (the -073 * source is picked up from hbase-site.xml
). -074 * @see TableMapReduceUtil#initTableReducerJob(String, Class, org.apache.hadoop.mapreduce.Job, Class, String, String, String) -075 */ -076 public static final String QUORUM_ADDRESS = OUTPUT_CONF_PREFIX + "quorum"; -077 -078 /** Optional job parameter to specify peer cluster's ZK client port */ -079 public static final String QUORUM_PORT = OUTPUT_CONF_PREFIX + "quorum.port"; -080 -081 /** Optional specification of the rs class name of the peer cluster */ -082 public static final String -083 REGION_SERVER_CLASS = OUTPUT_CONF_PREFIX + "rs.class"; -084 /** Optional specification of the rs impl name of the peer cluster */ -085 public static final String -086 REGION_SERVER_IMPL = OUTPUT_CONF_PREFIX + "rs.impl"; -087 -088 /** The configuration. */ -089 private Configuration conf = null; -090 -091 /** -092 * Writes the reducer output to an HBase table. -093 */ -094 protected class TableRecordWriter -095 extends RecordWriter{ -096 -097private Connection connection; -098private BufferedMutator mutator; -099 -100/** -101 * @throws IOException -102 * -103 */ -104public TableRecordWriter() throws IOException { -105 String tableName = conf.get(OUTPUT_TABLE); -106 this.connection = ConnectionFactory.createConnection(conf); -107 this.mutator = connection.getBufferedMutator(TableName.valueOf(tableName)); -108 LOG.info("Created table instance for " + tableName); -109} -110/** -111 * Closes the writer, in this case flush table commits. -112 * -113 * @param context The context. -114 * @throws IOException When closing the writer fails. -115 * @see RecordWriter#close(TaskAttemptC
[23/51] [partial] hbase-site git commit: Published site at .
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/505bbb2e/apidocs/src-html/org/apache/hadoop/hbase/util/Bytes.ByteArrayComparator.html -- diff --git a/apidocs/src-html/org/apache/hadoop/hbase/util/Bytes.ByteArrayComparator.html b/apidocs/src-html/org/apache/hadoop/hbase/util/Bytes.ByteArrayComparator.html index 51d92c2..86fc15e 100644 --- a/apidocs/src-html/org/apache/hadoop/hbase/util/Bytes.ByteArrayComparator.html +++ b/apidocs/src-html/org/apache/hadoop/hbase/util/Bytes.ByteArrayComparator.html @@ -44,2578 +44,2580 @@ 036import java.util.Iterator; 037import java.util.List; 038 -039import com.google.protobuf.ByteString; -040import org.apache.commons.logging.Log; -041import org.apache.commons.logging.LogFactory; -042import org.apache.hadoop.hbase.Cell; -043import org.apache.hadoop.hbase.CellComparator; -044import org.apache.hadoop.hbase.KeyValue; -045import org.apache.hadoop.io.RawComparator; -046import org.apache.hadoop.io.WritableComparator; -047import org.apache.hadoop.io.WritableUtils; -048import org.apache.yetus.audience.InterfaceAudience; -049import sun.misc.Unsafe; -050 -051import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; -052import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; +039import org.apache.hadoop.hbase.Cell; +040import org.apache.hadoop.hbase.CellComparator; +041import org.apache.hadoop.hbase.KeyValue; +042import org.apache.hadoop.io.RawComparator; +043import org.apache.hadoop.io.WritableComparator; +044import org.apache.hadoop.io.WritableUtils; +045import org.apache.yetus.audience.InterfaceAudience; +046import org.slf4j.Logger; +047import org.slf4j.LoggerFactory; +048 +049import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; +050import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; +051 +052import com.google.protobuf.ByteString; 053 -054/** -055 * Utility class that handles byte arrays, conversions to/from other types, -056 * comparisons, hash code generation, manufacturing keys for HashMaps or -057 * HashSets, and can be used as key in maps or trees. -058 */ -059@SuppressWarnings("restriction") -060@InterfaceAudience.Public -061@edu.umd.cs.findbugs.annotations.SuppressWarnings( -062 value="EQ_CHECK_FOR_OPERAND_NOT_COMPATIBLE_WITH_THIS", -063justification="It has been like this forever") -064public class Bytes implements Comparable{ -065 -066 // Using the charset canonical name for String/byte[] conversions is much -067 // more efficient due to use of cached encoders/decoders. -068 private static final String UTF8_CSN = StandardCharsets.UTF_8.name(); -069 -070 //HConstants.EMPTY_BYTE_ARRAY should be updated if this changed -071 private static final byte [] EMPTY_BYTE_ARRAY = new byte [0]; -072 -073 private static final Log LOG = LogFactory.getLog(Bytes.class); +054import sun.misc.Unsafe; +055 +056/** +057 * Utility class that handles byte arrays, conversions to/from other types, +058 * comparisons, hash code generation, manufacturing keys for HashMaps or +059 * HashSets, and can be used as key in maps or trees. +060 */ +061@SuppressWarnings("restriction") +062@InterfaceAudience.Public +063@edu.umd.cs.findbugs.annotations.SuppressWarnings( +064 value="EQ_CHECK_FOR_OPERAND_NOT_COMPATIBLE_WITH_THIS", +065justification="It has been like this forever") +066public class Bytes implements Comparable { +067 +068 // Using the charset canonical name for String/byte[] conversions is much +069 // more efficient due to use of cached encoders/decoders. +070 private static final String UTF8_CSN = StandardCharsets.UTF_8.name(); +071 +072 //HConstants.EMPTY_BYTE_ARRAY should be updated if this changed +073 private static final byte [] EMPTY_BYTE_ARRAY = new byte [0]; 074 -075 /** -076 * Size of boolean in bytes -077 */ -078 public static final int SIZEOF_BOOLEAN = Byte.SIZE / Byte.SIZE; -079 -080 /** -081 * Size of byte in bytes -082 */ -083 public static final int SIZEOF_BYTE = SIZEOF_BOOLEAN; -084 -085 /** -086 * Size of char in bytes -087 */ -088 public static final int SIZEOF_CHAR = Character.SIZE / Byte.SIZE; -089 -090 /** -091 * Size of double in bytes -092 */ -093 public static final int SIZEOF_DOUBLE = Double.SIZE / Byte.SIZE; -094 -095 /** -096 * Size of float in bytes -097 */ -098 public static final int SIZEOF_FLOAT = Float.SIZE / Byte.SIZE; -099 -100 /** -101 * Size of int in bytes -102 */ -103 public static final int SIZEOF_INT = Integer.SIZE / Byte.SIZE; -104 -105 /** -106 * Size of long in bytes -107 */ -108 public static final int SIZEOF_LONG = Long.SIZE / Byte.SIZE; -109 -110 /** -111 * Size of short in bytes -112 */ -113 public static final int SIZEOF_SHORT = Short.SIZE / Byte.SIZE; -114 -115 /** -116 * Mask to apply to a long to reveal the lower int only. Use like
[49/51] [partial] hbase-site git commit: Published site at .
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/505bbb2e/apidocs/org/apache/hadoop/hbase/client/HTableMultiplexer.html -- diff --git a/apidocs/org/apache/hadoop/hbase/client/HTableMultiplexer.html b/apidocs/org/apache/hadoop/hbase/client/HTableMultiplexer.html index bfd3bd2..37adc4c 100644 --- a/apidocs/org/apache/hadoop/hbase/client/HTableMultiplexer.html +++ b/apidocs/org/apache/hadoop/hbase/client/HTableMultiplexer.html @@ -110,7 +110,7 @@ var activeTableTab = "activeTableTab"; @InterfaceAudience.Public -public class HTableMultiplexer +public class HTableMultiplexer extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true"; title="class or interface in java.lang">Object HTableMultiplexer provides a thread-safe non blocking PUT API across all the tables. Each put will be sharded into different buffer queues based on its destination region server. @@ -301,7 +301,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html? TABLE_MULTIPLEXER_FLUSH_PERIOD_MS -public static final http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true"; title="class or interface in java.lang">String TABLE_MULTIPLEXER_FLUSH_PERIOD_MS +public static final http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true"; title="class or interface in java.lang">String TABLE_MULTIPLEXER_FLUSH_PERIOD_MS See Also: Constant Field Values @@ -314,7 +314,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html? TABLE_MULTIPLEXER_INIT_THREADS -public static final http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true"; title="class or interface in java.lang">String TABLE_MULTIPLEXER_INIT_THREADS +public static final http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true"; title="class or interface in java.lang">String TABLE_MULTIPLEXER_INIT_THREADS See Also: Constant Field Values @@ -327,7 +327,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html? TABLE_MULTIPLEXER_MAX_RETRIES_IN_QUEUE -public static final http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true"; title="class or interface in java.lang">String TABLE_MULTIPLEXER_MAX_RETRIES_IN_QUEUE +public static final http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true"; title="class or interface in java.lang">String TABLE_MULTIPLEXER_MAX_RETRIES_IN_QUEUE See Also: Constant Field Values @@ -348,7 +348,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html? HTableMultiplexer -public HTableMultiplexer(org.apache.hadoop.conf.Configuration conf, +public HTableMultiplexer(org.apache.hadoop.conf.Configuration conf, int perRegionServerBufferQueueSize) throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true"; title="class or interface in java.io">IOException @@ -367,7 +367,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html? HTableMultiplexer -public HTableMultiplexer(Connection conn, +public HTableMultiplexer(Connection conn, org.apache.hadoop.conf.Configuration conf, int perRegionServerBufferQueueSize) @@ -393,7 +393,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html? close -public void close() +public void close() throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true"; title="class or interface in java.io">IOException Closes the internal Connection. Does nothing if the Connection has already been closed. @@ -409,7 +409,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html? put -public boolean put(TableName tableName, +public boolean put(TableName tableName, Put put) The put request will be buffered by its corresponding buffer queue. Return false if the queue is already full. @@ -428,7 +428,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html? put -public http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true"; title="class or interface in java.util">Listput(TableName tableName, +public http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true"; title="class or interface in java.util">List put(TableName tableName, http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true"; title="class or interface in java.util">List puts) The puts request will be buffered by their corresponding buffer queue. Return the list of puts which could not be queued. @@ -448,7 +448,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html? put http://docs.oracle.com/javase/8/docs/api/java/lang/D
[44/51] [partial] hbase-site git commit: Published site at .
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/505bbb2e/apidocs/overview-frame.html -- diff --git a/apidocs/overview-frame.html b/apidocs/overview-frame.html index e1e77e4..90f1106 100644 --- a/apidocs/overview-frame.html +++ b/apidocs/overview-frame.html @@ -42,6 +42,7 @@ org.apache.hadoop.hbase.io.hfile org.apache.hadoop.hbase.io.util org.apache.hadoop.hbase.ipc +org.apache.hadoop.hbase.log org.apache.hadoop.hbase.mapred org.apache.hadoop.hbase.mapreduce org.apache.hadoop.hbase.master http://git-wip-us.apache.org/repos/asf/hbase-site/blob/505bbb2e/apidocs/overview-summary.html -- diff --git a/apidocs/overview-summary.html b/apidocs/overview-summary.html index 11fa0cd..7eb2c2a 100644 --- a/apidocs/overview-summary.html +++ b/apidocs/overview-summary.html @@ -216,108 +216,112 @@ +org.apache.hadoop.hbase.log + + + org.apache.hadoop.hbase.mapred Provides HBase http://wiki.apache.org/hadoop/HadoopMapReduce";>MapReduce Input/OutputFormats, a table indexing MapReduce job, and utility methods. - + org.apache.hadoop.hbase.mapreduce Provides HBase http://wiki.apache.org/hadoop/HadoopMapReduce";>MapReduce Input/OutputFormats, a table indexing MapReduce job, and utility methods. - + org.apache.hadoop.hbase.master - + org.apache.hadoop.hbase.mob - + org.apache.hadoop.hbase.mob.compactions - + org.apache.hadoop.hbase.namespace - + org.apache.hadoop.hbase.net - + org.apache.hadoop.hbase.nio - + org.apache.hadoop.hbase.quotas - + org.apache.hadoop.hbase.quotas.policies - + org.apache.hadoop.hbase.regionserver - + org.apache.hadoop.hbase.regionserver.querymatcher - + org.apache.hadoop.hbase.regionserver.throttle - + org.apache.hadoop.hbase.regionserver.wal - + org.apache.hadoop.hbase.replication Multi Cluster Replication - + org.apache.hadoop.hbase.rest HBase REST - + org.apache.hadoop.hbase.rest.client - + org.apache.hadoop.hbase.rsgroup - + org.apache.hadoop.hbase.security - + org.apache.hadoop.hbase.shaded.protobuf - + org.apache.hadoop.hbase.snapshot - + org.apache.hadoop.hbase.spark - + org.apache.hadoop.hbase.spark.example.hbasecontext - + org.apache.hadoop.hbase.types @@ -325,23 +329,23 @@ Input/OutputFormats, a table indexing MapReduce job, and utility methods. extensible data type API. - + org.apache.hadoop.hbase.util - + org.apache.hadoop.hbase.util.hbck - + org.apache.hadoop.hbase.wal - + org.apache.hadoop.hbase.zookeeper - + org.apache.hbase.archetypes.exemplars.client This package provides fully-functional exemplar Java code demonstrating @@ -349,7 +353,7 @@ Input/OutputFormats, a table indexing MapReduce job, and utility methods. archetype with hbase-client dependency. - + org.apache.hbase.archetypes.exemplars.shaded_client This package provides fully-functional exemplar Java code demonstrating http://git-wip-us.apache.org/repos/asf/hbase-site/blob/505bbb2e/apidocs/overview-tree.html -- diff --git a/apidocs/overview-tree.html b/apidocs/overview-tree.html index 922b0a4..ba5703a 100644 --- a/apidocs/overview-tree.html +++ b/apidocs/overview-tree.html @@ -102,6 +102,7 @@ org.apache.hadoop.hbase.io.hfile, org.apache.hadoop.hbase.io.util, org.apache.hadoop.hbase.ipc, +org.apache.hadoop.hbase.log, org.apache.hadoop.hbase.mapred, org.apache.hadoop.hbase.mapreduce, org.apache.hadoop.hbase.master, http://git-wip-us.apache.org/repos/asf/hbase-site/blob/505bbb2e/apidocs/package-list -- diff --git a/apidocs/package-list b/apidocs/package-list index d07965e..96ced66 100644 --- a/apidocs/package-list +++ b/apidocs/package-list @@ -27,6 +27,7 @@ org.apache.hadoop.hbase.io.hadoopbackport org.apache.hadoop.hbase.io.hfile org.apache.hadoop.hbase.io.util org.apache.hadoop.hbase.ipc +org.apache.hadoop.hbase.log org.apache.hadoop.hbase.mapred org.apache.hadoop.hbase.mapreduce org.apache.hadoop.hbase.master http://git-wip-us.apache.org/repos/asf/hbase-site/blob/505bbb2e/apidocs/src-html/org/apache/hadoop/hbase/AuthUtil.html -- diff --git a/apidocs/src-html/org/apache/hadoop/hbase/AuthUtil.html b/apidocs/src-html/org/apache/hadoop/hbase/AuthUtil.html index 6708bca..f16f8df 100644 --- a/apidocs/src-html/org/apache/hadoop/hbase/AuthUtil.html +++ b/apidocs/src-html/org/apache/hadoop/hbase/AuthUtil.html @@ -29,14 +29,14 @@ 021import java.io.IOException; 022import java.net.UnknownHostException; 023 -024import org.apache.commons.logging.Log; -025import org.apache.commons.logging.LogFactory; -026import org.apache.hadoop.conf.Configuration; -027import
[22/51] [partial] hbase-site git commit: Published site at .
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/505bbb2e/apidocs/src-html/org/apache/hadoop/hbase/util/Bytes.RowEndKeyComparator.html -- diff --git a/apidocs/src-html/org/apache/hadoop/hbase/util/Bytes.RowEndKeyComparator.html b/apidocs/src-html/org/apache/hadoop/hbase/util/Bytes.RowEndKeyComparator.html index 51d92c2..86fc15e 100644 --- a/apidocs/src-html/org/apache/hadoop/hbase/util/Bytes.RowEndKeyComparator.html +++ b/apidocs/src-html/org/apache/hadoop/hbase/util/Bytes.RowEndKeyComparator.html @@ -44,2578 +44,2580 @@ 036import java.util.Iterator; 037import java.util.List; 038 -039import com.google.protobuf.ByteString; -040import org.apache.commons.logging.Log; -041import org.apache.commons.logging.LogFactory; -042import org.apache.hadoop.hbase.Cell; -043import org.apache.hadoop.hbase.CellComparator; -044import org.apache.hadoop.hbase.KeyValue; -045import org.apache.hadoop.io.RawComparator; -046import org.apache.hadoop.io.WritableComparator; -047import org.apache.hadoop.io.WritableUtils; -048import org.apache.yetus.audience.InterfaceAudience; -049import sun.misc.Unsafe; -050 -051import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; -052import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; +039import org.apache.hadoop.hbase.Cell; +040import org.apache.hadoop.hbase.CellComparator; +041import org.apache.hadoop.hbase.KeyValue; +042import org.apache.hadoop.io.RawComparator; +043import org.apache.hadoop.io.WritableComparator; +044import org.apache.hadoop.io.WritableUtils; +045import org.apache.yetus.audience.InterfaceAudience; +046import org.slf4j.Logger; +047import org.slf4j.LoggerFactory; +048 +049import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; +050import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; +051 +052import com.google.protobuf.ByteString; 053 -054/** -055 * Utility class that handles byte arrays, conversions to/from other types, -056 * comparisons, hash code generation, manufacturing keys for HashMaps or -057 * HashSets, and can be used as key in maps or trees. -058 */ -059@SuppressWarnings("restriction") -060@InterfaceAudience.Public -061@edu.umd.cs.findbugs.annotations.SuppressWarnings( -062 value="EQ_CHECK_FOR_OPERAND_NOT_COMPATIBLE_WITH_THIS", -063justification="It has been like this forever") -064public class Bytes implements Comparable{ -065 -066 // Using the charset canonical name for String/byte[] conversions is much -067 // more efficient due to use of cached encoders/decoders. -068 private static final String UTF8_CSN = StandardCharsets.UTF_8.name(); -069 -070 //HConstants.EMPTY_BYTE_ARRAY should be updated if this changed -071 private static final byte [] EMPTY_BYTE_ARRAY = new byte [0]; -072 -073 private static final Log LOG = LogFactory.getLog(Bytes.class); +054import sun.misc.Unsafe; +055 +056/** +057 * Utility class that handles byte arrays, conversions to/from other types, +058 * comparisons, hash code generation, manufacturing keys for HashMaps or +059 * HashSets, and can be used as key in maps or trees. +060 */ +061@SuppressWarnings("restriction") +062@InterfaceAudience.Public +063@edu.umd.cs.findbugs.annotations.SuppressWarnings( +064 value="EQ_CHECK_FOR_OPERAND_NOT_COMPATIBLE_WITH_THIS", +065justification="It has been like this forever") +066public class Bytes implements Comparable { +067 +068 // Using the charset canonical name for String/byte[] conversions is much +069 // more efficient due to use of cached encoders/decoders. +070 private static final String UTF8_CSN = StandardCharsets.UTF_8.name(); +071 +072 //HConstants.EMPTY_BYTE_ARRAY should be updated if this changed +073 private static final byte [] EMPTY_BYTE_ARRAY = new byte [0]; 074 -075 /** -076 * Size of boolean in bytes -077 */ -078 public static final int SIZEOF_BOOLEAN = Byte.SIZE / Byte.SIZE; -079 -080 /** -081 * Size of byte in bytes -082 */ -083 public static final int SIZEOF_BYTE = SIZEOF_BOOLEAN; -084 -085 /** -086 * Size of char in bytes -087 */ -088 public static final int SIZEOF_CHAR = Character.SIZE / Byte.SIZE; -089 -090 /** -091 * Size of double in bytes -092 */ -093 public static final int SIZEOF_DOUBLE = Double.SIZE / Byte.SIZE; -094 -095 /** -096 * Size of float in bytes -097 */ -098 public static final int SIZEOF_FLOAT = Float.SIZE / Byte.SIZE; -099 -100 /** -101 * Size of int in bytes -102 */ -103 public static final int SIZEOF_INT = Integer.SIZE / Byte.SIZE; -104 -105 /** -106 * Size of long in bytes -107 */ -108 public static final int SIZEOF_LONG = Long.SIZE / Byte.SIZE; -109 -110 /** -111 * Size of short in bytes -112 */ -113 public static final int SIZEOF_SHORT = Short.SIZE / Byte.SIZE; -114 -115 /** -116 * Mask to apply to a long to reveal the lower int only. Use like
[27/51] [partial] hbase-site git commit: Published site at .
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/505bbb2e/apidocs/src-html/org/apache/hadoop/hbase/snapshot/SnapshotInfo.html -- diff --git a/apidocs/src-html/org/apache/hadoop/hbase/snapshot/SnapshotInfo.html b/apidocs/src-html/org/apache/hadoop/hbase/snapshot/SnapshotInfo.html index 6c97088..f999cbd 100644 --- a/apidocs/src-html/org/apache/hadoop/hbase/snapshot/SnapshotInfo.html +++ b/apidocs/src-html/org/apache/hadoop/hbase/snapshot/SnapshotInfo.html @@ -42,679 +42,678 @@ 034 035import org.apache.commons.cli.CommandLine; 036import org.apache.commons.cli.Option; -037import org.apache.commons.logging.Log; -038import org.apache.commons.logging.LogFactory; -039import org.apache.hadoop.conf.Configuration; -040import org.apache.hadoop.fs.FileStatus; -041import org.apache.hadoop.fs.FileSystem; -042import org.apache.hadoop.fs.Path; -043import org.apache.hadoop.hbase.TableName; -044import org.apache.hadoop.hbase.client.RegionInfo; -045import org.apache.hadoop.hbase.client.SnapshotDescription; -046import org.apache.hadoop.hbase.io.HFileLink; -047import org.apache.hadoop.hbase.io.WALLink; -048import org.apache.hadoop.hbase.util.AbstractHBaseTool; -049import org.apache.hadoop.hbase.util.FSUtils; -050import org.apache.hadoop.util.StringUtils; -051import org.apache.yetus.audience.InterfaceAudience; -052 -053import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil; -054import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos; -055import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotRegionManifest; -056 -057/** -058 * Tool for dumping snapshot information. -059 *-060 *
-065 */ -066@InterfaceAudience.Public -067public final class SnapshotInfo extends AbstractHBaseTool { -068 private static final Log LOG = LogFactory.getLog(SnapshotInfo.class); -069 -070 static final class Options { -071static final Option SNAPSHOT = new Option(null, "snapshot", true, "Snapshot to examine."); -072static final Option REMOTE_DIR = new Option(null, "remote-dir", true, -073"Root directory that contains the snapshots."); -074static final Option LIST_SNAPSHOTS = new Option(null, "list-snapshots", false, -075"List all the available snapshots and exit."); -076static final Option FILES = new Option(null, "files", false, "Files and logs list."); -077static final Option STATS = new Option(null, "stats", false, "Files and logs stats."); -078static final Option SCHEMA = new Option(null, "schema", false, -079"Describe the snapshotted table."); -080static final Option SIZE_IN_BYTES = new Option(null, "size-in-bytes", false, -081"Print the size of the files in bytes."); -082 } -083 -084 /** -085 * Statistics about the snapshot -086 *- Table Descriptor -061 *
- Snapshot creation time, type, format version, ... -062 *
- List of hfiles and wals -063 *
- Stats about hfiles and logs sizes, percentage of shared with the source table, ... -064 *
-087 *
-091 */ -092 public static class SnapshotStats { -093/** Information about the file referenced by the snapshot */ -094static class FileInfo { -095 private final boolean corrupted; -096 private final boolean inArchive; -097 private final long size; -098 -099 FileInfo(final boolean inArchive, final long size, final boolean corrupted) { -100this.corrupted = corrupted; -101this.inArchive = inArchive; -102this.size = size; -103 } -104 -105 /** @return true if the file is in the archive */ -106 public boolean inArchive() { -107return this.inArchive; -108 } -109 -110 /** @return true if the file is corrupted */ -111 public boolean isCorrupted() { -112return this.corrupted; -113 } -114 -115 /** @return true if the file is missing */ -116 public boolean isMissing() { -117return this.size < 0; -118 } -119 -120 /** @return the file size */ -121 public long getSize() { -122return this.size; -123 } -124 -125 String getStateToString() { -126if (isCorrupted()) return "CORRUPTED"; -127if (isMissing()) return "NOT FOUND"; -128if (inArchive()) return "archive"; -129return null; -130 } -131} -132 -133private AtomicInteger hfilesArchiveCount = new AtomicInteger(); -134private AtomicInteger hfilesCorrupted = new AtomicInteger(); -135private AtomicInteger hfilesMissing = new AtomicInteger(); -136private AtomicInteger hfilesCount = new AtomicInteger(); -137private AtomicInteger hfilesMobCount = new AtomicInteger(); -138private AtomicInteger logsMissing- How many store files and logs are in the archive -088 *
- How many store files and logs are shared with the table -089 *
- Total store files and logs size and shared amount -090 *
[07/51] [partial] hbase-site git commit: Published site at .
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/505bbb2e/devapidocs/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.html -- diff --git a/devapidocs/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.html b/devapidocs/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.html index 129390f..a21c2b6 100644 --- a/devapidocs/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.html +++ b/devapidocs/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.html @@ -114,7 +114,7 @@ var activeTableTab = "activeTableTab"; @InterfaceAudience.Private -public final class BackupSystemTable +public final class BackupSystemTable extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true"; title="class or interface in java.lang">Object implements http://docs.oracle.com/javase/8/docs/api/java/io/Closeable.html?is-external=true"; title="class or interface in java.io">Closeable This class provides API to access backup system table @@ -234,7 +234,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/io/Closeable.h INCR_BACKUP_SET -private static org.apache.commons.logging.Log +private static org.slf4j.Logger LOG @@ -953,7 +953,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/io/Closeable.h LOG -private static final org.apache.commons.logging.Log LOG +private static final org.slf4j.Logger LOG @@ -962,7 +962,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/io/Closeable.h tableName -private TableName tableName +private TableName tableName @@ -971,7 +971,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/io/Closeable.h SESSIONS_FAMILY -static final byte[] SESSIONS_FAMILY +static final byte[] SESSIONS_FAMILY Stores backup sessions (contexts) @@ -981,7 +981,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/io/Closeable.h META_FAMILY -static final byte[] META_FAMILY +static final byte[] META_FAMILY Stores other meta @@ -991,7 +991,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/io/Closeable.h BULK_LOAD_FAMILY -static final byte[] BULK_LOAD_FAMILY +static final byte[] BULK_LOAD_FAMILY @@ -1000,7 +1000,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/io/Closeable.h connection -private final Connection connection +private final Connection connection Connection to HBase cluster, shared among all instances @@ -1010,7 +1010,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/io/Closeable.h BACKUP_INFO_PREFIX -private static final http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true"; title="class or interface in java.lang">String BACKUP_INFO_PREFIX +private static final http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true"; title="class or interface in java.lang">String BACKUP_INFO_PREFIX See Also: Constant Field Values @@ -1023,7 +1023,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/io/Closeable.h START_CODE_ROW -private static final http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true"; title="class or interface in java.lang">String START_CODE_ROW +private static final http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true"; title="class or interface in java.lang">String START_CODE_ROW See Also: Constant Field Values @@ -1036,7 +1036,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/io/Closeable.h ACTIVE_SESSION_ROW -private static final byte[] ACTIVE_SESSION_ROW +private static final byte[] ACTIVE_SESSION_ROW @@ -1045,7 +1045,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/io/Closeable.h ACTIVE_SESSION_COL -private static final byte[] ACTIVE_SESSION_COL +private static final byte[] ACTIVE_SESSION_COL @@ -1054,7 +1054,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/io/Closeable.h ACTIVE_SESSION_YES -private static final byte[] ACTIVE_SESSION_YES +private static final byte[] ACTIVE_SESSION_YES @@ -1063,7 +1063,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/io/Closeable.h ACTIVE_SESSION_NO -private static final byte[] ACTIVE_SESSION_NO +private static final byte[] ACTIVE_SESSION_NO @@ -1072,7 +1072,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/io/Closeable.h INCR_BACKUP_SET -private static final http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true"; title="class or interface in java.lang">String INCR_BACKUP_SET +private static final http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true"; title="class or interface in java.lang">String INCR_BACKUP_SET See Also: Constant Field Values @@ -1085,7 +1085,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/io/Closeable.h TABLE_RS_LOG_MAP_PREFIX -private static final http://do
[02/51] [partial] hbase-site git commit: Published site at .
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/505bbb2e/devapidocs/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.Converter.html -- diff --git a/devapidocs/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.Converter.html b/devapidocs/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.Converter.html index 2a67eed..317a230 100644 --- a/devapidocs/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.Converter.html +++ b/devapidocs/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.Converter.html @@ -110,7 +110,7 @@ var activeTableTab = "activeTableTab"; http://docs.oracle.com/javase/8/docs/api/java/lang/FunctionalInterface.html?is-external=true"; title="class or interface in java.lang">@FunctionalInterface -private static interface RawAsyncHBaseAdmin.Converter+private static interface RawAsyncHBaseAdmin.Converter @@ -156,7 +156,7 @@ private static interface convert -D convert(S src) +D convert(S src) throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true"; title="class or interface in java.io">IOException Throws: http://git-wip-us.apache.org/repos/asf/hbase-site/blob/505bbb2e/devapidocs/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.CreateNamespaceProcedureBiConsumer.html -- diff --git a/devapidocs/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.CreateNamespaceProcedureBiConsumer.html b/devapidocs/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.CreateNamespaceProcedureBiConsumer.html index 523e977..4278e36 100644 --- a/devapidocs/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.CreateNamespaceProcedureBiConsumer.html +++ b/devapidocs/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.CreateNamespaceProcedureBiConsumer.html @@ -127,7 +127,7 @@ var activeTableTab = "activeTableTab"; -private class RawAsyncHBaseAdmin.CreateNamespaceProcedureBiConsumer +private class RawAsyncHBaseAdmin.CreateNamespaceProcedureBiConsumer extends RawAsyncHBaseAdmin.NamespaceProcedureBiConsumer @@ -232,7 +232,7 @@ extends CreateNamespaceProcedureBiConsumer -CreateNamespaceProcedureBiConsumer(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true"; title="class or interface in java.lang">String namespaceName) +CreateNamespaceProcedureBiConsumer(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true"; title="class or interface in java.lang">String namespaceName) @@ -249,7 +249,7 @@ extends getOperationType -http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true"; title="class or interface in java.lang">String getOperationType() +http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true"; title="class or interface in java.lang">String getOperationType() Specified by: getOperationType in class RawAsyncHBaseAdmin.NamespaceProcedureBiConsumer http://git-wip-us.apache.org/repos/asf/hbase-site/blob/505bbb2e/devapidocs/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.CreateTableProcedureBiConsumer.html -- diff --git a/devapidocs/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.CreateTableProcedureBiConsumer.html b/devapidocs/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.CreateTableProcedureBiConsumer.html index 2b9d7d2..fc4ec22 100644 --- a/devapidocs/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.CreateTableProcedureBiConsumer.html +++ b/devapidocs/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.CreateTableProcedureBiConsumer.html @@ -127,7 +127,7 @@ var activeTableTab = "activeTableTab"; -private class RawAsyncHBaseAdmin.CreateTableProcedureBiConsumer +private class RawAsyncHBaseAdmin.CreateTableProcedureBiConsumer extends RawAsyncHBaseAdmin.TableProcedureBiConsumer @@ -232,7 +232,7 @@ extends CreateTableProcedureBiConsumer -CreateTableProcedureBiConsumer(TableName tableName) +CreateTableProcedureBiConsumer(TableName tableName) @@ -249,7 +249,7 @@ extends getOperationType -http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true"; title="class or interface in java.lang">String getOperationType() +http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true"; title="class or interface in java.lang">String getOperationType() Specified by: getOperationType in class RawAsyncHBaseAdmin.TableProcedureBiConsumer http://git-wip-us.apache.org/repos/asf/hbase-site/blob/505bbb2e/devapidocs/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.DeleteColumnFamilyProcedureBiConsumer.html -- diff --git a/devapidocs/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.DeleteColumnFamilyProcedureBiConsumer.html b/devapidocs/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.DeleteColu
[09/51] [partial] hbase-site git commit: Published site at .
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/505bbb2e/devapidocs/org/apache/hadoop/hbase/backup/HFileArchiver.html -- diff --git a/devapidocs/org/apache/hadoop/hbase/backup/HFileArchiver.html b/devapidocs/org/apache/hadoop/hbase/backup/HFileArchiver.html index 2db8dcc..22a1749 100644 --- a/devapidocs/org/apache/hadoop/hbase/backup/HFileArchiver.html +++ b/devapidocs/org/apache/hadoop/hbase/backup/HFileArchiver.html @@ -110,7 +110,7 @@ var activeTableTab = "activeTableTab"; @InterfaceAudience.Private -public class HFileArchiver +public class HFileArchiver extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true"; title="class or interface in java.lang">Object Utility class to handle the removal of HFiles (or the respective StoreFiles) for a HRegion from the FileSystem. The hfiles will be archived or deleted, depending on @@ -198,7 +198,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html? FUNC_FILE_TO_PATH -private static org.apache.commons.logging.Log +private static org.slf4j.Logger LOG @@ -366,7 +366,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html? LOG -private static final org.apache.commons.logging.Log LOG +private static final org.slf4j.Logger LOG @@ -375,7 +375,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html? SEPARATOR -private static final http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true"; title="class or interface in java.lang">String SEPARATOR +private static final http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true"; title="class or interface in java.lang">String SEPARATOR See Also: Constant Field Values @@ -388,7 +388,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html? DEFAULT_RETRIES_NUMBER -private static final int DEFAULT_RETRIES_NUMBER +private static final int DEFAULT_RETRIES_NUMBER Number of retries in case of fs operation failure See Also: @@ -402,7 +402,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html? FUNC_FILE_TO_PATH -private static final org.apache.hadoop.hbase.shaded.com.google.common.base.FunctionFUNC_FILE_TO_PATH +private static final org.apache.hadoop.hbase.shaded.com.google.common.base.Function FUNC_FILE_TO_PATH @@ -419,7 +419,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html? HFileArchiver -private HFileArchiver() +private HFileArchiver() @@ -436,7 +436,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html? exists -public static boolean exists(org.apache.hadoop.conf.Configuration conf, +public static boolean exists(org.apache.hadoop.conf.Configuration conf, org.apache.hadoop.fs.FileSystem fs, RegionInfo info) throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true"; title="class or interface in java.io">IOException @@ -454,7 +454,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html? archiveRegion -public static void archiveRegion(org.apache.hadoop.conf.Configuration conf, +public static void archiveRegion(org.apache.hadoop.conf.Configuration conf, org.apache.hadoop.fs.FileSystem fs, RegionInfo info) throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true"; title="class or interface in java.io">IOException @@ -476,7 +476,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html? archiveRegion -public static boolean archiveRegion(org.apache.hadoop.fs.FileSystem fs, +public static boolean archiveRegion(org.apache.hadoop.fs.FileSystem fs, org.apache.hadoop.fs.Path rootdir, org.apache.hadoop.fs.Path tableDir, org.apache.hadoop.fs.Path regionDir) @@ -503,7 +503,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html? archiveFamily -public static void archiveFamily(org.apache.hadoop.fs.FileSystem fs, +public static void archiveFamily(org.apache.hadoop.fs.FileSystem fs, org.apache.hadoop.conf.Configuration conf, RegionInfo parent, org.apache.hadoop.fs.Path tableDir, @@ -529,7 +529,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html? archiveFamilyByFamilyDir -public static void archiveFamilyByFamilyDir(org.apache.hadoop.fs.FileSystem fs, +public static void archiveFamilyByFamilyDir(org.apache.hadoop.fs.FileSyste
[12/51] [partial] hbase-site git commit: Published site at .
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/505bbb2e/devapidocs/org/apache/hadoop/hbase/MetaTableAccessor.CloseableVisitor.html -- diff --git a/devapidocs/org/apache/hadoop/hbase/MetaTableAccessor.CloseableVisitor.html b/devapidocs/org/apache/hadoop/hbase/MetaTableAccessor.CloseableVisitor.html index f8df828..568381d 100644 --- a/devapidocs/org/apache/hadoop/hbase/MetaTableAccessor.CloseableVisitor.html +++ b/devapidocs/org/apache/hadoop/hbase/MetaTableAccessor.CloseableVisitor.html @@ -103,7 +103,7 @@ -public static interface MetaTableAccessor.CloseableVisitor +public static interface MetaTableAccessor.CloseableVisitor extends MetaTableAccessor.Visitor, http://docs.oracle.com/javase/8/docs/api/java/io/Closeable.html?is-external=true"; title="class or interface in java.io">Closeable Implementations 'visit' a catalog table row but with close() at the end. http://git-wip-us.apache.org/repos/asf/hbase-site/blob/505bbb2e/devapidocs/org/apache/hadoop/hbase/MetaTableAccessor.CollectAllVisitor.html -- diff --git a/devapidocs/org/apache/hadoop/hbase/MetaTableAccessor.CollectAllVisitor.html b/devapidocs/org/apache/hadoop/hbase/MetaTableAccessor.CollectAllVisitor.html index 1bad2b1..7cf80e2 100644 --- a/devapidocs/org/apache/hadoop/hbase/MetaTableAccessor.CollectAllVisitor.html +++ b/devapidocs/org/apache/hadoop/hbase/MetaTableAccessor.CollectAllVisitor.html @@ -122,7 +122,7 @@ var activeTableTab = "activeTableTab"; -static class MetaTableAccessor.CollectAllVisitor +static class MetaTableAccessor.CollectAllVisitor extends MetaTableAccessor.CollectingVisitorCollects all returned. @@ -214,7 +214,7 @@ extends CollectAllVisitor -CollectAllVisitor() +CollectAllVisitor() @@ -231,7 +231,7 @@ extends add -void add(Result r) +void add(Result r) Specified by: add in class MetaTableAccessor.CollectingVisitor http://git-wip-us.apache.org/repos/asf/hbase-site/blob/505bbb2e/devapidocs/org/apache/hadoop/hbase/MetaTableAccessor.CollectingVisitor.html -- diff --git a/devapidocs/org/apache/hadoop/hbase/MetaTableAccessor.CollectingVisitor.html b/devapidocs/org/apache/hadoop/hbase/MetaTableAccessor.CollectingVisitor.html index 0bd778b..c9a475e 100644 --- a/devapidocs/org/apache/hadoop/hbase/MetaTableAccessor.CollectingVisitor.html +++ b/devapidocs/org/apache/hadoop/hbase/MetaTableAccessor.CollectingVisitor.html @@ -121,7 +121,7 @@ var activeTableTab = "activeTableTab"; -abstract static class MetaTableAccessor.CollectingVisitor +abstract static class MetaTableAccessor.CollectingVisitor extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true"; title="class or interface in java.lang">Object implements MetaTableAccessor.Visitor A MetaTableAccessor.Visitor that collects content out of passed Result. @@ -221,7 +221,7 @@ implements results -final http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true"; title="class or interface in java.util">List results +final http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true"; title="class or interface in java.util">List results @@ -238,7 +238,7 @@ implements CollectingVisitor -CollectingVisitor() +CollectingVisitor() @@ -255,7 +255,7 @@ implements visit -public boolean visit(Result r) +public boolean visit(Result r) throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true"; title="class or interface in java.io">IOException Description copied from interface: MetaTableAccessor.Visitor Visit the catalog table row. @@ -278,7 +278,7 @@ implements add -abstract void add(Result r) +abstract void add(Result r) @@ -287,7 +287,7 @@ implements getResults -http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true"; title="class or interface in java.util">List getResults() +http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true"; title="class or interface in java.util">List getResults() Returns: Collected results; wait till visits complete to collect all http://git-wip-us.apache.org/repos/asf/hbase-site/blob/505bbb2e/devapidocs/org/apache/hadoop/hbase/MetaTableAccessor.DefaultVisitorBase.html -- diff --git a/devapidocs/org/apache/hadoop/hbase/MetaTableAccessor.DefaultVisitorBase.html b/devapidocs/org/apache/hadoop/hbase/MetaTableAccessor.DefaultVisitorBase.html index 8d07ed9..aa0de43 100644 --- a/devapidocs/org/apache/hadoop/hbase/MetaTableAccessor.DefaultVisitorBase.html +++ b/devapidocs/org/apache/hadoop/hbase/MetaTableAccessor.DefaultVisitorBase.html @@ -121,7 +121,7 @@ var activeTableTab = "activeTableTab"; -p
[17/51] [partial] hbase-site git commit: Published site at .
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/505bbb2e/dependency-convergence.html -- diff --git a/dependency-convergence.html b/dependency-convergence.html index ba40f22..546aa95 100644 --- a/dependency-convergence.html +++ b/dependency-convergence.html @@ -7,7 +7,7 @@ - + Apache HBase – Reactor Dependency Convergence @@ -293,7 +293,7 @@ 319 Number of unique artifacts (NOA): -348 +349 Number of version-conflicting artifacts (NOC): 20 @@ -557,6 +557,176 @@ org.apache.hbase:hbase-thrift:jar:3.0.0-SNAPSHOT+- org.apache.hbase:hbase-server:jar:3.0.0-SNAPSHOT:compile| \- org.apache.hadoop:hadoop-hdfs:jar:2.7.4:compile| \- (commons-lang:commons-lang:jar:2.6:compile - omitted for duplicate)+- org.apache.hbase:hbase-testing-util:jar:3.0.0-SNAPSHOT:test| \- org.apache.hadoop:hadoop-hdfs:test-jar:tests:2.7.4:test| \- (commons-lang:commons-lang:jar:2.6:test - omitted for duplicate)+- org.apache.hadoop:hadoop-client:jar:2.7.4:compile| +- org.apache.hadoop:hadoop-mapreduce-client-app:jar:2.7.4:compile| | \- org.apache.hadoop:hadoop-mapreduce-client-common:jar:2.7.4:compile| | \- org.apache.hadoop:hadoop-yarn-client:jar:2.7.4:compile| | 0; \- (commons-lang:commons-lang:jar:2.6:compile - omitted for duplicate)| \- org.apache.hadoop:hadoop-yarn-api:jar:2.7.4:compile| \- (commons-lang:commons-lang:jar:2.6:compile - omitted for duplicate)+- org.apache.hadoop:hadoop-mapreduce-client-core:jar:2.7.4:compile| \- org.apache.hadoop:hadoop-yarn-common:jar:2.7.4:compile| \- (commons-lang:commons-lang:jar:2.6:compile - omitted for duplicate)+- org.apache.hadoop:hadoop-common:jar:2.7.4:compile| \- commons-lang:commons-lang:jar:2.6:compile\- org.apache.hadoop:hadoop-minicluster:jar:2.7.4:test +- org.apache.hadoop:hadoop-common:test-jar:tests:2.7.4:test | \- (commons-lang:commons-lang:jar:2.6:test - omitted for duplicate) \- org.apache.hadoop:had oop-yarn-server-tests:test-jar:tests:2.7.4:test +- org.apache.hadoop:hadoop-yarn-server-nodemanager:jar:2.7.4:test | \- (commons-lang:commons-lang:jar:2.6:test - omitted for duplicate) \- org.apache.hadoop:hadoop-yarn-server-resourcemanager:jar:2.7.4:test \- (commons-lang:commons-lang:jar:2.6:test - omitted for duplicate) org.apache.hbase:hbase-zookeeper:jar:3.0.0-SNAPSHOT\- org.apache.hadoop:hadoop-common:jar:2.7.4:compile \- commons-lang:commons-lang:jar:2.6:compile +commons-logging:commons-logging + + + + + + +1.0.4 + + +org.apache.hbase:hbase-assembly:pom:3.0.0-SNAPSHOT\- org.apache.hbase:hbase-server:jar:3.0.0-SNAPSHOT:compile \- org.apache.hadoop:hadoop-common:jar:2.7.4:compile \- commons-httpclient:commons-httpclient:jar:3.1:compile \- (commons-logging:commons-logging:jar:1.0.4:compile - omitted for conflict with 1.1.3) +org.apache.hbase:hbase-backup:jar:3.0.0-SNAPSHOT\- org.apache.hadoop:hadoop-common:jar:2.7.4:compile \- commons-httpclient:commons-httpclient:jar:3.1:compile \- (commons-logging:commons-logging:jar:1.0.4:compile - omitted for conflict with 1.1.3) +org.apache.hbase:hbase-client-project:jar:3.0.0-SNAPSHOT\- org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT:compile \- org.apache.hadoop:hadoop-common:jar:2.7.4:compile \- commons-httpclient:commons-httpclient:jar:3.1:compile \- (commons-logging:commons-logging:jar:1.0.4:compile - omitted for conflict with 1.1.3) +org.apache.hbase:hbase-client:jar:3.0.0-SNAPSHOT\- org.apache.hadoop:hadoop-common:jar:2.7.4:compile \- commons-httpclient:commons-httpclient:jar:3.1:compile \- (commons-logging:commons-logging:jar:1.0.4:compile - omitted for conflict with 1.1.3) +org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT\- org.apache.hadoop:hadoop-common:jar:2.7.4:compile \- commons-httpclient:commons-httpclient:jar:3.1:compile \- (commons-logging:commons-logging:jar:1.0.4:compile - omitted for conflict with 1.1.3) +org.apache.hbase:hbase-endpoint:jar:3.0.0-SNAPSHOT\- org.apache.hadoop:hadoop-common:jar:2.7.4:compile \- commons-httpclient:commons-httpclient:jar:3.1:compile \- (commons-logging:commons-logging:jar:1.0.4:compile - omitted for conflict with 1.1.3) +org.apache.hbase:hbase-examples:jar:3.0.0-SNAPSHOT\- org.apache.hadoop:hadoop-common:jar:2.7.4:compile \- commons-httpclient:commons-httpclient:jar:3.1:compile \- (commons-logging:commons-logging:jar:1.0.4:compile - omitted for conflict with 1.1.3) +org.apache.hbase:hbase-external-blockcache:jar:3.0.0-SNAPSHOT\- org.apache.hadoop:hadoop-common:jar:2.7.4:compile \- commons-httpclient:commons-httpclient:jar:3.1:compile \- (commons-logging:commons-logging:jar:1.0.4:compile - omitted for conflict with 1.1.3) +org.apache.hbase:hbase-hadoop-compat:jar:3.0.0-SNAPSHOT\- org.apache.hbase:hbase-metrics-api:jar:3.0.0-SNAPSHOT:compile \- org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT:compile \- org.apache
[14/51] [partial] hbase-site git commit: Published site at .
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/505bbb2e/devapidocs/org/apache/hadoop/hbase/JMXListener.html -- diff --git a/devapidocs/org/apache/hadoop/hbase/JMXListener.html b/devapidocs/org/apache/hadoop/hbase/JMXListener.html index 9a0219c..62a1305 100644 --- a/devapidocs/org/apache/hadoop/hbase/JMXListener.html +++ b/devapidocs/org/apache/hadoop/hbase/JMXListener.html @@ -113,7 +113,7 @@ var activeTableTab = "activeTableTab"; -public class JMXListener +public class JMXListener extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true"; title="class or interface in java.lang">Object implements MasterCoprocessor, RegionServerCoprocessor Pluggable JMX Agent for HBase(to fix the 2 random TCP ports issue @@ -172,7 +172,7 @@ implements -private static org.apache.commons.logging.Log +private static org.slf4j.Logger LOG @@ -303,7 +303,7 @@ implements LOG -private static final org.apache.commons.logging.Log LOG +private static final org.slf4j.Logger LOG @@ -312,7 +312,7 @@ implements RMI_REGISTRY_PORT_CONF_KEY -public static final http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true"; title="class or interface in java.lang">String RMI_REGISTRY_PORT_CONF_KEY +public static final http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true"; title="class or interface in java.lang">String RMI_REGISTRY_PORT_CONF_KEY See Also: Constant Field Values @@ -325,7 +325,7 @@ implements RMI_CONNECTOR_PORT_CONF_KEY -public static final http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true"; title="class or interface in java.lang">String RMI_CONNECTOR_PORT_CONF_KEY +public static final http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true"; title="class or interface in java.lang">String RMI_CONNECTOR_PORT_CONF_KEY See Also: Constant Field Values @@ -338,7 +338,7 @@ implements defMasterRMIRegistryPort -public static final int defMasterRMIRegistryPort +public static final int defMasterRMIRegistryPort See Also: Constant Field Values @@ -351,7 +351,7 @@ implements defRegionserverRMIRegistryPort -public static final int defRegionserverRMIRegistryPort +public static final int defRegionserverRMIRegistryPort See Also: Constant Field Values @@ -364,7 +364,7 @@ implements JMX_CS -private static http://docs.oracle.com/javase/8/docs/api/javax/management/remote/JMXConnectorServer.html?is-external=true"; title="class or interface in javax.management.remote">JMXConnectorServer JMX_CS +private static http://docs.oracle.com/javase/8/docs/api/javax/management/remote/JMXConnectorServer.html?is-external=true"; title="class or interface in javax.management.remote">JMXConnectorServer JMX_CS workaround for HBASE-11146 master and regionserver are in 1 JVM in standalone mode only 1 JMX instance is allowed, otherwise there is port conflict even if @@ -377,7 +377,7 @@ implements rmiRegistry -private http://docs.oracle.com/javase/8/docs/api/java/rmi/registry/Registry.html?is-external=true"; title="class or interface in java.rmi.registry">Registry rmiRegistry +private http://docs.oracle.com/javase/8/docs/api/java/rmi/registry/Registry.html?is-external=true"; title="class or interface in java.rmi.registry">Registry rmiRegistry @@ -394,7 +394,7 @@ implements JMXListener -public JMXListener() +public JMXListener() @@ -411,7 +411,7 @@ implements buildJMXServiceURL -public static http://docs.oracle.com/javase/8/docs/api/javax/management/remote/JMXServiceURL.html?is-external=true"; title="class or interface in javax.management.remote">JMXServiceURL buildJMXServiceURL(int rmiRegistryPort, +public static http://docs.oracle.com/javase/8/docs/api/javax/management/remote/JMXServiceURL.html?is-external=true"; title="class or interface in javax.management.remote">JMXServiceURL buildJMXServiceURL(int rmiRegistryPort, int rmiConnectorPort) throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true"; title="class or interface in java.io">IOException @@ -426,7 +426,7 @@ implements startConnectorServer -public void startConnectorServer(int rmiRegistryPort, +public void startConnectorServer(int rmiRegistryPort, int rmiConnectorPort) throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true"; title="class or interface in java.io">IOException @@ -441,7 +441,7 @@ implements stopConnectorServer -public void stopConnectorServer() +public void stopConnectorServer() throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true"; title="class or interface in java.io">IOException Throws: @
[13/51] [partial] hbase-site git commit: Published site at .
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/505bbb2e/devapidocs/org/apache/hadoop/hbase/KeyValue.html -- diff --git a/devapidocs/org/apache/hadoop/hbase/KeyValue.html b/devapidocs/org/apache/hadoop/hbase/KeyValue.html index d8cfb05..628bd79 100644 --- a/devapidocs/org/apache/hadoop/hbase/KeyValue.html +++ b/devapidocs/org/apache/hadoop/hbase/KeyValue.html @@ -118,7 +118,7 @@ var activeTableTab = "activeTableTab"; @InterfaceAudience.Private -public class KeyValue +public class KeyValue extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true"; title="class or interface in java.lang">Object implements ExtendedCell An HBase Key/Value. This is the fundamental HBase Type. @@ -286,7 +286,7 @@ implements length -private static org.apache.commons.logging.Log +private static org.slf4j.Logger LOG @@ -1167,7 +1167,7 @@ implements EMPTY_ARRAY_LIST -private static final http://docs.oracle.com/javase/8/docs/api/java/util/ArrayList.html?is-external=true"; title="class or interface in java.util">ArrayListEMPTY_ARRAY_LIST +private static final http://docs.oracle.com/javase/8/docs/api/java/util/ArrayList.html?is-external=true"; title="class or interface in java.util">ArrayList EMPTY_ARRAY_LIST @@ -1176,7 +1176,7 @@ implements LOG -private static final org.apache.commons.logging.Log LOG +private static final org.slf4j.Logger LOG @@ -1185,7 +1185,7 @@ implements FIXED_OVERHEAD -public static final int FIXED_OVERHEAD +public static final int FIXED_OVERHEAD @@ -1194,7 +1194,7 @@ implements COLUMN_FAMILY_DELIMITER -public static final char COLUMN_FAMILY_DELIMITER +public static final char COLUMN_FAMILY_DELIMITER Colon character in UTF-8 See Also: @@ -1208,7 +1208,7 @@ implements COLUMN_FAMILY_DELIM_ARRAY -public static final byte[] COLUMN_FAMILY_DELIM_ARRAY +public static final byte[] COLUMN_FAMILY_DELIM_ARRAY @@ -1218,7 +1218,7 @@ implements COMPARATOR http://docs.oracle.com/javase/8/docs/api/java/lang/Deprecated.html?is-external=true"; title="class or interface in java.lang">@Deprecated -public static final KeyValue.KVComparator COMPARATOR +public static final KeyValue.KVComparator COMPARATOR Deprecated. Use CellComparator.getInstance() instead. Deprecated for hbase 2.0, remove for hbase 3.0. Comparator for plain key/values; i.e. non-catalog table key/values. Works on Key portion of KeyValue only. @@ -1231,7 +1231,7 @@ public static final META_COMPARATOR http://docs.oracle.com/javase/8/docs/api/java/lang/Deprecated.html?is-external=true"; title="class or interface in java.lang">@Deprecated -public static final KeyValue.KVComparator META_COMPARATOR +public static final KeyValue.KVComparator META_COMPARATOR Deprecated. Use CellComparatorImpl.META_COMPARATOR instead. Deprecated for hbase 2.0, remove for hbase 3.0. A KeyValue.KVComparator for hbase:meta catalog table KeyValues. @@ -1243,7 +1243,7 @@ public static final KEY_LENGTH_SIZE -public static final int KEY_LENGTH_SIZE +public static final int KEY_LENGTH_SIZE Size of the key length field in bytes See Also: @@ -1257,7 +1257,7 @@ public static final TYPE_SIZE -public static final int TYPE_SIZE +public static final int TYPE_SIZE Size of the key type field in bytes See Also: @@ -1271,7 +1271,7 @@ public static final ROW_LENGTH_SIZE -public static final int ROW_LENGTH_SIZE +public static final int ROW_LENGTH_SIZE Size of the row length field in bytes See Also: @@ -1285,7 +1285,7 @@ public static final FAMILY_LENGTH_SIZE -public static final int FAMILY_LENGTH_SIZE +public static final int FAMILY_LENGTH_SIZE Size of the family length field in bytes See Also: @@ -1299,7 +1299,7 @@ public static final TIMESTAMP_SIZE -public static final int TIMESTAMP_SIZE +public static final int TIMESTAMP_SIZE Size of the timestamp field in bytes See Also: @@ -1313,7 +1313,7 @@ public static final TIMESTAMP_TYPE_SIZE -public static final int TIMESTAMP_TYPE_SIZE +public static final int TIMESTAMP_TYPE_SIZE See Also: Constant Field Values @@ -1326,7 +1326,7 @@ public static final KEY_INFRASTRUCTURE_SIZE -public static final int KEY_INFRASTRUCTURE_SIZE +public static final int KEY_INFRASTRUCTURE_SIZE See Also: Constant Field Values @@ -1339,7 +1339,7 @@ public static final ROW_OFFSET -public static final int ROW_OFFSET +public static final int ROW_OFFSET See Also: Constant Field Values @@ -1352,7 +1352,7 @@ public static final ROW_KEY_OFFSET -public static final int ROW_KEY_OFFSET +public static final int ROW_KEY_OFFSET See Also: Constant Field Values @@ -1365,7 +1365,7 @@ public static final KEYVALUE_INFRASTRUCTURE_SIZE -public static final int KEYVALUE_INFRASTRUCTURE_SIZE +public static final int KEYVALUE_INFRASTRUCTURE_SIZE See Also: Constant Field Values @@ -1378,7 +1378,7 @@ public static final TAGS_L
[03/51] [partial] hbase-site git commit: Published site at .
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/505bbb2e/devapidocs/org/apache/hadoop/hbase/client/HTableMultiplexer.FlushWorker.html -- diff --git a/devapidocs/org/apache/hadoop/hbase/client/HTableMultiplexer.FlushWorker.html b/devapidocs/org/apache/hadoop/hbase/client/HTableMultiplexer.FlushWorker.html index a631eea..569ac21 100644 --- a/devapidocs/org/apache/hadoop/hbase/client/HTableMultiplexer.FlushWorker.html +++ b/devapidocs/org/apache/hadoop/hbase/client/HTableMultiplexer.FlushWorker.html @@ -117,7 +117,7 @@ var activeTableTab = "activeTableTab"; -static class HTableMultiplexer.FlushWorker +static class HTableMultiplexer.FlushWorker extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true"; title="class or interface in java.lang">Object implements http://docs.oracle.com/javase/8/docs/api/java/lang/Runnable.html?is-external=true"; title="class or interface in java.lang">Runnable @@ -317,7 +317,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/lang/Runnable. addr -private final HRegionLocation addr +private final HRegionLocation addr @@ -326,7 +326,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/lang/Runnable. queue -private final http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/LinkedBlockingQueue.html?is-external=true"; title="class or interface in java.util.concurrent">LinkedBlockingQueuequeue +private final http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/LinkedBlockingQueue.html?is-external=true"; title="class or interface in java.util.concurrent">LinkedBlockingQueue queue @@ -335,7 +335,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/lang/Runnable. multiplexer -private final HTableMultiplexer multiplexer +private final HTableMultiplexer multiplexer @@ -344,7 +344,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/lang/Runnable. totalFailedPutCount -private final http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/AtomicLong.html?is-external=true"; title="class or interface in java.util.concurrent.atomic">AtomicLong totalFailedPutCount +private final http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/AtomicLong.html?is-external=true"; title="class or interface in java.util.concurrent.atomic">AtomicLong totalFailedPutCount @@ -353,7 +353,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/lang/Runnable. currentProcessingCount -private final http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/AtomicInteger.html?is-external=true"; title="class or interface in java.util.concurrent.atomic">AtomicInteger currentProcessingCount +private final http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/AtomicInteger.html?is-external=true"; title="class or interface in java.util.concurrent.atomic">AtomicInteger currentProcessingCount @@ -362,7 +362,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/lang/Runnable. averageLatency -private final HTableMultiplexer.AtomicAverageCounter averageLatency +private final HTableMultiplexer.AtomicAverageCounter averageLatency @@ -371,7 +371,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/lang/Runnable. maxLatency -private final http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/AtomicLong.html?is-external=true"; title="class or interface in java.util.concurrent.atomic">AtomicLong maxLatency +private final http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/AtomicLong.html?is-external=true"; title="class or interface in java.util.concurrent.atomic">AtomicLong maxLatency @@ -380,7 +380,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/lang/Runnable. ap -private final AsyncProcess ap +private final AsyncProcess ap @@ -389,7 +389,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/lang/Runnable. processingList -private final http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true"; title="class or interface in java.util">List processingList +private final http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true"; title="class or interface in java.util">List processingList @@ -398,7 +398,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/lang/Runnable. executor -private final http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/ScheduledExecutorService.html?is-external=true"; title="class or interface in java.util.concurrent">ScheduledExecutorService executor +private final http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/ScheduledExecutorService.html?is-external=true"; title="class or interface in java.util.concurrent"
[06/51] [partial] hbase-site git commit: Published site at .
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/505bbb2e/devapidocs/org/apache/hadoop/hbase/backup/impl/FullTableBackupClient.html -- diff --git a/devapidocs/org/apache/hadoop/hbase/backup/impl/FullTableBackupClient.html b/devapidocs/org/apache/hadoop/hbase/backup/impl/FullTableBackupClient.html index 88d66e7..3ac4b13 100644 --- a/devapidocs/org/apache/hadoop/hbase/backup/impl/FullTableBackupClient.html +++ b/devapidocs/org/apache/hadoop/hbase/backup/impl/FullTableBackupClient.html @@ -152,7 +152,7 @@ extends Field and Description -private static org.apache.commons.logging.Log +private static org.slf4j.Logger LOG @@ -252,7 +252,7 @@ extends LOG -private static final org.apache.commons.logging.Log LOG +private static final org.slf4j.Logger LOG http://git-wip-us.apache.org/repos/asf/hbase-site/blob/505bbb2e/devapidocs/org/apache/hadoop/hbase/backup/impl/IncrementalBackupManager.html -- diff --git a/devapidocs/org/apache/hadoop/hbase/backup/impl/IncrementalBackupManager.html b/devapidocs/org/apache/hadoop/hbase/backup/impl/IncrementalBackupManager.html index 559ab52..d89cf57 100644 --- a/devapidocs/org/apache/hadoop/hbase/backup/impl/IncrementalBackupManager.html +++ b/devapidocs/org/apache/hadoop/hbase/backup/impl/IncrementalBackupManager.html @@ -162,7 +162,7 @@ extends Field and Description -static org.apache.commons.logging.Log +static org.slf4j.Logger LOG @@ -286,7 +286,7 @@ extends LOG -public static final org.apache.commons.logging.Log LOG +public static final org.slf4j.Logger LOG http://git-wip-us.apache.org/repos/asf/hbase-site/blob/505bbb2e/devapidocs/org/apache/hadoop/hbase/backup/impl/IncrementalTableBackupClient.html -- diff --git a/devapidocs/org/apache/hadoop/hbase/backup/impl/IncrementalTableBackupClient.html b/devapidocs/org/apache/hadoop/hbase/backup/impl/IncrementalTableBackupClient.html index 7b88fa4..b6707c1 100644 --- a/devapidocs/org/apache/hadoop/hbase/backup/impl/IncrementalTableBackupClient.html +++ b/devapidocs/org/apache/hadoop/hbase/backup/impl/IncrementalTableBackupClient.html @@ -153,7 +153,7 @@ extends Field and Description -private static org.apache.commons.logging.Log +private static org.slf4j.Logger LOG @@ -299,7 +299,7 @@ extends LOG -private static final org.apache.commons.logging.Log LOG +private static final org.slf4j.Logger LOG http://git-wip-us.apache.org/repos/asf/hbase-site/blob/505bbb2e/devapidocs/org/apache/hadoop/hbase/backup/impl/RestoreTablesClient.html -- diff --git a/devapidocs/org/apache/hadoop/hbase/backup/impl/RestoreTablesClient.html b/devapidocs/org/apache/hadoop/hbase/backup/impl/RestoreTablesClient.html index 1caae5c..ace49de 100644 --- a/devapidocs/org/apache/hadoop/hbase/backup/impl/RestoreTablesClient.html +++ b/devapidocs/org/apache/hadoop/hbase/backup/impl/RestoreTablesClient.html @@ -148,7 +148,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html? isOverwrite -private static org.apache.commons.logging.Log +private static org.slf4j.Logger LOG @@ -263,7 +263,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html? LOG -private static final org.apache.commons.logging.Log LOG +private static final org.slf4j.Logger LOG http://git-wip-us.apache.org/repos/asf/hbase-site/blob/505bbb2e/devapidocs/org/apache/hadoop/hbase/backup/impl/TableBackupClient.html -- diff --git a/devapidocs/org/apache/hadoop/hbase/backup/impl/TableBackupClient.html b/devapidocs/org/apache/hadoop/hbase/backup/impl/TableBackupClient.html index 3e28de9..0de81bc 100644 --- a/devapidocs/org/apache/hadoop/hbase/backup/impl/TableBackupClient.html +++ b/devapidocs/org/apache/hadoop/hbase/backup/impl/TableBackupClient.html @@ -185,7 +185,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html? conn -private static org.apache.commons.logging.Log +private static org.slf4j.Logger LOG @@ -392,7 +392,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html? LOG -private static final org.apache.commons.logging.Log LOG +private static final org.slf4j.Logger LOG http://git-wip-us.apache.org/repos/asf/hbase-site/blob/505bbb2e/devapidocs/org/apache/hadoop/hbase/backup/mapreduce/MapReduceBackupCopyJob.BackupDistCp.html -- diff --git a/devapidocs/org/apache/hadoop/hbase/backup/mapreduce/MapReduceBackupCopyJob.BackupDistCp.html b/devapidocs/org/apache/hadoop/hbase/backup/mapreduce/MapReduceBackupCopyJob.BackupDistCp.html index 9db6332..2488141 100644 --- a/devapidocs/org/apache/hadoop/
[2/7] hbase git commit: HBASE-19520 Add UTs for the new lock type PEER
HBASE-19520 Add UTs for the new lock type PEER Signed-off-by: zhangduo Project: http://git-wip-us.apache.org/repos/asf/hbase/repo Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/6ff20cb7 Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/6ff20cb7 Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/6ff20cb7 Branch: refs/heads/HBASE-19397 Commit: 6ff20cb72e7f8e5d02fb44ab6e5f89f84e5811b7 Parents: b22db10 Author: Guanghao Zhang Authored: Wed Dec 20 16:43:38 2017 +0800 Committer: zhangduo Committed: Thu Dec 21 21:47:57 2017 +0800 -- .../procedure/MasterProcedureScheduler.java | 9 +- .../procedure/TestMasterProcedureScheduler.java | 65 - ...TestMasterProcedureSchedulerConcurrency.java | 135 +++ 3 files changed, 201 insertions(+), 8 deletions(-) -- http://git-wip-us.apache.org/repos/asf/hbase/blob/6ff20cb7/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/MasterProcedureScheduler.java -- diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/MasterProcedureScheduler.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/MasterProcedureScheduler.java index dd85f5c..5f4665c 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/MasterProcedureScheduler.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/MasterProcedureScheduler.java @@ -389,6 +389,13 @@ public class MasterProcedureScheduler extends AbstractProcedureScheduler { while (tableIter.hasNext()) { count += tableIter.next().size(); } + +// Peer queues +final AvlTreeIterator peerIter = new AvlTreeIterator<>(peerMap); +while (peerIter.hasNext()) { + count += peerIter.next().size(); +} + return count; } @@ -1041,7 +1048,7 @@ public class MasterProcedureScheduler extends AbstractProcedureScheduler { * @see #wakePeerExclusiveLock(Procedure, String) * @param procedure the procedure trying to acquire the lock * @param peerId peer to lock - * @return true if the procedure has to wait for the per to be available + * @return true if the procedure has to wait for the peer to be available */ public boolean waitPeerExclusiveLock(Procedure procedure, String peerId) { schedLock(); http://git-wip-us.apache.org/repos/asf/hbase/blob/6ff20cb7/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestMasterProcedureScheduler.java -- diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestMasterProcedureScheduler.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestMasterProcedureScheduler.java index 0291165..fd77e1f 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestMasterProcedureScheduler.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestMasterProcedureScheduler.java @@ -905,6 +905,27 @@ public class TestMasterProcedureScheduler { } } + public static class TestPeerProcedure extends TestProcedure implements PeerProcedureInterface { +private final String peerId; +private final PeerOperationType opType; + +public TestPeerProcedure(long procId, String peerId, PeerOperationType opType) { + super(procId); + this.peerId = peerId; + this.opType = opType; +} + +@Override +public String getPeerId() { + return peerId; +} + +@Override +public PeerOperationType getPeerOperationType() { + return opType; +} + } + private static LockProcedure createLockProcedure(LockType lockType, long procId) throws Exception { LockProcedure procedure = new LockProcedure(); @@ -927,22 +948,19 @@ public class TestMasterProcedureScheduler { return createLockProcedure(LockType.SHARED, procId); } - private static void assertLockResource(LockedResource resource, - LockedResourceType resourceType, String resourceName) - { + private static void assertLockResource(LockedResource resource, LockedResourceType resourceType, + String resourceName) { assertEquals(resourceType, resource.getResourceType()); assertEquals(resourceName, resource.getResourceName()); } - private static void assertExclusiveLock(LockedResource resource, Procedure procedure) - { + private static void assertExclusiveLock(LockedResource resource, Procedure procedure) { assertEquals(LockType.EXCLUSIVE, resource.getLockType()); assertEquals(procedure, resource.getExclusiveLockOwnerProcedure()); assertEquals(0, resource.getSharedLockCount()); } - private static void assertSharedLock(LockedResource resource, int lockCount) - { + p
[4/7] hbase git commit: HBASE-19564 Procedure id is missing in the response of peer related operations
HBASE-19564 Procedure id is missing in the response of peer related operations Project: http://git-wip-us.apache.org/repos/asf/hbase/repo Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/b22db10f Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/b22db10f Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/b22db10f Branch: refs/heads/HBASE-19397 Commit: b22db10fff0ca81b89e398894f42d8b688e01bc7 Parents: d84d12f Author: zhangduo Authored: Wed Dec 20 20:57:37 2017 +0800 Committer: zhangduo Committed: Thu Dec 21 21:47:57 2017 +0800 -- .../hadoop/hbase/master/MasterRpcServices.java | 24 ++-- .../master/replication/ModifyPeerProcedure.java | 4 +--- 2 files changed, 13 insertions(+), 15 deletions(-) -- http://git-wip-us.apache.org/repos/asf/hbase/blob/b22db10f/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterRpcServices.java -- diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterRpcServices.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterRpcServices.java index 5c088b8..788d041 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterRpcServices.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterRpcServices.java @@ -1891,10 +1891,10 @@ public class MasterRpcServices extends RSRpcServices public AddReplicationPeerResponse addReplicationPeer(RpcController controller, AddReplicationPeerRequest request) throws ServiceException { try { - master.addReplicationPeer(request.getPeerId(), -ReplicationPeerConfigUtil.convert(request.getPeerConfig()), request.getPeerState() -.getState().equals(ReplicationState.State.ENABLED)); - return AddReplicationPeerResponse.newBuilder().build(); + long procId = master.addReplicationPeer(request.getPeerId(), +ReplicationPeerConfigUtil.convert(request.getPeerConfig()), + request.getPeerState().getState().equals(ReplicationState.State.ENABLED)); + return AddReplicationPeerResponse.newBuilder().setProcId(procId).build(); } catch (ReplicationException | IOException e) { throw new ServiceException(e); } @@ -1904,8 +1904,8 @@ public class MasterRpcServices extends RSRpcServices public RemoveReplicationPeerResponse removeReplicationPeer(RpcController controller, RemoveReplicationPeerRequest request) throws ServiceException { try { - master.removeReplicationPeer(request.getPeerId()); - return RemoveReplicationPeerResponse.newBuilder().build(); + long procId = master.removeReplicationPeer(request.getPeerId()); + return RemoveReplicationPeerResponse.newBuilder().setProcId(procId).build(); } catch (ReplicationException | IOException e) { throw new ServiceException(e); } @@ -1915,8 +1915,8 @@ public class MasterRpcServices extends RSRpcServices public EnableReplicationPeerResponse enableReplicationPeer(RpcController controller, EnableReplicationPeerRequest request) throws ServiceException { try { - master.enableReplicationPeer(request.getPeerId()); - return EnableReplicationPeerResponse.newBuilder().build(); + long procId = master.enableReplicationPeer(request.getPeerId()); + return EnableReplicationPeerResponse.newBuilder().setProcId(procId).build(); } catch (ReplicationException | IOException e) { throw new ServiceException(e); } @@ -1926,8 +1926,8 @@ public class MasterRpcServices extends RSRpcServices public DisableReplicationPeerResponse disableReplicationPeer(RpcController controller, DisableReplicationPeerRequest request) throws ServiceException { try { - master.disableReplicationPeer(request.getPeerId()); - return DisableReplicationPeerResponse.newBuilder().build(); + long procId = master.disableReplicationPeer(request.getPeerId()); + return DisableReplicationPeerResponse.newBuilder().setProcId(procId).build(); } catch (ReplicationException | IOException e) { throw new ServiceException(e); } @@ -1953,9 +1953,9 @@ public class MasterRpcServices extends RSRpcServices public UpdateReplicationPeerConfigResponse updateReplicationPeerConfig(RpcController controller, UpdateReplicationPeerConfigRequest request) throws ServiceException { try { - master.updateReplicationPeerConfig(request.getPeerId(), + long procId = master.updateReplicationPeerConfig(request.getPeerId(), ReplicationPeerConfigUtil.convert(request.getPeerConfig())); - return UpdateReplicationPeerConfigResponse.newBuilder().build(); + return UpdateReplicationPeerConfigResponse.newBuilder().setProcId(procId).build(); } catch (ReplicationException | IOException e) { throw new Se
[5/7] hbase git commit: HBASE-19524 Master side changes for moving peer modification from zk watcher to procedure
HBASE-19524 Master side changes for moving peer modification from zk watcher to procedure Project: http://git-wip-us.apache.org/repos/asf/hbase/repo Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/827977d4 Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/827977d4 Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/827977d4 Branch: refs/heads/HBASE-19397 Commit: 827977d43d2d2a5d4868dd256c5c2902b7a3343f Parents: 6583456 Author: zhangduo Authored: Mon Dec 18 15:22:36 2017 +0800 Committer: zhangduo Committed: Thu Dec 21 21:47:57 2017 +0800 -- .../procedure2/RemoteProcedureDispatcher.java | 3 +- .../src/main/protobuf/MasterProcedure.proto | 21 +++- .../src/main/protobuf/RegionServerStatus.proto | 3 +- .../src/main/protobuf/Replication.proto | 5 + .../replication/ReplicationPeersZKImpl.java | 4 +- .../org/apache/hadoop/hbase/master/HMaster.java | 113 --- .../hadoop/hbase/master/MasterRpcServices.java | 4 +- .../hadoop/hbase/master/MasterServices.java | 26 +++-- .../assignment/RegionTransitionProcedure.java | 11 +- .../master/procedure/MasterProcedureEnv.java| 5 + .../master/procedure/ProcedurePrepareLatch.java | 2 +- .../master/replication/AddPeerProcedure.java| 97 .../replication/DisablePeerProcedure.java | 70 .../master/replication/EnablePeerProcedure.java | 69 +++ .../master/replication/ModifyPeerProcedure.java | 97 +--- .../master/replication/RefreshPeerCallable.java | 67 --- .../replication/RefreshPeerProcedure.java | 28 +++-- .../master/replication/RemovePeerProcedure.java | 69 +++ .../master/replication/ReplicationManager.java | 76 ++--- .../replication/UpdatePeerConfigProcedure.java | 92 +++ .../hbase/regionserver/HRegionServer.java | 6 +- .../regionserver/RefreshPeerCallable.java | 70 .../hbase/master/MockNoopMasterServices.java| 23 ++-- .../replication/DummyModifyPeerProcedure.java | 13 ++- 24 files changed, 742 insertions(+), 232 deletions(-) -- http://git-wip-us.apache.org/repos/asf/hbase/blob/827977d4/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/RemoteProcedureDispatcher.java -- diff --git a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/RemoteProcedureDispatcher.java b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/RemoteProcedureDispatcher.java index e9a6906..1235b33 100644 --- a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/RemoteProcedureDispatcher.java +++ b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/RemoteProcedureDispatcher.java @@ -247,9 +247,8 @@ public abstract class RemoteProcedureDispatcherhttp://git-wip-us.apache.org/repos/asf/hbase/blob/827977d4/hbase-protocol-shaded/src/main/protobuf/MasterProcedure.proto -- diff --git a/hbase-protocol-shaded/src/main/protobuf/MasterProcedure.proto b/hbase-protocol-shaded/src/main/protobuf/MasterProcedure.proto index 0e2bdba..ae676ea 100644 --- a/hbase-protocol-shaded/src/main/protobuf/MasterProcedure.proto +++ b/hbase-protocol-shaded/src/main/protobuf/MasterProcedure.proto @@ -27,6 +27,7 @@ option optimize_for = SPEED; import "HBase.proto"; import "RPC.proto"; import "Snapshot.proto"; +import "Replication.proto"; // // WARNING - Compatibility rules @@ -367,9 +368,10 @@ message GCMergedRegionsStateData { } enum PeerModificationState { - UPDATE_PEER_STORAGE = 1; - REFRESH_PEER_ON_RS = 2; - POST_PEER_MODIFICATION = 3; + PRE_PEER_MODIFICATION = 1; + UPDATE_PEER_STORAGE = 2; + REFRESH_PEER_ON_RS = 3; + POST_PEER_MODIFICATION = 4; } message PeerModificationStateData { @@ -394,4 +396,17 @@ message RefreshPeerParameter { required string peer_id = 1; required PeerModificationType type = 2; required ServerName target_server = 3; +} + +message ModifyPeerStateData { + required string peer_id = 1; +} + +message AddPeerStateData { + required ReplicationPeer peer_config = 1; + required bool enabled = 2; +} + +message UpdatePeerConfigStateData { + required ReplicationPeer peer_config = 1; } \ No newline at end of file http://git-wip-us.apache.org/repos/asf/hbase/blob/827977d4/hbase-protocol-shaded/src/main/protobuf/RegionServerStatus.proto -- diff --git a/hbase-protocol-shaded/src/main/protobuf/RegionServerStatus.proto b/hbase-protocol-shaded/src/main/protobuf/RegionServerStatus.proto index eb396ac..4f75941 100644 --- a/hbase-protocol-shaded/src/main/pro
[3/7] hbase git commit: HBASE-19216 Implement a general framework to execute remote procedure on RS
HBASE-19216 Implement a general framework to execute remote procedure on RS Project: http://git-wip-us.apache.org/repos/asf/hbase/repo Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/6583456a Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/6583456a Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/6583456a Branch: refs/heads/HBASE-19397 Commit: 6583456a2a6af534e9a32a1f2e96b22f6d4c47e9 Parents: 00b0288 Author: zhangduo Authored: Fri Dec 15 21:06:44 2017 +0800 Committer: zhangduo Committed: Thu Dec 21 21:47:57 2017 +0800 -- .../hbase/procedure2/LockedResourceType.java| 4 +- .../procedure2/RemoteProcedureDispatcher.java | 23 +- .../src/main/protobuf/Admin.proto | 9 +- .../src/main/protobuf/MasterProcedure.proto | 30 +++ .../src/main/protobuf/RegionServerStatus.proto | 15 ++ .../apache/hadoop/hbase/executor/EventType.java | 26 ++- .../hadoop/hbase/executor/ExecutorType.java | 3 +- .../org/apache/hadoop/hbase/master/HMaster.java | 33 ++- .../hadoop/hbase/master/MasterRpcServices.java | 13 ++ .../assignment/RegionTransitionProcedure.java | 18 +- .../procedure/MasterProcedureScheduler.java | 224 +-- .../procedure/PeerProcedureInterface.java | 34 +++ .../master/procedure/RSProcedureDispatcher.java | 90 .../master/replication/ModifyPeerProcedure.java | 127 +++ .../master/replication/RefreshPeerCallable.java | 67 ++ .../replication/RefreshPeerProcedure.java | 197 .../hbase/procedure2/RSProcedureCallable.java | 43 .../hbase/regionserver/HRegionServer.java | 69 +- .../hbase/regionserver/RSRpcServices.java | 56 +++-- .../handler/RSProcedureHandler.java | 51 + .../assignment/TestAssignmentManager.java | 20 +- .../replication/DummyModifyPeerProcedure.java | 41 .../TestDummyModifyPeerProcedure.java | 80 +++ .../security/access/TestAccessController.java | 6 +- 24 files changed, 1109 insertions(+), 170 deletions(-) -- http://git-wip-us.apache.org/repos/asf/hbase/blob/6583456a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/LockedResourceType.java -- diff --git a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/LockedResourceType.java b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/LockedResourceType.java index c5fe62b..dc9b5d4 100644 --- a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/LockedResourceType.java +++ b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/LockedResourceType.java @@ -1,4 +1,4 @@ -/* +/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information @@ -22,5 +22,5 @@ import org.apache.yetus.audience.InterfaceAudience; @InterfaceAudience.Private public enum LockedResourceType { - SERVER, NAMESPACE, TABLE, REGION + SERVER, NAMESPACE, TABLE, REGION, PEER } http://git-wip-us.apache.org/repos/asf/hbase/blob/6583456a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/RemoteProcedureDispatcher.java -- diff --git a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/RemoteProcedureDispatcher.java b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/RemoteProcedureDispatcher.java index 54f2b08..e9a6906 100644 --- a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/RemoteProcedureDispatcher.java +++ b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/RemoteProcedureDispatcher.java @@ -226,13 +226,30 @@ public abstract class RemoteProcedureDispatcher - * @param */ public interface RemoteProcedure { +/** + * For building the remote operation. + */ RemoteOperation remoteCallBuild(TEnv env, TRemote remote); -void remoteCallCompleted(TEnv env, TRemote remote, RemoteOperation response); + +/** + * Called when the executeProcedure call is failed. + */ void remoteCallFailed(TEnv env, TRemote remote, IOException exception); + +/** + * Called when RS tells the remote procedure is succeeded through the + * {@code reportProcedureDone} method. + */ +void remoteOperationCompleted(TEnv env); + +/** + * Called when RS tells the remote procedure is failed through the {@code reportProcedureDone} + * method. + * @param error the error message + */ +void remoteOperationFailed(TEnv env, String error); } /** http://git-wip-us.apache.org/repos/asf/hbase/blob/6583456a/hbase-protocol-shaded/src/main/protobuf/Admin.proto -
[1/7] hbase git commit: HBASE-10092 Addendum fix TestAssignProcedure and TestWALEntrySinkFilter [Forced Update!]
Repository: hbase Updated Branches: refs/heads/HBASE-19397 540a8a261 -> 049904fef (forced update) HBASE-10092 Addendum fix TestAssignProcedure and TestWALEntrySinkFilter Project: http://git-wip-us.apache.org/repos/asf/hbase/repo Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/00b0288c Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/00b0288c Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/00b0288c Branch: refs/heads/HBASE-19397 Commit: 00b0288c96853b8d7879024b2143592e3c33da3a Parents: 9a07e07 Author: zhangduo Authored: Thu Dec 21 21:43:00 2017 +0800 Committer: zhangduo Committed: Thu Dec 21 21:43:17 2017 +0800 -- .../master/snapshot/TestAssignProcedure.java| 16 - .../regionserver/TestWALEntrySinkFilter.java| 35 ++-- 2 files changed, 26 insertions(+), 25 deletions(-) -- http://git-wip-us.apache.org/repos/asf/hbase/blob/00b0288c/hbase-server/src/test/java/org/apache/hadoop/hbase/master/snapshot/TestAssignProcedure.java -- diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/snapshot/TestAssignProcedure.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/snapshot/TestAssignProcedure.java index e8a3081..ed53c3a 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/snapshot/TestAssignProcedure.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/snapshot/TestAssignProcedure.java @@ -18,18 +18,19 @@ */ package org.apache.hadoop.hbase.master.snapshot; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; + import java.io.IOException; import java.util.ArrayList; import java.util.Collections; import java.util.List; +import java.util.Objects; import java.util.concurrent.atomic.AtomicBoolean; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.CategoryBasedTimeout; import org.apache.hadoop.hbase.HBaseConfiguration; -import org.apache.hadoop.hbase.Server; import org.apache.hadoop.hbase.ServerName; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.RegionInfo; @@ -51,14 +52,13 @@ import org.junit.experimental.categories.Category; import org.junit.rules.TestName; import org.junit.rules.TestRule; import org.mockito.Mockito; - -import static junit.framework.TestCase.assertFalse; -import static junit.framework.TestCase.assertTrue; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; @Category({RegionServerTests.class, SmallTests.class}) public class TestAssignProcedure { - private static final Log LOG = LogFactory.getLog(TestAssignProcedure.class); + private static final Logger LOG = LoggerFactory.getLogger(TestAssignProcedure.class); @Rule public TestName name = new TestName(); @Rule public final TestRule timeout = CategoryBasedTimeout.builder(). withTimeout(this.getClass()). @@ -209,7 +209,7 @@ public class TestAssignProcedure { assertTrue(procedures.get(7).getRegionInfo().equals(user3)); } catch (Throwable t) { for (AssignProcedure proc : procedures) { - LOG.debug(proc); + LOG.debug(Objects.toString(proc)); } throw t; } http://git-wip-us.apache.org/repos/asf/hbase/blob/00b0288c/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/regionserver/TestWALEntrySinkFilter.java -- diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/regionserver/TestWALEntrySinkFilter.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/regionserver/TestWALEntrySinkFilter.java index 0761178..941fa13f 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/regionserver/TestWALEntrySinkFilter.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/regionserver/TestWALEntrySinkFilter.java @@ -1,4 +1,4 @@ -/* +/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information @@ -17,8 +17,18 @@ */ package org.apache.hadoop.hbase.replication.regionserver; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; +import java.util.Map; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicBoolean; +import java.util.concurrent.atomic.AtomicInteger; + import org.apache.
[7/7] hbase git commit: HBASE-19580 Use slf4j instead of commons-logging
HBASE-19580 Use slf4j instead of commons-logging Project: http://git-wip-us.apache.org/repos/asf/hbase/repo Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/049904fe Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/049904fe Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/049904fe Branch: refs/heads/HBASE-19397 Commit: 049904fefc21284e3ee2c8307dfe6baf323a1d5f Parents: 6ff20cb Author: zhangduo Authored: Thu Dec 21 21:59:46 2017 +0800 Committer: zhangduo Committed: Thu Dec 21 22:00:29 2017 +0800 -- .../hadoop/hbase/master/replication/AddPeerProcedure.java | 6 +++--- .../hadoop/hbase/master/replication/DisablePeerProcedure.java | 6 +++--- .../hadoop/hbase/master/replication/EnablePeerProcedure.java | 6 +++--- .../hadoop/hbase/master/replication/ModifyPeerProcedure.java | 6 +++--- .../hadoop/hbase/master/replication/RefreshPeerProcedure.java | 6 +++--- .../hadoop/hbase/master/replication/RemovePeerProcedure.java | 6 +++--- .../hbase/master/replication/UpdatePeerConfigProcedure.java| 6 +++--- 7 files changed, 21 insertions(+), 21 deletions(-) -- http://git-wip-us.apache.org/repos/asf/hbase/blob/049904fe/hbase-server/src/main/java/org/apache/hadoop/hbase/master/replication/AddPeerProcedure.java -- diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/replication/AddPeerProcedure.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/replication/AddPeerProcedure.java index c3862d8..066c3e7 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/replication/AddPeerProcedure.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/replication/AddPeerProcedure.java @@ -19,8 +19,6 @@ package org.apache.hadoop.hbase.master.replication; import java.io.IOException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.client.replication.ReplicationPeerConfigUtil; import org.apache.hadoop.hbase.master.MasterCoprocessorHost; import org.apache.hadoop.hbase.master.procedure.MasterProcedureEnv; @@ -28,6 +26,8 @@ import org.apache.hadoop.hbase.procedure2.ProcedureStateSerializer; import org.apache.hadoop.hbase.replication.ReplicationException; import org.apache.hadoop.hbase.replication.ReplicationPeerConfig; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.AddPeerStateData; @@ -37,7 +37,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.A @InterfaceAudience.Private public class AddPeerProcedure extends ModifyPeerProcedure { - private static final Log LOG = LogFactory.getLog(AddPeerProcedure.class); + private static final Logger LOG = LoggerFactory.getLogger(AddPeerProcedure.class); private ReplicationPeerConfig peerConfig; http://git-wip-us.apache.org/repos/asf/hbase/blob/049904fe/hbase-server/src/main/java/org/apache/hadoop/hbase/master/replication/DisablePeerProcedure.java -- diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/replication/DisablePeerProcedure.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/replication/DisablePeerProcedure.java index 0b32db9..9a28de6 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/replication/DisablePeerProcedure.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/replication/DisablePeerProcedure.java @@ -19,11 +19,11 @@ package org.apache.hadoop.hbase.master.replication; import java.io.IOException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.master.MasterCoprocessorHost; import org.apache.hadoop.hbase.master.procedure.MasterProcedureEnv; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * The procedure for disabling a replication peer. @@ -31,7 +31,7 @@ import org.apache.yetus.audience.InterfaceAudience; @InterfaceAudience.Private public class DisablePeerProcedure extends ModifyPeerProcedure { - private static final Log LOG = LogFactory.getLog(DisablePeerProcedure.class); + private static final Logger LOG = LoggerFactory.getLogger(DisablePeerProcedure.class); public DisablePeerProcedure() { } http://git-wip-us.apache.org/repos/asf/hbase/blob/049904fe/hbase-server/src/main/java/org/apache/hadoop/hbase/master/replication/EnablePeerProcedure.java -- diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/replication/En
[6/7] hbase git commit: HBASE-19536 Client side changes for moving peer modification from zk watcher to procedure
HBASE-19536 Client side changes for moving peer modification from zk watcher to procedure Signed-off-by: zhangduo Project: http://git-wip-us.apache.org/repos/asf/hbase/repo Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/d84d12f9 Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/d84d12f9 Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/d84d12f9 Branch: refs/heads/HBASE-19397 Commit: d84d12f9f855593833c05a72a7dc438780a63792 Parents: 827977d Author: Guanghao Zhang Authored: Tue Dec 19 15:50:57 2017 +0800 Committer: zhangduo Committed: Thu Dec 21 21:47:57 2017 +0800 -- .../org/apache/hadoop/hbase/client/Admin.java | 87 ++- .../apache/hadoop/hbase/client/HBaseAdmin.java | 149 ++- .../hadoop/hbase/client/RawAsyncHBaseAdmin.java | 82 +- .../replication/TestReplicationAdmin.java | 2 +- 4 files changed, 239 insertions(+), 81 deletions(-) -- http://git-wip-us.apache.org/repos/asf/hbase/blob/d84d12f9/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Admin.java -- diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Admin.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Admin.java index 0567e8e..fe5eeb6 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Admin.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Admin.java @@ -2464,7 +2464,7 @@ public interface Admin extends Abortable, Closeable { /** * Add a new replication peer for replicating data to slave cluster. * @param peerId a short name that identifies the peer - * @param peerConfig configuration for the replication slave cluster + * @param peerConfig configuration for the replication peer * @throws IOException if a remote or network exception occurs */ default void addReplicationPeer(String peerId, ReplicationPeerConfig peerConfig) @@ -2475,7 +2475,7 @@ public interface Admin extends Abortable, Closeable { /** * Add a new replication peer for replicating data to slave cluster. * @param peerId a short name that identifies the peer - * @param peerConfig configuration for the replication slave cluster + * @param peerConfig configuration for the replication peer * @param enabled peer state, true if ENABLED and false if DISABLED * @throws IOException if a remote or network exception occurs */ @@ -2483,6 +2483,37 @@ public interface Admin extends Abortable, Closeable { throws IOException; /** + * Add a new replication peer but does not block and wait for it. + * + * You can use Future.get(long, TimeUnit) to wait on the operation to complete. It may throw + * ExecutionException if there was an error while executing the operation or TimeoutException in + * case the wait timeout was not long enough to allow the operation to complete. + * @param peerId a short name that identifies the peer + * @param peerConfig configuration for the replication peer + * @return the result of the async operation + * @throws IOException IOException if a remote or network exception occurs + */ + default Future addReplicationPeerAsync(String peerId, ReplicationPeerConfig peerConfig) + throws IOException { +return addReplicationPeerAsync(peerId, peerConfig, true); + } + + /** + * Add a new replication peer but does not block and wait for it. + * + * You can use Future.get(long, TimeUnit) to wait on the operation to complete. It may throw + * ExecutionException if there was an error while executing the operation or TimeoutException in + * case the wait timeout was not long enough to allow the operation to complete. + * @param peerId a short name that identifies the peer + * @param peerConfig configuration for the replication peer + * @param enabled peer state, true if ENABLED and false if DISABLED + * @return the result of the async operation + * @throws IOException IOException if a remote or network exception occurs + */ + Future addReplicationPeerAsync(String peerId, ReplicationPeerConfig peerConfig, + boolean enabled) throws IOException; + + /** * Remove a peer and stop the replication. * @param peerId a short name that identifies the peer * @throws IOException if a remote or network exception occurs @@ -2490,6 +2521,18 @@ public interface Admin extends Abortable, Closeable { void removeReplicationPeer(String peerId) throws IOException; /** + * Remove a replication peer but does not block and wait for it. + * + * You can use Future.get(long, TimeUnit) to wait on the operation to complete. It may throw + * ExecutionException if there was an error while executing the operation or TimeoutException in + * case the wait timeout was not long enough to allow the o
hbase git commit: HBASE-10092 Addendum fix TestAssignProcedure and TestWALEntrySinkFilter
Repository: hbase Updated Branches: refs/heads/branch-2 69ea6928b -> a82f89bd3 HBASE-10092 Addendum fix TestAssignProcedure and TestWALEntrySinkFilter Project: http://git-wip-us.apache.org/repos/asf/hbase/repo Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/a82f89bd Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/a82f89bd Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/a82f89bd Branch: refs/heads/branch-2 Commit: a82f89bd3cc8dd20078ac08b449c7f394106c843 Parents: 69ea692 Author: zhangduo Authored: Thu Dec 21 21:43:00 2017 +0800 Committer: zhangduo Committed: Thu Dec 21 21:45:09 2017 +0800 -- .../master/snapshot/TestAssignProcedure.java| 16 - .../regionserver/TestWALEntrySinkFilter.java| 35 ++-- 2 files changed, 26 insertions(+), 25 deletions(-) -- http://git-wip-us.apache.org/repos/asf/hbase/blob/a82f89bd/hbase-server/src/test/java/org/apache/hadoop/hbase/master/snapshot/TestAssignProcedure.java -- diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/snapshot/TestAssignProcedure.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/snapshot/TestAssignProcedure.java index e8a3081..ed53c3a 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/snapshot/TestAssignProcedure.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/snapshot/TestAssignProcedure.java @@ -18,18 +18,19 @@ */ package org.apache.hadoop.hbase.master.snapshot; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; + import java.io.IOException; import java.util.ArrayList; import java.util.Collections; import java.util.List; +import java.util.Objects; import java.util.concurrent.atomic.AtomicBoolean; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.CategoryBasedTimeout; import org.apache.hadoop.hbase.HBaseConfiguration; -import org.apache.hadoop.hbase.Server; import org.apache.hadoop.hbase.ServerName; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.RegionInfo; @@ -51,14 +52,13 @@ import org.junit.experimental.categories.Category; import org.junit.rules.TestName; import org.junit.rules.TestRule; import org.mockito.Mockito; - -import static junit.framework.TestCase.assertFalse; -import static junit.framework.TestCase.assertTrue; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; @Category({RegionServerTests.class, SmallTests.class}) public class TestAssignProcedure { - private static final Log LOG = LogFactory.getLog(TestAssignProcedure.class); + private static final Logger LOG = LoggerFactory.getLogger(TestAssignProcedure.class); @Rule public TestName name = new TestName(); @Rule public final TestRule timeout = CategoryBasedTimeout.builder(). withTimeout(this.getClass()). @@ -209,7 +209,7 @@ public class TestAssignProcedure { assertTrue(procedures.get(7).getRegionInfo().equals(user3)); } catch (Throwable t) { for (AssignProcedure proc : procedures) { - LOG.debug(proc); + LOG.debug(Objects.toString(proc)); } throw t; } http://git-wip-us.apache.org/repos/asf/hbase/blob/a82f89bd/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/regionserver/TestWALEntrySinkFilter.java -- diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/regionserver/TestWALEntrySinkFilter.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/regionserver/TestWALEntrySinkFilter.java index 0761178..941fa13f 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/regionserver/TestWALEntrySinkFilter.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/regionserver/TestWALEntrySinkFilter.java @@ -1,4 +1,4 @@ -/* +/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information @@ -17,8 +17,18 @@ */ package org.apache.hadoop.hbase.replication.regionserver; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; +import java.util.Map; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicBoolean; +import java.util.concurrent.atomic.AtomicInteger; + import org.apache.hadoop.conf.Configurat
hbase git commit: HBASE-10092 Addendum fix TestAssignProcedure and TestWALEntrySinkFilter
Repository: hbase Updated Branches: refs/heads/master 9a07e07e5 -> 00b0288c9 HBASE-10092 Addendum fix TestAssignProcedure and TestWALEntrySinkFilter Project: http://git-wip-us.apache.org/repos/asf/hbase/repo Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/00b0288c Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/00b0288c Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/00b0288c Branch: refs/heads/master Commit: 00b0288c96853b8d7879024b2143592e3c33da3a Parents: 9a07e07 Author: zhangduo Authored: Thu Dec 21 21:43:00 2017 +0800 Committer: zhangduo Committed: Thu Dec 21 21:43:17 2017 +0800 -- .../master/snapshot/TestAssignProcedure.java| 16 - .../regionserver/TestWALEntrySinkFilter.java| 35 ++-- 2 files changed, 26 insertions(+), 25 deletions(-) -- http://git-wip-us.apache.org/repos/asf/hbase/blob/00b0288c/hbase-server/src/test/java/org/apache/hadoop/hbase/master/snapshot/TestAssignProcedure.java -- diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/snapshot/TestAssignProcedure.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/snapshot/TestAssignProcedure.java index e8a3081..ed53c3a 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/snapshot/TestAssignProcedure.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/snapshot/TestAssignProcedure.java @@ -18,18 +18,19 @@ */ package org.apache.hadoop.hbase.master.snapshot; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; + import java.io.IOException; import java.util.ArrayList; import java.util.Collections; import java.util.List; +import java.util.Objects; import java.util.concurrent.atomic.AtomicBoolean; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.CategoryBasedTimeout; import org.apache.hadoop.hbase.HBaseConfiguration; -import org.apache.hadoop.hbase.Server; import org.apache.hadoop.hbase.ServerName; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.RegionInfo; @@ -51,14 +52,13 @@ import org.junit.experimental.categories.Category; import org.junit.rules.TestName; import org.junit.rules.TestRule; import org.mockito.Mockito; - -import static junit.framework.TestCase.assertFalse; -import static junit.framework.TestCase.assertTrue; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; @Category({RegionServerTests.class, SmallTests.class}) public class TestAssignProcedure { - private static final Log LOG = LogFactory.getLog(TestAssignProcedure.class); + private static final Logger LOG = LoggerFactory.getLogger(TestAssignProcedure.class); @Rule public TestName name = new TestName(); @Rule public final TestRule timeout = CategoryBasedTimeout.builder(). withTimeout(this.getClass()). @@ -209,7 +209,7 @@ public class TestAssignProcedure { assertTrue(procedures.get(7).getRegionInfo().equals(user3)); } catch (Throwable t) { for (AssignProcedure proc : procedures) { - LOG.debug(proc); + LOG.debug(Objects.toString(proc)); } throw t; } http://git-wip-us.apache.org/repos/asf/hbase/blob/00b0288c/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/regionserver/TestWALEntrySinkFilter.java -- diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/regionserver/TestWALEntrySinkFilter.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/regionserver/TestWALEntrySinkFilter.java index 0761178..941fa13f 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/regionserver/TestWALEntrySinkFilter.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/regionserver/TestWALEntrySinkFilter.java @@ -1,4 +1,4 @@ -/* +/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information @@ -17,8 +17,18 @@ */ package org.apache.hadoop.hbase.replication.regionserver; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; +import java.util.Map; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicBoolean; +import java.util.concurrent.atomic.AtomicInteger; + import org.apache.hadoop.conf.Configuration;
[16/42] hbase git commit: HBASE-10092 Move up on to log4j2
http://git-wip-us.apache.org/repos/asf/hbase/blob/f572c4b8/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCacheConfig.java -- diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCacheConfig.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCacheConfig.java index e1ae654..e07d2aa 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCacheConfig.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCacheConfig.java @@ -28,8 +28,6 @@ import java.lang.management.ManagementFactory; import java.lang.management.MemoryUsage; import java.nio.ByteBuffer; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; @@ -49,6 +47,8 @@ import org.junit.After; import org.junit.Before; import org.junit.Test; import org.junit.experimental.categories.Category; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Tests that {@link CacheConfig} does as expected. @@ -59,7 +59,7 @@ import org.junit.experimental.categories.Category; // tests clash on the global variable if this test is run as small sized test. @Category({IOTests.class, LargeTests.class}) public class TestCacheConfig { - private static final Log LOG = LogFactory.getLog(TestCacheConfig.class); + private static final Logger LOG = LoggerFactory.getLogger(TestCacheConfig.class); private Configuration conf; static class Deserializer implements CacheableDeserializer { http://git-wip-us.apache.org/repos/asf/hbase/blob/f572c4b8/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCacheOnWrite.java -- diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCacheOnWrite.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCacheOnWrite.java index 9535a46..74a310d 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCacheOnWrite.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCacheOnWrite.java @@ -34,8 +34,6 @@ import java.util.List; import java.util.Map; import java.util.Random; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; @@ -71,7 +69,8 @@ import org.junit.experimental.categories.Category; import org.junit.runner.RunWith; import org.junit.runners.Parameterized; import org.junit.runners.Parameterized.Parameters; - +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; /** @@ -82,7 +81,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; @Category({IOTests.class, MediumTests.class}) public class TestCacheOnWrite { - private static final Log LOG = LogFactory.getLog(TestCacheOnWrite.class); + private static final Logger LOG = LoggerFactory.getLogger(TestCacheOnWrite.class); private static final HBaseTestingUtility TEST_UTIL = HBaseTestingUtility.createLocalHTU(); private Configuration conf; http://git-wip-us.apache.org/repos/asf/hbase/blob/f572c4b8/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestChecksum.java -- diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestChecksum.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestChecksum.java index 5111e36..d48c5f3 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestChecksum.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestChecksum.java @@ -34,8 +34,6 @@ import java.util.Arrays; import java.util.Iterator; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FileSystem; @@ -52,10 +50,12 @@ import org.apache.hadoop.hbase.util.ChecksumType; import org.junit.Before; import org.junit.Test; import org.junit.experimental.categories.Category; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; @Category({IOTests.class, SmallTests.class}) public class TestChecksum { - private static final Log LOG = LogFactory.getLog(TestHFileBlock.class); + private static final Logger LOG = LoggerFactory.getLogger(TestHFileBlock.class); static final Compression.Algorithm[] COMPRESSION_ALGORITHMS = { NONE, GZ }; http://git-wip-us.apache.org/repos/asf/hbase/blob/f572c4b8/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestFixedFileTrailer.j
[18/42] hbase git commit: HBASE-10092 Move up on to log4j2
http://git-wip-us.apache.org/repos/asf/hbase/blob/f572c4b8/hbase-server/src/test/java/org/apache/hadoop/hbase/TestJMXListener.java -- diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestJMXListener.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestJMXListener.java index 6c8f27a..520294a 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestJMXListener.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestJMXListener.java @@ -24,8 +24,6 @@ import javax.management.MBeanServerConnection; import javax.management.remote.JMXConnector; import javax.management.remote.JMXConnectorFactory; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.coprocessor.CoprocessorHost; import org.apache.hadoop.hbase.testclassification.MediumTests; @@ -39,12 +37,14 @@ import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.ExpectedException; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; @Category({MiscTests.class, MediumTests.class}) public class TestJMXListener { - private static final Log LOG = LogFactory.getLog(TestJMXListener.class); + private static final Logger LOG = LoggerFactory.getLogger(TestJMXListener.class); private static HBaseTestingUtility UTIL = new HBaseTestingUtility(); private static int connectorPort = 61120; http://git-wip-us.apache.org/repos/asf/hbase/blob/f572c4b8/hbase-server/src/test/java/org/apache/hadoop/hbase/TestMetaTableAccessor.java -- diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestMetaTableAccessor.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestMetaTableAccessor.java index 15c0b0c..14057de 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestMetaTableAccessor.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestMetaTableAccessor.java @@ -34,8 +34,6 @@ import java.io.IOException; import java.util.List; import java.util.Random; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.client.Connection; @@ -66,7 +64,8 @@ import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.TestName; - +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; /** @@ -75,7 +74,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; @Category({MiscTests.class, MediumTests.class}) @SuppressWarnings("deprecation") public class TestMetaTableAccessor { - private static final Log LOG = LogFactory.getLog(TestMetaTableAccessor.class); + private static final Logger LOG = LoggerFactory.getLogger(TestMetaTableAccessor.class); private static final HBaseTestingUtility UTIL = new HBaseTestingUtility(); private static Connection connection; private Random random = new Random(); http://git-wip-us.apache.org/repos/asf/hbase/blob/f572c4b8/hbase-server/src/test/java/org/apache/hadoop/hbase/TestMetaTableAccessorNoCluster.java -- diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestMetaTableAccessorNoCluster.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestMetaTableAccessorNoCluster.java index 961677b..7049a74 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestMetaTableAccessorNoCluster.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestMetaTableAccessorNoCluster.java @@ -27,8 +27,6 @@ import java.util.ArrayList; import java.util.List; import java.util.NavigableMap; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.client.ClusterConnection; import org.apache.hadoop.hbase.client.HConnectionTestingUtility; import org.apache.hadoop.hbase.client.RegionInfo; @@ -49,7 +47,8 @@ import org.junit.experimental.categories.Category; import org.mockito.Mockito; import org.mockito.invocation.InvocationOnMock; import org.mockito.stubbing.Answer; - +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController; import org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException; @@ -59,7 +58,7 @@ import org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException; */ @Category({MiscTests.class, MediumTests.class}) public class TestMetaTableAccessorNoCluster { - private static final Log LOG = LogFactory.getLog(TestMetaTableAccessorNoCluster.class); + privat
[34/42] hbase git commit: HBASE-10092 Move up on to log4j2
http://git-wip-us.apache.org/repos/asf/hbase/blob/f572c4b8/hbase-client/src/main/java/org/apache/hadoop/hbase/MetaTableAccessor.java -- diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/MetaTableAccessor.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/MetaTableAccessor.java index 0774945..4f14192 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/MetaTableAccessor.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/MetaTableAccessor.java @@ -34,8 +34,6 @@ import java.util.TreeMap; import java.util.regex.Matcher; import java.util.regex.Pattern; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.Cell.DataType; import org.apache.hadoop.hbase.client.Connection; @@ -71,7 +69,8 @@ import org.apache.hadoop.hbase.util.ExceptionUtil; import org.apache.hadoop.hbase.util.Pair; import org.apache.hadoop.hbase.util.PairOfSameType; import org.apache.yetus.audience.InterfaceAudience; - +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; import edu.umd.cs.findbugs.annotations.NonNull; import edu.umd.cs.findbugs.annotations.Nullable; @@ -142,8 +141,8 @@ public class MetaTableAccessor { * separated by "," */ - private static final Log LOG = LogFactory.getLog(MetaTableAccessor.class); - private static final Log METALOG = LogFactory.getLog("org.apache.hadoop.hbase.META"); + private static final Logger LOG = LoggerFactory.getLogger(MetaTableAccessor.class); + private static final Logger METALOG = LoggerFactory.getLogger("org.apache.hadoop.hbase.META"); // Save its daughter/parent region(s) when split/merge private static final byte[] daughterNameCq = Bytes.toBytes("_DAUGHTER_"); http://git-wip-us.apache.org/repos/asf/hbase/blob/f572c4b8/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncBatchRpcRetryingCaller.java -- diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncBatchRpcRetryingCaller.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncBatchRpcRetryingCaller.java index 52eb821..7249435 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncBatchRpcRetryingCaller.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncBatchRpcRetryingCaller.java @@ -44,14 +44,14 @@ import java.util.function.Supplier; import java.util.stream.Collectors; import java.util.stream.Stream; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.CellScannable; import org.apache.hadoop.hbase.DoNotRetryIOException; import org.apache.hadoop.hbase.HRegionLocation; import org.apache.hadoop.hbase.ServerName; import org.apache.hadoop.hbase.TableName; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.client.MultiResponse.RegionResult; import org.apache.hadoop.hbase.client.RetriesExhaustedException.ThrowableWithExtraContext; import org.apache.hadoop.hbase.ipc.HBaseRpcController; @@ -77,7 +77,7 @@ import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; @InterfaceAudience.Private class AsyncBatchRpcRetryingCaller { - private static final Log LOG = LogFactory.getLog(AsyncBatchRpcRetryingCaller.class); + private static final Logger LOG = LoggerFactory.getLogger(AsyncBatchRpcRetryingCaller.class); private final HashedWheelTimer retryTimer; http://git-wip-us.apache.org/repos/asf/hbase/blob/f572c4b8/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncConnectionImpl.java -- diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncConnectionImpl.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncConnectionImpl.java index f9f9659..c1d84ec 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncConnectionImpl.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncConnectionImpl.java @@ -34,13 +34,13 @@ import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicReference; import org.apache.commons.io.IOUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.MasterNotRunningException; import org.apache.hadoop.hbase.ServerName; import org.apache.hadoop.hbase.TableName; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.ipc.HBaseRpcController; import
[17/42] hbase git commit: HBASE-10092 Move up on to log4j2
http://git-wip-us.apache.org/repos/asf/hbase/blob/f572c4b8/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestReplicaWithCluster.java -- diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestReplicaWithCluster.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestReplicaWithCluster.java index 22d0e8e..ba4a8c2 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestReplicaWithCluster.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestReplicaWithCluster.java @@ -30,8 +30,6 @@ import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicLong; import java.util.concurrent.atomic.AtomicReference; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.Cell; @@ -66,10 +64,12 @@ import org.junit.Assert; import org.junit.BeforeClass; import org.junit.Test; import org.junit.experimental.categories.Category; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; @Category({MediumTests.class, ClientTests.class}) public class TestReplicaWithCluster { - private static final Log LOG = LogFactory.getLog(TestReplicaWithCluster.class); + private static final Logger LOG = LoggerFactory.getLogger(TestReplicaWithCluster.class); private static final int NB_SERVERS = 3; private static final byte[] row = TestReplicaWithCluster.class.getName().getBytes(); @@ -115,7 +115,7 @@ public class TestReplicaWithCluster { } } } catch (InterruptedException e1) { - LOG.error(e1); + LOG.error(e1.toString(), e1); } } else { LOG.info("We're not the primary replicas."); @@ -555,7 +555,7 @@ public class TestReplicaWithCluster { try { Thread.sleep(2 * REFRESH_PERIOD); } catch (InterruptedException e1) { -LOG.error(e1); +LOG.error(e1.toString(), e1); } // But if we ask for stale we will get it @@ -590,7 +590,7 @@ public class TestReplicaWithCluster { try { Thread.sleep(2 * REFRESH_PERIOD); } catch (InterruptedException e1) { -LOG.error(e1); +LOG.error(e1.toString(), e1); } // But if we ask for stale we will get it @@ -636,7 +636,7 @@ public class TestReplicaWithCluster { try { Thread.sleep(2 * REFRESH_PERIOD); } catch (InterruptedException e1) { -LOG.error(e1); +LOG.error(e1.toString(), e1); } try { @@ -768,7 +768,7 @@ public class TestReplicaWithCluster { try { Thread.sleep(2 * REFRESH_PERIOD); } catch (InterruptedException e1) { -LOG.error(e1); +LOG.error(e1.toString(), e1); } // Simulating the RS down http://git-wip-us.apache.org/repos/asf/hbase/blob/f572c4b8/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestReplicasClient.java -- diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestReplicasClient.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestReplicasClient.java index a34b651..a06055d 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestReplicasClient.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestReplicasClient.java @@ -35,9 +35,7 @@ import java.util.concurrent.atomic.AtomicLong; import java.util.concurrent.atomic.AtomicReference; import com.codahale.metrics.Counter; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.apache.commons.logging.impl.Log4JLogger; + import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.HBaseTestingUtility; @@ -62,7 +60,6 @@ import org.apache.hadoop.hbase.regionserver.TestRegionServerNoMaster; import org.apache.hadoop.hbase.testclassification.ClientTests; import org.apache.hadoop.hbase.testclassification.MediumTests; import org.apache.hadoop.hbase.util.Bytes; -import org.apache.log4j.Level; import org.apache.zookeeper.KeeperException; import org.junit.After; import org.junit.AfterClass; @@ -71,6 +68,8 @@ import org.junit.Before; import org.junit.BeforeClass; import org.junit.Test; import org.junit.experimental.categories.Category; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Tests for region replicas. Sad that we cannot isolate these without bringing up a whole @@ -79,11 +78,7 @@ import org.junit.experimental.categories.Category; @Category({MediumTests.class, ClientTests.class}) @SuppressWarnings("deprecation") public class TestReplicasClient { - private static final Log LOG = LogFactory.getLog(TestReplicasClient.class); - - static { -((Log4JLogger)RpcRe
[40/42] hbase git commit: HBASE-19524 Master side changes for moving peer modification from zk watcher to procedure
HBASE-19524 Master side changes for moving peer modification from zk watcher to procedure Project: http://git-wip-us.apache.org/repos/asf/hbase/repo Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/9620501b Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/9620501b Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/9620501b Branch: refs/heads/HBASE-19397 Commit: 9620501be61734593821bcf062f21ecd38b6ec05 Parents: 8e4af07 Author: zhangduo Authored: Mon Dec 18 15:22:36 2017 +0800 Committer: zhangduo Committed: Thu Dec 21 21:18:13 2017 +0800 -- .../procedure2/RemoteProcedureDispatcher.java | 3 +- .../src/main/protobuf/MasterProcedure.proto | 21 +++- .../src/main/protobuf/RegionServerStatus.proto | 3 +- .../src/main/protobuf/Replication.proto | 5 + .../replication/ReplicationPeersZKImpl.java | 4 +- .../org/apache/hadoop/hbase/master/HMaster.java | 113 --- .../hadoop/hbase/master/MasterRpcServices.java | 4 +- .../hadoop/hbase/master/MasterServices.java | 26 +++-- .../assignment/RegionTransitionProcedure.java | 11 +- .../master/procedure/MasterProcedureEnv.java| 5 + .../master/procedure/ProcedurePrepareLatch.java | 2 +- .../master/replication/AddPeerProcedure.java| 97 .../replication/DisablePeerProcedure.java | 70 .../master/replication/EnablePeerProcedure.java | 69 +++ .../master/replication/ModifyPeerProcedure.java | 97 +--- .../master/replication/RefreshPeerCallable.java | 67 --- .../replication/RefreshPeerProcedure.java | 28 +++-- .../master/replication/RemovePeerProcedure.java | 69 +++ .../master/replication/ReplicationManager.java | 76 ++--- .../replication/UpdatePeerConfigProcedure.java | 92 +++ .../hbase/regionserver/HRegionServer.java | 6 +- .../regionserver/RefreshPeerCallable.java | 70 .../hbase/master/MockNoopMasterServices.java| 23 ++-- .../replication/DummyModifyPeerProcedure.java | 13 ++- 24 files changed, 742 insertions(+), 232 deletions(-) -- http://git-wip-us.apache.org/repos/asf/hbase/blob/9620501b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/RemoteProcedureDispatcher.java -- diff --git a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/RemoteProcedureDispatcher.java b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/RemoteProcedureDispatcher.java index e9a6906..1235b33 100644 --- a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/RemoteProcedureDispatcher.java +++ b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/RemoteProcedureDispatcher.java @@ -247,9 +247,8 @@ public abstract class RemoteProcedureDispatcherhttp://git-wip-us.apache.org/repos/asf/hbase/blob/9620501b/hbase-protocol-shaded/src/main/protobuf/MasterProcedure.proto -- diff --git a/hbase-protocol-shaded/src/main/protobuf/MasterProcedure.proto b/hbase-protocol-shaded/src/main/protobuf/MasterProcedure.proto index 0e2bdba..ae676ea 100644 --- a/hbase-protocol-shaded/src/main/protobuf/MasterProcedure.proto +++ b/hbase-protocol-shaded/src/main/protobuf/MasterProcedure.proto @@ -27,6 +27,7 @@ option optimize_for = SPEED; import "HBase.proto"; import "RPC.proto"; import "Snapshot.proto"; +import "Replication.proto"; // // WARNING - Compatibility rules @@ -367,9 +368,10 @@ message GCMergedRegionsStateData { } enum PeerModificationState { - UPDATE_PEER_STORAGE = 1; - REFRESH_PEER_ON_RS = 2; - POST_PEER_MODIFICATION = 3; + PRE_PEER_MODIFICATION = 1; + UPDATE_PEER_STORAGE = 2; + REFRESH_PEER_ON_RS = 3; + POST_PEER_MODIFICATION = 4; } message PeerModificationStateData { @@ -394,4 +396,17 @@ message RefreshPeerParameter { required string peer_id = 1; required PeerModificationType type = 2; required ServerName target_server = 3; +} + +message ModifyPeerStateData { + required string peer_id = 1; +} + +message AddPeerStateData { + required ReplicationPeer peer_config = 1; + required bool enabled = 2; +} + +message UpdatePeerConfigStateData { + required ReplicationPeer peer_config = 1; } \ No newline at end of file http://git-wip-us.apache.org/repos/asf/hbase/blob/9620501b/hbase-protocol-shaded/src/main/protobuf/RegionServerStatus.proto -- diff --git a/hbase-protocol-shaded/src/main/protobuf/RegionServerStatus.proto b/hbase-protocol-shaded/src/main/protobuf/RegionServerStatus.proto index eb396ac..4f75941 100644 --- a/hbase-protocol-shaded/src/main/pro
[25/42] hbase git commit: HBASE-10092 Move up on to log4j2
http://git-wip-us.apache.org/repos/asf/hbase/blob/f572c4b8/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java -- diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java index 262dfa2..e31db82 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java @@ -39,6 +39,7 @@ import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Map.Entry; +import java.util.Objects; import java.util.Set; import java.util.concurrent.CountDownLatch; import java.util.concurrent.ExecutionException; @@ -50,8 +51,6 @@ import java.util.concurrent.atomic.AtomicReference; import java.util.function.Function; import java.util.regex.Pattern; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.ClusterStatus; @@ -91,6 +90,7 @@ import org.apache.hadoop.hbase.http.InfoServer; import org.apache.hadoop.hbase.ipc.CoprocessorRpcUtils; import org.apache.hadoop.hbase.ipc.RpcServer; import org.apache.hadoop.hbase.ipc.ServerNotRunningYetException; +import org.apache.hadoop.hbase.log.HBaseMarkers; import org.apache.hadoop.hbase.master.MasterRpcServices.BalanceSwitchMode; import org.apache.hadoop.hbase.master.assignment.AssignmentManager; import org.apache.hadoop.hbase.master.assignment.MergeTableRegionsProcedure; @@ -188,7 +188,8 @@ import org.eclipse.jetty.server.Server; import org.eclipse.jetty.server.ServerConnector; import org.eclipse.jetty.servlet.ServletHolder; import org.eclipse.jetty.webapp.WebAppContext; - +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; import org.apache.hadoop.hbase.shaded.com.google.common.collect.Maps; @@ -221,7 +222,7 @@ import com.google.protobuf.Service; @InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.TOOLS) @SuppressWarnings("deprecation") public class HMaster extends HRegionServer implements MasterServices { - private static final Log LOG = LogFactory.getLog(HMaster.class.getName()); + private static Logger LOG = LoggerFactory.getLogger(HMaster.class.getName()); /** * Protection against zombie master. Started once Master accepts active responsibility and @@ -607,6 +608,7 @@ public class HMaster extends HRegionServer implements MasterServices { return connector.getLocalPort(); } + @Override protected Function getMetaTableObserver() { return builder -> builder.setRegionReplication(conf.getInt(HConstants.META_REPLICAS_NUM, HConstants.DEFAULT_META_REPLICA_NUM)); } @@ -818,7 +820,7 @@ public class HMaster extends HRegionServer implements MasterServices { // Wait for region servers to report in String statusStr = "Wait for region servers to report in"; status.setStatus(statusStr); -LOG.info(status); +LOG.info(Objects.toString(status)); waitForRegionServers(status); if (this.balancer instanceof FavoredNodesPromoter) { @@ -1528,6 +1530,7 @@ public class HMaster extends HRegionServer implements MasterServices { /** * @return Client info for use as prefix on an audit log string; who did an action */ + @Override public String getClientIdAuditPrefix() { return "Client=" + RpcServer.getRequestUserName().orElse(null) + "/" + RpcServer.getRemoteAddress().orElse(null); @@ -2017,7 +2020,7 @@ public class HMaster extends HRegionServer implements MasterServices { } } catch (Throwable t) { status.setStatus("Failed to become active: " + t.getMessage()); - LOG.fatal("Failed to become active master", t); + LOG.error(HBaseMarkers.FATAL, "Failed to become active master", t); // HBASE-5680: Likely hadoop23 vs hadoop 20.x/1.x incompatibility if (t instanceof NoClassDefFoundError && t.getMessage() @@ -2606,13 +2609,13 @@ public class HMaster extends HRegionServer implements MasterServices { } if (cpHost != null) { // HBASE-4014: dump a list of loaded coprocessors. - LOG.fatal("Master server abort: loaded coprocessors are: " + + LOG.error(HBaseMarkers.FATAL, "Master server abort: loaded coprocessors are: " + getLoadedCoprocessors()); } if (t != null) { - LOG.fatal(msg, t); + LOG.error(HBaseMarkers.FATAL, msg, t); } else { - LOG.fatal(msg); + LOG.error(HBaseMarkers.FATAL, msg); } try { http://git-wip-us.apache.org/repos/asf/hbase/blob/f572c4b8/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMasterComm