hbase git commit: HBASE-18412 [Shell] Support unset of list of configuration for a table (Yun Zhao)

2017-07-20 Thread tedyu
Repository: hbase
Updated Branches:
  refs/heads/branch-2 8606cda35 -> b81fed7f8


HBASE-18412 [Shell] Support unset of list of configuration for a table (Yun 
Zhao)


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/b81fed7f
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/b81fed7f
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/b81fed7f

Branch: refs/heads/branch-2
Commit: b81fed7f885bc6396251707f8ad07023c7df0c8a
Parents: 8606cda
Author: tedyu 
Authored: Thu Jul 20 21:01:48 2017 -0700
Committer: tedyu 
Committed: Thu Jul 20 21:01:48 2017 -0700

--
 hbase-shell/src/main/ruby/hbase/admin.rb| 17 +++
 .../src/main/ruby/shell/commands/alter.rb   |  4 +++
 hbase-shell/src/test/ruby/hbase/admin_test.rb   | 30 
 3 files changed, 51 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/b81fed7f/hbase-shell/src/main/ruby/hbase/admin.rb
--
diff --git a/hbase-shell/src/main/ruby/hbase/admin.rb 
b/hbase-shell/src/main/ruby/hbase/admin.rb
index 5eee26c..460ede3 100644
--- a/hbase-shell/src/main/ruby/hbase/admin.rb
+++ b/hbase-shell/src/main/ruby/hbase/admin.rb
@@ -657,6 +657,23 @@ module Hbase
   htd.remove(name)
 end
 hasTableUpdate = true
+  # Unset table configuration
+  elsif method == 'table_conf_unset'
+raise(ArgumentError, 'NAME parameter missing for table_conf_unset 
method') unless name
+if name.is_a?(Array)
+  name.each do |key|
+if htd.getConfigurationValue(key).nil?
+  raise ArgumentError, "Could not find configuration: #{key}"
+end
+htd.removeConfiguration(key)
+  end
+else
+  if htd.getConfigurationValue(name).nil?
+raise ArgumentError, "Could not find configuration: #{name}"
+  end
+  htd.removeConfiguration(name)
+end
+hasTableUpdate = true
   # Unknown method
   else
 raise ArgumentError, "Unknown method: #{method}"

http://git-wip-us.apache.org/repos/asf/hbase/blob/b81fed7f/hbase-shell/src/main/ruby/shell/commands/alter.rb
--
diff --git a/hbase-shell/src/main/ruby/shell/commands/alter.rb 
b/hbase-shell/src/main/ruby/shell/commands/alter.rb
index 2207111..4aef28c 100644
--- a/hbase-shell/src/main/ruby/shell/commands/alter.rb
+++ b/hbase-shell/src/main/ruby/shell/commands/alter.rb
@@ -71,6 +71,10 @@ You can also set configuration settings specific to this 
table or column family:
   hbase> alter 't1', CONFIGURATION => 
{'hbase.hregion.scan.loadColumnFamiliesOnDemand' => 'true'}
   hbase> alter 't1', {NAME => 'f2', CONFIGURATION => 
{'hbase.hstore.blockingStoreFiles' => '10'}}
 
+You can also unset configuration settings specific to this table:
+
+  hbase> alter 't1', METHOD => 'table_conf_unset', NAME => 
'hbase.hregion.majorcompaction'
+
 You can also remove a table-scope attribute:
 
   hbase> alter 't1', METHOD => 'table_att_unset', NAME => 'MAX_FILESIZE'

http://git-wip-us.apache.org/repos/asf/hbase/blob/b81fed7f/hbase-shell/src/test/ruby/hbase/admin_test.rb
--
diff --git a/hbase-shell/src/test/ruby/hbase/admin_test.rb 
b/hbase-shell/src/test/ruby/hbase/admin_test.rb
index 2a20d34..025b737 100644
--- a/hbase-shell/src/test/ruby/hbase/admin_test.rb
+++ b/hbase-shell/src/test/ruby/hbase/admin_test.rb
@@ -480,6 +480,36 @@ module Hbase
   assert_no_match(eval("/" + key_2 + "/"), admin.describe(@test_name))
 end
 
+define_test "alter should be able to remove a table configuration" do
+  drop_test_table(@test_name)
+  create_test_table(@test_name)
+
+  key = "TestConf"
+  command(:alter, @test_name, CONFIGURATION => {key => 1})
+
+  # eval() is used to convert a string to regex
+  assert_match(eval("/" + key + "/"), admin.describe(@test_name))
+
+  command(:alter, @test_name, 'METHOD' => 'table_conf_unset', 'NAME' => 
key)
+  assert_no_match(eval("/" + key + "/"), admin.describe(@test_name))
+end
+
+define_test "alter should be able to remove a list of table configuration" 
do
+  drop_test_table(@test_name)
+
+  key_1 = "TestConf1"
+  key_2 = "TestConf2"
+  command(:create, @test_name, { NAME => 'i'}, CONFIGURATION => { key_1 => 
1, key_2 => 2 })
+
+  # eval() is used to convert a string to regex
+  assert_match(eval("/" + key_1 + "/"), admin.describe(@test_name))
+  assert_match(eval("/" + key_2 + "/"), admin.describe(@test_name))
+
+

hbase git commit: HBASE-18412 [Shell] Support unset of list of configuration for a table (Yun Zhao)

2017-07-20 Thread tedyu
Repository: hbase
Updated Branches:
  refs/heads/master bdc94b1d6 -> af534acab


HBASE-18412 [Shell] Support unset of list of configuration for a table (Yun 
Zhao)


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/af534aca
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/af534aca
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/af534aca

Branch: refs/heads/master
Commit: af534acabb3e0d03e804ff409a09f3b77e17e779
Parents: bdc94b1
Author: tedyu 
Authored: Thu Jul 20 21:01:05 2017 -0700
Committer: tedyu 
Committed: Thu Jul 20 21:01:05 2017 -0700

--
 hbase-shell/src/main/ruby/hbase/admin.rb| 17 +++
 .../src/main/ruby/shell/commands/alter.rb   |  4 +++
 hbase-shell/src/test/ruby/hbase/admin_test.rb   | 30 
 3 files changed, 51 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/af534aca/hbase-shell/src/main/ruby/hbase/admin.rb
--
diff --git a/hbase-shell/src/main/ruby/hbase/admin.rb 
b/hbase-shell/src/main/ruby/hbase/admin.rb
index 5eee26c..460ede3 100644
--- a/hbase-shell/src/main/ruby/hbase/admin.rb
+++ b/hbase-shell/src/main/ruby/hbase/admin.rb
@@ -657,6 +657,23 @@ module Hbase
   htd.remove(name)
 end
 hasTableUpdate = true
+  # Unset table configuration
+  elsif method == 'table_conf_unset'
+raise(ArgumentError, 'NAME parameter missing for table_conf_unset 
method') unless name
+if name.is_a?(Array)
+  name.each do |key|
+if htd.getConfigurationValue(key).nil?
+  raise ArgumentError, "Could not find configuration: #{key}"
+end
+htd.removeConfiguration(key)
+  end
+else
+  if htd.getConfigurationValue(name).nil?
+raise ArgumentError, "Could not find configuration: #{name}"
+  end
+  htd.removeConfiguration(name)
+end
+hasTableUpdate = true
   # Unknown method
   else
 raise ArgumentError, "Unknown method: #{method}"

http://git-wip-us.apache.org/repos/asf/hbase/blob/af534aca/hbase-shell/src/main/ruby/shell/commands/alter.rb
--
diff --git a/hbase-shell/src/main/ruby/shell/commands/alter.rb 
b/hbase-shell/src/main/ruby/shell/commands/alter.rb
index 2207111..4aef28c 100644
--- a/hbase-shell/src/main/ruby/shell/commands/alter.rb
+++ b/hbase-shell/src/main/ruby/shell/commands/alter.rb
@@ -71,6 +71,10 @@ You can also set configuration settings specific to this 
table or column family:
   hbase> alter 't1', CONFIGURATION => 
{'hbase.hregion.scan.loadColumnFamiliesOnDemand' => 'true'}
   hbase> alter 't1', {NAME => 'f2', CONFIGURATION => 
{'hbase.hstore.blockingStoreFiles' => '10'}}
 
+You can also unset configuration settings specific to this table:
+
+  hbase> alter 't1', METHOD => 'table_conf_unset', NAME => 
'hbase.hregion.majorcompaction'
+
 You can also remove a table-scope attribute:
 
   hbase> alter 't1', METHOD => 'table_att_unset', NAME => 'MAX_FILESIZE'

http://git-wip-us.apache.org/repos/asf/hbase/blob/af534aca/hbase-shell/src/test/ruby/hbase/admin_test.rb
--
diff --git a/hbase-shell/src/test/ruby/hbase/admin_test.rb 
b/hbase-shell/src/test/ruby/hbase/admin_test.rb
index 2a20d34..025b737 100644
--- a/hbase-shell/src/test/ruby/hbase/admin_test.rb
+++ b/hbase-shell/src/test/ruby/hbase/admin_test.rb
@@ -480,6 +480,36 @@ module Hbase
   assert_no_match(eval("/" + key_2 + "/"), admin.describe(@test_name))
 end
 
+define_test "alter should be able to remove a table configuration" do
+  drop_test_table(@test_name)
+  create_test_table(@test_name)
+
+  key = "TestConf"
+  command(:alter, @test_name, CONFIGURATION => {key => 1})
+
+  # eval() is used to convert a string to regex
+  assert_match(eval("/" + key + "/"), admin.describe(@test_name))
+
+  command(:alter, @test_name, 'METHOD' => 'table_conf_unset', 'NAME' => 
key)
+  assert_no_match(eval("/" + key + "/"), admin.describe(@test_name))
+end
+
+define_test "alter should be able to remove a list of table configuration" 
do
+  drop_test_table(@test_name)
+
+  key_1 = "TestConf1"
+  key_2 = "TestConf2"
+  command(:create, @test_name, { NAME => 'i'}, CONFIGURATION => { key_1 => 
1, key_2 => 2 })
+
+  # eval() is used to convert a string to regex
+  assert_match(eval("/" + key_1 + "/"), admin.describe(@test_name))
+  assert_match(eval("/" + key_2 + "/"), admin.describe(@test_name))
+
+  

[17/23] hbase git commit: HBASE-16133 RSGroupBasedLoadBalancer.retainAssignment() might miss a region

2017-07-20 Thread apurtell
HBASE-16133 RSGroupBasedLoadBalancer.retainAssignment() might miss a region


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/221c17b9
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/221c17b9
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/221c17b9

Branch: refs/heads/HBASE-15631-branch-1
Commit: 221c17b9c567825d04450ec93a955aa958af23fa
Parents: ca0f3c8
Author: Andrew Purtell 
Authored: Wed Jul 5 15:43:46 2017 -0700
Committer: Andrew Purtell 
Committed: Thu Jul 20 17:55:42 2017 -0700

--
 .../apache/hadoop/hbase/rsgroup/RSGroupBasedLoadBalancer.java | 7 ---
 1 file changed, 4 insertions(+), 3 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/221c17b9/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupBasedLoadBalancer.java
--
diff --git 
a/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupBasedLoadBalancer.java
 
b/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupBasedLoadBalancer.java
index f69f093..c1b3c7d 100644
--- 
a/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupBasedLoadBalancer.java
+++ 
b/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupBasedLoadBalancer.java
@@ -216,9 +216,10 @@ public class RSGroupBasedLoadBalancer implements 
RSGroupableBalancer, LoadBalanc
 List candidateList = filterOfflineServers(info, servers);
 ServerName server = this.internalBalancer.randomAssignment(region,
 candidateList);
-if (server != null && !assignments.containsKey(server)) {
-  assignments.put(server, new ArrayList());
-} else if (server != null) {
+if (server != null) {
+  if (!assignments.containsKey(server)) {
+assignments.put(server, new ArrayList());
+  }
   assignments.get(server).add(region);
 } else {
   //if not server is available assign to bogus so it ends up in RIT



[19/23] hbase git commit: HBASE-17496 RSGroup shell commands:get_server_rsgroup don't work and commands display an incorrect result size (Guangxu Cheng)

2017-07-20 Thread apurtell
HBASE-17496 RSGroup shell commands:get_server_rsgroup don't work and commands 
display an incorrect result size (Guangxu Cheng)


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/348d7e9b
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/348d7e9b
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/348d7e9b

Branch: refs/heads/HBASE-15631-branch-1
Commit: 348d7e9b49e7d14fe5a76c556ee2cce5461adf5a
Parents: a557056
Author: Andrew Purtell 
Authored: Wed Jul 5 18:23:19 2017 -0700
Committer: Andrew Purtell 
Committed: Thu Jul 20 17:55:42 2017 -0700

--
 hbase-shell/src/main/ruby/shell/commands/get_rsgroup.rb| 5 ++---
 hbase-shell/src/main/ruby/shell/commands/get_server_rsgroup.rb | 5 ++---
 hbase-shell/src/main/ruby/shell/commands/get_table_rsgroup.rb  | 3 +--
 hbase-shell/src/main/ruby/shell/commands/list_rsgroups.rb  | 3 +--
 4 files changed, 6 insertions(+), 10 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/348d7e9b/hbase-shell/src/main/ruby/shell/commands/get_rsgroup.rb
--
diff --git a/hbase-shell/src/main/ruby/shell/commands/get_rsgroup.rb 
b/hbase-shell/src/main/ruby/shell/commands/get_rsgroup.rb
index 122020a..a5b41af 100644
--- a/hbase-shell/src/main/ruby/shell/commands/get_rsgroup.rb
+++ b/hbase-shell/src/main/ruby/shell/commands/get_rsgroup.rb
@@ -33,12 +33,11 @@ EOF
   end
 
   def command(group_name)
-now = Time.now
-formatter.header(['RSGROUP '.concat(group_name)])
+formatter.header(['GROUP INFORMATION'])
 rsgroup_admin.get_rsgroup(group_name) do |s|
   formatter.row([s])
 end
-formatter.footer(now)
+formatter.footer()
   end
 end
   end

http://git-wip-us.apache.org/repos/asf/hbase/blob/348d7e9b/hbase-shell/src/main/ruby/shell/commands/get_server_rsgroup.rb
--
diff --git a/hbase-shell/src/main/ruby/shell/commands/get_server_rsgroup.rb 
b/hbase-shell/src/main/ruby/shell/commands/get_server_rsgroup.rb
index dddf080..fd2ccc7 100644
--- a/hbase-shell/src/main/ruby/shell/commands/get_server_rsgroup.rb
+++ b/hbase-shell/src/main/ruby/shell/commands/get_server_rsgroup.rb
@@ -33,10 +33,9 @@ EOF
   end
 
   def command(server)
-now = Time.now
-group_name = rsgroup_admin.getRSGroupOfServer(server).getName
+group_name = rsgroup_admin.get_rsgroup_of_server(server).getName
 formatter.row([group_name])
-formatter.footer(now, 1)
+formatter.footer(1)
   end
 end
   end

http://git-wip-us.apache.org/repos/asf/hbase/blob/348d7e9b/hbase-shell/src/main/ruby/shell/commands/get_table_rsgroup.rb
--
diff --git a/hbase-shell/src/main/ruby/shell/commands/get_table_rsgroup.rb 
b/hbase-shell/src/main/ruby/shell/commands/get_table_rsgroup.rb
index 6939c12..9684687 100644
--- a/hbase-shell/src/main/ruby/shell/commands/get_table_rsgroup.rb
+++ b/hbase-shell/src/main/ruby/shell/commands/get_table_rsgroup.rb
@@ -33,11 +33,10 @@ EOF
   end
 
   def command(table)
-now = Time.now
 group_name =
 rsgroup_admin.get_rsgroup_of_table(table).getName
 formatter.row([group_name])
-formatter.footer(now, 1)
+formatter.footer(1)
   end
 end
   end

http://git-wip-us.apache.org/repos/asf/hbase/blob/348d7e9b/hbase-shell/src/main/ruby/shell/commands/list_rsgroups.rb
--
diff --git a/hbase-shell/src/main/ruby/shell/commands/list_rsgroups.rb 
b/hbase-shell/src/main/ruby/shell/commands/list_rsgroups.rb
index 5ab923a..393797d 100644
--- a/hbase-shell/src/main/ruby/shell/commands/list_rsgroups.rb
+++ b/hbase-shell/src/main/ruby/shell/commands/list_rsgroups.rb
@@ -35,7 +35,6 @@ EOF
   end
 
   def command(regex = '.*')
-now = Time.now
 formatter.header(['GROUPS'])
 
 regex = /#{regex}/ unless regex.is_a?(Regexp)
@@ -44,7 +43,7 @@ EOF
   formatter.row([group])
 end
 
-formatter.footer(now, list.size)
+formatter.footer(list.size)
   end
 end
   end



[22/23] hbase git commit: HBASE-17758 [RSGROUP] Add shell command to move servers and tables at the same time (Guangxu Cheng)

2017-07-20 Thread apurtell
http://git-wip-us.apache.org/repos/asf/hbase/blob/56baade3/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupAdminEndpoint.java
--
diff --git 
a/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupAdminEndpoint.java
 
b/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupAdminEndpoint.java
index e71470e..ee30e15 100644
--- 
a/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupAdminEndpoint.java
+++ 
b/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupAdminEndpoint.java
@@ -68,6 +68,8 @@ import 
org.apache.hadoop.hbase.protobuf.generated.RSGroupAdminProtos.GetRSGroupI
 import 
org.apache.hadoop.hbase.protobuf.generated.RSGroupAdminProtos.GetRSGroupInfoResponse;
 import 
org.apache.hadoop.hbase.protobuf.generated.RSGroupAdminProtos.ListRSGroupInfosRequest;
 import 
org.apache.hadoop.hbase.protobuf.generated.RSGroupAdminProtos.ListRSGroupInfosResponse;
+import 
org.apache.hadoop.hbase.protobuf.generated.RSGroupAdminProtos.MoveServersAndTablesRequest;
+import 
org.apache.hadoop.hbase.protobuf.generated.RSGroupAdminProtos.MoveServersAndTablesResponse;
 import 
org.apache.hadoop.hbase.protobuf.generated.RSGroupAdminProtos.MoveServersRequest;
 import 
org.apache.hadoop.hbase.protobuf.generated.RSGroupAdminProtos.MoveServersResponse;
 import 
org.apache.hadoop.hbase.protobuf.generated.RSGroupAdminProtos.MoveTablesRequest;
@@ -206,6 +208,26 @@ public class RSGroupAdminEndpoint extends 
RSGroupAdminService
   }
 
   @Override
+  public void moveServersAndTables(RpcController controller, 
MoveServersAndTablesRequest request,
+  RpcCallback done) {
+MoveServersAndTablesResponse.Builder builder = 
MoveServersAndTablesResponse.newBuilder();
+try {
+  Set hostPorts = Sets.newHashSet();
+  for (HBaseProtos.ServerName el : request.getServersList()) {
+hostPorts.add(HostAndPort.fromParts(el.getHostName(), el.getPort()));
+  }
+  Set tables = new HashSet<>(request.getTableNameList().size());
+  for (HBaseProtos.TableName tableName : request.getTableNameList()) {
+tables.add(ProtobufUtil.toTableName(tableName));
+  }
+  groupAdminServer.moveServersAndTables(hostPorts, tables, 
request.getTargetGroup());
+} catch (IOException e) {
+  ResponseConverter.setControllerException(controller, e);
+}
+done.run(builder.build());
+  }
+
+  @Override
   public void addRSGroup(RpcController controller,
AddRSGroupRequest request,
RpcCallback done) {
@@ -953,6 +975,16 @@ public class RSGroupAdminEndpoint extends 
RSGroupAdminService
   }
 
   @Override
+  public void 
preMoveServersAndTables(ObserverContext ctx,
+  Set servers, Set tables, String targetGroup) 
throws IOException {
+  }
+
+  @Override
+  public void 
postMoveServersAndTables(ObserverContext ctx,
+  Set servers, Set tables, String targetGroup) 
throws IOException {
+  }
+
+  @Override
   public void preAddRSGroup(ObserverContext ctx,
 String name) throws IOException {
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/56baade3/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupAdminServer.java
--
diff --git 
a/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupAdminServer.java
 
b/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupAdminServer.java
index 1069ac0..863b71e 100644
--- 
a/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupAdminServer.java
+++ 
b/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupAdminServer.java
@@ -501,6 +501,19 @@ public class RSGroupAdminServer extends RSGroupAdmin {
   }
 
   @Override
+  public void moveServersAndTables(Set servers, Set 
tables,
+  String targetGroup) throws IOException {
+if (servers == null || servers.isEmpty() ) {
+  throw new ConstraintException("The list of servers to move cannot be 
null or empty.");
+}
+if (tables == null || tables.isEmpty()) {
+  throw new ConstraintException("The list of tables to move cannot be null 
or empty.");
+}
+moveServers(servers, targetGroup);
+moveTables(tables, targetGroup);
+  }
+
+  @Override
   public void close() throws IOException {
   }
 }

http://git-wip-us.apache.org/repos/asf/hbase/blob/56baade3/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupInfoManager.java
--
diff --git 
a/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupInfoManager.java
 
b/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupInfoManager.java
index 5b5563e..e11cb57 100644
--- 
a/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupInfoManager.java
+++ 

[18/23] hbase git commit: HBASE-16462 TestRSGroupsBas#testGroupBalance may hang due to uneven region distribution (Guangxu Cheng)

2017-07-20 Thread apurtell
HBASE-16462 TestRSGroupsBas#testGroupBalance may hang due to uneven region 
distribution (Guangxu Cheng)


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/69328e93
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/69328e93
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/69328e93

Branch: refs/heads/HBASE-15631-branch-1
Commit: 69328e93a4021ee9b42158d14802d6fdc6275cda
Parents: 4c69806
Author: Andrew Purtell 
Authored: Wed Jul 5 17:57:24 2017 -0700
Committer: Andrew Purtell 
Committed: Thu Jul 20 17:55:42 2017 -0700

--
 .../test/java/org/apache/hadoop/hbase/rsgroup/TestRSGroups.java| 2 ++
 1 file changed, 2 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/69328e93/hbase-rsgroup/src/test/java/org/apache/hadoop/hbase/rsgroup/TestRSGroups.java
--
diff --git 
a/hbase-rsgroup/src/test/java/org/apache/hadoop/hbase/rsgroup/TestRSGroups.java 
b/hbase-rsgroup/src/test/java/org/apache/hadoop/hbase/rsgroup/TestRSGroups.java
index e5a1f6a..9baaa1a 100644
--- 
a/hbase-rsgroup/src/test/java/org/apache/hadoop/hbase/rsgroup/TestRSGroups.java
+++ 
b/hbase-rsgroup/src/test/java/org/apache/hadoop/hbase/rsgroup/TestRSGroups.java
@@ -79,6 +79,8 @@ public class TestRSGroups extends TestRSGroupsBase {
   @BeforeClass
   public static void setUp() throws Exception {
 TEST_UTIL = new HBaseTestingUtility();
+TEST_UTIL.getConfiguration().setFloat(
+"hbase.master.balancer.stochastic.tableSkewCost", 6000);
 TEST_UTIL.getConfiguration().set(
 HConstants.HBASE_MASTER_LOADBALANCER_CLASS,
 RSGroupBasedLoadBalancer.class.getName());



[14/23] hbase git commit: Be robust against movement of the rsgroup table

2017-07-20 Thread apurtell
Be robust against movement of the rsgroup table


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/0c309c16
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/0c309c16
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/0c309c16

Branch: refs/heads/HBASE-15631-branch-1
Commit: 0c309c16921934dc5e50e7b463b3a4b346874cf2
Parents: 0405043
Author: Andrew Purtell 
Authored: Wed Jul 5 15:19:32 2017 -0700
Committer: Andrew Purtell 
Committed: Thu Jul 20 17:55:42 2017 -0700

--
 .../hbase/rsgroup/RSGroupInfoManagerImpl.java   | 35 +++-
 1 file changed, 19 insertions(+), 16 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/0c309c16/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupInfoManagerImpl.java
--
diff --git 
a/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupInfoManagerImpl.java
 
b/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupInfoManagerImpl.java
index 7fcb7c7..6c991bd 100644
--- 
a/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupInfoManagerImpl.java
+++ 
b/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupInfoManagerImpl.java
@@ -77,6 +77,7 @@ import 
org.apache.hadoop.hbase.protobuf.generated.ClientProtos;
 import org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProtos;
 import org.apache.hadoop.hbase.protobuf.generated.RSGroupProtos;
 import org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos;
+import 
org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProtos.MutateRowsRequest;
 import org.apache.hadoop.hbase.regionserver.DisabledRegionSplitPolicy;
 import org.apache.hadoop.hbase.security.access.AccessControlLists;
 import org.apache.hadoop.hbase.util.Bytes;
@@ -112,7 +113,6 @@ public class RSGroupInfoManagerImpl implements 
RSGroupInfoManager, ServerListene
   private volatile Map rsGroupMap;
   private volatile Map tableMap;
   private MasterServices master;
-  private Table rsGroupTable;
   private ClusterConnection conn;
   private ZooKeeperWatcher watcher;
   private RSGroupStartupWorker rsGroupStartupWorker;
@@ -281,10 +281,9 @@ public class RSGroupInfoManagerImpl implements 
RSGroupInfoManager, ServerListene
 // if online read from GROUP table
 if (forceOnline || isOnline()) {
   LOG.debug("Refreshing in Online mode.");
-  if (rsGroupTable == null) {
-rsGroupTable = conn.getTable(RSGROUP_TABLE_NAME);
+  try (Table rsGroupTable = conn.getTable(RSGROUP_TABLE_NAME)) {
+groupList.addAll(rsGroupSerDe.retrieveGroupList(rsGroupTable));
   }
-  groupList.addAll(rsGroupSerDe.retrieveGroupList(rsGroupTable));
 } else {
   LOG.debug("Refershing in Offline mode.");
   String groupBasePath = ZKUtil.joinZNode(watcher.baseZNode, rsGroupZNode);
@@ -724,28 +723,32 @@ public class RSGroupInfoManagerImpl implements 
RSGroupInfoManager, ServerListene
 
   private void multiMutate(List mutations)
   throws IOException {
-CoprocessorRpcChannel channel = rsGroupTable.coprocessorService(ROW_KEY);
-MultiRowMutationProtos.MutateRowsRequest.Builder mmrBuilder
-  = MultiRowMutationProtos.MutateRowsRequest.newBuilder();
+MutateRowsRequest.Builder mrmBuilder = MutateRowsRequest.newBuilder();
 for (Mutation mutation : mutations) {
   if (mutation instanceof Put) {
-mmrBuilder.addMutationRequest(ProtobufUtil.toMutation(
+mrmBuilder.addMutationRequest(ProtobufUtil.toMutation(
   ClientProtos.MutationProto.MutationType.PUT, mutation));
   } else if (mutation instanceof Delete) {
-mmrBuilder.addMutationRequest(ProtobufUtil.toMutation(
+mrmBuilder.addMutationRequest(ProtobufUtil.toMutation(
   ClientProtos.MutationProto.MutationType.DELETE, mutation));
   } else {
 throw new DoNotRetryIOException("multiMutate doesn't support "
   + mutation.getClass().getName());
   }
 }
-
-MultiRowMutationProtos.MultiRowMutationService.BlockingInterface service =
-  MultiRowMutationProtos.MultiRowMutationService.newBlockingStub(channel);
-try {
-  service.mutateRows(null, mmrBuilder.build());
-} catch (ServiceException ex) {
-  ProtobufUtil.toIOException(ex);
+MutateRowsRequest mrm = mrmBuilder.build();
+// Be robust against movement of the rsgroup table
+// TODO: Why is this necessary sometimes? Should we be using our own 
connection?
+conn.clearRegionCache(RSGROUP_TABLE_NAME);
+try (Table rsGroupTable = conn.getTable(RSGROUP_TABLE_NAME)) {
+  CoprocessorRpcChannel channel = rsGroupTable.coprocessorService(ROW_KEY);
+  

[12/23] hbase git commit: HBASE-16456 Fix findbugs warnings in hbase-rsgroup module (Guangxu Cheng)

2017-07-20 Thread apurtell
HBASE-16456 Fix findbugs warnings in hbase-rsgroup module (Guangxu Cheng)


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/4c69806b
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/4c69806b
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/4c69806b

Branch: refs/heads/HBASE-15631-branch-1
Commit: 4c69806b1fa6f1054f4ec634b7ef1c1183683424
Parents: 03c4dfc
Author: Andrew Purtell 
Authored: Wed Jul 5 17:19:43 2017 -0700
Committer: Andrew Purtell 
Committed: Thu Jul 20 17:55:42 2017 -0700

--
 .../hadoop/hbase/rsgroup/RSGroupAdminEndpoint.java  | 16 +++-
 .../hadoop/hbase/rsgroup/RSGroupAdminServer.java|  2 +-
 .../hbase/rsgroup/RSGroupBasedLoadBalancer.java | 16 +---
 .../hbase/rsgroup/RSGroupInfoManagerImpl.java   | 14 --
 4 files changed, 37 insertions(+), 11 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/4c69806b/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupAdminEndpoint.java
--
diff --git 
a/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupAdminEndpoint.java
 
b/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupAdminEndpoint.java
index 8fa9fdc..e71470e 100644
--- 
a/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupAdminEndpoint.java
+++ 
b/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupAdminEndpoint.java
@@ -89,7 +89,7 @@ public class RSGroupAdminEndpoint extends RSGroupAdminService
   public void start(CoprocessorEnvironment env) throws IOException {
 MasterCoprocessorEnvironment menv = (MasterCoprocessorEnvironment)env;
 master = menv.getMasterServices();
-groupInfoManager = new RSGroupInfoManagerImpl(master);
+setGroupInfoManager(new RSGroupInfoManagerImpl(master));
 groupAdminServer = new RSGroupAdminServer(master, groupInfoManager);
 Class clazz =
 
master.getConfiguration().getClass(HConstants.HBASE_MASTER_LOADBALANCER_CLASS, 
null);
@@ -107,6 +107,20 @@ public class RSGroupAdminEndpoint extends 
RSGroupAdminService
 return this;
   }
 
+  private static void setStaticGroupInfoManager(RSGroupInfoManagerImpl 
groupInfoManager) {
+RSGroupAdminEndpoint.groupInfoManager = groupInfoManager;
+  }
+
+  private void setGroupInfoManager(RSGroupInfoManagerImpl groupInfoManager) 
throws IOException {
+if (groupInfoManager == null) {
+  groupInfoManager = new RSGroupInfoManagerImpl(master);
+  groupInfoManager.init();
+} else if (!groupInfoManager.isInit()) {
+  groupInfoManager.init();
+}
+setStaticGroupInfoManager(groupInfoManager);
+  }
+
   public RSGroupInfoManager getGroupInfoManager() {
 return groupInfoManager;
   }

http://git-wip-us.apache.org/repos/asf/hbase/blob/4c69806b/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupAdminServer.java
--
diff --git 
a/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupAdminServer.java
 
b/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupAdminServer.java
index 43ac3ad..e76e3e7 100644
--- 
a/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupAdminServer.java
+++ 
b/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupAdminServer.java
@@ -218,7 +218,7 @@ public class RSGroupAdminServer extends RSGroupAdmin {
 }
   }
   try {
-Thread.sleep(1000);
+manager.wait(1000);
   } catch (InterruptedException e) {
 LOG.warn("Sleep interrupted", e);
 Thread.currentThread().interrupt();

http://git-wip-us.apache.org/repos/asf/hbase/blob/4c69806b/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupBasedLoadBalancer.java
--
diff --git 
a/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupBasedLoadBalancer.java
 
b/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupBasedLoadBalancer.java
index c1b3c7d..519177c 100644
--- 
a/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupBasedLoadBalancer.java
+++ 
b/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupBasedLoadBalancer.java
@@ -322,18 +322,19 @@ public class RSGroupBasedLoadBalancer implements 
RSGroupableBalancer, LoadBalanc
   private Set getMisplacedRegions(
   Map regions) throws IOException {
 Set misplacedRegions = new HashSet();
-for (HRegionInfo region : regions.keySet()) {
-  ServerName assignedServer = regions.get(region);
+

[16/23] hbase git commit: HBASE-15858 Some region server group shell commands don't work

2017-07-20 Thread apurtell
HBASE-15858 Some region server group shell commands don't work


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/ca0f3c80
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/ca0f3c80
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/ca0f3c80

Branch: refs/heads/HBASE-15631-branch-1
Commit: ca0f3c8066f0d547fbcff2c79847a0e1c25cdba9
Parents: ae96f6f
Author: Andrew Purtell 
Authored: Wed Jul 5 15:37:47 2017 -0700
Committer: Andrew Purtell 
Committed: Thu Jul 20 17:55:42 2017 -0700

--
 .../src/main/ruby/shell/commands/get_server_rsgroup.rb   |  2 +-
 hbase-shell/src/test/ruby/shell/rsgroup_shell_test.rb| 11 +++
 2 files changed, 8 insertions(+), 5 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/ca0f3c80/hbase-shell/src/main/ruby/shell/commands/get_server_rsgroup.rb
--
diff --git a/hbase-shell/src/main/ruby/shell/commands/get_server_rsgroup.rb 
b/hbase-shell/src/main/ruby/shell/commands/get_server_rsgroup.rb
index 322f6bb..a689a7c 100644
--- a/hbase-shell/src/main/ruby/shell/commands/get_server_rsgroup.rb
+++ b/hbase-shell/src/main/ruby/shell/commands/get_server_rsgroup.rb
@@ -31,7 +31,7 @@ EOF
 
   def command(server)
 now = Time.now
-group_name = rsgroup_admin.getGroupOfServer(server).getName
+group_name = rsgroup_admin.getRSGroupOfServer(server).getName
 formatter.row([group_name])
 formatter.footer(now, 1)
   end

http://git-wip-us.apache.org/repos/asf/hbase/blob/ca0f3c80/hbase-shell/src/test/ruby/shell/rsgroup_shell_test.rb
--
diff --git a/hbase-shell/src/test/ruby/shell/rsgroup_shell_test.rb 
b/hbase-shell/src/test/ruby/shell/rsgroup_shell_test.rb
index d892775..1040ed8 100644
--- a/hbase-shell/src/test/ruby/shell/rsgroup_shell_test.rb
+++ b/hbase-shell/src/test/ruby/shell/rsgroup_shell_test.rb
@@ -49,12 +49,15 @@ module Hbase
   assert_not_nil(group)
   assert_equal(0, group.getServers.count)
 
-  hostport =
-  
@rsgroup_admin.getRSGroupInfo('default').getServers.iterator.next.toString
+  hostport = 
@rsgroup_admin.getRSGroupInfo('default').getServers.iterator.next
+  @shell.command('get_rsgroup', 'default')
+  hostPortStr = hostport.toString
+  @shell.command('get_server_rsgroup', [hostPortStr])
   @shell.command('move_rsgroup_servers',
  group_name,
- [hostport])
+ [hostPortStr])
   assert_equal(1, 
@rsgroup_admin.getRSGroupInfo(group_name).getServers.count)
+  assert_equal(group_name, 
@rsgroup_admin.getRSGroupOfServer(hostport).getName)
 
   @shell.command('move_rsgroup_tables',
  group_name,
@@ -65,7 +68,7 @@ module Hbase
   @hbase.rsgroup_admin(@formatter).get_rsgroup(group_name) do |line|
 case count
 when 1
-  assert_equal(hostport, line)
+  assert_equal(hostPortStr, line)
 when 3
   assert_equal(table_name, line)
 end



[08/23] hbase git commit: HBASE-15631 Backport Regionserver Groups (HBASE-6721) to branch-1

2017-07-20 Thread apurtell
http://git-wip-us.apache.org/repos/asf/hbase/blob/52f05079/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupAdminEndpoint.java
--
diff --git 
a/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupAdminEndpoint.java
 
b/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupAdminEndpoint.java
new file mode 100644
index 000..00cd6b0
--- /dev/null
+++ 
b/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupAdminEndpoint.java
@@ -0,0 +1,955 @@
+/**
+ * Copyright The Apache Software Foundation
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hbase.rsgroup;
+
+import com.google.common.collect.Sets;
+import com.google.common.net.HostAndPort;
+import com.google.protobuf.RpcCallback;
+import com.google.protobuf.RpcController;
+import com.google.protobuf.Service;
+
+import java.io.IOException;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Set;
+
+import org.apache.hadoop.hbase.Coprocessor;
+import org.apache.hadoop.hbase.CoprocessorEnvironment;
+import org.apache.hadoop.hbase.HColumnDescriptor;
+import org.apache.hadoop.hbase.HConstants;
+import org.apache.hadoop.hbase.HRegionInfo;
+import org.apache.hadoop.hbase.HTableDescriptor;
+import org.apache.hadoop.hbase.NamespaceDescriptor;
+import org.apache.hadoop.hbase.ProcedureInfo;
+import org.apache.hadoop.hbase.ServerName;
+import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.client.Admin.MasterSwitchType;
+import org.apache.hadoop.hbase.constraint.ConstraintException;
+import org.apache.hadoop.hbase.coprocessor.CoprocessorService;
+import org.apache.hadoop.hbase.coprocessor.MasterCoprocessorEnvironment;
+import org.apache.hadoop.hbase.coprocessor.MasterObserver;
+import org.apache.hadoop.hbase.coprocessor.ObserverContext;
+import org.apache.hadoop.hbase.master.MasterServices;
+import org.apache.hadoop.hbase.master.RegionPlan;
+import org.apache.hadoop.hbase.master.procedure.MasterProcedureEnv;
+import org.apache.hadoop.hbase.procedure2.ProcedureExecutor;
+import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
+import org.apache.hadoop.hbase.protobuf.ResponseConverter;
+import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos;
+import org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.Quotas;
+import org.apache.hadoop.hbase.protobuf.generated.RSGroupAdminProtos;
+import 
org.apache.hadoop.hbase.protobuf.generated.RSGroupAdminProtos.AddRSGroupRequest;
+import 
org.apache.hadoop.hbase.protobuf.generated.RSGroupAdminProtos.AddRSGroupResponse;
+import 
org.apache.hadoop.hbase.protobuf.generated.RSGroupAdminProtos.BalanceRSGroupRequest;
+import 
org.apache.hadoop.hbase.protobuf.generated.RSGroupAdminProtos.BalanceRSGroupResponse;
+import 
org.apache.hadoop.hbase.protobuf.generated.RSGroupAdminProtos.GetRSGroupInfoOfServerRequest;
+import 
org.apache.hadoop.hbase.protobuf.generated.RSGroupAdminProtos.GetRSGroupInfoOfServerResponse;
+import 
org.apache.hadoop.hbase.protobuf.generated.RSGroupAdminProtos.GetRSGroupInfoOfTableRequest;
+import 
org.apache.hadoop.hbase.protobuf.generated.RSGroupAdminProtos.GetRSGroupInfoOfTableResponse;
+import 
org.apache.hadoop.hbase.protobuf.generated.RSGroupAdminProtos.GetRSGroupInfoRequest;
+import 
org.apache.hadoop.hbase.protobuf.generated.RSGroupAdminProtos.GetRSGroupInfoResponse;
+import 
org.apache.hadoop.hbase.protobuf.generated.RSGroupAdminProtos.ListRSGroupInfosRequest;
+import 
org.apache.hadoop.hbase.protobuf.generated.RSGroupAdminProtos.ListRSGroupInfosResponse;
+import 
org.apache.hadoop.hbase.protobuf.generated.RSGroupAdminProtos.MoveServersRequest;
+import 
org.apache.hadoop.hbase.protobuf.generated.RSGroupAdminProtos.MoveServersResponse;
+import 
org.apache.hadoop.hbase.protobuf.generated.RSGroupAdminProtos.MoveTablesRequest;
+import 
org.apache.hadoop.hbase.protobuf.generated.RSGroupAdminProtos.MoveTablesResponse;
+import 
org.apache.hadoop.hbase.protobuf.generated.RSGroupAdminProtos.RSGroupAdminService;
+import 
org.apache.hadoop.hbase.protobuf.generated.RSGroupAdminProtos.RemoveRSGroupRequest;
+import 

[11/23] hbase git commit: HBASE-15631 Backport Regionserver Groups (HBASE-6721) to branch-1

2017-07-20 Thread apurtell
HBASE-15631 Backport Regionserver Groups (HBASE-6721) to branch-1

  Applied 
https://issues.apache.org/jira/secure/attachment/12799888/HBASE-15631.02.branch-1.patch
  Amending-Author: Andrew Purtell 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/52f05079
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/52f05079
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/52f05079

Branch: refs/heads/HBASE-15631-branch-1
Commit: 52f05079f28a0b2e5a2a8944f6bbda95d65b9314
Parents: 6f1cc2c
Author: Andrew Purtell 
Authored: Wed Jul 5 13:39:35 2017 -0700
Committer: Andrew Purtell 
Committed: Thu Jul 20 17:55:42 2017 -0700

--
 .../org/apache/hadoop/hbase/ServerName.java |19 +-
 .../hadoop/hbase/protobuf/ProtobufUtil.java |36 +-
 .../hadoop/hbase/rsgroup/RSGroupInfo.java   |   187 +
 hbase-it/pom.xml|10 +
 .../hbase/rsgroup/IntegrationTestRSGroup.java   |99 +
 hbase-protocol/pom.xml  | 2 +
 .../hbase/protobuf/generated/ClientProtos.java  | 2 +-
 .../hbase/protobuf/generated/MasterProtos.java  |30 +-
 .../protobuf/generated/RSGroupAdminProtos.java  | 11855 +
 .../hbase/protobuf/generated/RSGroupProtos.java |  1331 ++
 .../protobuf/generated/SnapshotProtos.java  |24 +-
 hbase-protocol/src/main/protobuf/RSGroup.proto  |34 +
 .../src/main/protobuf/RSGroupAdmin.proto|   136 +
 hbase-rsgroup/pom.xml   |   278 +
 .../hadoop/hbase/rsgroup/RSGroupAdmin.java  |   121 +
 .../hbase/rsgroup/RSGroupAdminClient.java   |   204 +
 .../hbase/rsgroup/RSGroupAdminEndpoint.java |   955 ++
 .../hbase/rsgroup/RSGroupAdminServer.java   |   503 +
 .../hbase/rsgroup/RSGroupBasedLoadBalancer.java |   428 +
 .../hbase/rsgroup/RSGroupInfoManager.java   |   132 +
 .../hbase/rsgroup/RSGroupInfoManagerImpl.java   |   758 ++
 .../hadoop/hbase/rsgroup/RSGroupSerDe.java  |88 +
 .../hbase/rsgroup/RSGroupableBalancer.java  |29 +
 .../balancer/TestRSGroupBasedLoadBalancer.java  |   574 +
 .../hadoop/hbase/rsgroup/TestRSGroups.java  |   287 +
 .../hadoop/hbase/rsgroup/TestRSGroupsBase.java  |   643 +
 .../hbase/rsgroup/TestRSGroupsOfflineMode.java  |   187 +
 .../rsgroup/VerifyingRSGroupAdminClient.java|   149 +
 .../hbase/tmpl/master/MasterStatusTmpl.jamon| 2 +
 .../apache/hadoop/hbase/LocalHBaseCluster.java  | 3 +
 .../BaseMasterAndRegionObserver.java|53 +
 .../hbase/coprocessor/BaseMasterObserver.java   |54 +
 .../hbase/coprocessor/MasterObserver.java   |98 +
 .../hadoop/hbase/master/AssignmentManager.java  |16 +-
 .../org/apache/hadoop/hbase/master/HMaster.java |40 +-
 .../hadoop/hbase/master/LoadBalancer.java   | 3 +
 .../hbase/master/MasterCoprocessorHost.java |   137 +
 .../hadoop/hbase/master/MasterRpcServices.java  | 8 +
 .../hadoop/hbase/master/MasterServices.java | 5 +
 .../hbase/security/access/AccessController.java |32 +
 .../hbase/coprocessor/TestMasterObserver.java   |52 +
 .../hbase/master/MockNoopMasterServices.java| 5 +
 .../master/TestAssignmentManagerOnCluster.java  |   127 +-
 .../hadoop/hbase/master/TestCatalogJanitor.java | 3 +
 .../hbase/master/TestMasterStatusServlet.java   |12 +-
 .../normalizer/TestSimpleRegionNormalizer.java  | 2 +-
 .../security/access/TestAccessController.java   |75 +
 hbase-shell/pom.xml |35 +
 hbase-shell/src/main/ruby/hbase.rb  | 1 +
 hbase-shell/src/main/ruby/hbase/hbase.rb| 4 +
 .../src/main/ruby/hbase/rsgroup_admin.rb|   150 +
 hbase-shell/src/main/ruby/shell.rb  |21 +
 hbase-shell/src/main/ruby/shell/commands.rb | 4 +
 .../src/main/ruby/shell/commands/add_rsgroup.rb |39 +
 .../main/ruby/shell/commands/balance_rsgroup.rb |37 +
 .../src/main/ruby/shell/commands/get_rsgroup.rb |44 +
 .../ruby/shell/commands/get_server_rsgroup.rb   |40 +
 .../ruby/shell/commands/get_table_rsgroup.rb|41 +
 .../main/ruby/shell/commands/list_rsgroups.rb   |50 +
 .../ruby/shell/commands/move_rsgroup_servers.rb |37 +
 .../ruby/shell/commands/move_rsgroup_tables.rb  |37 +
 .../main/ruby/shell/commands/remove_rsgroup.rb  |37 +
 .../apache/hadoop/hbase/client/TestShell.java   | 2 +-
 .../hbase/client/rsgroup/TestShellRSGroups.java |   111 +
 .../src/test/ruby/shell/rsgroup_shell_test.rb   |96 +
 hbase-shell/src/test/ruby/test_helper.rb| 4 +
 pom.xml |23 +
 67 files changed, 20585 insertions(+), 56 deletions(-)
--



[23/23] hbase git commit: HBASE-17758 [RSGROUP] Add shell command to move servers and tables at the same time (Guangxu Cheng)

2017-07-20 Thread apurtell
HBASE-17758 [RSGROUP] Add shell command to move servers and tables at the same 
time (Guangxu Cheng)

HBASE-17806 TestRSGroups#testMoveServersAndTables is flaky in master branch


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/56baade3
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/56baade3
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/56baade3

Branch: refs/heads/HBASE-15631-branch-1
Commit: 56baade3b01c009fce2f6df52fcd8f8fea1ca753
Parents: 22cbbf9
Author: Andrew Purtell 
Authored: Wed Jul 5 18:29:14 2017 -0700
Committer: Andrew Purtell 
Committed: Thu Jul 20 17:55:42 2017 -0700

--
 .../hadoop/hbase/rsgroup/RSGroupInfo.java   |3 +
 .../protobuf/generated/RSGroupAdminProtos.java  | 1759 +-
 .../src/main/protobuf/RSGroupAdmin.proto|   12 +
 .../hadoop/hbase/rsgroup/RSGroupAdmin.java  |   11 +
 .../hbase/rsgroup/RSGroupAdminClient.java   |   22 +
 .../hbase/rsgroup/RSGroupAdminEndpoint.java |   32 +
 .../hbase/rsgroup/RSGroupAdminServer.java   |   13 +
 .../hbase/rsgroup/RSGroupInfoManager.java   |   10 +
 .../hbase/rsgroup/RSGroupInfoManagerImpl.java   |   24 +
 .../hadoop/hbase/rsgroup/TestRSGroupsBase.java  |  111 ++
 .../rsgroup/VerifyingRSGroupAdminClient.java|7 +
 .../BaseMasterAndRegionObserver.java|   10 +
 .../hbase/coprocessor/BaseMasterObserver.java   |   10 +
 .../hbase/coprocessor/MasterObserver.java   |   20 +-
 .../hbase/master/MasterCoprocessorHost.java |   26 +
 .../hbase/security/access/AccessController.java |6 +
 .../hbase/coprocessor/TestMasterObserver.java   |   10 +
 .../src/main/ruby/hbase/rsgroup_admin.rb|   14 +
 hbase-shell/src/main/ruby/shell.rb  |1 +
 .../commands/move_servers_tables_rsgroup.rb |   37 +
 20 files changed, 2115 insertions(+), 23 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/56baade3/hbase-common/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupInfo.java
--
diff --git 
a/hbase-common/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupInfo.java 
b/hbase-common/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupInfo.java
index 7297ff2..74572ac 100644
--- 
a/hbase-common/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupInfo.java
+++ 
b/hbase-common/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupInfo.java
@@ -150,6 +150,9 @@ public class RSGroupInfo {
 sb.append(", ");
 sb.append(" Servers:");
 sb.append(this.servers);
+sb.append(", ");
+sb.append(" Tables:");
+sb.append(this.tables);
 return sb.toString();
 
   }

http://git-wip-us.apache.org/repos/asf/hbase/blob/56baade3/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/RSGroupAdminProtos.java
--
diff --git 
a/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/RSGroupAdminProtos.java
 
b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/RSGroupAdminProtos.java
index 3d1f4bd..ca1db1e 100644
--- 
a/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/RSGroupAdminProtos.java
+++ 
b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/RSGroupAdminProtos.java
@@ -10754,6 +10754,1621 @@ public final class RSGroupAdminProtos {
 // 
@@protoc_insertion_point(class_scope:hbase.pb.GetRSGroupInfoOfServerResponse)
   }
 
+  public interface MoveServersAndTablesRequestOrBuilder
+  extends com.google.protobuf.MessageOrBuilder {
+
+// required string target_group = 1;
+/**
+ * required string target_group = 1;
+ */
+boolean hasTargetGroup();
+/**
+ * required string target_group = 1;
+ */
+java.lang.String getTargetGroup();
+/**
+ * required string target_group = 1;
+ */
+com.google.protobuf.ByteString
+getTargetGroupBytes();
+
+// repeated .hbase.pb.ServerName servers = 2;
+/**
+ * repeated .hbase.pb.ServerName servers = 2;
+ */
+
java.util.List
 
+getServersList();
+/**
+ * repeated .hbase.pb.ServerName servers = 2;
+ */
+org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName 
getServers(int index);
+/**
+ * repeated .hbase.pb.ServerName servers = 2;
+ */
+int getServersCount();
+/**
+ * repeated .hbase.pb.ServerName servers = 2;
+ */
+java.util.List 
+getServersOrBuilderList();
+/**
+ * repeated .hbase.pb.ServerName servers = 2;
+ */
+org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder 
getServersOrBuilder(
+int index);
+
+// repeated 

[15/23] hbase git commit: HBASE-16430 Fix RegionServer Group's bug when moving multiple tables (Guangxu Cheng)

2017-07-20 Thread apurtell
HBASE-16430 Fix RegionServer Group's bug when moving multiple tables (Guangxu 
Cheng)


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/03c4dfce
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/03c4dfce
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/03c4dfce

Branch: refs/heads/HBASE-15631-branch-1
Commit: 03c4dfce4bc7f7f47735a1eebd6d341bab8489a0
Parents: 221c17b
Author: Andrew Purtell 
Authored: Wed Jul 5 17:16:50 2017 -0700
Committer: Andrew Purtell 
Committed: Thu Jul 20 17:55:42 2017 -0700

--
 .../hbase/rsgroup/RSGroupInfoManagerImpl.java   |  2 +-
 .../hadoop/hbase/rsgroup/TestRSGroupsBase.java  | 54 
 2 files changed, 55 insertions(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/03c4dfce/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupInfoManagerImpl.java
--
diff --git 
a/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupInfoManagerImpl.java
 
b/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupInfoManagerImpl.java
index 6c991bd..5cb2e71 100644
--- 
a/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupInfoManagerImpl.java
+++ 
b/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupInfoManagerImpl.java
@@ -227,7 +227,7 @@ public class RSGroupInfoManagerImpl implements 
RSGroupInfoManager, ServerListene
 Map newGroupMap = Maps.newHashMap(rsGroupMap);
 for(TableName tableName: tableNames) {
   if (tableMap.containsKey(tableName)) {
-RSGroupInfo src = new 
RSGroupInfo(rsGroupMap.get(tableMap.get(tableName)));
+RSGroupInfo src = new 
RSGroupInfo(newGroupMap.get(tableMap.get(tableName)));
 src.removeTable(tableName);
 newGroupMap.put(src.getName(), src);
   }

http://git-wip-us.apache.org/repos/asf/hbase/blob/03c4dfce/hbase-rsgroup/src/test/java/org/apache/hadoop/hbase/rsgroup/TestRSGroupsBase.java
--
diff --git 
a/hbase-rsgroup/src/test/java/org/apache/hadoop/hbase/rsgroup/TestRSGroupsBase.java
 
b/hbase-rsgroup/src/test/java/org/apache/hadoop/hbase/rsgroup/TestRSGroupsBase.java
index 9225e09..5fcdc7c 100644
--- 
a/hbase-rsgroup/src/test/java/org/apache/hadoop/hbase/rsgroup/TestRSGroupsBase.java
+++ 
b/hbase-rsgroup/src/test/java/org/apache/hadoop/hbase/rsgroup/TestRSGroupsBase.java
@@ -640,4 +640,58 @@ public abstract class TestRSGroupsBase {
   private String getGroupName(String baseName) {
 return groupPrefix+"_"+baseName+"_"+rand.nextInt(Integer.MAX_VALUE);
   }
+
+  @Test
+  public void testMultiTableMove() throws Exception {
+LOG.info("testMultiTableMove");
+
+final TableName tableNameA = TableName.valueOf(tablePrefix + 
"_testMultiTableMoveA");
+final TableName tableNameB = TableName.valueOf(tablePrefix + 
"_testMultiTableMoveB");
+final byte[] familyNameBytes = Bytes.toBytes("f");
+String newGroupName = getGroupName("testMultiTableMove");
+final RSGroupInfo newGroup = addGroup(rsGroupAdmin, newGroupName, 1);
+
+TEST_UTIL.createTable(tableNameA, familyNameBytes);
+TEST_UTIL.createTable(tableNameB, familyNameBytes);
+TEST_UTIL.waitFor(WAIT_TIMEOUT, new Waiter.Predicate() {
+  @Override
+  public boolean evaluate() throws Exception {
+List regionsA = getTableRegionMap().get(tableNameA);
+if (regionsA == null)
+  return false;
+List regionsB = getTableRegionMap().get(tableNameB);
+if (regionsB == null)
+  return false;
+
+return getTableRegionMap().get(tableNameA).size() >= 1
+&& getTableRegionMap().get(tableNameB).size() >= 1;
+  }
+});
+
+RSGroupInfo tableGrpA = rsGroupAdmin.getRSGroupInfoOfTable(tableNameA);
+assertTrue(tableGrpA.getName().equals(RSGroupInfo.DEFAULT_GROUP));
+
+RSGroupInfo tableGrpB = rsGroupAdmin.getRSGroupInfoOfTable(tableNameB);
+assertTrue(tableGrpB.getName().equals(RSGroupInfo.DEFAULT_GROUP));
+//change table's group
+LOG.info("Moving table [" + tableNameA + "," + tableNameB + "] to " + 
newGroup.getName());
+rsGroupAdmin.moveTables(Sets.newHashSet(tableNameA, tableNameB), 
newGroup.getName());
+
+//verify group change
+Assert.assertEquals(newGroup.getName(),
+rsGroupAdmin.getRSGroupInfoOfTable(tableNameA).getName());
+
+Assert.assertEquals(newGroup.getName(),
+rsGroupAdmin.getRSGroupInfoOfTable(tableNameB).getName());
+
+//verify tables' not exist in old group
+Set DefaultTables = 
rsGroupAdmin.getRSGroupInfo(RSGroupInfo.DEFAULT_GROUP).getTables();
+

[21/23] hbase git commit: HBASE-17350 Fixup of regionserver group-based assignment

2017-07-20 Thread apurtell
HBASE-17350 Fixup of regionserver group-based assignment


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/a557056d
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/a557056d
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/a557056d

Branch: refs/heads/HBASE-15631-branch-1
Commit: a557056d260b98edba1442ccb0f71ab90665db33
Parents: 69328e9
Author: Andrew Purtell 
Authored: Wed Jul 5 18:09:48 2017 -0700
Committer: Andrew Purtell 
Committed: Thu Jul 20 17:55:42 2017 -0700

--
 .../hadoop/hbase/rsgroup/RSGroupInfo.java   |  19 +--
 .../apache/hadoop/hbase/util/Addressing.java|  22 +++
 .../hadoop/hbase/util/TestAddressing.java   |  39 +
 .../hbase/rsgroup/RSGroupAdminServer.java   | 159 ++-
 .../hbase/rsgroup/RSGroupInfoManager.java   |   4 +-
 .../hbase/rsgroup/RSGroupInfoManagerImpl.java   |  42 +++--
 .../apache/hadoop/hbase/rsgroup/Utility.java|  48 ++
 .../hadoop/hbase/rsgroup/TestRSGroupsBase.java  |   2 +-
 .../hadoop/hbase/master/MasterRpcServices.java  |   3 +-
 .../hadoop/hbase/master/RegionStates.java   |   2 +-
 .../hadoop/hbase/master/ServerManager.java  |   1 -
 hbase-shell/src/main/ruby/shell.rb  |   7 +-
 hbase-shell/src/main/ruby/shell/commands.rb |   1 -
 .../src/main/ruby/shell/commands/add_rsgroup.rb |   3 +-
 .../main/ruby/shell/commands/balance_rsgroup.rb |   5 +-
 .../src/main/ruby/shell/commands/get_rsgroup.rb |   5 +-
 .../ruby/shell/commands/get_server_rsgroup.rb   |   5 +-
 .../ruby/shell/commands/get_table_rsgroup.rb|   5 +-
 .../main/ruby/shell/commands/list_procedures.rb |   2 +-
 .../main/ruby/shell/commands/list_rsgroups.rb   |   3 +-
 .../ruby/shell/commands/move_rsgroup_servers.rb |  37 -
 .../ruby/shell/commands/move_rsgroup_tables.rb  |  37 -
 .../ruby/shell/commands/move_servers_rsgroup.rb |  40 +
 .../ruby/shell/commands/move_tables_rsgroup.rb  |  40 +
 .../main/ruby/shell/commands/remove_rsgroup.rb  |   3 +-
 .../src/test/ruby/shell/rsgroup_shell_test.rb   |   4 +-
 26 files changed, 341 insertions(+), 197 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/a557056d/hbase-common/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupInfo.java
--
diff --git 
a/hbase-common/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupInfo.java 
b/hbase-common/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupInfo.java
index 0fb02d8..7297ff2 100644
--- 
a/hbase-common/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupInfo.java
+++ 
b/hbase-common/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupInfo.java
@@ -20,16 +20,19 @@
 
 package org.apache.hadoop.hbase.rsgroup;
 
-import com.google.common.collect.Sets;
-import com.google.common.net.HostAndPort;
-
 import java.util.Collection;
 import java.util.NavigableSet;
 import java.util.Set;
+import java.util.SortedSet;
+import java.util.TreeSet;
 
 import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.classification.InterfaceAudience;
 import org.apache.hadoop.hbase.classification.InterfaceStability;
+import org.apache.hadoop.hbase.util.Addressing;
+
+import com.google.common.collect.Sets;
+import com.google.common.net.HostAndPort;
 
 /**
  * Stores the group information of region server groups.
@@ -53,14 +56,13 @@ public class RSGroupInfo {
   Set servers,
   NavigableSet tables) {
 this.name = name;
-this.servers = servers;
-this.tables = tables;
+this.servers = new TreeSet<>(new Addressing.HostAndPortComparable());
+this.servers.addAll(servers);
+this.tables = new TreeSet<>(tables);
   }
 
   public RSGroupInfo(RSGroupInfo src) {
-name = src.getName();
-servers = Sets.newHashSet(src.getServers());
-tables = Sets.newTreeSet(src.getTables());
+this(src.getName(), src.servers, src.tables);
   }
 
   /**
@@ -183,5 +185,4 @@ public class RSGroupInfo {
 result = 31 * result + name.hashCode();
 return result;
   }
-
 }

http://git-wip-us.apache.org/repos/asf/hbase/blob/a557056d/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Addressing.java
--
diff --git 
a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Addressing.java 
b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Addressing.java
index 31fb1f5..71f6127 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Addressing.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Addressing.java
@@ -24,10 +24,13 @@ import java.net.InetAddress;
 import java.net.InetSocketAddress;
 import java.net.NetworkInterface;
 import java.net.SocketException;

[20/23] hbase git commit: HBASE-15848 Fix possible null point dereference in RSGroupBasedLoadBalancer#getMisplacedRegions (Stephen Yuan Jiang)

2017-07-20 Thread apurtell
HBASE-15848 Fix possible null point dereference in 
RSGroupBasedLoadBalancer#getMisplacedRegions (Stephen Yuan Jiang)


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/ae96f6f2
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/ae96f6f2
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/ae96f6f2

Branch: refs/heads/HBASE-15631-branch-1
Commit: ae96f6f261e38b6358e332ce15a5ccd11d7ecd2c
Parents: 0c309c1
Author: Andrew Purtell 
Authored: Wed Jul 5 15:37:10 2017 -0700
Committer: Andrew Purtell 
Committed: Thu Jul 20 17:55:42 2017 -0700

--
 .../apache/hadoop/hbase/rsgroup/RSGroupBasedLoadBalancer.java| 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/ae96f6f2/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupBasedLoadBalancer.java
--
diff --git 
a/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupBasedLoadBalancer.java
 
b/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupBasedLoadBalancer.java
index fea1275..f69f093 100644
--- 
a/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupBasedLoadBalancer.java
+++ 
b/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupBasedLoadBalancer.java
@@ -331,7 +331,7 @@ public class RSGroupBasedLoadBalancer implements 
RSGroupableBalancer, LoadBalanc
 " on server: " + assignedServer +
 " found in group: " +
 
RSGroupInfoManager.getRSGroupOfServer(assignedServer.getHostPort()) +
-" outside of group: " + info.getName());
+" outside of group: " + (info == null ? "UNKNOWN" : 
info.getName()));
 misplacedRegions.add(region);
   }
 }
@@ -352,7 +352,7 @@ public class RSGroupBasedLoadBalancer implements 
RSGroupableBalancer, LoadBalanc
 try {
   info = RSGroupInfoManager.getRSGroup(
   RSGroupInfoManager.getRSGroupOfTable(region.getTable()));
-}catch(IOException exp){
+} catch (IOException exp) {
   LOG.debug("Group information null for region of table " + 
region.getTable(),
   exp);
 }



[10/23] hbase git commit: HBASE-15631 Backport Regionserver Groups (HBASE-6721) to branch-1

2017-07-20 Thread apurtell
http://git-wip-us.apache.org/repos/asf/hbase/blob/52f05079/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/RSGroupAdminProtos.java
--
diff --git 
a/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/RSGroupAdminProtos.java
 
b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/RSGroupAdminProtos.java
new file mode 100644
index 000..3d1f4bd
--- /dev/null
+++ 
b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/RSGroupAdminProtos.java
@@ -0,0 +1,11855 @@
+// Generated by the protocol buffer compiler.  DO NOT EDIT!
+// source: RSGroupAdmin.proto
+
+package org.apache.hadoop.hbase.protobuf.generated;
+
+public final class RSGroupAdminProtos {
+  private RSGroupAdminProtos() {}
+  public static void registerAllExtensions(
+  com.google.protobuf.ExtensionRegistry registry) {
+  }
+  public interface ListTablesOfRSGroupRequestOrBuilder
+  extends com.google.protobuf.MessageOrBuilder {
+
+// required string r_s_group_name = 1;
+/**
+ * required string r_s_group_name = 1;
+ */
+boolean hasRSGroupName();
+/**
+ * required string r_s_group_name = 1;
+ */
+java.lang.String getRSGroupName();
+/**
+ * required string r_s_group_name = 1;
+ */
+com.google.protobuf.ByteString
+getRSGroupNameBytes();
+  }
+  /**
+   * Protobuf type {@code hbase.pb.ListTablesOfRSGroupRequest}
+   */
+  public static final class ListTablesOfRSGroupRequest extends
+  com.google.protobuf.GeneratedMessage
+  implements ListTablesOfRSGroupRequestOrBuilder {
+// Use ListTablesOfRSGroupRequest.newBuilder() to construct.
+private 
ListTablesOfRSGroupRequest(com.google.protobuf.GeneratedMessage.Builder 
builder) {
+  super(builder);
+  this.unknownFields = builder.getUnknownFields();
+}
+private ListTablesOfRSGroupRequest(boolean noInit) { this.unknownFields = 
com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
+
+private static final ListTablesOfRSGroupRequest defaultInstance;
+public static ListTablesOfRSGroupRequest getDefaultInstance() {
+  return defaultInstance;
+}
+
+public ListTablesOfRSGroupRequest getDefaultInstanceForType() {
+  return defaultInstance;
+}
+
+private final com.google.protobuf.UnknownFieldSet unknownFields;
+@java.lang.Override
+public final com.google.protobuf.UnknownFieldSet
+getUnknownFields() {
+  return this.unknownFields;
+}
+private ListTablesOfRSGroupRequest(
+com.google.protobuf.CodedInputStream input,
+com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+throws com.google.protobuf.InvalidProtocolBufferException {
+  initFields();
+  int mutable_bitField0_ = 0;
+  com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+  com.google.protobuf.UnknownFieldSet.newBuilder();
+  try {
+boolean done = false;
+while (!done) {
+  int tag = input.readTag();
+  switch (tag) {
+case 0:
+  done = true;
+  break;
+default: {
+  if (!parseUnknownField(input, unknownFields,
+ extensionRegistry, tag)) {
+done = true;
+  }
+  break;
+}
+case 10: {
+  bitField0_ |= 0x0001;
+  rSGroupName_ = input.readBytes();
+  break;
+}
+  }
+}
+  } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+throw e.setUnfinishedMessage(this);
+  } catch (java.io.IOException e) {
+throw new com.google.protobuf.InvalidProtocolBufferException(
+e.getMessage()).setUnfinishedMessage(this);
+  } finally {
+this.unknownFields = unknownFields.build();
+makeExtensionsImmutable();
+  }
+}
+public static final com.google.protobuf.Descriptors.Descriptor
+getDescriptor() {
+  return 
org.apache.hadoop.hbase.protobuf.generated.RSGroupAdminProtos.internal_static_hbase_pb_ListTablesOfRSGroupRequest_descriptor;
+}
+
+protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+internalGetFieldAccessorTable() {
+  return 
org.apache.hadoop.hbase.protobuf.generated.RSGroupAdminProtos.internal_static_hbase_pb_ListTablesOfRSGroupRequest_fieldAccessorTable
+  .ensureFieldAccessorsInitialized(
+  
org.apache.hadoop.hbase.protobuf.generated.RSGroupAdminProtos.ListTablesOfRSGroupRequest.class,
 
org.apache.hadoop.hbase.protobuf.generated.RSGroupAdminProtos.ListTablesOfRSGroupRequest.Builder.class);
+}
+
+public static com.google.protobuf.Parser 
PARSER =
+new com.google.protobuf.AbstractParser() {
+  public ListTablesOfRSGroupRequest parsePartialFrom(
+  

[06/23] hbase git commit: HBASE-15631 Backport Regionserver Groups (HBASE-6721) to branch-1

2017-07-20 Thread apurtell
http://git-wip-us.apache.org/repos/asf/hbase/blob/52f05079/hbase-rsgroup/src/test/java/org/apache/hadoop/hbase/rsgroup/TestRSGroupsBase.java
--
diff --git 
a/hbase-rsgroup/src/test/java/org/apache/hadoop/hbase/rsgroup/TestRSGroupsBase.java
 
b/hbase-rsgroup/src/test/java/org/apache/hadoop/hbase/rsgroup/TestRSGroupsBase.java
new file mode 100644
index 000..9225e09
--- /dev/null
+++ 
b/hbase-rsgroup/src/test/java/org/apache/hadoop/hbase/rsgroup/TestRSGroupsBase.java
@@ -0,0 +1,643 @@
+/**
+ * Copyright The Apache Software Foundation
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.rsgroup;
+
+import com.google.common.collect.Maps;
+import com.google.common.collect.Sets;
+import com.google.common.net.HostAndPort;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.hbase.ClusterStatus;
+import org.apache.hadoop.hbase.HBaseCluster;
+import org.apache.hadoop.hbase.HBaseTestingUtility;
+import org.apache.hadoop.hbase.HColumnDescriptor;
+import org.apache.hadoop.hbase.HRegionInfo;
+import org.apache.hadoop.hbase.HTableDescriptor;
+import org.apache.hadoop.hbase.NamespaceDescriptor;
+import org.apache.hadoop.hbase.RegionLoad;
+import org.apache.hadoop.hbase.ServerName;
+import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.Waiter;
+import org.apache.hadoop.hbase.client.HBaseAdmin;
+import org.apache.hadoop.hbase.constraint.ConstraintException;
+import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
+import org.apache.hadoop.hbase.protobuf.generated.AdminProtos;
+import org.apache.hadoop.hbase.util.Bytes;
+import org.junit.Assert;
+import org.junit.Test;
+
+import java.io.IOException;
+import java.security.SecureRandom;
+import java.util.HashSet;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.TreeMap;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertNull;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+
+public abstract class TestRSGroupsBase {
+  protected static final Log LOG = LogFactory.getLog(TestRSGroupsBase.class);
+
+  //shared
+  protected final static String groupPrefix = "Group";
+  protected final static String tablePrefix = "Group";
+  protected final static SecureRandom rand = new SecureRandom();
+
+  //shared, cluster type specific
+  protected static HBaseTestingUtility TEST_UTIL;
+  protected static HBaseAdmin admin;
+  protected static HBaseCluster cluster;
+  protected static RSGroupAdmin rsGroupAdmin;
+
+  public final static long WAIT_TIMEOUT = 6*5;
+  public final static int NUM_SLAVES_BASE = 4; //number of slaves for the 
smallest cluster
+
+
+
+  protected RSGroupInfo addGroup(RSGroupAdmin gAdmin, String groupName,
+ int serverCount) throws IOException, 
InterruptedException {
+RSGroupInfo defaultInfo = gAdmin
+.getRSGroupInfo(RSGroupInfo.DEFAULT_GROUP);
+assertTrue(defaultInfo != null);
+assertTrue(defaultInfo.getServers().size() >= serverCount);
+gAdmin.addRSGroup(groupName);
+
+Set set = new HashSet();
+for(HostAndPort server: defaultInfo.getServers()) {
+  if(set.size() == serverCount) {
+break;
+  }
+  set.add(server);
+}
+gAdmin.moveServers(set, groupName);
+RSGroupInfo result = gAdmin.getRSGroupInfo(groupName);
+assertTrue(result.getServers().size() >= serverCount);
+return result;
+  }
+
+  static void removeGroup(RSGroupAdminClient groupAdmin, String groupName) 
throws IOException {
+RSGroupInfo RSGroupInfo = groupAdmin.getRSGroupInfo(groupName);
+groupAdmin.moveTables(RSGroupInfo.getTables(), RSGroupInfo.DEFAULT_GROUP);
+groupAdmin.moveServers(RSGroupInfo.getServers(), 
RSGroupInfo.DEFAULT_GROUP);
+groupAdmin.removeRSGroup(groupName);
+  }
+
+  protected void deleteTableIfNecessary() throws IOException {
+for (HTableDescriptor desc : 
TEST_UTIL.getHBaseAdmin().listTables(tablePrefix+".*")) {
+  

[04/23] hbase git commit: HBASE-17772 IntegrationTestRSGroup won't run

2017-07-20 Thread apurtell
HBASE-17772 IntegrationTestRSGroup won't run


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/22cbbf9e
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/22cbbf9e
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/22cbbf9e

Branch: refs/heads/HBASE-15631-branch-1
Commit: 22cbbf9ed6d78ab70cd5224c9a9ef444f73a5401
Parents: 348d7e9
Author: Andrew Purtell 
Authored: Wed Jul 5 18:24:47 2017 -0700
Committer: Andrew Purtell 
Committed: Thu Jul 20 17:55:42 2017 -0700

--
 .../org/apache/hadoop/hbase/rsgroup/TestRSGroupsBase.java   | 9 -
 1 file changed, 8 insertions(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/22cbbf9e/hbase-rsgroup/src/test/java/org/apache/hadoop/hbase/rsgroup/TestRSGroupsBase.java
--
diff --git 
a/hbase-rsgroup/src/test/java/org/apache/hadoop/hbase/rsgroup/TestRSGroupsBase.java
 
b/hbase-rsgroup/src/test/java/org/apache/hadoop/hbase/rsgroup/TestRSGroupsBase.java
index 5831696..52f576d 100644
--- 
a/hbase-rsgroup/src/test/java/org/apache/hadoop/hbase/rsgroup/TestRSGroupsBase.java
+++ 
b/hbase-rsgroup/src/test/java/org/apache/hadoop/hbase/rsgroup/TestRSGroupsBase.java
@@ -150,7 +150,14 @@ public abstract class TestRSGroupsBase {
 ClusterStatus status = 
TEST_UTIL.getHBaseClusterInterface().getClusterStatus();
 for(ServerName serverName : status.getServers()) {
   for(RegionLoad rl : 
status.getLoad(serverName).getRegionsLoad().values()) {
-TableName tableName = HRegionInfo.getTable(rl.getName());
+TableName tableName = null;
+try {
+  tableName = HRegionInfo.getTable(rl.getName());
+} catch (IllegalArgumentException e) {
+  LOG.warn("Failed parse a table name from regionname=" +
+  Bytes.toStringBinary(rl.getName()));
+  continue;
+}
 if(!map.containsKey(tableName)) {
   map.put(tableName, new TreeMap());
 }



[02/23] hbase git commit: HBASE-18147 POC jenkinsfile for nightly checks.

2017-07-20 Thread apurtell
HBASE-18147 POC jenkinsfile for nightly checks.

* adds ruby tools to dockerfile
* adds rubocop to dockerfile
* adds ruby-lint to dockerfile
* adds perlcritic to dockerfile

Signed-off-by: Alex Leblang 
Signed-off-by: Josh Elser 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/6f1cc2c8
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/6f1cc2c8
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/6f1cc2c8

Branch: refs/heads/HBASE-15631-branch-1
Commit: 6f1cc2c89ff44a0e59d292a5f05c20ae99e4d9b8
Parents: 9036556
Author: Sean Busbey 
Authored: Thu Jul 20 17:07:33 2017 -0500
Committer: Sean Busbey 
Committed: Thu Jul 20 17:23:33 2017 -0500

--
 dev-support/Jenkinsfile   | 198 +
 dev-support/docker/Dockerfile |  29 ++
 2 files changed, 227 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/6f1cc2c8/dev-support/Jenkinsfile
--
diff --git a/dev-support/Jenkinsfile b/dev-support/Jenkinsfile
new file mode 100644
index 000..26f72d7
--- /dev/null
+++ b/dev-support/Jenkinsfile
@@ -0,0 +1,198 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements.  See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership.  The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License.  You may obtain a copy of the License at
+//
+//   http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied.  See the License for the
+// specific language governing permissions and limitations
+// under the License.
+pipeline {
+  agent {
+node {
+  label 'Hadoop'
+}
+  }
+  // work around for YETUS-508, requires maven outside of the dockerfile
+  tools {
+maven 'Maven (latest)'
+  }
+  triggers {
+cron('@daily')
+  }
+  options {
+buildDiscarder(logRotator(numToKeepStr: '30'))
+timeout (time: 6, unit: 'HOURS')
+timestamps()
+  }
+  environment {
+TOOLS = "${env.WORKSPACE}/tools"
+// where we check out to across stages
+BASEDIR = "${env.WORKSPACE}/component"
+YETUS_RELEASE = '0.5.0'
+// where we'll write everything from different steps.
+OUTPUT_RELATIVE = 'output'
+OUTPUTDIR = "${env.WORKSPACE}/output"
+  }
+  parameters {
+booleanParam(name: 'USE_YETUS_PRERELEASE', defaultValue: false, 
description: '''Check to use the current HEAD of apache/yetus rather than our 
configured release.
+
+Should only be used manually when e.g. there is some non-work-aroundable 
issue in yetus we are checking a fix for.''')
+booleanParam(name: 'DEBUG', defaultValue: false, description: 'Produce a 
lot more meta-information.')
+  }
+  stages {
+stage ('yetus check') {
+  environment {
+PROJECT = 'hbase'
+PROJECT_PERSONALITY = 
'https://git-wip-us.apache.org/repos/asf?p=hbase.git;a=blob_plain;f=dev-support/hbase-personality.sh;hb=refs/heads/master'
+// This section of the docs tells folks not to use the javadoc tag. 
older branches have our old version of the check for said tag.
+AUTHOR_IGNORE_LIST = 
'src/main/asciidoc/_chapters/developer.adoc,dev-support/test-patch.sh'
+WHITESPACE_IGNORE_LIST = '.*/generated/.*'
+// output from surefire; sadly the archive function in yetus only 
works on file names.
+ARCHIVE_PATTERN_LIST = 
'TEST-*.xml,org.apache.h*-output.txt,org.apache.h*.txt'
+// These tests currently have known failures. Once they burn down to 0, remove 
from here so that new problems will cause a failure.
+TESTS_FILTER = 
'cc,checkstyle,javac,javadoc,pylint,shellcheck,whitespace,perlcritic,ruby-lint,rubocop'
+BRANCH_SPECIFIC_DOCKERFILE = 
"${env.BASEDIR}/dev-support/docker/Dockerfile"
+EXCLUDE_TESTS_URL = 
'https://builds.apache.org/job/HBase-Find-Flaky-Tests/lastSuccessfulBuild/artifact/excludes/'
+  }
+  steps {
+// TODO we can move the yetus install into a different stage and then use 
stash to deploy it.
+sh  '''#!/usr/bin/env bash
+printenv
+echo "Ensure we have a copy of Apache Yetus."
+if [[ true !=  "${USE_YETUS_PRERELEASE}" ]]; then
+  YETUS_DIR="${WORKSPACE}/yetus-${YETUS_RELEASE}"
+  echo "Checking for Yetus ${YETUS_RELEASE} in '${YETUS_DIR}'"
+  if [ ! -d "${YETUS_DIR}" ]; then
+echo "New download of Apache Yetus version ${YETUS_RELEASE}."

[09/23] hbase git commit: HBASE-15631 Backport Regionserver Groups (HBASE-6721) to branch-1

2017-07-20 Thread apurtell
http://git-wip-us.apache.org/repos/asf/hbase/blob/52f05079/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/RSGroupProtos.java
--
diff --git 
a/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/RSGroupProtos.java
 
b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/RSGroupProtos.java
new file mode 100644
index 000..979f762
--- /dev/null
+++ 
b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/RSGroupProtos.java
@@ -0,0 +1,1331 @@
+// Generated by the protocol buffer compiler.  DO NOT EDIT!
+// source: RSGroup.proto
+
+package org.apache.hadoop.hbase.protobuf.generated;
+
+public final class RSGroupProtos {
+  private RSGroupProtos() {}
+  public static void registerAllExtensions(
+  com.google.protobuf.ExtensionRegistry registry) {
+  }
+  public interface RSGroupInfoOrBuilder
+  extends com.google.protobuf.MessageOrBuilder {
+
+// required string name = 1;
+/**
+ * required string name = 1;
+ */
+boolean hasName();
+/**
+ * required string name = 1;
+ */
+java.lang.String getName();
+/**
+ * required string name = 1;
+ */
+com.google.protobuf.ByteString
+getNameBytes();
+
+// repeated .hbase.pb.ServerName servers = 4;
+/**
+ * repeated .hbase.pb.ServerName servers = 4;
+ */
+
java.util.List
 
+getServersList();
+/**
+ * repeated .hbase.pb.ServerName servers = 4;
+ */
+org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName 
getServers(int index);
+/**
+ * repeated .hbase.pb.ServerName servers = 4;
+ */
+int getServersCount();
+/**
+ * repeated .hbase.pb.ServerName servers = 4;
+ */
+java.util.List 
+getServersOrBuilderList();
+/**
+ * repeated .hbase.pb.ServerName servers = 4;
+ */
+org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder 
getServersOrBuilder(
+int index);
+
+// repeated .hbase.pb.TableName tables = 3;
+/**
+ * repeated .hbase.pb.TableName tables = 3;
+ */
+
java.util.List
 
+getTablesList();
+/**
+ * repeated .hbase.pb.TableName tables = 3;
+ */
+org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName 
getTables(int index);
+/**
+ * repeated .hbase.pb.TableName tables = 3;
+ */
+int getTablesCount();
+/**
+ * repeated .hbase.pb.TableName tables = 3;
+ */
+java.util.List 
+getTablesOrBuilderList();
+/**
+ * repeated .hbase.pb.TableName tables = 3;
+ */
+org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder 
getTablesOrBuilder(
+int index);
+  }
+  /**
+   * Protobuf type {@code hbase.pb.RSGroupInfo}
+   */
+  public static final class RSGroupInfo extends
+  com.google.protobuf.GeneratedMessage
+  implements RSGroupInfoOrBuilder {
+// Use RSGroupInfo.newBuilder() to construct.
+private RSGroupInfo(com.google.protobuf.GeneratedMessage.Builder 
builder) {
+  super(builder);
+  this.unknownFields = builder.getUnknownFields();
+}
+private RSGroupInfo(boolean noInit) { this.unknownFields = 
com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
+
+private static final RSGroupInfo defaultInstance;
+public static RSGroupInfo getDefaultInstance() {
+  return defaultInstance;
+}
+
+public RSGroupInfo getDefaultInstanceForType() {
+  return defaultInstance;
+}
+
+private final com.google.protobuf.UnknownFieldSet unknownFields;
+@java.lang.Override
+public final com.google.protobuf.UnknownFieldSet
+getUnknownFields() {
+  return this.unknownFields;
+}
+private RSGroupInfo(
+com.google.protobuf.CodedInputStream input,
+com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+throws com.google.protobuf.InvalidProtocolBufferException {
+  initFields();
+  int mutable_bitField0_ = 0;
+  com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+  com.google.protobuf.UnknownFieldSet.newBuilder();
+  try {
+boolean done = false;
+while (!done) {
+  int tag = input.readTag();
+  switch (tag) {
+case 0:
+  done = true;
+  break;
+default: {
+  if (!parseUnknownField(input, unknownFields,
+ extensionRegistry, tag)) {
+done = true;
+  }
+  break;
+}
+case 10: {
+  bitField0_ |= 0x0001;
+  name_ = input.readBytes();
+  break;
+}
+case 26: {
+  if (!((mutable_bitField0_ & 0x0004) == 0x0004)) {
+tables_ = new 
java.util.ArrayList();
+mutable_bitField0_ |= 0x0004;

[07/23] hbase git commit: HBASE-15631 Backport Regionserver Groups (HBASE-6721) to branch-1

2017-07-20 Thread apurtell
http://git-wip-us.apache.org/repos/asf/hbase/blob/52f05079/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupInfoManagerImpl.java
--
diff --git 
a/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupInfoManagerImpl.java
 
b/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupInfoManagerImpl.java
new file mode 100644
index 000..7fcb7c7
--- /dev/null
+++ 
b/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupInfoManagerImpl.java
@@ -0,0 +1,758 @@
+/**
+ * Copyright The Apache Software Foundation
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hbase.rsgroup;
+
+import com.google.common.collect.Lists;
+import com.google.common.collect.Maps;
+
+import com.google.common.collect.Sets;
+import com.google.common.net.HostAndPort;
+import com.google.protobuf.ServiceException;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.Comparator;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.Map;
+import java.util.NavigableSet;
+import java.util.Set;
+import java.util.TreeSet;
+import java.util.concurrent.atomic.AtomicBoolean;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.Cell;
+import org.apache.hadoop.hbase.CellUtil;
+import org.apache.hadoop.hbase.Coprocessor;
+import org.apache.hadoop.hbase.DoNotRetryIOException;
+import org.apache.hadoop.hbase.HColumnDescriptor;
+import org.apache.hadoop.hbase.HConstants;
+import org.apache.hadoop.hbase.HRegionInfo;
+import org.apache.hadoop.hbase.HTableDescriptor;
+import org.apache.hadoop.hbase.MetaTableAccessor;
+import org.apache.hadoop.hbase.NamespaceDescriptor;
+import org.apache.hadoop.hbase.ServerName;
+import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.TableStateManager;
+import org.apache.hadoop.hbase.client.ClusterConnection;
+import org.apache.hadoop.hbase.client.Delete;
+import org.apache.hadoop.hbase.client.Get;
+import org.apache.hadoop.hbase.client.Mutation;
+import org.apache.hadoop.hbase.client.Put;
+import org.apache.hadoop.hbase.client.Result;
+import org.apache.hadoop.hbase.client.Table;
+import org.apache.hadoop.hbase.constraint.ConstraintException;
+import org.apache.hadoop.hbase.coprocessor.MultiRowMutationEndpoint;
+import org.apache.hadoop.hbase.ipc.CoprocessorRpcChannel;
+import org.apache.hadoop.hbase.master.MasterServices;
+import org.apache.hadoop.hbase.master.ServerListener;
+import org.apache.hadoop.hbase.master.procedure.CreateTableProcedure;
+import org.apache.hadoop.hbase.master.procedure.ProcedurePrepareLatch;
+import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
+import org.apache.hadoop.hbase.protobuf.RequestConverter;
+import org.apache.hadoop.hbase.protobuf.generated.ClientProtos;
+import org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProtos;
+import org.apache.hadoop.hbase.protobuf.generated.RSGroupProtos;
+import org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos;
+import org.apache.hadoop.hbase.regionserver.DisabledRegionSplitPolicy;
+import org.apache.hadoop.hbase.security.access.AccessControlLists;
+import org.apache.hadoop.hbase.util.Bytes;
+import org.apache.hadoop.hbase.util.ModifyRegionUtils;
+import org.apache.hadoop.hbase.zookeeper.ZKUtil;
+import org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher;
+import org.apache.zookeeper.KeeperException;
+
+/**
+ * This is an implementation of {@link RSGroupInfoManager}. Which makes
+ * use of an HBase table as the persistence store for the group information.
+ * It also makes use of zookeeper to store group information needed
+ * for bootstrapping during offline mode.
+ */
+public class RSGroupInfoManagerImpl implements RSGroupInfoManager, 
ServerListener {
+  private static final Log LOG = 
LogFactory.getLog(RSGroupInfoManagerImpl.class);
+
+  /** Table descriptor for hbase:rsgroup catalog table */
+  private final static HTableDescriptor RSGROUP_TABLE_DESC;
+  static {
+

[03/23] hbase git commit: HBASE-18235 LoadBalancer.BOGUS_SERVER_NAME should not have a bogus hostname

2017-07-20 Thread apurtell
HBASE-18235 LoadBalancer.BOGUS_SERVER_NAME should not have a bogus hostname


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/f7c37893
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/f7c37893
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/f7c37893

Branch: refs/heads/HBASE-15631-branch-1
Commit: f7c378935474e6c39b1b2dfb3c9520e3ebf17d60
Parents: 56baade
Author: Andrew Purtell 
Authored: Mon Jul 3 17:54:36 2017 -0700
Committer: Andrew Purtell 
Committed: Thu Jul 20 17:55:42 2017 -0700

--
 .../main/java/org/apache/hadoop/hbase/master/LoadBalancer.java  | 5 +++--
 1 file changed, 3 insertions(+), 2 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/f7c37893/hbase-server/src/main/java/org/apache/hadoop/hbase/master/LoadBalancer.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/LoadBalancer.java 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/LoadBalancer.java
index 937b32f..a80cdc3 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/LoadBalancer.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/LoadBalancer.java
@@ -52,8 +52,9 @@ import org.apache.hadoop.hbase.TableName;
 @InterfaceAudience.Private
 public interface LoadBalancer extends Configurable, Stoppable, 
ConfigurationObserver {
 
-  //used to signal to the caller that the region(s) cannot be assigned
-  ServerName BOGUS_SERVER_NAME = ServerName.parseServerName("localhost,1,1");
+  // Used to signal to the caller that the region(s) cannot be assigned
+  // We deliberately use 'localhost' so the operation will fail fast
+  ServerName BOGUS_SERVER_NAME = ServerName.valueOf("localhost,1,1");
 
   /**
* Set the current cluster status.  This allows a LoadBalancer to map host 
name to a server



[01/23] hbase git commit: HBASE-16993 BucketCache throw java.io.IOException: Invalid HFile block magic when configuring hbase.bucketcache.bucket.sizes. [Forced Update!]

2017-07-20 Thread apurtell
Repository: hbase
Updated Branches:
  refs/heads/HBASE-15631-branch-1 d0c72847a -> f7c378935 (forced update)


HBASE-16993 BucketCache throw java.io.IOException: Invalid HFile block magic 
when configuring hbase.bucketcache.bucket.sizes.


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/9036556a
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/9036556a
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/9036556a

Branch: refs/heads/HBASE-15631-branch-1
Commit: 9036556a33c356225814c3ca50ecc09997269ea5
Parents: cfd5b6b
Author: anoopsamjohn 
Authored: Thu Jul 20 23:00:48 2017 +0530
Committer: anoopsamjohn 
Committed: Thu Jul 20 23:00:48 2017 +0530

--
 hbase-common/src/main/resources/hbase-default.xml  |  2 +-
 .../apache/hadoop/hbase/io/hfile/CacheConfig.java  | 17 +
 .../hadoop/hbase/io/hfile/TestCacheConfig.java | 14 ++
 3 files changed, 28 insertions(+), 5 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/9036556a/hbase-common/src/main/resources/hbase-default.xml
--
diff --git a/hbase-common/src/main/resources/hbase-default.xml 
b/hbase-common/src/main/resources/hbase-default.xml
index 3c4044d..a29e9c8 100644
--- a/hbase-common/src/main/resources/hbase-default.xml
+++ b/hbase-common/src/main/resources/hbase-default.xml
@@ -848,7 +848,7 @@ possible configurations would overwhelm and obscure the 
important.
 A comma-separated list of sizes for buckets for the 
bucketcache.
 Can be multiple sizes. List block sizes in order from smallest to largest.
 The sizes you use will depend on your data access patterns.
-Must be a multiple of 1024 else you will run into
+Must be a multiple of 256 else you will run into
 'java.io.IOException: Invalid HFile block magic' when you go to read from 
cache.
 If you specify no values here, then you pick up the default bucketsizes set
 in code (See BucketAllocator#DEFAULT_BUCKET_SIZES). 

http://git-wip-us.apache.org/repos/asf/hbase/blob/9036556a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/CacheConfig.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/CacheConfig.java 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/CacheConfig.java
index 340236b..57d2057 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/CacheConfig.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/CacheConfig.java
@@ -551,7 +551,8 @@ public class CacheConfig {
* @return Returns L2 block cache instance (for now it is BucketCache 
BlockCache all the time)
* or null if not supposed to be a L2.
*/
-  private static BlockCache getL2(final Configuration c) {
+  @VisibleForTesting
+  static BlockCache getL2(final Configuration c) {
 final boolean useExternal = c.getBoolean(EXTERNAL_BLOCKCACHE_KEY, 
EXTERNAL_BLOCKCACHE_DEFAULT);
 if (LOG.isDebugEnabled()) {
   LOG.debug("Trying to use " + (useExternal?" External":" Internal") + " 
l2 cache");
@@ -561,10 +562,8 @@ public class CacheConfig {
 if (useExternal) {
   return getExternalBlockcache(c);
 }
-
 // otherwise use the bucket cache.
 return getBucketCache(c);
-
   }
 
   private static BlockCache getExternalBlockcache(Configuration c) {
@@ -618,7 +617,17 @@ public class CacheConfig {
 if (configuredBucketSizes != null) {
   bucketSizes = new int[configuredBucketSizes.length];
   for (int i = 0; i < configuredBucketSizes.length; i++) {
-bucketSizes[i] = Integer.parseInt(configuredBucketSizes[i].trim());
+int bucketSize = Integer.parseInt(configuredBucketSizes[i].trim());
+if (bucketSize % 256 != 0) {
+  // We need all the bucket sizes to be multiples of 256. Having all 
the configured bucket
+  // sizes to be multiples of 256 will ensure that the block offsets 
within buckets,
+  // that are calculated, will also be multiples of 256.
+  // See BucketEntry where offset to each block is represented using 5 
bytes (instead of 8
+  // bytes long). We would like to save heap overhead as less as 
possible.
+  throw new IllegalArgumentException("Illegal value: " + bucketSize + 
" configured for '"
+  + BUCKET_CACHE_BUCKETS_KEY + "'. All bucket sizes to be 
multiples of 256");
+}
+bucketSizes[i] = bucketSize;
   }
 }
 BucketCache bucketCache = null;

http://git-wip-us.apache.org/repos/asf/hbase/blob/9036556a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCacheConfig.java

[05/23] hbase git commit: HBASE-15631 Backport Regionserver Groups (HBASE-6721) to branch-1

2017-07-20 Thread apurtell
http://git-wip-us.apache.org/repos/asf/hbase/blob/52f05079/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessController.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessController.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessController.java
index 51aeff8..c409ee9 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessController.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessController.java
@@ -2925,4 +2925,79 @@ public class TestAccessController extends SecureTestUtil 
{
 verifyDenied(replicateLogEntriesAction, USER_CREATE, USER_RW, USER_RO, 
USER_NONE, USER_OWNER,
   USER_GROUP_READ, USER_GROUP_ADMIN, USER_GROUP_CREATE);
   }
+
+  @Test
+  public void testMoveServers() throws Exception {
+AccessTestAction action1 = new AccessTestAction() {
+  @Override
+  public Object run() throws Exception {
+
ACCESS_CONTROLLER.preMoveServers(ObserverContext.createAndPrepare(CP_ENV, null),
+null, null);
+return null;
+  }
+};
+
+verifyAllowed(action1, SUPERUSER, USER_ADMIN);
+verifyDenied(action1, USER_CREATE, USER_RW, USER_RO, USER_NONE, 
USER_OWNER);
+  }
+
+  @Test
+  public void testMoveTables() throws Exception {
+AccessTestAction action1 = new AccessTestAction() {
+  @Override
+  public Object run() throws Exception {
+
ACCESS_CONTROLLER.preMoveTables(ObserverContext.createAndPrepare(CP_ENV, null),
+null, null);
+return null;
+  }
+};
+
+verifyAllowed(action1, SUPERUSER, USER_ADMIN);
+verifyDenied(action1, USER_CREATE, USER_RW, USER_RO, USER_NONE, 
USER_OWNER);
+  }
+
+  @Test
+  public void testAddGroup() throws Exception {
+AccessTestAction action1 = new AccessTestAction() {
+  @Override
+  public Object run() throws Exception {
+
ACCESS_CONTROLLER.preAddRSGroup(ObserverContext.createAndPrepare(CP_ENV, null),
+null);
+return null;
+  }
+};
+
+verifyAllowed(action1, SUPERUSER, USER_ADMIN);
+verifyDenied(action1, USER_CREATE, USER_RW, USER_RO, USER_NONE, 
USER_OWNER);
+  }
+
+  @Test
+  public void testRemoveGroup() throws Exception {
+AccessTestAction action1 = new AccessTestAction() {
+  @Override
+  public Object run() throws Exception {
+
ACCESS_CONTROLLER.preRemoveRSGroup(ObserverContext.createAndPrepare(CP_ENV, 
null),
+null);
+return null;
+  }
+};
+
+verifyAllowed(action1, SUPERUSER, USER_ADMIN);
+verifyDenied(action1, USER_CREATE, USER_RW, USER_RO, USER_NONE, 
USER_OWNER);
+  }
+
+  @Test
+  public void testBalanceGroup() throws Exception {
+AccessTestAction action1 = new AccessTestAction() {
+  @Override
+  public Object run() throws Exception {
+
ACCESS_CONTROLLER.preBalanceRSGroup(ObserverContext.createAndPrepare(CP_ENV, 
null),
+null);
+return null;
+  }
+};
+
+verifyAllowed(action1, SUPERUSER, USER_ADMIN);
+verifyDenied(action1, USER_CREATE, USER_RW, USER_RO, USER_NONE, 
USER_OWNER);
+  }
 }

http://git-wip-us.apache.org/repos/asf/hbase/blob/52f05079/hbase-shell/pom.xml
--
diff --git a/hbase-shell/pom.xml b/hbase-shell/pom.xml
index a2a1d0c..44b6095 100644
--- a/hbase-shell/pom.xml
+++ b/hbase-shell/pom.xml
@@ -254,6 +254,41 @@
 
   
   
+
+  rsgroup
+  
+
+!skip-rsgroup
+
+  
+  
+
+  org.apache.hbase
+  hbase-rsgroup
+
+  
+  
+
+  
+org.codehaus.mojo
+build-helper-maven-plugin
+
+  
+add-test-source
+
+  add-test-source
+
+
+  
+src/test/rsgroup
+  
+
+  
+
+  
+
+  
+
 
 
   skipShellTests

http://git-wip-us.apache.org/repos/asf/hbase/blob/52f05079/hbase-shell/src/main/ruby/hbase.rb
--
diff --git a/hbase-shell/src/main/ruby/hbase.rb 
b/hbase-shell/src/main/ruby/hbase.rb
index 88a6f04..2c0aecb 100644
--- a/hbase-shell/src/main/ruby/hbase.rb
+++ b/hbase-shell/src/main/ruby/hbase.rb
@@ -112,6 +112,7 @@ require 'hbase/quotas'
 require 'hbase/replication_admin'
 require 'hbase/security'
 require 'hbase/visibility_labels'
+require 'hbase/rsgroup_admin'
 
 
 include HBaseQuotasConstants

http://git-wip-us.apache.org/repos/asf/hbase/blob/52f05079/hbase-shell/src/main/ruby/hbase/hbase.rb
--
diff --git 

[1/3] hbase git commit: HBASE-18426 nightly job should use independent stages to check supported jdks

2017-07-20 Thread busbey
Repository: hbase
Updated Branches:
  refs/heads/HBASE-18147 [deleted] bdc94b1d6
  refs/heads/HBASE-18426 [created] 015d219c1
  refs/heads/branch-1-HBASE-18147 [deleted] c852591fc
  refs/heads/branch-1.1-HBASE-18147 [deleted] af5af7ee3
  refs/heads/branch-1.1-HBASE-18426 [created] 2edb0ef88
  refs/heads/branch-1.2-HBASE-18147 [deleted] 45573cb68
  refs/heads/branch-1.2-HBASE-18426 [created] 5f1f9dea4


HBASE-18426 nightly job should use independent stages to check supported jdks

* Jenkinsfile that works for all current branches.
* adds dev-support script for setting parameters for our yetus nightly 
invocation


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/015d219c
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/015d219c
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/015d219c

Branch: refs/heads/HBASE-18426
Commit: 015d219c1d883df13929d7b1940adc12bc1b61bc
Parents: bdc94b1
Author: Sean Busbey 
Authored: Thu Jul 20 01:01:59 2017 -0500
Committer: Sean Busbey 
Committed: Thu Jul 20 17:31:34 2017 -0500

--
 dev-support/Jenkinsfile| 259 +++-
 dev-support/hbase_nightly_yetus.sh |  86 +++
 2 files changed, 243 insertions(+), 102 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/015d219c/dev-support/Jenkinsfile
--
diff --git a/dev-support/Jenkinsfile b/dev-support/Jenkinsfile
index 26f72d7..a11e0a4 100644
--- a/dev-support/Jenkinsfile
+++ b/dev-support/Jenkinsfile
@@ -20,10 +20,6 @@ pipeline {
   label 'Hadoop'
 }
   }
-  // work around for YETUS-508, requires maven outside of the dockerfile
-  tools {
-maven 'Maven (latest)'
-  }
   triggers {
 cron('@daily')
   }
@@ -38,8 +34,23 @@ pipeline {
 BASEDIR = "${env.WORKSPACE}/component"
 YETUS_RELEASE = '0.5.0'
 // where we'll write everything from different steps.
-OUTPUT_RELATIVE = 'output'
-OUTPUTDIR = "${env.WORKSPACE}/output"
+OUTPUT_RELATIVE_GENERAL = 'output-general'
+OUTPUTDIR_GENERAL = "${env.WORKSPACE}/output-general"
+OUTPUT_RELATIVE_JDK7 = 'output-jdk7'
+OUTPUTDIR_JDK7 = "${env.WORKSPACE}/output-jdk7"
+OUTPUT_RELATIVE_JDK8 = 'output-jdk8'
+OUTPUTDIR_JDK8 = "${env.WORKSPACE}/output-jdk8"
+PROJECT = 'hbase'
+PROJET_PERSONALITY = 
'https://raw.githubusercontent.com/apache/hbase/master/dev-support/hbase-personality.sh'
+// This section of the docs tells folks not to use the javadoc tag. older 
branches have our old version of the check for said tag.
+AUTHOR_IGNORE_LIST = 
'src/main/asciidoc/_chapters/developer.adoc,dev-support/test-patch.sh'
+WHITESPACE_IGNORE_LIST = '.*/generated/.*'
+// output from surefire; sadly the archive function in yetus only works on 
file names.
+ARCHIVE_PATTERN_LIST = 
'TEST-*.xml,org.apache.h*-output.txt,org.apache.h*.txt'
+// These tests currently have known failures. Once they burn down to 0, 
remove from here so that new problems will cause a failure.
+TESTS_FILTER = 
'cc,checkstyle,javac,javadoc,pylint,shellcheck,whitespace,perlcritic,ruby-lint,rubocop'
+BRANCH_SPECIFIC_DOCKERFILE = "${env.BASEDIR}/dev-support/docker/Dockerfile"
+EXCLUDE_TESTS_URL = 
'https://builds.apache.org/job/HBase-Find-Flaky-Tests/lastSuccessfulBuild/artifact/excludes/'
   }
   parameters {
 booleanParam(name: 'USE_YETUS_PRERELEASE', defaultValue: false, 
description: '''Check to use the current HEAD of apache/yetus rather than our 
configured release.
@@ -48,24 +59,9 @@ pipeline {
 booleanParam(name: 'DEBUG', defaultValue: false, description: 'Produce a 
lot more meta-information.')
   }
   stages {
-stage ('yetus check') {
-  environment {
-PROJECT = 'hbase'
-PROJECT_PERSONALITY = 
'https://git-wip-us.apache.org/repos/asf?p=hbase.git;a=blob_plain;f=dev-support/hbase-personality.sh;hb=refs/heads/master'
-// This section of the docs tells folks not to use the javadoc tag. 
older branches have our old version of the check for said tag.
-AUTHOR_IGNORE_LIST = 
'src/main/asciidoc/_chapters/developer.adoc,dev-support/test-patch.sh'
-WHITESPACE_IGNORE_LIST = '.*/generated/.*'
-// output from surefire; sadly the archive function in yetus only 
works on file names.
-ARCHIVE_PATTERN_LIST = 
'TEST-*.xml,org.apache.h*-output.txt,org.apache.h*.txt'
-// These tests currently have known failures. Once they burn down to 0, remove 
from here so that new problems will cause a failure.
-TESTS_FILTER = 
'cc,checkstyle,javac,javadoc,pylint,shellcheck,whitespace,perlcritic,ruby-lint,rubocop'
-BRANCH_SPECIFIC_DOCKERFILE = 
"${env.BASEDIR}/dev-support/docker/Dockerfile"
-EXCLUDE_TESTS_URL 

[3/3] hbase git commit: HBASE-18426 nightly job should use independent stages to check supported jdks

2017-07-20 Thread busbey
HBASE-18426 nightly job should use independent stages to check supported jdks

* Jenkinsfile that works for all current branches.
* adds dev-support script for setting parameters for our yetus nightly 
invocation


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/2edb0ef8
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/2edb0ef8
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/2edb0ef8

Branch: refs/heads/branch-1.1-HBASE-18426
Commit: 2edb0ef8895eeec065c79a69409f38e2c5789584
Parents: 5316580
Author: Sean Busbey 
Authored: Thu Jul 20 01:01:59 2017 -0500
Committer: Sean Busbey 
Committed: Thu Jul 20 17:34:10 2017 -0500

--
 dev-support/Jenkinsfile| 259 +++-
 dev-support/hbase_nightly_yetus.sh |  86 +++
 2 files changed, 243 insertions(+), 102 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/2edb0ef8/dev-support/Jenkinsfile
--
diff --git a/dev-support/Jenkinsfile b/dev-support/Jenkinsfile
index 26f72d7..a11e0a4 100644
--- a/dev-support/Jenkinsfile
+++ b/dev-support/Jenkinsfile
@@ -20,10 +20,6 @@ pipeline {
   label 'Hadoop'
 }
   }
-  // work around for YETUS-508, requires maven outside of the dockerfile
-  tools {
-maven 'Maven (latest)'
-  }
   triggers {
 cron('@daily')
   }
@@ -38,8 +34,23 @@ pipeline {
 BASEDIR = "${env.WORKSPACE}/component"
 YETUS_RELEASE = '0.5.0'
 // where we'll write everything from different steps.
-OUTPUT_RELATIVE = 'output'
-OUTPUTDIR = "${env.WORKSPACE}/output"
+OUTPUT_RELATIVE_GENERAL = 'output-general'
+OUTPUTDIR_GENERAL = "${env.WORKSPACE}/output-general"
+OUTPUT_RELATIVE_JDK7 = 'output-jdk7'
+OUTPUTDIR_JDK7 = "${env.WORKSPACE}/output-jdk7"
+OUTPUT_RELATIVE_JDK8 = 'output-jdk8'
+OUTPUTDIR_JDK8 = "${env.WORKSPACE}/output-jdk8"
+PROJECT = 'hbase'
+PROJET_PERSONALITY = 
'https://raw.githubusercontent.com/apache/hbase/master/dev-support/hbase-personality.sh'
+// This section of the docs tells folks not to use the javadoc tag. older 
branches have our old version of the check for said tag.
+AUTHOR_IGNORE_LIST = 
'src/main/asciidoc/_chapters/developer.adoc,dev-support/test-patch.sh'
+WHITESPACE_IGNORE_LIST = '.*/generated/.*'
+// output from surefire; sadly the archive function in yetus only works on 
file names.
+ARCHIVE_PATTERN_LIST = 
'TEST-*.xml,org.apache.h*-output.txt,org.apache.h*.txt'
+// These tests currently have known failures. Once they burn down to 0, 
remove from here so that new problems will cause a failure.
+TESTS_FILTER = 
'cc,checkstyle,javac,javadoc,pylint,shellcheck,whitespace,perlcritic,ruby-lint,rubocop'
+BRANCH_SPECIFIC_DOCKERFILE = "${env.BASEDIR}/dev-support/docker/Dockerfile"
+EXCLUDE_TESTS_URL = 
'https://builds.apache.org/job/HBase-Find-Flaky-Tests/lastSuccessfulBuild/artifact/excludes/'
   }
   parameters {
 booleanParam(name: 'USE_YETUS_PRERELEASE', defaultValue: false, 
description: '''Check to use the current HEAD of apache/yetus rather than our 
configured release.
@@ -48,24 +59,9 @@ pipeline {
 booleanParam(name: 'DEBUG', defaultValue: false, description: 'Produce a 
lot more meta-information.')
   }
   stages {
-stage ('yetus check') {
-  environment {
-PROJECT = 'hbase'
-PROJECT_PERSONALITY = 
'https://git-wip-us.apache.org/repos/asf?p=hbase.git;a=blob_plain;f=dev-support/hbase-personality.sh;hb=refs/heads/master'
-// This section of the docs tells folks not to use the javadoc tag. 
older branches have our old version of the check for said tag.
-AUTHOR_IGNORE_LIST = 
'src/main/asciidoc/_chapters/developer.adoc,dev-support/test-patch.sh'
-WHITESPACE_IGNORE_LIST = '.*/generated/.*'
-// output from surefire; sadly the archive function in yetus only 
works on file names.
-ARCHIVE_PATTERN_LIST = 
'TEST-*.xml,org.apache.h*-output.txt,org.apache.h*.txt'
-// These tests currently have known failures. Once they burn down to 0, remove 
from here so that new problems will cause a failure.
-TESTS_FILTER = 
'cc,checkstyle,javac,javadoc,pylint,shellcheck,whitespace,perlcritic,ruby-lint,rubocop'
-BRANCH_SPECIFIC_DOCKERFILE = 
"${env.BASEDIR}/dev-support/docker/Dockerfile"
-EXCLUDE_TESTS_URL = 
'https://builds.apache.org/job/HBase-Find-Flaky-Tests/lastSuccessfulBuild/artifact/excludes/'
-  }
+stage ('yetus install') {
   steps {
-// TODO we can move the yetus install into a different stage and then use 
stash to deploy it.
 sh  '''#!/usr/bin/env bash
-printenv
 echo "Ensure we have a copy of Apache Yetus."
 if [[ true !=  "${USE_YETUS_PRERELEASE}" ]]; 

[2/3] hbase git commit: HBASE-18426 nightly job should use independent stages to check supported jdks

2017-07-20 Thread busbey
HBASE-18426 nightly job should use independent stages to check supported jdks

* Jenkinsfile that works for all current branches.
* adds dev-support script for setting parameters for our yetus nightly 
invocation


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/5f1f9dea
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/5f1f9dea
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/5f1f9dea

Branch: refs/heads/branch-1.2-HBASE-18426
Commit: 5f1f9dea44cbcf946c03f7b0546c78e7c76f6ce8
Parents: 933f4b3
Author: Sean Busbey 
Authored: Thu Jul 20 01:01:59 2017 -0500
Committer: Sean Busbey 
Committed: Thu Jul 20 17:34:02 2017 -0500

--
 dev-support/Jenkinsfile| 259 +++-
 dev-support/hbase_nightly_yetus.sh |  86 +++
 2 files changed, 243 insertions(+), 102 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/5f1f9dea/dev-support/Jenkinsfile
--
diff --git a/dev-support/Jenkinsfile b/dev-support/Jenkinsfile
index 26f72d7..a11e0a4 100644
--- a/dev-support/Jenkinsfile
+++ b/dev-support/Jenkinsfile
@@ -20,10 +20,6 @@ pipeline {
   label 'Hadoop'
 }
   }
-  // work around for YETUS-508, requires maven outside of the dockerfile
-  tools {
-maven 'Maven (latest)'
-  }
   triggers {
 cron('@daily')
   }
@@ -38,8 +34,23 @@ pipeline {
 BASEDIR = "${env.WORKSPACE}/component"
 YETUS_RELEASE = '0.5.0'
 // where we'll write everything from different steps.
-OUTPUT_RELATIVE = 'output'
-OUTPUTDIR = "${env.WORKSPACE}/output"
+OUTPUT_RELATIVE_GENERAL = 'output-general'
+OUTPUTDIR_GENERAL = "${env.WORKSPACE}/output-general"
+OUTPUT_RELATIVE_JDK7 = 'output-jdk7'
+OUTPUTDIR_JDK7 = "${env.WORKSPACE}/output-jdk7"
+OUTPUT_RELATIVE_JDK8 = 'output-jdk8'
+OUTPUTDIR_JDK8 = "${env.WORKSPACE}/output-jdk8"
+PROJECT = 'hbase'
+PROJET_PERSONALITY = 
'https://raw.githubusercontent.com/apache/hbase/master/dev-support/hbase-personality.sh'
+// This section of the docs tells folks not to use the javadoc tag. older 
branches have our old version of the check for said tag.
+AUTHOR_IGNORE_LIST = 
'src/main/asciidoc/_chapters/developer.adoc,dev-support/test-patch.sh'
+WHITESPACE_IGNORE_LIST = '.*/generated/.*'
+// output from surefire; sadly the archive function in yetus only works on 
file names.
+ARCHIVE_PATTERN_LIST = 
'TEST-*.xml,org.apache.h*-output.txt,org.apache.h*.txt'
+// These tests currently have known failures. Once they burn down to 0, 
remove from here so that new problems will cause a failure.
+TESTS_FILTER = 
'cc,checkstyle,javac,javadoc,pylint,shellcheck,whitespace,perlcritic,ruby-lint,rubocop'
+BRANCH_SPECIFIC_DOCKERFILE = "${env.BASEDIR}/dev-support/docker/Dockerfile"
+EXCLUDE_TESTS_URL = 
'https://builds.apache.org/job/HBase-Find-Flaky-Tests/lastSuccessfulBuild/artifact/excludes/'
   }
   parameters {
 booleanParam(name: 'USE_YETUS_PRERELEASE', defaultValue: false, 
description: '''Check to use the current HEAD of apache/yetus rather than our 
configured release.
@@ -48,24 +59,9 @@ pipeline {
 booleanParam(name: 'DEBUG', defaultValue: false, description: 'Produce a 
lot more meta-information.')
   }
   stages {
-stage ('yetus check') {
-  environment {
-PROJECT = 'hbase'
-PROJECT_PERSONALITY = 
'https://git-wip-us.apache.org/repos/asf?p=hbase.git;a=blob_plain;f=dev-support/hbase-personality.sh;hb=refs/heads/master'
-// This section of the docs tells folks not to use the javadoc tag. 
older branches have our old version of the check for said tag.
-AUTHOR_IGNORE_LIST = 
'src/main/asciidoc/_chapters/developer.adoc,dev-support/test-patch.sh'
-WHITESPACE_IGNORE_LIST = '.*/generated/.*'
-// output from surefire; sadly the archive function in yetus only 
works on file names.
-ARCHIVE_PATTERN_LIST = 
'TEST-*.xml,org.apache.h*-output.txt,org.apache.h*.txt'
-// These tests currently have known failures. Once they burn down to 0, remove 
from here so that new problems will cause a failure.
-TESTS_FILTER = 
'cc,checkstyle,javac,javadoc,pylint,shellcheck,whitespace,perlcritic,ruby-lint,rubocop'
-BRANCH_SPECIFIC_DOCKERFILE = 
"${env.BASEDIR}/dev-support/docker/Dockerfile"
-EXCLUDE_TESTS_URL = 
'https://builds.apache.org/job/HBase-Find-Flaky-Tests/lastSuccessfulBuild/artifact/excludes/'
-  }
+stage ('yetus install') {
   steps {
-// TODO we can move the yetus install into a different stage and then use 
stash to deploy it.
 sh  '''#!/usr/bin/env bash
-printenv
 echo "Ensure we have a copy of Apache Yetus."
 if [[ true !=  "${USE_YETUS_PRERELEASE}" ]]; 

[2/6] hbase git commit: HBASE-18147 POC jenkinsfile for nightly checks.

2017-07-20 Thread busbey
HBASE-18147 POC jenkinsfile for nightly checks.

* adds ruby tools to dockerfile
* adds rubocop to dockerfile
* adds ruby-lint to dockerfile
* adds perlcritic to dockerfile

Signed-off-by: Alex Leblang 
Signed-off-by: Josh Elser 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/6f1cc2c8
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/6f1cc2c8
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/6f1cc2c8

Branch: refs/heads/branch-1
Commit: 6f1cc2c89ff44a0e59d292a5f05c20ae99e4d9b8
Parents: 9036556
Author: Sean Busbey 
Authored: Thu Jul 20 17:07:33 2017 -0500
Committer: Sean Busbey 
Committed: Thu Jul 20 17:23:33 2017 -0500

--
 dev-support/Jenkinsfile   | 198 +
 dev-support/docker/Dockerfile |  29 ++
 2 files changed, 227 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/6f1cc2c8/dev-support/Jenkinsfile
--
diff --git a/dev-support/Jenkinsfile b/dev-support/Jenkinsfile
new file mode 100644
index 000..26f72d7
--- /dev/null
+++ b/dev-support/Jenkinsfile
@@ -0,0 +1,198 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements.  See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership.  The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License.  You may obtain a copy of the License at
+//
+//   http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied.  See the License for the
+// specific language governing permissions and limitations
+// under the License.
+pipeline {
+  agent {
+node {
+  label 'Hadoop'
+}
+  }
+  // work around for YETUS-508, requires maven outside of the dockerfile
+  tools {
+maven 'Maven (latest)'
+  }
+  triggers {
+cron('@daily')
+  }
+  options {
+buildDiscarder(logRotator(numToKeepStr: '30'))
+timeout (time: 6, unit: 'HOURS')
+timestamps()
+  }
+  environment {
+TOOLS = "${env.WORKSPACE}/tools"
+// where we check out to across stages
+BASEDIR = "${env.WORKSPACE}/component"
+YETUS_RELEASE = '0.5.0'
+// where we'll write everything from different steps.
+OUTPUT_RELATIVE = 'output'
+OUTPUTDIR = "${env.WORKSPACE}/output"
+  }
+  parameters {
+booleanParam(name: 'USE_YETUS_PRERELEASE', defaultValue: false, 
description: '''Check to use the current HEAD of apache/yetus rather than our 
configured release.
+
+Should only be used manually when e.g. there is some non-work-aroundable 
issue in yetus we are checking a fix for.''')
+booleanParam(name: 'DEBUG', defaultValue: false, description: 'Produce a 
lot more meta-information.')
+  }
+  stages {
+stage ('yetus check') {
+  environment {
+PROJECT = 'hbase'
+PROJECT_PERSONALITY = 
'https://git-wip-us.apache.org/repos/asf?p=hbase.git;a=blob_plain;f=dev-support/hbase-personality.sh;hb=refs/heads/master'
+// This section of the docs tells folks not to use the javadoc tag. 
older branches have our old version of the check for said tag.
+AUTHOR_IGNORE_LIST = 
'src/main/asciidoc/_chapters/developer.adoc,dev-support/test-patch.sh'
+WHITESPACE_IGNORE_LIST = '.*/generated/.*'
+// output from surefire; sadly the archive function in yetus only 
works on file names.
+ARCHIVE_PATTERN_LIST = 
'TEST-*.xml,org.apache.h*-output.txt,org.apache.h*.txt'
+// These tests currently have known failures. Once they burn down to 0, remove 
from here so that new problems will cause a failure.
+TESTS_FILTER = 
'cc,checkstyle,javac,javadoc,pylint,shellcheck,whitespace,perlcritic,ruby-lint,rubocop'
+BRANCH_SPECIFIC_DOCKERFILE = 
"${env.BASEDIR}/dev-support/docker/Dockerfile"
+EXCLUDE_TESTS_URL = 
'https://builds.apache.org/job/HBase-Find-Flaky-Tests/lastSuccessfulBuild/artifact/excludes/'
+  }
+  steps {
+// TODO we can move the yetus install into a different stage and then use 
stash to deploy it.
+sh  '''#!/usr/bin/env bash
+printenv
+echo "Ensure we have a copy of Apache Yetus."
+if [[ true !=  "${USE_YETUS_PRERELEASE}" ]]; then
+  YETUS_DIR="${WORKSPACE}/yetus-${YETUS_RELEASE}"
+  echo "Checking for Yetus ${YETUS_RELEASE} in '${YETUS_DIR}'"
+  if [ ! -d "${YETUS_DIR}" ]; then
+echo "New download of Apache Yetus version ${YETUS_RELEASE}."
+rm -rf 

[5/6] hbase git commit: HBASE-18147 POC jenkinsfile for nightly checks.

2017-07-20 Thread busbey
HBASE-18147 POC jenkinsfile for nightly checks.

* adds ruby tools to dockerfile
* adds rubocop to dockerfile
* adds ruby-lint to dockerfile
* adds perlcritic to dockerfile

Signed-off-by: Alex Leblang 
Signed-off-by: Josh Elser 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/933f4b33
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/933f4b33
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/933f4b33

Branch: refs/heads/branch-1.2
Commit: 933f4b33dc7102b5db99aea2fb05a9c9b0f70686
Parents: 7d2175e
Author: Sean Busbey 
Authored: Thu Jul 20 17:07:33 2017 -0500
Committer: Sean Busbey 
Committed: Thu Jul 20 17:23:50 2017 -0500

--
 dev-support/Jenkinsfile   | 198 +
 dev-support/docker/Dockerfile |  29 ++
 2 files changed, 227 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/933f4b33/dev-support/Jenkinsfile
--
diff --git a/dev-support/Jenkinsfile b/dev-support/Jenkinsfile
new file mode 100644
index 000..26f72d7
--- /dev/null
+++ b/dev-support/Jenkinsfile
@@ -0,0 +1,198 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements.  See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership.  The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License.  You may obtain a copy of the License at
+//
+//   http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied.  See the License for the
+// specific language governing permissions and limitations
+// under the License.
+pipeline {
+  agent {
+node {
+  label 'Hadoop'
+}
+  }
+  // work around for YETUS-508, requires maven outside of the dockerfile
+  tools {
+maven 'Maven (latest)'
+  }
+  triggers {
+cron('@daily')
+  }
+  options {
+buildDiscarder(logRotator(numToKeepStr: '30'))
+timeout (time: 6, unit: 'HOURS')
+timestamps()
+  }
+  environment {
+TOOLS = "${env.WORKSPACE}/tools"
+// where we check out to across stages
+BASEDIR = "${env.WORKSPACE}/component"
+YETUS_RELEASE = '0.5.0'
+// where we'll write everything from different steps.
+OUTPUT_RELATIVE = 'output'
+OUTPUTDIR = "${env.WORKSPACE}/output"
+  }
+  parameters {
+booleanParam(name: 'USE_YETUS_PRERELEASE', defaultValue: false, 
description: '''Check to use the current HEAD of apache/yetus rather than our 
configured release.
+
+Should only be used manually when e.g. there is some non-work-aroundable 
issue in yetus we are checking a fix for.''')
+booleanParam(name: 'DEBUG', defaultValue: false, description: 'Produce a 
lot more meta-information.')
+  }
+  stages {
+stage ('yetus check') {
+  environment {
+PROJECT = 'hbase'
+PROJECT_PERSONALITY = 
'https://git-wip-us.apache.org/repos/asf?p=hbase.git;a=blob_plain;f=dev-support/hbase-personality.sh;hb=refs/heads/master'
+// This section of the docs tells folks not to use the javadoc tag. 
older branches have our old version of the check for said tag.
+AUTHOR_IGNORE_LIST = 
'src/main/asciidoc/_chapters/developer.adoc,dev-support/test-patch.sh'
+WHITESPACE_IGNORE_LIST = '.*/generated/.*'
+// output from surefire; sadly the archive function in yetus only 
works on file names.
+ARCHIVE_PATTERN_LIST = 
'TEST-*.xml,org.apache.h*-output.txt,org.apache.h*.txt'
+// These tests currently have known failures. Once they burn down to 0, remove 
from here so that new problems will cause a failure.
+TESTS_FILTER = 
'cc,checkstyle,javac,javadoc,pylint,shellcheck,whitespace,perlcritic,ruby-lint,rubocop'
+BRANCH_SPECIFIC_DOCKERFILE = 
"${env.BASEDIR}/dev-support/docker/Dockerfile"
+EXCLUDE_TESTS_URL = 
'https://builds.apache.org/job/HBase-Find-Flaky-Tests/lastSuccessfulBuild/artifact/excludes/'
+  }
+  steps {
+// TODO we can move the yetus install into a different stage and then use 
stash to deploy it.
+sh  '''#!/usr/bin/env bash
+printenv
+echo "Ensure we have a copy of Apache Yetus."
+if [[ true !=  "${USE_YETUS_PRERELEASE}" ]]; then
+  YETUS_DIR="${WORKSPACE}/yetus-${YETUS_RELEASE}"
+  echo "Checking for Yetus ${YETUS_RELEASE} in '${YETUS_DIR}'"
+  if [ ! -d "${YETUS_DIR}" ]; then
+echo "New download of Apache Yetus version ${YETUS_RELEASE}."
+rm 

[1/6] hbase git commit: HBASE-18147 POC jenkinsfile for nightly checks.

2017-07-20 Thread busbey
Repository: hbase
Updated Branches:
  refs/heads/branch-1 9036556a3 -> 6f1cc2c89
  refs/heads/branch-1.1 b03a5e743 -> 5316580dc
  refs/heads/branch-1.2 7d2175eb3 -> 933f4b33d
  refs/heads/branch-1.3 2b60f4ecd -> b3a2a00af
  refs/heads/branch-1.4 0940714c4 -> 8cfcd12e9
  refs/heads/master bc93b6610 -> bdc94b1d6


HBASE-18147 POC jenkinsfile for nightly checks.

* adds ruby tools to dockerfile
* adds rubocop to dockerfile
* adds ruby-lint to dockerfile
* adds perlcritic to dockerfile

Signed-off-by: Alex Leblang 
Signed-off-by: Josh Elser 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/bdc94b1d
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/bdc94b1d
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/bdc94b1d

Branch: refs/heads/master
Commit: bdc94b1d6b836479b308fb0e7e4952c85b37a550
Parents: bc93b66
Author: Sean Busbey 
Authored: Thu Jul 20 17:07:33 2017 -0500
Committer: Sean Busbey 
Committed: Thu Jul 20 17:07:33 2017 -0500

--
 dev-support/Jenkinsfile   | 198 +
 dev-support/docker/Dockerfile |  29 ++
 2 files changed, 227 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/bdc94b1d/dev-support/Jenkinsfile
--
diff --git a/dev-support/Jenkinsfile b/dev-support/Jenkinsfile
new file mode 100644
index 000..26f72d7
--- /dev/null
+++ b/dev-support/Jenkinsfile
@@ -0,0 +1,198 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements.  See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership.  The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License.  You may obtain a copy of the License at
+//
+//   http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied.  See the License for the
+// specific language governing permissions and limitations
+// under the License.
+pipeline {
+  agent {
+node {
+  label 'Hadoop'
+}
+  }
+  // work around for YETUS-508, requires maven outside of the dockerfile
+  tools {
+maven 'Maven (latest)'
+  }
+  triggers {
+cron('@daily')
+  }
+  options {
+buildDiscarder(logRotator(numToKeepStr: '30'))
+timeout (time: 6, unit: 'HOURS')
+timestamps()
+  }
+  environment {
+TOOLS = "${env.WORKSPACE}/tools"
+// where we check out to across stages
+BASEDIR = "${env.WORKSPACE}/component"
+YETUS_RELEASE = '0.5.0'
+// where we'll write everything from different steps.
+OUTPUT_RELATIVE = 'output'
+OUTPUTDIR = "${env.WORKSPACE}/output"
+  }
+  parameters {
+booleanParam(name: 'USE_YETUS_PRERELEASE', defaultValue: false, 
description: '''Check to use the current HEAD of apache/yetus rather than our 
configured release.
+
+Should only be used manually when e.g. there is some non-work-aroundable 
issue in yetus we are checking a fix for.''')
+booleanParam(name: 'DEBUG', defaultValue: false, description: 'Produce a 
lot more meta-information.')
+  }
+  stages {
+stage ('yetus check') {
+  environment {
+PROJECT = 'hbase'
+PROJECT_PERSONALITY = 
'https://git-wip-us.apache.org/repos/asf?p=hbase.git;a=blob_plain;f=dev-support/hbase-personality.sh;hb=refs/heads/master'
+// This section of the docs tells folks not to use the javadoc tag. 
older branches have our old version of the check for said tag.
+AUTHOR_IGNORE_LIST = 
'src/main/asciidoc/_chapters/developer.adoc,dev-support/test-patch.sh'
+WHITESPACE_IGNORE_LIST = '.*/generated/.*'
+// output from surefire; sadly the archive function in yetus only 
works on file names.
+ARCHIVE_PATTERN_LIST = 
'TEST-*.xml,org.apache.h*-output.txt,org.apache.h*.txt'
+// These tests currently have known failures. Once they burn down to 0, remove 
from here so that new problems will cause a failure.
+TESTS_FILTER = 
'cc,checkstyle,javac,javadoc,pylint,shellcheck,whitespace,perlcritic,ruby-lint,rubocop'
+BRANCH_SPECIFIC_DOCKERFILE = 
"${env.BASEDIR}/dev-support/docker/Dockerfile"
+EXCLUDE_TESTS_URL = 
'https://builds.apache.org/job/HBase-Find-Flaky-Tests/lastSuccessfulBuild/artifact/excludes/'
+  }
+  steps {
+// TODO we can move the yetus install into a different stage and then use 
stash to deploy it.
+sh  '''#!/usr/bin/env bash
+printenv
+echo "Ensure 

[6/6] hbase git commit: HBASE-18147 POC jenkinsfile for nightly checks.

2017-07-20 Thread busbey
HBASE-18147 POC jenkinsfile for nightly checks.

* adds ruby tools to dockerfile
* adds rubocop to dockerfile
* adds ruby-lint to dockerfile
* adds perlcritic to dockerfile

Signed-off-by: Alex Leblang 
Signed-off-by: Josh Elser 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/5316580d
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/5316580d
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/5316580d

Branch: refs/heads/branch-1.1
Commit: 5316580dc65840ca9acef852476ba4b794571ba0
Parents: b03a5e7
Author: Sean Busbey 
Authored: Thu Jul 20 17:07:33 2017 -0500
Committer: Sean Busbey 
Committed: Thu Jul 20 17:23:55 2017 -0500

--
 dev-support/Jenkinsfile   | 198 +
 dev-support/docker/Dockerfile |  29 ++
 2 files changed, 227 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/5316580d/dev-support/Jenkinsfile
--
diff --git a/dev-support/Jenkinsfile b/dev-support/Jenkinsfile
new file mode 100644
index 000..26f72d7
--- /dev/null
+++ b/dev-support/Jenkinsfile
@@ -0,0 +1,198 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements.  See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership.  The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License.  You may obtain a copy of the License at
+//
+//   http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied.  See the License for the
+// specific language governing permissions and limitations
+// under the License.
+pipeline {
+  agent {
+node {
+  label 'Hadoop'
+}
+  }
+  // work around for YETUS-508, requires maven outside of the dockerfile
+  tools {
+maven 'Maven (latest)'
+  }
+  triggers {
+cron('@daily')
+  }
+  options {
+buildDiscarder(logRotator(numToKeepStr: '30'))
+timeout (time: 6, unit: 'HOURS')
+timestamps()
+  }
+  environment {
+TOOLS = "${env.WORKSPACE}/tools"
+// where we check out to across stages
+BASEDIR = "${env.WORKSPACE}/component"
+YETUS_RELEASE = '0.5.0'
+// where we'll write everything from different steps.
+OUTPUT_RELATIVE = 'output'
+OUTPUTDIR = "${env.WORKSPACE}/output"
+  }
+  parameters {
+booleanParam(name: 'USE_YETUS_PRERELEASE', defaultValue: false, 
description: '''Check to use the current HEAD of apache/yetus rather than our 
configured release.
+
+Should only be used manually when e.g. there is some non-work-aroundable 
issue in yetus we are checking a fix for.''')
+booleanParam(name: 'DEBUG', defaultValue: false, description: 'Produce a 
lot more meta-information.')
+  }
+  stages {
+stage ('yetus check') {
+  environment {
+PROJECT = 'hbase'
+PROJECT_PERSONALITY = 
'https://git-wip-us.apache.org/repos/asf?p=hbase.git;a=blob_plain;f=dev-support/hbase-personality.sh;hb=refs/heads/master'
+// This section of the docs tells folks not to use the javadoc tag. 
older branches have our old version of the check for said tag.
+AUTHOR_IGNORE_LIST = 
'src/main/asciidoc/_chapters/developer.adoc,dev-support/test-patch.sh'
+WHITESPACE_IGNORE_LIST = '.*/generated/.*'
+// output from surefire; sadly the archive function in yetus only 
works on file names.
+ARCHIVE_PATTERN_LIST = 
'TEST-*.xml,org.apache.h*-output.txt,org.apache.h*.txt'
+// These tests currently have known failures. Once they burn down to 0, remove 
from here so that new problems will cause a failure.
+TESTS_FILTER = 
'cc,checkstyle,javac,javadoc,pylint,shellcheck,whitespace,perlcritic,ruby-lint,rubocop'
+BRANCH_SPECIFIC_DOCKERFILE = 
"${env.BASEDIR}/dev-support/docker/Dockerfile"
+EXCLUDE_TESTS_URL = 
'https://builds.apache.org/job/HBase-Find-Flaky-Tests/lastSuccessfulBuild/artifact/excludes/'
+  }
+  steps {
+// TODO we can move the yetus install into a different stage and then use 
stash to deploy it.
+sh  '''#!/usr/bin/env bash
+printenv
+echo "Ensure we have a copy of Apache Yetus."
+if [[ true !=  "${USE_YETUS_PRERELEASE}" ]]; then
+  YETUS_DIR="${WORKSPACE}/yetus-${YETUS_RELEASE}"
+  echo "Checking for Yetus ${YETUS_RELEASE} in '${YETUS_DIR}'"
+  if [ ! -d "${YETUS_DIR}" ]; then
+echo "New download of Apache Yetus version ${YETUS_RELEASE}."
+rm 

[4/6] hbase git commit: HBASE-18147 POC jenkinsfile for nightly checks.

2017-07-20 Thread busbey
HBASE-18147 POC jenkinsfile for nightly checks.

* adds ruby tools to dockerfile
* adds rubocop to dockerfile
* adds ruby-lint to dockerfile
* adds perlcritic to dockerfile

Signed-off-by: Alex Leblang 
Signed-off-by: Josh Elser 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/b3a2a00a
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/b3a2a00a
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/b3a2a00a

Branch: refs/heads/branch-1.3
Commit: b3a2a00aff1591bdec9b7c67691e1fce0ba0b4e3
Parents: 2b60f4e
Author: Sean Busbey 
Authored: Thu Jul 20 17:07:33 2017 -0500
Committer: Sean Busbey 
Committed: Thu Jul 20 17:23:45 2017 -0500

--
 dev-support/Jenkinsfile   | 198 +
 dev-support/docker/Dockerfile |  29 ++
 2 files changed, 227 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/b3a2a00a/dev-support/Jenkinsfile
--
diff --git a/dev-support/Jenkinsfile b/dev-support/Jenkinsfile
new file mode 100644
index 000..26f72d7
--- /dev/null
+++ b/dev-support/Jenkinsfile
@@ -0,0 +1,198 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements.  See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership.  The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License.  You may obtain a copy of the License at
+//
+//   http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied.  See the License for the
+// specific language governing permissions and limitations
+// under the License.
+pipeline {
+  agent {
+node {
+  label 'Hadoop'
+}
+  }
+  // work around for YETUS-508, requires maven outside of the dockerfile
+  tools {
+maven 'Maven (latest)'
+  }
+  triggers {
+cron('@daily')
+  }
+  options {
+buildDiscarder(logRotator(numToKeepStr: '30'))
+timeout (time: 6, unit: 'HOURS')
+timestamps()
+  }
+  environment {
+TOOLS = "${env.WORKSPACE}/tools"
+// where we check out to across stages
+BASEDIR = "${env.WORKSPACE}/component"
+YETUS_RELEASE = '0.5.0'
+// where we'll write everything from different steps.
+OUTPUT_RELATIVE = 'output'
+OUTPUTDIR = "${env.WORKSPACE}/output"
+  }
+  parameters {
+booleanParam(name: 'USE_YETUS_PRERELEASE', defaultValue: false, 
description: '''Check to use the current HEAD of apache/yetus rather than our 
configured release.
+
+Should only be used manually when e.g. there is some non-work-aroundable 
issue in yetus we are checking a fix for.''')
+booleanParam(name: 'DEBUG', defaultValue: false, description: 'Produce a 
lot more meta-information.')
+  }
+  stages {
+stage ('yetus check') {
+  environment {
+PROJECT = 'hbase'
+PROJECT_PERSONALITY = 
'https://git-wip-us.apache.org/repos/asf?p=hbase.git;a=blob_plain;f=dev-support/hbase-personality.sh;hb=refs/heads/master'
+// This section of the docs tells folks not to use the javadoc tag. 
older branches have our old version of the check for said tag.
+AUTHOR_IGNORE_LIST = 
'src/main/asciidoc/_chapters/developer.adoc,dev-support/test-patch.sh'
+WHITESPACE_IGNORE_LIST = '.*/generated/.*'
+// output from surefire; sadly the archive function in yetus only 
works on file names.
+ARCHIVE_PATTERN_LIST = 
'TEST-*.xml,org.apache.h*-output.txt,org.apache.h*.txt'
+// These tests currently have known failures. Once they burn down to 0, remove 
from here so that new problems will cause a failure.
+TESTS_FILTER = 
'cc,checkstyle,javac,javadoc,pylint,shellcheck,whitespace,perlcritic,ruby-lint,rubocop'
+BRANCH_SPECIFIC_DOCKERFILE = 
"${env.BASEDIR}/dev-support/docker/Dockerfile"
+EXCLUDE_TESTS_URL = 
'https://builds.apache.org/job/HBase-Find-Flaky-Tests/lastSuccessfulBuild/artifact/excludes/'
+  }
+  steps {
+// TODO we can move the yetus install into a different stage and then use 
stash to deploy it.
+sh  '''#!/usr/bin/env bash
+printenv
+echo "Ensure we have a copy of Apache Yetus."
+if [[ true !=  "${USE_YETUS_PRERELEASE}" ]]; then
+  YETUS_DIR="${WORKSPACE}/yetus-${YETUS_RELEASE}"
+  echo "Checking for Yetus ${YETUS_RELEASE} in '${YETUS_DIR}'"
+  if [ ! -d "${YETUS_DIR}" ]; then
+echo "New download of Apache Yetus version ${YETUS_RELEASE}."
+rm 

[3/6] hbase git commit: HBASE-18147 POC jenkinsfile for nightly checks.

2017-07-20 Thread busbey
HBASE-18147 POC jenkinsfile for nightly checks.

* adds ruby tools to dockerfile
* adds rubocop to dockerfile
* adds ruby-lint to dockerfile
* adds perlcritic to dockerfile

Signed-off-by: Alex Leblang 
Signed-off-by: Josh Elser 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/8cfcd12e
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/8cfcd12e
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/8cfcd12e

Branch: refs/heads/branch-1.4
Commit: 8cfcd12e958bbd2fb185d4006d376cc98406a5d4
Parents: 0940714
Author: Sean Busbey 
Authored: Thu Jul 20 17:07:33 2017 -0500
Committer: Sean Busbey 
Committed: Thu Jul 20 17:23:40 2017 -0500

--
 dev-support/Jenkinsfile   | 198 +
 dev-support/docker/Dockerfile |  29 ++
 2 files changed, 227 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/8cfcd12e/dev-support/Jenkinsfile
--
diff --git a/dev-support/Jenkinsfile b/dev-support/Jenkinsfile
new file mode 100644
index 000..26f72d7
--- /dev/null
+++ b/dev-support/Jenkinsfile
@@ -0,0 +1,198 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements.  See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership.  The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License.  You may obtain a copy of the License at
+//
+//   http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied.  See the License for the
+// specific language governing permissions and limitations
+// under the License.
+pipeline {
+  agent {
+node {
+  label 'Hadoop'
+}
+  }
+  // work around for YETUS-508, requires maven outside of the dockerfile
+  tools {
+maven 'Maven (latest)'
+  }
+  triggers {
+cron('@daily')
+  }
+  options {
+buildDiscarder(logRotator(numToKeepStr: '30'))
+timeout (time: 6, unit: 'HOURS')
+timestamps()
+  }
+  environment {
+TOOLS = "${env.WORKSPACE}/tools"
+// where we check out to across stages
+BASEDIR = "${env.WORKSPACE}/component"
+YETUS_RELEASE = '0.5.0'
+// where we'll write everything from different steps.
+OUTPUT_RELATIVE = 'output'
+OUTPUTDIR = "${env.WORKSPACE}/output"
+  }
+  parameters {
+booleanParam(name: 'USE_YETUS_PRERELEASE', defaultValue: false, 
description: '''Check to use the current HEAD of apache/yetus rather than our 
configured release.
+
+Should only be used manually when e.g. there is some non-work-aroundable 
issue in yetus we are checking a fix for.''')
+booleanParam(name: 'DEBUG', defaultValue: false, description: 'Produce a 
lot more meta-information.')
+  }
+  stages {
+stage ('yetus check') {
+  environment {
+PROJECT = 'hbase'
+PROJECT_PERSONALITY = 
'https://git-wip-us.apache.org/repos/asf?p=hbase.git;a=blob_plain;f=dev-support/hbase-personality.sh;hb=refs/heads/master'
+// This section of the docs tells folks not to use the javadoc tag. 
older branches have our old version of the check for said tag.
+AUTHOR_IGNORE_LIST = 
'src/main/asciidoc/_chapters/developer.adoc,dev-support/test-patch.sh'
+WHITESPACE_IGNORE_LIST = '.*/generated/.*'
+// output from surefire; sadly the archive function in yetus only 
works on file names.
+ARCHIVE_PATTERN_LIST = 
'TEST-*.xml,org.apache.h*-output.txt,org.apache.h*.txt'
+// These tests currently have known failures. Once they burn down to 0, remove 
from here so that new problems will cause a failure.
+TESTS_FILTER = 
'cc,checkstyle,javac,javadoc,pylint,shellcheck,whitespace,perlcritic,ruby-lint,rubocop'
+BRANCH_SPECIFIC_DOCKERFILE = 
"${env.BASEDIR}/dev-support/docker/Dockerfile"
+EXCLUDE_TESTS_URL = 
'https://builds.apache.org/job/HBase-Find-Flaky-Tests/lastSuccessfulBuild/artifact/excludes/'
+  }
+  steps {
+// TODO we can move the yetus install into a different stage and then use 
stash to deploy it.
+sh  '''#!/usr/bin/env bash
+printenv
+echo "Ensure we have a copy of Apache Yetus."
+if [[ true !=  "${USE_YETUS_PRERELEASE}" ]]; then
+  YETUS_DIR="${WORKSPACE}/yetus-${YETUS_RELEASE}"
+  echo "Checking for Yetus ${YETUS_RELEASE} in '${YETUS_DIR}'"
+  if [ ! -d "${YETUS_DIR}" ]; then
+echo "New download of Apache Yetus version ${YETUS_RELEASE}."
+rm 

[3/3] hbase git commit: HBASE-18147 POC jenkinsfile for nightly checks.

2017-07-20 Thread busbey
HBASE-18147 POC jenkinsfile for nightly checks.

* adds ruby tools to dockerfile
* adds rubocop to dockerfile
* adds ruby-lint to dockerfile
* adds perlcritic to dockerfile

Signed-off-by: Alex Leblang 
Signed-off-by: Josh Elser 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/bdc94b1d
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/bdc94b1d
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/bdc94b1d

Branch: refs/heads/HBASE-18147
Commit: bdc94b1d6b836479b308fb0e7e4952c85b37a550
Parents: bc93b66
Author: Sean Busbey 
Authored: Thu Jul 20 17:07:33 2017 -0500
Committer: Sean Busbey 
Committed: Thu Jul 20 17:07:33 2017 -0500

--
 dev-support/Jenkinsfile   | 198 +
 dev-support/docker/Dockerfile |  29 ++
 2 files changed, 227 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/bdc94b1d/dev-support/Jenkinsfile
--
diff --git a/dev-support/Jenkinsfile b/dev-support/Jenkinsfile
new file mode 100644
index 000..26f72d7
--- /dev/null
+++ b/dev-support/Jenkinsfile
@@ -0,0 +1,198 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements.  See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership.  The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License.  You may obtain a copy of the License at
+//
+//   http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied.  See the License for the
+// specific language governing permissions and limitations
+// under the License.
+pipeline {
+  agent {
+node {
+  label 'Hadoop'
+}
+  }
+  // work around for YETUS-508, requires maven outside of the dockerfile
+  tools {
+maven 'Maven (latest)'
+  }
+  triggers {
+cron('@daily')
+  }
+  options {
+buildDiscarder(logRotator(numToKeepStr: '30'))
+timeout (time: 6, unit: 'HOURS')
+timestamps()
+  }
+  environment {
+TOOLS = "${env.WORKSPACE}/tools"
+// where we check out to across stages
+BASEDIR = "${env.WORKSPACE}/component"
+YETUS_RELEASE = '0.5.0'
+// where we'll write everything from different steps.
+OUTPUT_RELATIVE = 'output'
+OUTPUTDIR = "${env.WORKSPACE}/output"
+  }
+  parameters {
+booleanParam(name: 'USE_YETUS_PRERELEASE', defaultValue: false, 
description: '''Check to use the current HEAD of apache/yetus rather than our 
configured release.
+
+Should only be used manually when e.g. there is some non-work-aroundable 
issue in yetus we are checking a fix for.''')
+booleanParam(name: 'DEBUG', defaultValue: false, description: 'Produce a 
lot more meta-information.')
+  }
+  stages {
+stage ('yetus check') {
+  environment {
+PROJECT = 'hbase'
+PROJECT_PERSONALITY = 
'https://git-wip-us.apache.org/repos/asf?p=hbase.git;a=blob_plain;f=dev-support/hbase-personality.sh;hb=refs/heads/master'
+// This section of the docs tells folks not to use the javadoc tag. 
older branches have our old version of the check for said tag.
+AUTHOR_IGNORE_LIST = 
'src/main/asciidoc/_chapters/developer.adoc,dev-support/test-patch.sh'
+WHITESPACE_IGNORE_LIST = '.*/generated/.*'
+// output from surefire; sadly the archive function in yetus only 
works on file names.
+ARCHIVE_PATTERN_LIST = 
'TEST-*.xml,org.apache.h*-output.txt,org.apache.h*.txt'
+// These tests currently have known failures. Once they burn down to 0, remove 
from here so that new problems will cause a failure.
+TESTS_FILTER = 
'cc,checkstyle,javac,javadoc,pylint,shellcheck,whitespace,perlcritic,ruby-lint,rubocop'
+BRANCH_SPECIFIC_DOCKERFILE = 
"${env.BASEDIR}/dev-support/docker/Dockerfile"
+EXCLUDE_TESTS_URL = 
'https://builds.apache.org/job/HBase-Find-Flaky-Tests/lastSuccessfulBuild/artifact/excludes/'
+  }
+  steps {
+// TODO we can move the yetus install into a different stage and then use 
stash to deploy it.
+sh  '''#!/usr/bin/env bash
+printenv
+echo "Ensure we have a copy of Apache Yetus."
+if [[ true !=  "${USE_YETUS_PRERELEASE}" ]]; then
+  YETUS_DIR="${WORKSPACE}/yetus-${YETUS_RELEASE}"
+  echo "Checking for Yetus ${YETUS_RELEASE} in '${YETUS_DIR}'"
+  if [ ! -d "${YETUS_DIR}" ]; then
+echo "New download of Apache Yetus version ${YETUS_RELEASE}."
+rm 

[1/3] hbase git commit: HBASE-17738 BucketCache startup is slow - addendum (Ram) [Forced Update!]

2017-07-20 Thread busbey
Repository: hbase
Updated Branches:
  refs/heads/HBASE-18147 37141d5f1 -> bdc94b1d6 (forced update)


HBASE-17738 BucketCache startup is slow - addendum (Ram)


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/0c49185c
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/0c49185c
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/0c49185c

Branch: refs/heads/HBASE-18147
Commit: 0c49185c3e95cc91ba6455a404ca4e89f2c2fc20
Parents: 01db60d
Author: Ramkrishna 
Authored: Thu Jul 20 22:38:13 2017 +0530
Committer: Ramkrishna 
Committed: Thu Jul 20 22:38:13 2017 +0530

--
 .../hadoop/hbase/util/ByteBufferArray.java  | 17 ++---
 .../hadoop/hbase/util/TestByteBufferArray.java  | 39 
 2 files changed, 51 insertions(+), 5 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/0c49185c/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ByteBufferArray.java
--
diff --git 
a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ByteBufferArray.java 
b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ByteBufferArray.java
index 60f8c79..068afe2 100644
--- 
a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ByteBufferArray.java
+++ 
b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ByteBufferArray.java
@@ -44,14 +44,15 @@ import com.google.common.annotations.VisibleForTesting;
  * reading/writing data from this large buffer with a position and offset
  */
 @InterfaceAudience.Private
-public final class ByteBufferArray {
+public class ByteBufferArray {
   private static final Log LOG = LogFactory.getLog(ByteBufferArray.class);
 
   public static final int DEFAULT_BUFFER_SIZE = 4 * 1024 * 1024;
   @VisibleForTesting
   ByteBuffer buffers[];
   private int bufferSize;
-  private int bufferCount;
+  @VisibleForTesting
+  int bufferCount;
 
   /**
* We allocate a number of byte buffers as the capacity. In order not to out
@@ -75,12 +76,13 @@ public final class ByteBufferArray {
 createBuffers(directByteBuffer, allocator);
   }
 
-  private void createBuffers(boolean directByteBuffer, ByteBufferAllocator 
allocator)
+  @VisibleForTesting
+  void createBuffers(boolean directByteBuffer, ByteBufferAllocator allocator)
   throws IOException {
-int threadCount = Runtime.getRuntime().availableProcessors();
+int threadCount = getThreadCount();
 ExecutorService service = new ThreadPoolExecutor(threadCount, threadCount, 
0L,
 TimeUnit.MILLISECONDS, new LinkedBlockingQueue());
-int perThreadCount = Math.round((float) (bufferCount) / threadCount);
+int perThreadCount = (int)Math.floor((double) (bufferCount) / threadCount);
 int lastThreadCount = bufferCount - (perThreadCount * (threadCount - 1));
 Future[] futures = new Future[threadCount];
 try {
@@ -109,6 +111,11 @@ public final class ByteBufferArray {
 this.buffers[bufferCount] = ByteBuffer.allocate(0);
   }
 
+  @VisibleForTesting
+  int getThreadCount() {
+return Runtime.getRuntime().availableProcessors();
+  }
+
   /**
* A callable that creates buffers of the specified length either 
onheap/offheap using the
* {@link ByteBufferAllocator}

http://git-wip-us.apache.org/repos/asf/hbase/blob/0c49185c/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestByteBufferArray.java
--
diff --git 
a/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestByteBufferArray.java
 
b/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestByteBufferArray.java
index c71b86c..7077643 100644
--- 
a/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestByteBufferArray.java
+++ 
b/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestByteBufferArray.java
@@ -79,4 +79,43 @@ public class TestByteBufferArray {
   }
 }
   }
+
+  @Test
+  public void testByteBufferCreation1() throws Exception {
+ByteBufferAllocator allocator = new ByteBufferAllocator() {
+  @Override
+  public ByteBuffer allocate(long size, boolean directByteBuffer) throws 
IOException {
+if (directByteBuffer) {
+  return ByteBuffer.allocateDirect((int) size);
+} else {
+  return ByteBuffer.allocate((int) size);
+}
+  }
+};
+ByteBufferArray array = new DummyByteBufferArray(7 * 1024 * 1024, false, 
allocator);
+// overwrite
+array.bufferCount = 25;
+array.buffers = new ByteBuffer[array.bufferCount + 1];
+array.createBuffers(true, allocator);
+for (int i = 0; i < array.buffers.length; i++) {
+  if (i == array.buffers.length - 1) {
+

[2/3] hbase git commit: HBASE-16993 BucketCache throw java.io.IOException: Invalid HFile block magic when configuring hbase.bucketcache.bucket.sizes.

2017-07-20 Thread busbey
HBASE-16993 BucketCache throw java.io.IOException: Invalid HFile block magic 
when configuring hbase.bucketcache.bucket.sizes.


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/bc93b661
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/bc93b661
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/bc93b661

Branch: refs/heads/HBASE-18147
Commit: bc93b6610b349d38502290af27da0ae0b5fd4936
Parents: 0c49185
Author: anoopsamjohn 
Authored: Thu Jul 20 22:59:06 2017 +0530
Committer: anoopsamjohn 
Committed: Thu Jul 20 22:59:06 2017 +0530

--
 hbase-common/src/main/resources/hbase-default.xml  |  2 +-
 .../apache/hadoop/hbase/io/hfile/CacheConfig.java  | 17 +
 .../hadoop/hbase/io/hfile/TestCacheConfig.java | 14 ++
 3 files changed, 28 insertions(+), 5 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/bc93b661/hbase-common/src/main/resources/hbase-default.xml
--
diff --git a/hbase-common/src/main/resources/hbase-default.xml 
b/hbase-common/src/main/resources/hbase-default.xml
index 27a833f..c4148a1 100644
--- a/hbase-common/src/main/resources/hbase-default.xml
+++ b/hbase-common/src/main/resources/hbase-default.xml
@@ -905,7 +905,7 @@ possible configurations would overwhelm and obscure the 
important.
 A comma-separated list of sizes for buckets for the 
bucketcache.
 Can be multiple sizes. List block sizes in order from smallest to largest.
 The sizes you use will depend on your data access patterns.
-Must be a multiple of 1024 else you will run into
+Must be a multiple of 256 else you will run into
 'java.io.IOException: Invalid HFile block magic' when you go to read from 
cache.
 If you specify no values here, then you pick up the default bucketsizes set
 in code (See BucketAllocator#DEFAULT_BUCKET_SIZES).

http://git-wip-us.apache.org/repos/asf/hbase/blob/bc93b661/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/CacheConfig.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/CacheConfig.java 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/CacheConfig.java
index d514003..9fa0483 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/CacheConfig.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/CacheConfig.java
@@ -584,7 +584,8 @@ public class CacheConfig {
* @return Returns L2 block cache instance (for now it is BucketCache 
BlockCache all the time)
* or null if not supposed to be a L2.
*/
-  private static BlockCache getL2(final Configuration c) {
+  @VisibleForTesting
+  static BlockCache getL2(final Configuration c) {
 final boolean useExternal = c.getBoolean(EXTERNAL_BLOCKCACHE_KEY, 
EXTERNAL_BLOCKCACHE_DEFAULT);
 if (LOG.isDebugEnabled()) {
   LOG.debug("Trying to use " + (useExternal?" External":" Internal") + " 
l2 cache");
@@ -594,10 +595,8 @@ public class CacheConfig {
 if (useExternal) {
   return getExternalBlockcache(c);
 }
-
 // otherwise use the bucket cache.
 return getBucketCache(c);
-
   }
 
   private static BlockCache getExternalBlockcache(Configuration c) {
@@ -651,7 +650,17 @@ public class CacheConfig {
 if (configuredBucketSizes != null) {
   bucketSizes = new int[configuredBucketSizes.length];
   for (int i = 0; i < configuredBucketSizes.length; i++) {
-bucketSizes[i] = Integer.parseInt(configuredBucketSizes[i].trim());
+int bucketSize = Integer.parseInt(configuredBucketSizes[i].trim());
+if (bucketSize % 256 != 0) {
+  // We need all the bucket sizes to be multiples of 256. Having all 
the configured bucket
+  // sizes to be multiples of 256 will ensure that the block offsets 
within buckets,
+  // that are calculated, will also be multiples of 256.
+  // See BucketEntry where offset to each block is represented using 5 
bytes (instead of 8
+  // bytes long). We would like to save heap overhead as less as 
possible.
+  throw new IllegalArgumentException("Illegal value: " + bucketSize + 
" configured for '"
+  + BUCKET_CACHE_BUCKETS_KEY + "'. All bucket sizes to be 
multiples of 256");
+}
+bucketSizes[i] = bucketSize;
   }
 }
 BucketCache bucketCache = null;

http://git-wip-us.apache.org/repos/asf/hbase/blob/bc93b661/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCacheConfig.java
--
diff --git 

[14/19] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-20 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/9e6e3360/devapidocs/src-html/org/apache/hadoop/hbase/replication/regionserver/DumpReplicationQueues.DumpOptions.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/replication/regionserver/DumpReplicationQueues.DumpOptions.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/replication/regionserver/DumpReplicationQueues.DumpOptions.html
index 9ec7269..8428049 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/replication/regionserver/DumpReplicationQueues.DumpOptions.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/replication/regionserver/DumpReplicationQueues.DumpOptions.html
@@ -331,118 +331,121 @@
 323// Loops each peer on each RS and 
dumps the queues
 324try {
 325  ListString regionservers = 
queuesClient.getListOfReplicators();
-326  for (String regionserver : 
regionservers) {
-327ListString queueIds = 
queuesClient.getAllQueues(regionserver);
-328
replicationQueues.init(regionserver);
-329if 
(!liveRegionServers.contains(regionserver)) {
-330  
deadRegionServers.add(regionserver);
-331}
-332for (String queueId : queueIds) 
{
-333  ReplicationQueueInfo queueInfo 
= new ReplicationQueueInfo(queueId);
-334  ListString wals = 
queuesClient.getLogsInQueue(regionserver, queueId);
-335  if 
(!peerIds.contains(queueInfo.getPeerId())) {
-336
deletedQueues.add(regionserver + "/" + queueId);
-337
sb.append(formatQueue(regionserver, replicationQueues, queueInfo, queueId, 
wals, true,
-338  hdfs));
-339  } else {
-340
sb.append(formatQueue(regionserver, replicationQueues, queueInfo, queueId, 
wals, false,
+326  if (regionservers == null || 
regionservers.isEmpty()) {
+327return sb.toString();
+328  }
+329  for (String regionserver : 
regionservers) {
+330ListString queueIds = 
queuesClient.getAllQueues(regionserver);
+331
replicationQueues.init(regionserver);
+332if 
(!liveRegionServers.contains(regionserver)) {
+333  
deadRegionServers.add(regionserver);
+334}
+335for (String queueId : queueIds) 
{
+336  ReplicationQueueInfo queueInfo 
= new ReplicationQueueInfo(queueId);
+337  ListString wals = 
queuesClient.getLogsInQueue(regionserver, queueId);
+338  if 
(!peerIds.contains(queueInfo.getPeerId())) {
+339
deletedQueues.add(regionserver + "/" + queueId);
+340
sb.append(formatQueue(regionserver, replicationQueues, queueInfo, queueId, 
wals, true,
 341  hdfs));
-342  }
-343}
-344  }
-345} catch (KeeperException ke) {
-346  throw new IOException(ke);
-347}
-348return sb.toString();
-349  }
-350
-351  private String formatQueue(String 
regionserver, ReplicationQueues replicationQueues, ReplicationQueueInfo 
queueInfo,
-352   String 
queueId, ListString wals, boolean isDeleted, boolean hdfs) throws 
Exception {
+342  } else {
+343
sb.append(formatQueue(regionserver, replicationQueues, queueInfo, queueId, 
wals, false,
+344  hdfs));
+345  }
+346}
+347  }
+348} catch (KeeperException ke) {
+349  throw new IOException(ke);
+350}
+351return sb.toString();
+352  }
 353
-354StringBuilder sb = new 
StringBuilder();
-355
-356ListString deadServers ;
-357
-358sb.append("Dumping replication queue 
info for RegionServer: [" + regionserver + "]" + "\n");
-359sb.append("Queue znode: " + 
queueId + "\n");
-360sb.append("PeerID: " + 
queueInfo.getPeerId() + "\n");
-361sb.append("Recovered: " + 
queueInfo.isQueueRecovered() + "\n");
-362deadServers = 
queueInfo.getDeadRegionServers();
-363if (deadServers.isEmpty()) {
-364  sb.append("No dead 
RegionServers found in this queue." + "\n");
-365} else {
-366  sb.append("Dead RegionServers: 
" + deadServers + "\n");
-367}
-368sb.append("Was deleted: " + 
isDeleted + "\n");
-369sb.append("Number of WALs in 
replication queue: " + wals.size() + "\n");
-370
peersQueueSize.addAndGet(queueInfo.getPeerId(), wals.size());
-371
-372for (String wal : wals) {
-373  long position = 
replicationQueues.getLogPosition(queueInfo.getPeerId(), wal);
-374  sb.append("Replication position 
for " + wal + ": " + (position  0 ? position : "0"
-375  + " (not started or nothing to 
replicate)") + "\n");
-376}
-377
-378if (hdfs) {
-379  FileSystem fs = 
FileSystem.get(getConf());
-380  sb.append("Total size of WALs 
on HDFS for this queue: "
-381  + 
StringUtils.humanSize(getTotalWALSize(fs, wals, regionserver)) + "\n");
-382}
-383return sb.toString();
-384  }
-385  /**
-386   *  return total size in bytes from a 
list 

[10/19] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-20 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/9e6e3360/hbase-archetypes/dependency-info.html
--
diff --git a/hbase-archetypes/dependency-info.html 
b/hbase-archetypes/dependency-info.html
index f276b41..0176558 100644
--- a/hbase-archetypes/dependency-info.html
+++ b/hbase-archetypes/dependency-info.html
@@ -1,5 +1,5 @@
 http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd;>
-
+
 http://www.w3.org/1999/xhtml; xml:lang="en" lang="en">
   
 
@@ -10,7 +10,7 @@
   @import url("./css/site.css");
 
 
-
+
 
 
 
@@ -27,7 +27,7 @@
 
 
 
-Last Published: 2017-07-19
+Last Published: 2017-07-20
   | Version: 
3.0.0-SNAPSHOT
   
 Apache HBase - Archetypes

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/9e6e3360/hbase-archetypes/dependency-management.html
--
diff --git a/hbase-archetypes/dependency-management.html 
b/hbase-archetypes/dependency-management.html
index 3f623f4..1c8ecf2 100644
--- a/hbase-archetypes/dependency-management.html
+++ b/hbase-archetypes/dependency-management.html
@@ -1,5 +1,5 @@
 http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd;>
-
+
 http://www.w3.org/1999/xhtml; xml:lang="en" lang="en">
   
 
@@ -10,7 +10,7 @@
   @import url("./css/site.css");
 
 
-
+
 
 
 
@@ -27,7 +27,7 @@
 
 
 
-Last Published: 2017-07-19
+Last Published: 2017-07-20
   | Version: 
3.0.0-SNAPSHOT
   
 Apache HBase - Archetypes

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/9e6e3360/hbase-archetypes/hbase-archetype-builder/dependencies.html
--
diff --git a/hbase-archetypes/hbase-archetype-builder/dependencies.html 
b/hbase-archetypes/hbase-archetype-builder/dependencies.html
index 96f2aee..b02e512 100644
--- a/hbase-archetypes/hbase-archetype-builder/dependencies.html
+++ b/hbase-archetypes/hbase-archetype-builder/dependencies.html
@@ -1,5 +1,5 @@
 http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd;>
-
+
 http://www.w3.org/1999/xhtml; xml:lang="en" lang="en">
   
 
@@ -10,7 +10,7 @@
   @import url("./css/site.css");
 
 
-
+
 
 
 
@@ -27,7 +27,7 @@
 
 
 
-Last Published: 2017-07-19
+Last Published: 2017-07-20
   | Version: 
3.0.0-SNAPSHOT
   
 Apache HBase - Archetype builder

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/9e6e3360/hbase-archetypes/hbase-archetype-builder/dependency-convergence.html
--
diff --git 
a/hbase-archetypes/hbase-archetype-builder/dependency-convergence.html 
b/hbase-archetypes/hbase-archetype-builder/dependency-convergence.html
index a32e0d1..7041acd 100644
--- a/hbase-archetypes/hbase-archetype-builder/dependency-convergence.html
+++ b/hbase-archetypes/hbase-archetype-builder/dependency-convergence.html
@@ -1,5 +1,5 @@
 http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd;>
-
+
 http://www.w3.org/1999/xhtml; xml:lang="en" lang="en">
   
 
@@ -10,7 +10,7 @@
   @import url("./css/site.css");
 
 
-
+
 
 
 
@@ -27,7 +27,7 @@
 
 
 
-Last Published: 2017-07-19
+Last Published: 2017-07-20
   | Version: 
3.0.0-SNAPSHOT
   
 Apache HBase - Archetype builder

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/9e6e3360/hbase-archetypes/hbase-archetype-builder/dependency-info.html
--
diff --git a/hbase-archetypes/hbase-archetype-builder/dependency-info.html 
b/hbase-archetypes/hbase-archetype-builder/dependency-info.html
index 7db338f..c8074ae 100644
--- a/hbase-archetypes/hbase-archetype-builder/dependency-info.html
+++ b/hbase-archetypes/hbase-archetype-builder/dependency-info.html
@@ -1,5 +1,5 @@
 http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd;>
-
+
 http://www.w3.org/1999/xhtml; xml:lang="en" lang="en">
   
 
@@ -10,7 +10,7 @@
   @import url("./css/site.css");
 
 
-
+
 
 
 
@@ -27,7 +27,7 @@
 
 
 
-Last Published: 2017-07-19
+Last Published: 2017-07-20
   | Version: 
3.0.0-SNAPSHOT
   
 Apache HBase - Archetype builder


[07/19] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-20 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/9e6e3360/testdevapidocs/org/apache/hadoop/hbase/util/package-summary.html
--
diff --git a/testdevapidocs/org/apache/hadoop/hbase/util/package-summary.html 
b/testdevapidocs/org/apache/hadoop/hbase/util/package-summary.html
index 0f4afb7..eecbc04 100644
--- a/testdevapidocs/org/apache/hadoop/hbase/util/package-summary.html
+++ b/testdevapidocs/org/apache/hadoop/hbase/util/package-summary.html
@@ -308,304 +308,308 @@
 
 
 
-TestByteBufferUtils
+TestByteBufferArray.DummyByteBufferArray
 
 
 
-TestByteBuffUtils
+TestByteBufferUtils
 
 
 
-TestByteRangeWithKVSerialization
+TestByteBuffUtils
 
 
 
-TestBytes
+TestByteRangeWithKVSerialization
 
 
 
-TestCompressionTest
+TestBytes
 
 
 
-TestConcatenatedLists
+TestCompressionTest
 
 
 
-TestConfigurationUtil
+TestConcatenatedLists
 
 
 
-TestConnectionCache
+TestConfigurationUtil
 
 
 
+TestConnectionCache
+
+
+
 TestCoprocessorClassLoader
 
 Test TestCoprocessorClassLoader.
 
 
-
+
 TestCoprocessorScanPolicy
 
 
-
+
 TestCoprocessorScanPolicy.ScanObserver
 
 
-
+
 TestCounter
 
 
-
+
 TestDefaultEnvironmentEdge
 
 Tests to make sure that the default environment edge 
conforms to appropriate
  behaviour.
 
 
-
+
 TestDrainBarrier
 
 
-
+
 TestDynamicClassLoader
 
 Test TestDynamicClassLoader
 
 
-
+
 TestEncryptionTest
 
 
-
+
 TestEncryptionTest.FailingCipherProvider
 
 
-
+
 TestEncryptionTest.FailingKeyProvider
 
 
-
+
 TestEnvironmentEdgeManager
 
 
-
+
 TestFromClientSide3WoUnsafe
 
 
-
+
 TestFSHDFSUtils
 
 Test our recoverLease loop against mocked up 
filesystem.
 
 
-
+
 TestFSTableDescriptors
 
 Tests for FSTableDescriptors.
 
 
-
+
 TestFSTableDescriptors.FSTableDescriptorsTest
 
 
-
+
 TestFSUtils
 
 Test FSUtils.
 
 
-
+
 TestFSVisitor
 
 Test FSUtils.
 
 
-
+
 TestHBaseFsckComparator
 
 Test the comparator used by Hbck.
 
 
-
+
 TestHBaseFsckEncryption
 
 
-
+
 TestHBaseFsckMOB
 
 
-
+
 TestHBaseFsckOneRS
 
 
-
+
 TestHBaseFsckReplicas
 
 
-
+
 TestHBaseFsckTwoRS
 
 
-
+
 TestHFileArchiveUtil
 
 Test that the utility works as expected
 
 
-
+
 TestIdLock
 
 
-
+
 TestIdReadWriteLock
 
 
-
+
 TestIncrementingEnvironmentEdge
 
 Tests that the incrementing environment edge increments 
time instead of using
  the default.
 
 
-
+
 TestJSONMetricUtil
 
 
-
+
 TestKeyLocker
 
 
-
+
 TestLoadTestKVGenerator
 
 
-
+
 TestMiniClusterLoadEncoded
 
 Runs a load test on a mini HBase cluster with data block 
encoding turned on.
 
 
-
+
 TestMiniClusterLoadParallel
 
 A write/read/verify load test on a mini HBase cluster.
 
 
-
+
 TestMiniClusterLoadSequential
 
 A write/read/verify load test on a mini HBase cluster.
 
 
-
+
 TestOrder
 
 
-
+
 TestOrderedBytes
 
 
-
+
 TestPoolMap
 
 
-
+
 TestPoolMap.TestPoolType
 
 
-
+
 TestPoolMap.TestReusablePoolType
 
 
-
+
 TestPoolMap.TestRoundRobinPoolType
 
 
-
+
 TestPoolMap.TestThreadLocalPoolType
 
 
-
+
 TestRegionMover
 
 Tests for Region Mover Load/Unload functionality with and 
without ack mode and also to test
  exclude functionality useful for rack decommissioning
 
 
-
+
 TestRegionSizeCalculator
 
 
-
+
 TestRegionSplitCalculator
 
 
-
+
 TestRegionSplitCalculator.SimpleRange
 
 This is range uses a user specified start and end 
keys.
 
 
-
+
 TestRegionSplitter
 
 Tests for RegionSplitter, which can create a 
pre-split table or do a
  rolling split of an existing table.
 
 
-
+
 TestRootPath
 
 Test requirement that root directory must be a URI
 
 
-
+
 TestShowProperties
 
 This test is there to dump the properties.
 
 
-
+
 TestSimpleMutableByteRange
 
 
-
+
 TestSimplePositionedMutableByteRange
 
 
-
+
 TestSortedList
 
 
-
+
 TestSortedList.StringComparator
 
 
-
+
 TestStealJobQueue
 
 
-
+
 TestTableName
 
 Returns a byte[] containing the name of the 
currently running test method.
 
 
-
+
 TestThreads
 
 
-
+
 TestVersionInfo
 
 
-
+
 TestWeakObjectPool
 
 
-
+
 TimeOffsetEnvironmentEdge
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/9e6e3360/testdevapidocs/org/apache/hadoop/hbase/util/package-tree.html
--
diff --git a/testdevapidocs/org/apache/hadoop/hbase/util/package-tree.html 
b/testdevapidocs/org/apache/hadoop/hbase/util/package-tree.html
index 7908019..c16ca4a 100644
--- a/testdevapidocs/org/apache/hadoop/hbase/util/package-tree.html
+++ b/testdevapidocs/org/apache/hadoop/hbase/util/package-tree.html
@@ -131,6 +131,11 @@
 org.apache.hadoop.hbase.util.BaseTestHBaseFsck.MasterSyncObserver 
(implements org.apache.hadoop.hbase.coprocessor.MasterObserver)
 org.apache.hadoop.hbase.util.BaseTestHBaseFsck.MockErrorReporter (implements 
org.apache.hadoop.hbase.util.HBaseFsck.ErrorReporter)
 org.apache.hadoop.hbase.util.BuilderStyleTest
+org.apache.hadoop.hbase.util.ByteBufferArray
+
+org.apache.hadoop.hbase.util.TestByteBufferArray.DummyByteBufferArray
+
+
 

[01/19] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-20 Thread git-site-role
Repository: hbase-site
Updated Branches:
  refs/heads/asf-site 95b5168b3 -> 9e6e3360a


http://git-wip-us.apache.org/repos/asf/hbase-site/blob/9e6e3360/testdevapidocs/src-html/org/apache/hadoop/hbase/util/TestByteBufferArray.DummyByteBufferArray.html
--
diff --git 
a/testdevapidocs/src-html/org/apache/hadoop/hbase/util/TestByteBufferArray.DummyByteBufferArray.html
 
b/testdevapidocs/src-html/org/apache/hadoop/hbase/util/TestByteBufferArray.DummyByteBufferArray.html
new file mode 100644
index 000..59b1c7c
--- /dev/null
+++ 
b/testdevapidocs/src-html/org/apache/hadoop/hbase/util/TestByteBufferArray.DummyByteBufferArray.html
@@ -0,0 +1,193 @@
+http://www.w3.org/TR/html4/loose.dtd;>
+
+
+Source code
+
+
+
+
+001/**
+002 * Licensed to the Apache Software 
Foundation (ASF) under one
+003 * or more contributor license 
agreements.  See the NOTICE file
+004 * distributed with this work for 
additional information
+005 * regarding copyright ownership.  The 
ASF licenses this file
+006 * to you under the Apache License, 
Version 2.0 (the
+007 * "License"); you may not use this file 
except in compliance
+008 * with the License.  You may obtain a 
copy of the License at
+009 *
+010 * 
http://www.apache.org/licenses/LICENSE-2.0
+011 *
+012 * Unless required by applicable law or 
agreed to in writing, software
+013 * distributed under the License is 
distributed on an "AS IS" BASIS,
+014 * WITHOUT WARRANTIES OR CONDITIONS OF 
ANY KIND, either express or implied.
+015 * See the License for the specific 
language governing permissions and
+016 * limitations under the License.
+017 */
+018package org.apache.hadoop.hbase.util;
+019
+020import static 
org.junit.Assert.assertEquals;
+021import static 
org.junit.Assert.assertFalse;
+022import static 
org.junit.Assert.assertTrue;
+023
+024import java.io.IOException;
+025import java.nio.ByteBuffer;
+026
+027import 
org.apache.hadoop.hbase.nio.ByteBuff;
+028import 
org.apache.hadoop.hbase.testclassification.MiscTests;
+029import 
org.apache.hadoop.hbase.testclassification.SmallTests;
+030import org.junit.Test;
+031import 
org.junit.experimental.categories.Category;
+032
+033@Category({MiscTests.class, 
SmallTests.class})
+034public class TestByteBufferArray {
+035
+036  @Test
+037  public void 
testAsSubBufferWhenEndOffsetLandInLastBuffer() throws Exception {
+038int capacity = 4 * 1024 * 1024;
+039ByteBufferAllocator allocator = new 
ByteBufferAllocator() {
+040  @Override
+041  public ByteBuffer allocate(long 
size, boolean directByteBuffer)
+042  throws IOException {
+043if (directByteBuffer) {
+044  return 
ByteBuffer.allocateDirect((int) size);
+045} else {
+046  return 
ByteBuffer.allocate((int) size);
+047}
+048  }
+049};
+050ByteBufferArray array = new 
ByteBufferArray(capacity, false, allocator);
+051ByteBuff subBuf = 
array.asSubByteBuff(0, capacity);
+052subBuf.position(capacity - 1);// 
Position to the last byte
+053assertTrue(subBuf.hasRemaining());
+054// Read last byte
+055subBuf.get();
+056assertFalse(subBuf.hasRemaining());
+057  }
+058
+059  @Test
+060  public void testByteBufferCreation() 
throws Exception {
+061int capacity = 470 * 1021 * 1023;
+062ByteBufferAllocator allocator = new 
ByteBufferAllocator() {
+063  @Override
+064  public ByteBuffer allocate(long 
size, boolean directByteBuffer) throws IOException {
+065if (directByteBuffer) {
+066  return 
ByteBuffer.allocateDirect((int) size);
+067} else {
+068  return 
ByteBuffer.allocate((int) size);
+069}
+070  }
+071};
+072ByteBufferArray array = new 
ByteBufferArray(capacity, false, allocator);
+073assertEquals(119, 
array.buffers.length);
+074for (int i = 0; i  
array.buffers.length; i++) {
+075  if (i == array.buffers.length - 1) 
{
+076
assertEquals(array.buffers[i].capacity(), 0);
+077  } else {
+078
assertEquals(array.buffers[i].capacity(), 
ByteBufferArray.DEFAULT_BUFFER_SIZE);
+079  }
+080}
+081  }
+082
+083  @Test
+084  public void testByteBufferCreation1() 
throws Exception {
+085ByteBufferAllocator allocator = new 
ByteBufferAllocator() {
+086  @Override
+087  public ByteBuffer allocate(long 
size, boolean directByteBuffer) throws IOException {
+088if (directByteBuffer) {
+089  return 
ByteBuffer.allocateDirect((int) size);
+090} else {
+091  return 
ByteBuffer.allocate((int) size);
+092}
+093  }
+094};
+095ByteBufferArray array = new 
DummyByteBufferArray(7 * 1024 * 1024, false, allocator);
+096// overwrite
+097array.bufferCount = 25;
+098array.buffers = new 
ByteBuffer[array.bufferCount + 1];
+099array.createBuffers(true, 
allocator);
+100for (int i = 0; i  
array.buffers.length; i++) {
+101  if (i == array.buffers.length - 1) 

[18/19] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-20 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/9e6e3360/apache_hbase_reference_guide.pdf
--
diff --git a/apache_hbase_reference_guide.pdf b/apache_hbase_reference_guide.pdf
index 381fc68..ff5a5a8 100644
--- a/apache_hbase_reference_guide.pdf
+++ b/apache_hbase_reference_guide.pdf
@@ -5,8 +5,8 @@
 /Author (Apache HBase Team)
 /Creator (Asciidoctor PDF 1.5.0.alpha.15, based on Prawn 2.2.2)
 /Producer (Apache HBase Team)
-/ModDate (D:20170719214846+00'00')
-/CreationDate (D:20170719214846+00'00')
+/ModDate (D:20170720214839+00'00')
+/CreationDate (D:20170720214839+00'00')
 >>
 endobj
 2 0 obj
@@ -27334,7 +27334,7 @@ endobj
 endobj
 136 0 obj
 << /Limits [(__anchor-top) (adding.new.node)]
-/Names [(__anchor-top) 25 0 R (__indexterm-6954596) 3262 0 R 
(__indexterm-6956846) 3264 0 R (__indexterm-6958908) 3265 0 R 
(__indexterm-6960782) 3266 0 R (acid) 891 0 R 
(add-metric-name-and-function-to-hadoop-compat-interface) 3361 0 R 
(add-the-implementation-to-both-hadoop-1-and-hadoop-2-compat-modules) 3362 0 R 
(add.metrics) 3359 0 R (adding-a-new-chapter-to-the-hbase-reference-guide) 3596 
0 R (adding.new.node) 2859 0 R]
+/Names [(__anchor-top) 25 0 R (__indexterm-6954594) 3262 0 R 
(__indexterm-6956844) 3264 0 R (__indexterm-6958906) 3265 0 R 
(__indexterm-6960780) 3266 0 R (acid) 891 0 R 
(add-metric-name-and-function-to-hadoop-compat-interface) 3361 0 R 
(add-the-implementation-to-both-hadoop-1-and-hadoop-2-compat-modules) 3362 0 R 
(add.metrics) 3359 0 R (adding-a-new-chapter-to-the-hbase-reference-guide) 3596 
0 R (adding.new.node) 2859 0 R]
 >>
 endobj
 137 0 obj
@@ -46908,7 +46908,7 @@ endobj
 [338 0 R /XYZ 0 152.06 null]
 endobj
 346 0 obj
-<< /Length 8124
+<< /Length 8122
 >>
 stream
 q
@@ -46980,12 +46980,12 @@ ET
 0.2 0.2 0.2 scn
 0.2 0.2 0.2 SCN
 
-1.6501 Tw
+2.0694 Tw
 
 BT
 63.24 701.345 Td
 /F1.0 10.5 Tf
-<7061747465726e732e204d7573742062652061206d756c7469706c65206f66203130323420656c736520796f752077696c6c2072756e20696e746f20276a6176612e696f2e494f457863657074696f6e3a20496e76616c6964204846696c65>
 Tj
+<7061747465726e732e204d7573742062652061206d756c7469706c65206f662032353620656c736520796f752077696c6c2072756e20696e746f20276a6176612e696f2e494f457863657074696f6e3a20496e76616c6964204846696c65>
 Tj
 ET
 
 
@@ -755128,4234 +755128,4234 @@ xref
 756640 0 n 
 756687 0 n 
 756733 0 n 
-764911 0 n 
-765288 0 n 
-765334 0 n 
-765381 0 n 
-765569 0 n 
-765616 0 n 
-765663 0 n 
-765710 0 n 
-765756 0 n 
-774781 0 n 
-775166 0 n 
-775213 0 n 
-775806 0 n 
-775853 0 n 
-775900 0 n 
-776614 0 n 
-776661 0 n 
-776888 0 n 
-777114 0 n 
-777161 0 n 
-785878 0 n 
-786271 0 n 
-786466 0 n 
-786513 0 n 
-786708 0 n 
-786902 0 n 
-786949 0 n 
-786996 0 n 
-787043 0 n 
-787090 0 n 
-787842 0 n 
-795332 0 n 
-795691 0 n 
-795738 0 n 
-795785 0 n 
-795832 0 n 
-795879 0 n 
-795926 0 n 
-795972 0 n 
-804609 0 n 
-804968 0 n 
-805015 0 n 
-805062 0 n 
-805109 0 n 
-805156 0 n 
-805201 0 n 
-812975 0 n 
-813334 0 n 
-813382 0 n 
-813430 0 n 
-813478 0 n 
-813526 0 n 
-813574 0 n 
-813621 0 n 
-822102 0 n 
-822461 0 n 
-822508 0 n 
-822555 0 n 
-822602 0 n 
-822649 0 n 
-822694 0 n 
-830137 0 n 
-830496 0 n 
-830543 0 n 
-830590 0 n 
-830637 0 n 
-830684 0 n 
-830731 0 n 
-830778 0 n 
-838696 0 n 
-839055 0 n 
-839103 0 n 
-839151 0 n 
-839199 0 n 
-839247 0 n 
-839740 0 n 
-839788 0 n 
-839836 0 n 
-848036 0 n 
-848395 0 n 
-848443 0 n 
-848491 0 n 
-848539 0 n 
-848587 0 n 
-848635 0 n 
-848683 0 n 
-849464 0 n 
-858359 0 n 
-858718 0 n 
-858765 0 n 
-858812 0 n 
-858859 0 n 
-858906 0 n 
-858953 0 n 
-866497 0 n 
-866856 0 n 
-866903 0 n 
-866950 0 n 
-866997 0 n 
-867044 0 n 
-867091 0 n 
-867138 0 n 
-876112 0 n 
-876513 0 n 
-876559 0 n 
-876606 0 n 
-876653 0 n 
-876900 0 n 
-877147 0 n 
-877194 0 n 
-877443 0 n 
-877692 0 n 
-877739 0 n 
-877785 0 n 
-878439 0 n 
-888598 0 n 
-888971 0 n 
-889147 0 n 
-889195 0 n 
-889243 0 n 
-889291 0 n 
-889339 0 n 
-898594 0 

[12/19] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-20 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/9e6e3360/devapidocs/src-html/org/apache/hadoop/hbase/util/ByteBufferArray.Visitor.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/util/ByteBufferArray.Visitor.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/util/ByteBufferArray.Visitor.html
index 7a442f0..103cd9e 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/util/ByteBufferArray.Visitor.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/util/ByteBufferArray.Visitor.html
@@ -52,278 +52,285 @@
 044 * reading/writing data from this large 
buffer with a position and offset
 045 */
 046@InterfaceAudience.Private
-047public final class ByteBufferArray {
+047public class ByteBufferArray {
 048  private static final Log LOG = 
LogFactory.getLog(ByteBufferArray.class);
 049
 050  public static final int 
DEFAULT_BUFFER_SIZE = 4 * 1024 * 1024;
 051  @VisibleForTesting
 052  ByteBuffer buffers[];
 053  private int bufferSize;
-054  private int bufferCount;
-055
-056  /**
-057   * We allocate a number of byte buffers 
as the capacity. In order not to out
-058   * of the array bounds for the last 
byte(see {@link ByteBufferArray#multiple}),
-059   * we will allocate one additional 
buffer with capacity 0;
-060   * @param capacity total size of the 
byte buffer array
-061   * @param directByteBuffer true if we 
allocate direct buffer
-062   * @param allocator the 
ByteBufferAllocator that will create the buffers
-063   * @throws IOException throws 
IOException if there is an exception thrown by the allocator
-064   */
-065  public ByteBufferArray(long capacity, 
boolean directByteBuffer, ByteBufferAllocator allocator)
-066  throws IOException {
-067this.bufferSize = 
DEFAULT_BUFFER_SIZE;
-068if (this.bufferSize  (capacity / 
16))
-069  this.bufferSize = (int) 
roundUp(capacity / 16, 32768);
-070this.bufferCount = (int) 
(roundUp(capacity, bufferSize) / bufferSize);
-071LOG.info("Allocating buffers total=" 
+ StringUtils.byteDesc(capacity)
-072+ ", sizePerBuffer=" + 
StringUtils.byteDesc(bufferSize) + ", count="
-073+ bufferCount + ", direct=" + 
directByteBuffer);
-074buffers = new ByteBuffer[bufferCount 
+ 1];
-075createBuffers(directByteBuffer, 
allocator);
-076  }
-077
-078  private void createBuffers(boolean 
directByteBuffer, ByteBufferAllocator allocator)
-079  throws IOException {
-080int threadCount = 
Runtime.getRuntime().availableProcessors();
-081ExecutorService service = new 
ThreadPoolExecutor(threadCount, threadCount, 0L,
-082TimeUnit.MILLISECONDS, new 
LinkedBlockingQueueRunnable());
-083int perThreadCount = 
Math.round((float) (bufferCount) / threadCount);
-084int lastThreadCount = bufferCount - 
(perThreadCount * (threadCount - 1));
-085FutureByteBuffer[][] futures 
= new Future[threadCount];
-086try {
-087  for (int i = 0; i  threadCount; 
i++) {
-088// Last thread will have to deal 
with a different number of buffers
-089int buffersToCreate = (i == 
threadCount - 1) ? lastThreadCount : perThreadCount;
-090futures[i] = service.submit(
-091  new 
BufferCreatorCallable(bufferSize, directByteBuffer, buffersToCreate, 
allocator));
-092  }
-093  int bufferIndex = 0;
-094  for (FutureByteBuffer[] 
future : futures) {
-095try {
-096  ByteBuffer[] buffers = 
future.get();
-097  for (ByteBuffer buffer : 
buffers) {
-098this.buffers[bufferIndex++] = 
buffer;
-099  }
-100} catch (InterruptedException | 
ExecutionException e) {
-101  LOG.error("Buffer creation 
interrupted", e);
-102  throw new IOException(e);
-103}
-104  }
-105} finally {
-106  service.shutdownNow();
-107}
-108// always create on heap empty dummy 
buffer at last
-109this.buffers[bufferCount] = 
ByteBuffer.allocate(0);
-110  }
-111
-112  /**
-113   * A callable that creates buffers of 
the specified length either onheap/offheap using the
-114   * {@link ByteBufferAllocator}
-115   */
-116  private static class 
BufferCreatorCallable implements CallableByteBuffer[] {
-117private final int bufferCapacity;
-118private final boolean 
directByteBuffer;
-119private final int bufferCount;
-120private final ByteBufferAllocator 
allocator;
-121
-122BufferCreatorCallable(int 
bufferCapacity, boolean directByteBuffer, int bufferCount,
-123ByteBufferAllocator allocator) 
{
-124  this.bufferCapacity = 
bufferCapacity;
-125  this.directByteBuffer = 
directByteBuffer;
-126  this.bufferCount = bufferCount;
-127  this.allocator = allocator;
-128}
-129
-130@Override
-131public ByteBuffer[] call() throws 
Exception {
-132  ByteBuffer[] buffers = new 
ByteBuffer[this.bufferCount];
-133  for (int i = 0; i  
this.bufferCount; i++) {
-134buffers[i] = 

[16/19] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-20 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/9e6e3360/devapidocs/org/apache/hadoop/hbase/regionserver/querymatcher/package-tree.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/regionserver/querymatcher/package-tree.html
 
b/devapidocs/org/apache/hadoop/hbase/regionserver/querymatcher/package-tree.html
index c54199a..54ada95 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/regionserver/querymatcher/package-tree.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/regionserver/querymatcher/package-tree.html
@@ -125,10 +125,10 @@
 
 java.lang.http://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true;
 title="class or interface in java.lang">EnumE (implements java.lang.http://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true;
 title="class or interface in java.lang">ComparableT, java.io.http://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true;
 title="class or interface in java.io">Serializable)
 
-org.apache.hadoop.hbase.regionserver.querymatcher.DeleteTracker.DeleteResult
 org.apache.hadoop.hbase.regionserver.querymatcher.DeleteTracker.DeleteCompare
-org.apache.hadoop.hbase.regionserver.querymatcher.StripeCompactionScanQueryMatcher.DropDeletesInOutput
+org.apache.hadoop.hbase.regionserver.querymatcher.DeleteTracker.DeleteResult
 org.apache.hadoop.hbase.regionserver.querymatcher.ScanQueryMatcher.MatchCode
+org.apache.hadoop.hbase.regionserver.querymatcher.StripeCompactionScanQueryMatcher.DropDeletesInOutput
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/9e6e3360/devapidocs/org/apache/hadoop/hbase/regionserver/wal/package-tree.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/regionserver/wal/package-tree.html 
b/devapidocs/org/apache/hadoop/hbase/regionserver/wal/package-tree.html
index 33cb6b6..7cba16c 100644
--- a/devapidocs/org/apache/hadoop/hbase/regionserver/wal/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/regionserver/wal/package-tree.html
@@ -248,8 +248,8 @@
 
 java.lang.http://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true;
 title="class or interface in java.lang">EnumE (implements java.lang.http://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true;
 title="class or interface in java.lang">ComparableT, java.io.http://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true;
 title="class or interface in java.io">Serializable)
 
-org.apache.hadoop.hbase.regionserver.wal.ProtobufLogReader.WALHdrResult
 org.apache.hadoop.hbase.regionserver.wal.RingBufferTruck.Type
+org.apache.hadoop.hbase.regionserver.wal.ProtobufLogReader.WALHdrResult
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/9e6e3360/devapidocs/org/apache/hadoop/hbase/replication/regionserver/DumpReplicationQueues.WarnOnlyAbortable.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/replication/regionserver/DumpReplicationQueues.WarnOnlyAbortable.html
 
b/devapidocs/org/apache/hadoop/hbase/replication/regionserver/DumpReplicationQueues.WarnOnlyAbortable.html
index da67f01..5ddd142 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/replication/regionserver/DumpReplicationQueues.WarnOnlyAbortable.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/replication/regionserver/DumpReplicationQueues.WarnOnlyAbortable.html
@@ -117,7 +117,7 @@ var activeTableTab = "activeTableTab";
 
 
 
-private static class DumpReplicationQueues.WarnOnlyAbortable
+private static class DumpReplicationQueues.WarnOnlyAbortable
 extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
 implements Abortable
 
@@ -198,7 +198,7 @@ implements 
 
 WarnOnlyAbortable
-privateWarnOnlyAbortable()
+privateWarnOnlyAbortable()
 
 
 
@@ -215,7 +215,7 @@ implements 
 
 abort
-publicvoidabort(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringwhy,
+publicvoidabort(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringwhy,
   http://docs.oracle.com/javase/8/docs/api/java/lang/Throwable.html?is-external=true;
 title="class or interface in java.lang">Throwablee)
 Description copied from 
interface:Abortable
 Abort the server or client.
@@ -234,7 +234,7 @@ implements 
 
 isAborted
-publicbooleanisAborted()
+publicbooleanisAborted()
 Description copied from 
interface:Abortable
 Check if the server or client was aborted.
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/9e6e3360/devapidocs/org/apache/hadoop/hbase/replication/regionserver/DumpReplicationQueues.WarnOnlyStoppable.html

[04/19] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-20 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/9e6e3360/testdevapidocs/src-html/org/apache/hadoop/hbase/io/hfile/TestCacheConfig.IndexCacheEntry.html
--
diff --git 
a/testdevapidocs/src-html/org/apache/hadoop/hbase/io/hfile/TestCacheConfig.IndexCacheEntry.html
 
b/testdevapidocs/src-html/org/apache/hadoop/hbase/io/hfile/TestCacheConfig.IndexCacheEntry.html
index c6abfe5..3734517 100644
--- 
a/testdevapidocs/src-html/org/apache/hadoop/hbase/io/hfile/TestCacheConfig.IndexCacheEntry.html
+++ 
b/testdevapidocs/src-html/org/apache/hadoop/hbase/io/hfile/TestCacheConfig.IndexCacheEntry.html
@@ -29,406 +29,420 @@
 021import static 
org.junit.Assert.assertEquals;
 022import static 
org.junit.Assert.assertFalse;
 023import static 
org.junit.Assert.assertTrue;
-024
-025import java.io.IOException;
-026import 
java.lang.management.ManagementFactory;
-027import 
java.lang.management.MemoryUsage;
-028import java.nio.ByteBuffer;
-029import java.util.Map;
-030
-031import org.apache.commons.logging.Log;
-032import 
org.apache.commons.logging.LogFactory;
-033import 
org.apache.hadoop.conf.Configuration;
-034import org.apache.hadoop.fs.FileSystem;
-035import org.apache.hadoop.fs.Path;
-036import 
org.apache.hadoop.hbase.HBaseConfiguration;
-037import 
org.apache.hadoop.hbase.HBaseTestingUtility;
-038import 
org.apache.hadoop.hbase.HColumnDescriptor;
-039import 
org.apache.hadoop.hbase.HConstants;
-040import 
org.apache.hadoop.hbase.testclassification.IOTests;
-041import 
org.apache.hadoop.hbase.testclassification.LargeTests;
-042import 
org.apache.hadoop.hbase.io.hfile.BlockType.BlockCategory;
-043import 
org.apache.hadoop.hbase.io.hfile.Cacheable.MemoryType;
-044import 
org.apache.hadoop.hbase.io.hfile.bucket.BucketCache;
-045import 
org.apache.hadoop.hbase.io.util.MemorySizeUtil;
-046import 
org.apache.hadoop.hbase.nio.ByteBuff;
-047import 
org.apache.hadoop.hbase.util.Threads;
-048import org.junit.After;
-049import org.junit.Before;
-050import org.junit.Test;
-051import 
org.junit.experimental.categories.Category;
-052
-053/**
-054 * Tests that {@link CacheConfig} does as 
expected.
-055 */
-056// This test is marked as a large test 
though it runs in a short amount of time
-057// (seconds).  It is large because it 
depends on being able to reset the global
-058// blockcache instance which is in a 
global variable.  Experience has it that
-059// tests clash on the global variable if 
this test is run as small sized test.
-060@Category({IOTests.class, 
LargeTests.class})
-061public class TestCacheConfig {
-062  private static final Log LOG = 
LogFactory.getLog(TestCacheConfig.class);
-063  private Configuration conf;
-064
-065  static class Deserializer implements 
CacheableDeserializerCacheable {
-066private final Cacheable cacheable;
-067private int deserializedIdentifier = 
0;
-068
-069Deserializer(final Cacheable c) {
-070  deserializedIdentifier = 
CacheableDeserializerIdManager.registerDeserializer(this);
-071  this.cacheable = c;
-072}
-073
-074@Override
-075public int 
getDeserialiserIdentifier() {
-076  return deserializedIdentifier;
-077}
-078
-079@Override
-080public Cacheable deserialize(ByteBuff 
b, boolean reuse, MemoryType memType) throws IOException {
-081  LOG.info("Deserialized " + b + ", 
reuse=" + reuse);
-082  return cacheable;
-083}
-084
-085@Override
-086public Cacheable deserialize(ByteBuff 
b) throws IOException {
-087  LOG.info("Deserialized " + b);
-088  return cacheable;
-089}
-090  };
-091
-092  static class IndexCacheEntry extends 
DataCacheEntry {
-093private static IndexCacheEntry 
SINGLETON = new IndexCacheEntry();
-094
-095public IndexCacheEntry() {
-096  super(SINGLETON);
-097}
-098
-099@Override
-100public BlockType getBlockType() {
-101  return BlockType.ROOT_INDEX;
-102}
-103  }
-104
-105  static class DataCacheEntry implements 
Cacheable {
-106private static final int SIZE = 1;
-107private static DataCacheEntry 
SINGLETON = new DataCacheEntry();
-108final 
CacheableDeserializerCacheable deserializer;
-109
-110DataCacheEntry() {
-111  this(SINGLETON);
-112}
-113
-114DataCacheEntry(final Cacheable c) {
-115  this.deserializer = new 
Deserializer(c);
-116}
-117
-118@Override
-119public String toString() {
-120  return "size=" + SIZE + ", type=" + 
getBlockType();
-121};
-122
-123@Override
-124public long heapSize() {
-125  return SIZE;
-126}
-127
-128@Override
-129public int getSerializedLength() {
-130  return SIZE;
-131}
-132
-133@Override
-134public void serialize(ByteBuffer 
destination) {
-135  LOG.info("Serialized " + this + " 
to " + destination);
-136}
-137
-138@Override
-139public 
CacheableDeserializerCacheable getDeserializer() {
-140  return this.deserializer;
-141}
-142
-143   

[08/19] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-20 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/9e6e3360/testdevapidocs/org/apache/hadoop/hbase/io/hfile/TestCacheConfig.DataCacheEntry.html
--
diff --git 
a/testdevapidocs/org/apache/hadoop/hbase/io/hfile/TestCacheConfig.DataCacheEntry.html
 
b/testdevapidocs/org/apache/hadoop/hbase/io/hfile/TestCacheConfig.DataCacheEntry.html
index 9d284a3..cfca4f2 100644
--- 
a/testdevapidocs/org/apache/hadoop/hbase/io/hfile/TestCacheConfig.DataCacheEntry.html
+++ 
b/testdevapidocs/org/apache/hadoop/hbase/io/hfile/TestCacheConfig.DataCacheEntry.html
@@ -121,7 +121,7 @@ var activeTableTab = "activeTableTab";
 
 
 
-static class TestCacheConfig.DataCacheEntry
+static class TestCacheConfig.DataCacheEntry
 extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
 implements org.apache.hadoop.hbase.io.hfile.Cacheable
 
@@ -260,7 +260,7 @@ implements org.apache.hadoop.hbase.io.hfile.Cacheable
 
 
 SIZE
-private static finalint SIZE
+private static finalint SIZE
 
 See Also:
 Constant
 Field Values
@@ -273,7 +273,7 @@ implements org.apache.hadoop.hbase.io.hfile.Cacheable
 
 
 SINGLETON
-private staticTestCacheConfig.DataCacheEntry SINGLETON
+private staticTestCacheConfig.DataCacheEntry SINGLETON
 
 
 
@@ -282,7 +282,7 @@ implements org.apache.hadoop.hbase.io.hfile.Cacheable
 
 
 deserializer
-finalorg.apache.hadoop.hbase.io.hfile.CacheableDeserializerorg.apache.hadoop.hbase.io.hfile.Cacheable
 deserializer
+finalorg.apache.hadoop.hbase.io.hfile.CacheableDeserializerorg.apache.hadoop.hbase.io.hfile.Cacheable
 deserializer
 
 
 
@@ -299,7 +299,7 @@ implements org.apache.hadoop.hbase.io.hfile.Cacheable
 
 
 DataCacheEntry
-DataCacheEntry()
+DataCacheEntry()
 
 
 
@@ -308,7 +308,7 @@ implements org.apache.hadoop.hbase.io.hfile.Cacheable
 
 
 DataCacheEntry
-DataCacheEntry(org.apache.hadoop.hbase.io.hfile.Cacheablec)
+DataCacheEntry(org.apache.hadoop.hbase.io.hfile.Cacheablec)
 
 
 
@@ -325,7 +325,7 @@ implements org.apache.hadoop.hbase.io.hfile.Cacheable
 
 
 toString
-publichttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringtoString()
+publichttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringtoString()
 
 Overrides:
 http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#toString--;
 title="class or interface in java.lang">toStringin 
classhttp://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
@@ -338,7 +338,7 @@ implements org.apache.hadoop.hbase.io.hfile.Cacheable
 
 
 heapSize
-publiclongheapSize()
+publiclongheapSize()
 
 Specified by:
 heapSizein 
interfaceorg.apache.hadoop.hbase.io.HeapSize
@@ -351,7 +351,7 @@ implements org.apache.hadoop.hbase.io.hfile.Cacheable
 
 
 getSerializedLength
-publicintgetSerializedLength()
+publicintgetSerializedLength()
 
 Specified by:
 getSerializedLengthin 
interfaceorg.apache.hadoop.hbase.io.hfile.Cacheable
@@ -364,7 +364,7 @@ implements org.apache.hadoop.hbase.io.hfile.Cacheable
 
 
 serialize
-publicvoidserialize(http://docs.oracle.com/javase/8/docs/api/java/nio/ByteBuffer.html?is-external=true;
 title="class or interface in java.nio">ByteBufferdestination)
+publicvoidserialize(http://docs.oracle.com/javase/8/docs/api/java/nio/ByteBuffer.html?is-external=true;
 title="class or interface in java.nio">ByteBufferdestination)
 
 Specified by:
 serializein 
interfaceorg.apache.hadoop.hbase.io.hfile.Cacheable
@@ -377,7 +377,7 @@ implements org.apache.hadoop.hbase.io.hfile.Cacheable
 
 
 getDeserializer
-publicorg.apache.hadoop.hbase.io.hfile.CacheableDeserializerorg.apache.hadoop.hbase.io.hfile.CacheablegetDeserializer()
+publicorg.apache.hadoop.hbase.io.hfile.CacheableDeserializerorg.apache.hadoop.hbase.io.hfile.CacheablegetDeserializer()
 
 Specified by:
 getDeserializerin 
interfaceorg.apache.hadoop.hbase.io.hfile.Cacheable
@@ -390,7 +390,7 @@ implements org.apache.hadoop.hbase.io.hfile.Cacheable
 
 
 getBlockType
-publicorg.apache.hadoop.hbase.io.hfile.BlockTypegetBlockType()
+publicorg.apache.hadoop.hbase.io.hfile.BlockTypegetBlockType()
 
 Specified by:
 getBlockTypein 
interfaceorg.apache.hadoop.hbase.io.hfile.Cacheable
@@ -403,7 +403,7 @@ implements org.apache.hadoop.hbase.io.hfile.Cacheable
 
 
 getMemoryType
-publicorg.apache.hadoop.hbase.io.hfile.Cacheable.MemoryTypegetMemoryType()
+publicorg.apache.hadoop.hbase.io.hfile.Cacheable.MemoryTypegetMemoryType()
 
 Specified by:
 getMemoryTypein 
interfaceorg.apache.hadoop.hbase.io.hfile.Cacheable

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/9e6e3360/testdevapidocs/org/apache/hadoop/hbase/io/hfile/TestCacheConfig.Deserializer.html
--
diff --git 

[03/19] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-20 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/9e6e3360/testdevapidocs/src-html/org/apache/hadoop/hbase/io/hfile/TestCacheConfig.MetaCacheEntry.html
--
diff --git 
a/testdevapidocs/src-html/org/apache/hadoop/hbase/io/hfile/TestCacheConfig.MetaCacheEntry.html
 
b/testdevapidocs/src-html/org/apache/hadoop/hbase/io/hfile/TestCacheConfig.MetaCacheEntry.html
index c6abfe5..3734517 100644
--- 
a/testdevapidocs/src-html/org/apache/hadoop/hbase/io/hfile/TestCacheConfig.MetaCacheEntry.html
+++ 
b/testdevapidocs/src-html/org/apache/hadoop/hbase/io/hfile/TestCacheConfig.MetaCacheEntry.html
@@ -29,406 +29,420 @@
 021import static 
org.junit.Assert.assertEquals;
 022import static 
org.junit.Assert.assertFalse;
 023import static 
org.junit.Assert.assertTrue;
-024
-025import java.io.IOException;
-026import 
java.lang.management.ManagementFactory;
-027import 
java.lang.management.MemoryUsage;
-028import java.nio.ByteBuffer;
-029import java.util.Map;
-030
-031import org.apache.commons.logging.Log;
-032import 
org.apache.commons.logging.LogFactory;
-033import 
org.apache.hadoop.conf.Configuration;
-034import org.apache.hadoop.fs.FileSystem;
-035import org.apache.hadoop.fs.Path;
-036import 
org.apache.hadoop.hbase.HBaseConfiguration;
-037import 
org.apache.hadoop.hbase.HBaseTestingUtility;
-038import 
org.apache.hadoop.hbase.HColumnDescriptor;
-039import 
org.apache.hadoop.hbase.HConstants;
-040import 
org.apache.hadoop.hbase.testclassification.IOTests;
-041import 
org.apache.hadoop.hbase.testclassification.LargeTests;
-042import 
org.apache.hadoop.hbase.io.hfile.BlockType.BlockCategory;
-043import 
org.apache.hadoop.hbase.io.hfile.Cacheable.MemoryType;
-044import 
org.apache.hadoop.hbase.io.hfile.bucket.BucketCache;
-045import 
org.apache.hadoop.hbase.io.util.MemorySizeUtil;
-046import 
org.apache.hadoop.hbase.nio.ByteBuff;
-047import 
org.apache.hadoop.hbase.util.Threads;
-048import org.junit.After;
-049import org.junit.Before;
-050import org.junit.Test;
-051import 
org.junit.experimental.categories.Category;
-052
-053/**
-054 * Tests that {@link CacheConfig} does as 
expected.
-055 */
-056// This test is marked as a large test 
though it runs in a short amount of time
-057// (seconds).  It is large because it 
depends on being able to reset the global
-058// blockcache instance which is in a 
global variable.  Experience has it that
-059// tests clash on the global variable if 
this test is run as small sized test.
-060@Category({IOTests.class, 
LargeTests.class})
-061public class TestCacheConfig {
-062  private static final Log LOG = 
LogFactory.getLog(TestCacheConfig.class);
-063  private Configuration conf;
-064
-065  static class Deserializer implements 
CacheableDeserializerCacheable {
-066private final Cacheable cacheable;
-067private int deserializedIdentifier = 
0;
-068
-069Deserializer(final Cacheable c) {
-070  deserializedIdentifier = 
CacheableDeserializerIdManager.registerDeserializer(this);
-071  this.cacheable = c;
-072}
-073
-074@Override
-075public int 
getDeserialiserIdentifier() {
-076  return deserializedIdentifier;
-077}
-078
-079@Override
-080public Cacheable deserialize(ByteBuff 
b, boolean reuse, MemoryType memType) throws IOException {
-081  LOG.info("Deserialized " + b + ", 
reuse=" + reuse);
-082  return cacheable;
-083}
-084
-085@Override
-086public Cacheable deserialize(ByteBuff 
b) throws IOException {
-087  LOG.info("Deserialized " + b);
-088  return cacheable;
-089}
-090  };
-091
-092  static class IndexCacheEntry extends 
DataCacheEntry {
-093private static IndexCacheEntry 
SINGLETON = new IndexCacheEntry();
-094
-095public IndexCacheEntry() {
-096  super(SINGLETON);
-097}
-098
-099@Override
-100public BlockType getBlockType() {
-101  return BlockType.ROOT_INDEX;
-102}
-103  }
-104
-105  static class DataCacheEntry implements 
Cacheable {
-106private static final int SIZE = 1;
-107private static DataCacheEntry 
SINGLETON = new DataCacheEntry();
-108final 
CacheableDeserializerCacheable deserializer;
-109
-110DataCacheEntry() {
-111  this(SINGLETON);
-112}
-113
-114DataCacheEntry(final Cacheable c) {
-115  this.deserializer = new 
Deserializer(c);
-116}
-117
-118@Override
-119public String toString() {
-120  return "size=" + SIZE + ", type=" + 
getBlockType();
-121};
-122
-123@Override
-124public long heapSize() {
-125  return SIZE;
-126}
-127
-128@Override
-129public int getSerializedLength() {
-130  return SIZE;
-131}
-132
-133@Override
-134public void serialize(ByteBuffer 
destination) {
-135  LOG.info("Serialized " + this + " 
to " + destination);
-136}
-137
-138@Override
-139public 
CacheableDeserializerCacheable getDeserializer() {
-140  return this.deserializer;
-141}
-142
-143

[11/19] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-20 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/9e6e3360/devapidocs/src-html/org/apache/hadoop/hbase/util/ByteBufferArray.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/util/ByteBufferArray.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/util/ByteBufferArray.html
index 7a442f0..103cd9e 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/util/ByteBufferArray.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/util/ByteBufferArray.html
@@ -52,278 +52,285 @@
 044 * reading/writing data from this large 
buffer with a position and offset
 045 */
 046@InterfaceAudience.Private
-047public final class ByteBufferArray {
+047public class ByteBufferArray {
 048  private static final Log LOG = 
LogFactory.getLog(ByteBufferArray.class);
 049
 050  public static final int 
DEFAULT_BUFFER_SIZE = 4 * 1024 * 1024;
 051  @VisibleForTesting
 052  ByteBuffer buffers[];
 053  private int bufferSize;
-054  private int bufferCount;
-055
-056  /**
-057   * We allocate a number of byte buffers 
as the capacity. In order not to out
-058   * of the array bounds for the last 
byte(see {@link ByteBufferArray#multiple}),
-059   * we will allocate one additional 
buffer with capacity 0;
-060   * @param capacity total size of the 
byte buffer array
-061   * @param directByteBuffer true if we 
allocate direct buffer
-062   * @param allocator the 
ByteBufferAllocator that will create the buffers
-063   * @throws IOException throws 
IOException if there is an exception thrown by the allocator
-064   */
-065  public ByteBufferArray(long capacity, 
boolean directByteBuffer, ByteBufferAllocator allocator)
-066  throws IOException {
-067this.bufferSize = 
DEFAULT_BUFFER_SIZE;
-068if (this.bufferSize  (capacity / 
16))
-069  this.bufferSize = (int) 
roundUp(capacity / 16, 32768);
-070this.bufferCount = (int) 
(roundUp(capacity, bufferSize) / bufferSize);
-071LOG.info("Allocating buffers total=" 
+ StringUtils.byteDesc(capacity)
-072+ ", sizePerBuffer=" + 
StringUtils.byteDesc(bufferSize) + ", count="
-073+ bufferCount + ", direct=" + 
directByteBuffer);
-074buffers = new ByteBuffer[bufferCount 
+ 1];
-075createBuffers(directByteBuffer, 
allocator);
-076  }
-077
-078  private void createBuffers(boolean 
directByteBuffer, ByteBufferAllocator allocator)
-079  throws IOException {
-080int threadCount = 
Runtime.getRuntime().availableProcessors();
-081ExecutorService service = new 
ThreadPoolExecutor(threadCount, threadCount, 0L,
-082TimeUnit.MILLISECONDS, new 
LinkedBlockingQueueRunnable());
-083int perThreadCount = 
Math.round((float) (bufferCount) / threadCount);
-084int lastThreadCount = bufferCount - 
(perThreadCount * (threadCount - 1));
-085FutureByteBuffer[][] futures 
= new Future[threadCount];
-086try {
-087  for (int i = 0; i  threadCount; 
i++) {
-088// Last thread will have to deal 
with a different number of buffers
-089int buffersToCreate = (i == 
threadCount - 1) ? lastThreadCount : perThreadCount;
-090futures[i] = service.submit(
-091  new 
BufferCreatorCallable(bufferSize, directByteBuffer, buffersToCreate, 
allocator));
-092  }
-093  int bufferIndex = 0;
-094  for (FutureByteBuffer[] 
future : futures) {
-095try {
-096  ByteBuffer[] buffers = 
future.get();
-097  for (ByteBuffer buffer : 
buffers) {
-098this.buffers[bufferIndex++] = 
buffer;
-099  }
-100} catch (InterruptedException | 
ExecutionException e) {
-101  LOG.error("Buffer creation 
interrupted", e);
-102  throw new IOException(e);
-103}
-104  }
-105} finally {
-106  service.shutdownNow();
-107}
-108// always create on heap empty dummy 
buffer at last
-109this.buffers[bufferCount] = 
ByteBuffer.allocate(0);
-110  }
-111
-112  /**
-113   * A callable that creates buffers of 
the specified length either onheap/offheap using the
-114   * {@link ByteBufferAllocator}
-115   */
-116  private static class 
BufferCreatorCallable implements CallableByteBuffer[] {
-117private final int bufferCapacity;
-118private final boolean 
directByteBuffer;
-119private final int bufferCount;
-120private final ByteBufferAllocator 
allocator;
-121
-122BufferCreatorCallable(int 
bufferCapacity, boolean directByteBuffer, int bufferCount,
-123ByteBufferAllocator allocator) 
{
-124  this.bufferCapacity = 
bufferCapacity;
-125  this.directByteBuffer = 
directByteBuffer;
-126  this.bufferCount = bufferCount;
-127  this.allocator = allocator;
-128}
-129
-130@Override
-131public ByteBuffer[] call() throws 
Exception {
-132  ByteBuffer[] buffers = new 
ByteBuffer[this.bufferCount];
-133  for (int i = 0; i  
this.bufferCount; i++) {
-134buffers[i] = 
allocator.allocate(this.bufferCapacity, 

hbase-site git commit: INFRA-10751 Empty commit

2017-07-20 Thread git-site-role
Repository: hbase-site
Updated Branches:
  refs/heads/asf-site 9e6e3360a -> be6740763


INFRA-10751 Empty commit


Project: http://git-wip-us.apache.org/repos/asf/hbase-site/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase-site/commit/be674076
Tree: http://git-wip-us.apache.org/repos/asf/hbase-site/tree/be674076
Diff: http://git-wip-us.apache.org/repos/asf/hbase-site/diff/be674076

Branch: refs/heads/asf-site
Commit: be674076369811ccd6298aa9f453342653fece63
Parents: 9e6e336
Author: jenkins 
Authored: Thu Jul 20 22:07:16 2017 +
Committer: jenkins 
Committed: Thu Jul 20 22:07:16 2017 +

--

--




[17/19] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-20 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/9e6e3360/book.html
--
diff --git a/book.html b/book.html
index 5123827..6efd503 100644
--- a/book.html
+++ b/book.html
@@ -3585,7 +3585,7 @@ Some configurations would only appear in source code; the 
only way to identify t
 
 
 Description
-A comma-separated list of sizes for buckets for the bucketcache. Can be 
multiple sizes. List block sizes in order from smallest to largest. The sizes 
you use will depend on your data access patterns. Must be a multiple of 1024 
else you will run into 'java.io.IOException: Invalid HFile block magic' when 
you go to read from cache. If you specify no values here, then you pick up the 
default bucketsizes set in code (See BucketAllocator#DEFAULT_BUCKET_SIZES).
+A comma-separated list of sizes for buckets for the bucketcache. Can be 
multiple sizes. List block sizes in order from smallest to largest. The sizes 
you use will depend on your data access patterns. Must be a multiple of 256 
else you will run into 'java.io.IOException: Invalid HFile block magic' when 
you go to read from cache. If you specify no values here, then you pick up the 
default bucketsizes set in code (See BucketAllocator#DEFAULT_BUCKET_SIZES).
 
 
 Default

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/9e6e3360/bulk-loads.html
--
diff --git a/bulk-loads.html b/bulk-loads.html
index a7b731d..7ce8138 100644
--- a/bulk-loads.html
+++ b/bulk-loads.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase   
   Bulk Loads in Apache HBase (TM)
@@ -311,7 +311,7 @@ under the License. -->
 https://www.apache.org/;>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2017-07-19
+  Last Published: 
2017-07-20
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/9e6e3360/checkstyle-aggregate.html
--
diff --git a/checkstyle-aggregate.html b/checkstyle-aggregate.html
index 50bd7e0..163159c 100644
--- a/checkstyle-aggregate.html
+++ b/checkstyle-aggregate.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase  Checkstyle Results
 
@@ -7381,12 +7381,12 @@
 http://checkstyle.sourceforge.net/config_javadoc.html#JavadocTagContinuationIndentation;>JavadocTagContinuationIndentation
 
 offset: 2
-797
+803
 Error
 
 
 http://checkstyle.sourceforge.net/config_javadoc.html#NonEmptyAtclauseDescription;>NonEmptyAtclauseDescription
-3259
+3253
 Error
 
 misc
@@ -13242,7 +13242,7 @@
 
 Error
 javadoc
-NonEmptyAtclauseDescription
+JavadocTagContinuationIndentation
 Javadoc comment at column 43 has parse error. Missed HTML close tag 
'TableName'. Sometimes it means that close tag missed for one of previous 
tags.
 181
 
@@ -16926,7 +16926,7 @@
 
 Error
 javadoc
-NonEmptyAtclauseDescription
+JavadocTagContinuationIndentation
 Javadoc comment at column 64 has parse error. Missed HTML close tag 
'code'. Sometimes it means that close tag missed for one of previous tags.
 2153
 
@@ -22380,7 +22380,7 @@
 
 Error
 javadoc
-NonEmptyAtclauseDescription
+JavadocTagContinuationIndentation
 Javadoc comment at column 37 has parse error. Details: no viable 
alternative at input 'ColumnFamily,' while parsing HTML_ELEMENT
 29
 
@@ -32291,31 +32291,31 @@
 blocks
 NeedBraces
 'if' construct must use '{}'s.
-632
+631
 
 Error
 blocks
 NeedBraces
 'if' construct must use '{}'s.
-680
+689
 
 Error
 blocks
 NeedBraces
 'if' construct must use '{}'s.
-681
+690
 
 Error
 sizes
 LineLength
 Line is longer than 100 characters (found 103).
-683
+692
 
 Error
 blocks
 NeedBraces
 'if' construct must use '{}'s.
-684
+693
 
 org/apache/hadoop/hbase/io/hfile/CacheStats.java
 
@@ -48305,31 +48305,31 @@
 javadoc
 NonEmptyAtclauseDescription
 At-clause should have a non-empty description.
-104
+107
 
 Error
 javadoc
 NonEmptyAtclauseDescription
 At-clause should have a non-empty description.
-136
+139
 
 Error
 javadoc
 NonEmptyAtclauseDescription
 At-clause should have a non-empty description.
-156
+159
 
 Error
 javadoc
 NonEmptyAtclauseDescription
 At-clause should have a non-empty description.
-180
+183
 
 Error
 javadoc
 NonEmptyAtclauseDescription
 At-clause should have a non-empty description.
-195
+198
 
 org/apache/hadoop/hbase/master/cleaner/TimeToLiveHFileCleaner.java
 
@@ -52842,7 +52842,7 @@
 
 Error
 javadoc
-NonEmptyAtclauseDescription
+JavadocTagContinuationIndentation
 Javadoc comment at column 60 has parse error. Missed HTML close tag 
'Comparable'. Sometimes it means that close tag missed for one of previous 
tags.
 28
 
@@ -55773,7 +55773,7 @@
 
 Error
 javadoc
-NonEmptyAtclauseDescription
+JavadocTagContinuationIndentation
 Javadoc comment at column 0 has parse error. Unrecognized error 

[15/19] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-20 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/9e6e3360/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html
index a31c60b..f04095f 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html
@@ -592,136 +592,145 @@
 584   * @return Returns L2 block cache 
instance (for now it is BucketCache BlockCache all the time)
 585   * or null if not supposed to be a 
L2.
 586   */
-587  private static BlockCache getL2(final 
Configuration c) {
-588final boolean useExternal = 
c.getBoolean(EXTERNAL_BLOCKCACHE_KEY, EXTERNAL_BLOCKCACHE_DEFAULT);
-589if (LOG.isDebugEnabled()) {
-590  LOG.debug("Trying to use " + 
(useExternal?" External":" Internal") + " l2 cache");
-591}
-592
-593// If we want to use an external 
block cache then create that.
-594if (useExternal) {
-595  return getExternalBlockcache(c);
-596}
-597
+587  @VisibleForTesting
+588  static BlockCache getL2(final 
Configuration c) {
+589final boolean useExternal = 
c.getBoolean(EXTERNAL_BLOCKCACHE_KEY, EXTERNAL_BLOCKCACHE_DEFAULT);
+590if (LOG.isDebugEnabled()) {
+591  LOG.debug("Trying to use " + 
(useExternal?" External":" Internal") + " l2 cache");
+592}
+593
+594// If we want to use an external 
block cache then create that.
+595if (useExternal) {
+596  return getExternalBlockcache(c);
+597}
 598// otherwise use the bucket cache.
 599return getBucketCache(c);
-600
-601  }
-602
-603  private static BlockCache 
getExternalBlockcache(Configuration c) {
-604Class klass = null;
-605
-606// Get the class, from the config. 
s
-607try {
-608  klass = 
ExternalBlockCaches.valueOf(c.get(EXTERNAL_BLOCKCACHE_CLASS_KEY, 
"memcache")).clazz;
-609} catch (IllegalArgumentException 
exception) {
-610  try {
-611klass = 
c.getClass(EXTERNAL_BLOCKCACHE_CLASS_KEY, Class.forName(
-612
"org.apache.hadoop.hbase.io.hfile.MemcachedBlockCache"));
-613  } catch (ClassNotFoundException e) 
{
-614return null;
-615  }
-616}
-617
-618// Now try and create an instance of 
the block cache.
-619try {
-620  LOG.info("Creating external block 
cache of type: " + klass);
-621  return (BlockCache) 
ReflectionUtils.newInstance(klass, c);
-622} catch (Exception e) {
-623  LOG.warn("Error creating external 
block cache", e);
-624}
-625return null;
-626
-627  }
-628
-629  private static BlockCache 
getBucketCache(Configuration c) {
-630// Check for L2.  ioengine name must 
be non-null.
-631String bucketCacheIOEngineName = 
c.get(BUCKET_CACHE_IOENGINE_KEY, null);
-632if (bucketCacheIOEngineName == null 
|| bucketCacheIOEngineName.length() = 0) return null;
-633
-634int blockSize = 
c.getInt(BLOCKCACHE_BLOCKSIZE_KEY, HConstants.DEFAULT_BLOCKSIZE);
-635final long bucketCacheSize = 
MemorySizeUtil.getBucketCacheSize(c);
-636if (bucketCacheSize = 0) {
-637  throw new 
IllegalStateException("bucketCacheSize = 0; Check " +
-638BUCKET_CACHE_SIZE_KEY + " setting 
and/or server java heap size");
-639}
-640if 
(c.get("hbase.bucketcache.percentage.in.combinedcache") != null) {
-641  LOG.warn("Configuration 
'hbase.bucketcache.percentage.in.combinedcache' is no longer "
-642  + "respected. See comments in 
http://hbase.apache.org/book.html#_changes_of_note;);
-643}
-644int writerThreads = 
c.getInt(BUCKET_CACHE_WRITER_THREADS_KEY,
-645  
DEFAULT_BUCKET_CACHE_WRITER_THREADS);
-646int writerQueueLen = 
c.getInt(BUCKET_CACHE_WRITER_QUEUE_KEY,
-647  
DEFAULT_BUCKET_CACHE_WRITER_QUEUE);
-648String persistentPath = 
c.get(BUCKET_CACHE_PERSISTENT_PATH_KEY);
-649String[] configuredBucketSizes = 
c.getStrings(BUCKET_CACHE_BUCKETS_KEY);
-650int [] bucketSizes = null;
-651if (configuredBucketSizes != null) 
{
-652  bucketSizes = new 
int[configuredBucketSizes.length];
-653  for (int i = 0; i  
configuredBucketSizes.length; i++) {
-654bucketSizes[i] = 
Integer.parseInt(configuredBucketSizes[i].trim());
-655  }
-656}
-657BucketCache bucketCache = null;
-658try {
-659  int ioErrorsTolerationDuration = 
c.getInt(
-660
"hbase.bucketcache.ioengine.errors.tolerated.duration",
-661
BucketCache.DEFAULT_ERROR_TOLERATION_DURATION);
-662  // Bucket cache logs its stats on 
creation internal to the constructor.
-663  bucketCache = new 
BucketCache(bucketCacheIOEngineName,
-664bucketCacheSize, blockSize, 
bucketSizes, writerThreads, writerQueueLen, persistentPath,
-665ioErrorsTolerationDuration);
-666} catch (IOException ioex) {
-667  

[09/19] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-20 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/9e6e3360/hbase-archetypes/hbase-shaded-client-project/source-repository.html
--
diff --git 
a/hbase-archetypes/hbase-shaded-client-project/source-repository.html 
b/hbase-archetypes/hbase-shaded-client-project/source-repository.html
index fba74ab..617f060 100644
--- a/hbase-archetypes/hbase-shaded-client-project/source-repository.html
+++ b/hbase-archetypes/hbase-shaded-client-project/source-repository.html
@@ -1,5 +1,5 @@
 http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd;>
-
+
 http://www.w3.org/1999/xhtml; xml:lang="en" lang="en">
   
 
@@ -10,7 +10,7 @@
   @import url("./css/site.css");
 
 
-
+
 
 
 
@@ -27,7 +27,7 @@
 
 
 
-Last Published: 2017-07-19
+Last Published: 2017-07-20
   | Version: 
3.0.0-SNAPSHOT
   
 Apache HBase - Exemplar for 
hbase-shaded-client archetype

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/9e6e3360/hbase-archetypes/hbase-shaded-client-project/team-list.html
--
diff --git a/hbase-archetypes/hbase-shaded-client-project/team-list.html 
b/hbase-archetypes/hbase-shaded-client-project/team-list.html
index 80687d6..111afc5 100644
--- a/hbase-archetypes/hbase-shaded-client-project/team-list.html
+++ b/hbase-archetypes/hbase-shaded-client-project/team-list.html
@@ -1,5 +1,5 @@
 http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd;>
-
+
 http://www.w3.org/1999/xhtml; xml:lang="en" lang="en">
   
 
@@ -10,7 +10,7 @@
   @import url("./css/site.css");
 
 
-
+
 
 
 
@@ -27,7 +27,7 @@
 
 
 
-Last Published: 2017-07-19
+Last Published: 2017-07-20
   | Version: 
3.0.0-SNAPSHOT
   
 Apache HBase - Exemplar for 
hbase-shaded-client archetype

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/9e6e3360/hbase-archetypes/index.html
--
diff --git a/hbase-archetypes/index.html b/hbase-archetypes/index.html
index 11d33ec..4ed2a46 100644
--- a/hbase-archetypes/index.html
+++ b/hbase-archetypes/index.html
@@ -1,5 +1,5 @@
 http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd;>
-
+
 http://www.w3.org/1999/xhtml; xml:lang="en" lang="en">
   
 
@@ -10,7 +10,7 @@
   @import url("./css/site.css");
 
 
-
+
 
 
 
@@ -27,7 +27,7 @@
 
 
 
-Last Published: 2017-07-19
+Last Published: 2017-07-20
   | Version: 
3.0.0-SNAPSHOT
   
 Apache HBase - Archetypes

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/9e6e3360/hbase-archetypes/integration.html
--
diff --git a/hbase-archetypes/integration.html 
b/hbase-archetypes/integration.html
index 3858a64..d1269b5 100644
--- a/hbase-archetypes/integration.html
+++ b/hbase-archetypes/integration.html
@@ -1,5 +1,5 @@
 http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd;>
-
+
 http://www.w3.org/1999/xhtml; xml:lang="en" lang="en">
   
 
@@ -10,7 +10,7 @@
   @import url("./css/site.css");
 
 
-
+
 
 
 
@@ -27,7 +27,7 @@
 
 
 
-Last Published: 2017-07-19
+Last Published: 2017-07-20
   | Version: 
3.0.0-SNAPSHOT
   
 Apache HBase - Archetypes

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/9e6e3360/hbase-archetypes/issue-tracking.html
--
diff --git a/hbase-archetypes/issue-tracking.html 
b/hbase-archetypes/issue-tracking.html
index 49ea3c7..9e4630e 100644
--- a/hbase-archetypes/issue-tracking.html
+++ b/hbase-archetypes/issue-tracking.html
@@ -1,5 +1,5 @@
 http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd;>
-
+
 http://www.w3.org/1999/xhtml; xml:lang="en" lang="en">
   
 
@@ -10,7 +10,7 @@
   @import url("./css/site.css");
 
 
-
+
 
 
 
@@ -27,7 +27,7 @@
 
 
 
-Last Published: 2017-07-19
+Last Published: 2017-07-20
   | Version: 
3.0.0-SNAPSHOT
   
 Apache HBase - Archetypes

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/9e6e3360/hbase-archetypes/license.html
--
diff --git a/hbase-archetypes/license.html 

[05/19] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-20 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/9e6e3360/testdevapidocs/src-html/org/apache/hadoop/hbase/io/hfile/TestCacheConfig.Deserializer.html
--
diff --git 
a/testdevapidocs/src-html/org/apache/hadoop/hbase/io/hfile/TestCacheConfig.Deserializer.html
 
b/testdevapidocs/src-html/org/apache/hadoop/hbase/io/hfile/TestCacheConfig.Deserializer.html
index c6abfe5..3734517 100644
--- 
a/testdevapidocs/src-html/org/apache/hadoop/hbase/io/hfile/TestCacheConfig.Deserializer.html
+++ 
b/testdevapidocs/src-html/org/apache/hadoop/hbase/io/hfile/TestCacheConfig.Deserializer.html
@@ -29,406 +29,420 @@
 021import static 
org.junit.Assert.assertEquals;
 022import static 
org.junit.Assert.assertFalse;
 023import static 
org.junit.Assert.assertTrue;
-024
-025import java.io.IOException;
-026import 
java.lang.management.ManagementFactory;
-027import 
java.lang.management.MemoryUsage;
-028import java.nio.ByteBuffer;
-029import java.util.Map;
-030
-031import org.apache.commons.logging.Log;
-032import 
org.apache.commons.logging.LogFactory;
-033import 
org.apache.hadoop.conf.Configuration;
-034import org.apache.hadoop.fs.FileSystem;
-035import org.apache.hadoop.fs.Path;
-036import 
org.apache.hadoop.hbase.HBaseConfiguration;
-037import 
org.apache.hadoop.hbase.HBaseTestingUtility;
-038import 
org.apache.hadoop.hbase.HColumnDescriptor;
-039import 
org.apache.hadoop.hbase.HConstants;
-040import 
org.apache.hadoop.hbase.testclassification.IOTests;
-041import 
org.apache.hadoop.hbase.testclassification.LargeTests;
-042import 
org.apache.hadoop.hbase.io.hfile.BlockType.BlockCategory;
-043import 
org.apache.hadoop.hbase.io.hfile.Cacheable.MemoryType;
-044import 
org.apache.hadoop.hbase.io.hfile.bucket.BucketCache;
-045import 
org.apache.hadoop.hbase.io.util.MemorySizeUtil;
-046import 
org.apache.hadoop.hbase.nio.ByteBuff;
-047import 
org.apache.hadoop.hbase.util.Threads;
-048import org.junit.After;
-049import org.junit.Before;
-050import org.junit.Test;
-051import 
org.junit.experimental.categories.Category;
-052
-053/**
-054 * Tests that {@link CacheConfig} does as 
expected.
-055 */
-056// This test is marked as a large test 
though it runs in a short amount of time
-057// (seconds).  It is large because it 
depends on being able to reset the global
-058// blockcache instance which is in a 
global variable.  Experience has it that
-059// tests clash on the global variable if 
this test is run as small sized test.
-060@Category({IOTests.class, 
LargeTests.class})
-061public class TestCacheConfig {
-062  private static final Log LOG = 
LogFactory.getLog(TestCacheConfig.class);
-063  private Configuration conf;
-064
-065  static class Deserializer implements 
CacheableDeserializerCacheable {
-066private final Cacheable cacheable;
-067private int deserializedIdentifier = 
0;
-068
-069Deserializer(final Cacheable c) {
-070  deserializedIdentifier = 
CacheableDeserializerIdManager.registerDeserializer(this);
-071  this.cacheable = c;
-072}
-073
-074@Override
-075public int 
getDeserialiserIdentifier() {
-076  return deserializedIdentifier;
-077}
-078
-079@Override
-080public Cacheable deserialize(ByteBuff 
b, boolean reuse, MemoryType memType) throws IOException {
-081  LOG.info("Deserialized " + b + ", 
reuse=" + reuse);
-082  return cacheable;
-083}
-084
-085@Override
-086public Cacheable deserialize(ByteBuff 
b) throws IOException {
-087  LOG.info("Deserialized " + b);
-088  return cacheable;
-089}
-090  };
-091
-092  static class IndexCacheEntry extends 
DataCacheEntry {
-093private static IndexCacheEntry 
SINGLETON = new IndexCacheEntry();
-094
-095public IndexCacheEntry() {
-096  super(SINGLETON);
-097}
-098
-099@Override
-100public BlockType getBlockType() {
-101  return BlockType.ROOT_INDEX;
-102}
-103  }
-104
-105  static class DataCacheEntry implements 
Cacheable {
-106private static final int SIZE = 1;
-107private static DataCacheEntry 
SINGLETON = new DataCacheEntry();
-108final 
CacheableDeserializerCacheable deserializer;
-109
-110DataCacheEntry() {
-111  this(SINGLETON);
-112}
-113
-114DataCacheEntry(final Cacheable c) {
-115  this.deserializer = new 
Deserializer(c);
-116}
-117
-118@Override
-119public String toString() {
-120  return "size=" + SIZE + ", type=" + 
getBlockType();
-121};
-122
-123@Override
-124public long heapSize() {
-125  return SIZE;
-126}
-127
-128@Override
-129public int getSerializedLength() {
-130  return SIZE;
-131}
-132
-133@Override
-134public void serialize(ByteBuffer 
destination) {
-135  LOG.info("Serialized " + this + " 
to " + destination);
-136}
-137
-138@Override
-139public 
CacheableDeserializerCacheable getDeserializer() {
-140  return this.deserializer;
-141}
-142
-143@Override

[06/19] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-20 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/9e6e3360/testdevapidocs/src-html/org/apache/hadoop/hbase/io/hfile/TestCacheConfig.DataCacheEntry.html
--
diff --git 
a/testdevapidocs/src-html/org/apache/hadoop/hbase/io/hfile/TestCacheConfig.DataCacheEntry.html
 
b/testdevapidocs/src-html/org/apache/hadoop/hbase/io/hfile/TestCacheConfig.DataCacheEntry.html
index c6abfe5..3734517 100644
--- 
a/testdevapidocs/src-html/org/apache/hadoop/hbase/io/hfile/TestCacheConfig.DataCacheEntry.html
+++ 
b/testdevapidocs/src-html/org/apache/hadoop/hbase/io/hfile/TestCacheConfig.DataCacheEntry.html
@@ -29,406 +29,420 @@
 021import static 
org.junit.Assert.assertEquals;
 022import static 
org.junit.Assert.assertFalse;
 023import static 
org.junit.Assert.assertTrue;
-024
-025import java.io.IOException;
-026import 
java.lang.management.ManagementFactory;
-027import 
java.lang.management.MemoryUsage;
-028import java.nio.ByteBuffer;
-029import java.util.Map;
-030
-031import org.apache.commons.logging.Log;
-032import 
org.apache.commons.logging.LogFactory;
-033import 
org.apache.hadoop.conf.Configuration;
-034import org.apache.hadoop.fs.FileSystem;
-035import org.apache.hadoop.fs.Path;
-036import 
org.apache.hadoop.hbase.HBaseConfiguration;
-037import 
org.apache.hadoop.hbase.HBaseTestingUtility;
-038import 
org.apache.hadoop.hbase.HColumnDescriptor;
-039import 
org.apache.hadoop.hbase.HConstants;
-040import 
org.apache.hadoop.hbase.testclassification.IOTests;
-041import 
org.apache.hadoop.hbase.testclassification.LargeTests;
-042import 
org.apache.hadoop.hbase.io.hfile.BlockType.BlockCategory;
-043import 
org.apache.hadoop.hbase.io.hfile.Cacheable.MemoryType;
-044import 
org.apache.hadoop.hbase.io.hfile.bucket.BucketCache;
-045import 
org.apache.hadoop.hbase.io.util.MemorySizeUtil;
-046import 
org.apache.hadoop.hbase.nio.ByteBuff;
-047import 
org.apache.hadoop.hbase.util.Threads;
-048import org.junit.After;
-049import org.junit.Before;
-050import org.junit.Test;
-051import 
org.junit.experimental.categories.Category;
-052
-053/**
-054 * Tests that {@link CacheConfig} does as 
expected.
-055 */
-056// This test is marked as a large test 
though it runs in a short amount of time
-057// (seconds).  It is large because it 
depends on being able to reset the global
-058// blockcache instance which is in a 
global variable.  Experience has it that
-059// tests clash on the global variable if 
this test is run as small sized test.
-060@Category({IOTests.class, 
LargeTests.class})
-061public class TestCacheConfig {
-062  private static final Log LOG = 
LogFactory.getLog(TestCacheConfig.class);
-063  private Configuration conf;
-064
-065  static class Deserializer implements 
CacheableDeserializerCacheable {
-066private final Cacheable cacheable;
-067private int deserializedIdentifier = 
0;
-068
-069Deserializer(final Cacheable c) {
-070  deserializedIdentifier = 
CacheableDeserializerIdManager.registerDeserializer(this);
-071  this.cacheable = c;
-072}
-073
-074@Override
-075public int 
getDeserialiserIdentifier() {
-076  return deserializedIdentifier;
-077}
-078
-079@Override
-080public Cacheable deserialize(ByteBuff 
b, boolean reuse, MemoryType memType) throws IOException {
-081  LOG.info("Deserialized " + b + ", 
reuse=" + reuse);
-082  return cacheable;
-083}
-084
-085@Override
-086public Cacheable deserialize(ByteBuff 
b) throws IOException {
-087  LOG.info("Deserialized " + b);
-088  return cacheable;
-089}
-090  };
-091
-092  static class IndexCacheEntry extends 
DataCacheEntry {
-093private static IndexCacheEntry 
SINGLETON = new IndexCacheEntry();
-094
-095public IndexCacheEntry() {
-096  super(SINGLETON);
-097}
-098
-099@Override
-100public BlockType getBlockType() {
-101  return BlockType.ROOT_INDEX;
-102}
-103  }
-104
-105  static class DataCacheEntry implements 
Cacheable {
-106private static final int SIZE = 1;
-107private static DataCacheEntry 
SINGLETON = new DataCacheEntry();
-108final 
CacheableDeserializerCacheable deserializer;
-109
-110DataCacheEntry() {
-111  this(SINGLETON);
-112}
-113
-114DataCacheEntry(final Cacheable c) {
-115  this.deserializer = new 
Deserializer(c);
-116}
-117
-118@Override
-119public String toString() {
-120  return "size=" + SIZE + ", type=" + 
getBlockType();
-121};
-122
-123@Override
-124public long heapSize() {
-125  return SIZE;
-126}
-127
-128@Override
-129public int getSerializedLength() {
-130  return SIZE;
-131}
-132
-133@Override
-134public void serialize(ByteBuffer 
destination) {
-135  LOG.info("Serialized " + this + " 
to " + destination);
-136}
-137
-138@Override
-139public 
CacheableDeserializerCacheable getDeserializer() {
-140  return this.deserializer;
-141}
-142
-143

[13/19] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-20 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/9e6e3360/devapidocs/src-html/org/apache/hadoop/hbase/replication/regionserver/DumpReplicationQueues.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/replication/regionserver/DumpReplicationQueues.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/replication/regionserver/DumpReplicationQueues.html
index 9ec7269..8428049 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/replication/regionserver/DumpReplicationQueues.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/replication/regionserver/DumpReplicationQueues.html
@@ -331,118 +331,121 @@
 323// Loops each peer on each RS and 
dumps the queues
 324try {
 325  ListString regionservers = 
queuesClient.getListOfReplicators();
-326  for (String regionserver : 
regionservers) {
-327ListString queueIds = 
queuesClient.getAllQueues(regionserver);
-328
replicationQueues.init(regionserver);
-329if 
(!liveRegionServers.contains(regionserver)) {
-330  
deadRegionServers.add(regionserver);
-331}
-332for (String queueId : queueIds) 
{
-333  ReplicationQueueInfo queueInfo 
= new ReplicationQueueInfo(queueId);
-334  ListString wals = 
queuesClient.getLogsInQueue(regionserver, queueId);
-335  if 
(!peerIds.contains(queueInfo.getPeerId())) {
-336
deletedQueues.add(regionserver + "/" + queueId);
-337
sb.append(formatQueue(regionserver, replicationQueues, queueInfo, queueId, 
wals, true,
-338  hdfs));
-339  } else {
-340
sb.append(formatQueue(regionserver, replicationQueues, queueInfo, queueId, 
wals, false,
+326  if (regionservers == null || 
regionservers.isEmpty()) {
+327return sb.toString();
+328  }
+329  for (String regionserver : 
regionservers) {
+330ListString queueIds = 
queuesClient.getAllQueues(regionserver);
+331
replicationQueues.init(regionserver);
+332if 
(!liveRegionServers.contains(regionserver)) {
+333  
deadRegionServers.add(regionserver);
+334}
+335for (String queueId : queueIds) 
{
+336  ReplicationQueueInfo queueInfo 
= new ReplicationQueueInfo(queueId);
+337  ListString wals = 
queuesClient.getLogsInQueue(regionserver, queueId);
+338  if 
(!peerIds.contains(queueInfo.getPeerId())) {
+339
deletedQueues.add(regionserver + "/" + queueId);
+340
sb.append(formatQueue(regionserver, replicationQueues, queueInfo, queueId, 
wals, true,
 341  hdfs));
-342  }
-343}
-344  }
-345} catch (KeeperException ke) {
-346  throw new IOException(ke);
-347}
-348return sb.toString();
-349  }
-350
-351  private String formatQueue(String 
regionserver, ReplicationQueues replicationQueues, ReplicationQueueInfo 
queueInfo,
-352   String 
queueId, ListString wals, boolean isDeleted, boolean hdfs) throws 
Exception {
+342  } else {
+343
sb.append(formatQueue(regionserver, replicationQueues, queueInfo, queueId, 
wals, false,
+344  hdfs));
+345  }
+346}
+347  }
+348} catch (KeeperException ke) {
+349  throw new IOException(ke);
+350}
+351return sb.toString();
+352  }
 353
-354StringBuilder sb = new 
StringBuilder();
-355
-356ListString deadServers ;
-357
-358sb.append("Dumping replication queue 
info for RegionServer: [" + regionserver + "]" + "\n");
-359sb.append("Queue znode: " + 
queueId + "\n");
-360sb.append("PeerID: " + 
queueInfo.getPeerId() + "\n");
-361sb.append("Recovered: " + 
queueInfo.isQueueRecovered() + "\n");
-362deadServers = 
queueInfo.getDeadRegionServers();
-363if (deadServers.isEmpty()) {
-364  sb.append("No dead 
RegionServers found in this queue." + "\n");
-365} else {
-366  sb.append("Dead RegionServers: 
" + deadServers + "\n");
-367}
-368sb.append("Was deleted: " + 
isDeleted + "\n");
-369sb.append("Number of WALs in 
replication queue: " + wals.size() + "\n");
-370
peersQueueSize.addAndGet(queueInfo.getPeerId(), wals.size());
-371
-372for (String wal : wals) {
-373  long position = 
replicationQueues.getLogPosition(queueInfo.getPeerId(), wal);
-374  sb.append("Replication position 
for " + wal + ": " + (position  0 ? position : "0"
-375  + " (not started or nothing to 
replicate)") + "\n");
-376}
-377
-378if (hdfs) {
-379  FileSystem fs = 
FileSystem.get(getConf());
-380  sb.append("Total size of WALs 
on HDFS for this queue: "
-381  + 
StringUtils.humanSize(getTotalWALSize(fs, wals, regionserver)) + "\n");
-382}
-383return sb.toString();
-384  }
-385  /**
-386   *  return total size in bytes from a 
list of WALs
-387   */
-388  private long 

[19/19] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-20 Thread git-site-role
Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.


Project: http://git-wip-us.apache.org/repos/asf/hbase-site/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase-site/commit/9e6e3360
Tree: http://git-wip-us.apache.org/repos/asf/hbase-site/tree/9e6e3360
Diff: http://git-wip-us.apache.org/repos/asf/hbase-site/diff/9e6e3360

Branch: refs/heads/asf-site
Commit: 9e6e3360a806c847f8cf0df364f6c5dd40674b4f
Parents: 95b5168
Author: jenkins 
Authored: Thu Jul 20 22:06:59 2017 +
Committer: jenkins 
Committed: Thu Jul 20 22:06:59 2017 +

--
 acid-semantics.html |4 +-
 apache_hbase_reference_guide.pdf| 8460 +-
 book.html   |2 +-
 bulk-loads.html |4 +-
 checkstyle-aggregate.html   |   54 +-
 coc.html|4 +-
 cygwin.html |4 +-
 dependencies.html   |4 +-
 dependency-convergence.html |4 +-
 dependency-info.html|4 +-
 dependency-management.html  |4 +-
 devapidocs/constant-values.html |6 +-
 devapidocs/index-all.html   |2 +
 .../hbase/classification/package-tree.html  |6 +-
 .../hadoop/hbase/client/package-tree.html   |   22 +-
 .../hadoop/hbase/filter/package-tree.html   |6 +-
 .../hadoop/hbase/io/hfile/CacheConfig.html  |   12 +-
 .../hbase/io/hfile/class-use/BlockCache.html|2 +-
 .../hadoop/hbase/io/hfile/package-tree.html |4 +-
 .../hadoop/hbase/mapreduce/package-tree.html|4 +-
 ...onZKNodeCleaner.ReplicationQueueDeletor.html |   10 +-
 .../cleaner/ReplicationZKNodeCleaner.html   |6 +-
 .../hadoop/hbase/master/package-tree.html   |6 +-
 .../hbase/master/procedure/package-tree.html|2 +-
 .../org/apache/hadoop/hbase/package-tree.html   |   12 +-
 .../hadoop/hbase/procedure2/package-tree.html   |4 +-
 .../hadoop/hbase/quotas/package-tree.html   |6 +-
 .../hadoop/hbase/regionserver/package-tree.html |   18 +-
 .../regionserver/querymatcher/package-tree.html |4 +-
 .../hbase/regionserver/wal/package-tree.html|2 +-
 ...DumpReplicationQueues.WarnOnlyAbortable.html |8 +-
 ...DumpReplicationQueues.WarnOnlyStoppable.html |8 +-
 .../regionserver/DumpReplicationQueues.html |4 +-
 .../replication/regionserver/package-tree.html  |2 +-
 .../hadoop/hbase/security/package-tree.html |2 +-
 .../hadoop/hbase/thrift/package-tree.html   |2 +-
 .../ByteBufferArray.BufferCreatorCallable.html  |   14 +-
 .../hbase/util/ByteBufferArray.Visitor.html |4 +-
 .../hadoop/hbase/util/ByteBufferArray.html  |   55 +-
 .../util/class-use/ByteBufferAllocator.html |2 +-
 .../apache/hadoop/hbase/util/package-tree.html  |   10 +-
 .../apache/hadoop/hbase/wal/package-tree.html   |2 +-
 .../org/apache/hadoop/hbase/Version.html|6 +-
 .../hfile/CacheConfig.ExternalBlockCaches.html  |  265 +-
 .../hadoop/hbase/io/hfile/CacheConfig.html  |  265 +-
 ...onZKNodeCleaner.ReplicationQueueDeletor.html |  249 +-
 .../cleaner/ReplicationZKNodeCleaner.html   |  249 +-
 .../replication/ReplicationPeersZKImpl.html |   45 +-
 .../ReplicationQueuesClientZKImpl.html  |2 +-
 .../DumpReplicationQueues.DumpOptions.html  |  223 +-
 ...DumpReplicationQueues.WarnOnlyAbortable.html |  223 +-
 ...DumpReplicationQueues.WarnOnlyStoppable.html |  223 +-
 .../regionserver/DumpReplicationQueues.html |  223 +-
 .../ByteBufferArray.BufferCreatorCallable.html  |  535 +-
 .../hbase/util/ByteBufferArray.Visitor.html |  535 +-
 .../hadoop/hbase/util/ByteBufferArray.html  |  535 +-
 export_control.html |4 +-
 hbase-annotations/checkstyle.html   |6 +-
 hbase-annotations/dependencies.html |6 +-
 hbase-annotations/dependency-convergence.html   |6 +-
 hbase-annotations/dependency-info.html  |6 +-
 hbase-annotations/dependency-management.html|6 +-
 hbase-annotations/index.html|6 +-
 hbase-annotations/integration.html  |6 +-
 hbase-annotations/issue-tracking.html   |6 +-
 hbase-annotations/license.html  |6 +-
 hbase-annotations/mail-lists.html   |6 +-
 hbase-annotations/plugin-management.html|6 +-
 hbase-annotations/plugins.html  |6 +-
 hbase-annotations/project-info.html |6 +-
 hbase-annotations/project-reports.html  |6 +-
 hbase-annotations/project-summary.html  |6 +-
 hbase-annotations/source-repository.html|6 +-
 

[02/19] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-20 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/9e6e3360/testdevapidocs/src-html/org/apache/hadoop/hbase/io/hfile/TestCacheConfig.html
--
diff --git 
a/testdevapidocs/src-html/org/apache/hadoop/hbase/io/hfile/TestCacheConfig.html 
b/testdevapidocs/src-html/org/apache/hadoop/hbase/io/hfile/TestCacheConfig.html
index c6abfe5..3734517 100644
--- 
a/testdevapidocs/src-html/org/apache/hadoop/hbase/io/hfile/TestCacheConfig.html
+++ 
b/testdevapidocs/src-html/org/apache/hadoop/hbase/io/hfile/TestCacheConfig.html
@@ -29,406 +29,420 @@
 021import static 
org.junit.Assert.assertEquals;
 022import static 
org.junit.Assert.assertFalse;
 023import static 
org.junit.Assert.assertTrue;
-024
-025import java.io.IOException;
-026import 
java.lang.management.ManagementFactory;
-027import 
java.lang.management.MemoryUsage;
-028import java.nio.ByteBuffer;
-029import java.util.Map;
-030
-031import org.apache.commons.logging.Log;
-032import 
org.apache.commons.logging.LogFactory;
-033import 
org.apache.hadoop.conf.Configuration;
-034import org.apache.hadoop.fs.FileSystem;
-035import org.apache.hadoop.fs.Path;
-036import 
org.apache.hadoop.hbase.HBaseConfiguration;
-037import 
org.apache.hadoop.hbase.HBaseTestingUtility;
-038import 
org.apache.hadoop.hbase.HColumnDescriptor;
-039import 
org.apache.hadoop.hbase.HConstants;
-040import 
org.apache.hadoop.hbase.testclassification.IOTests;
-041import 
org.apache.hadoop.hbase.testclassification.LargeTests;
-042import 
org.apache.hadoop.hbase.io.hfile.BlockType.BlockCategory;
-043import 
org.apache.hadoop.hbase.io.hfile.Cacheable.MemoryType;
-044import 
org.apache.hadoop.hbase.io.hfile.bucket.BucketCache;
-045import 
org.apache.hadoop.hbase.io.util.MemorySizeUtil;
-046import 
org.apache.hadoop.hbase.nio.ByteBuff;
-047import 
org.apache.hadoop.hbase.util.Threads;
-048import org.junit.After;
-049import org.junit.Before;
-050import org.junit.Test;
-051import 
org.junit.experimental.categories.Category;
-052
-053/**
-054 * Tests that {@link CacheConfig} does as 
expected.
-055 */
-056// This test is marked as a large test 
though it runs in a short amount of time
-057// (seconds).  It is large because it 
depends on being able to reset the global
-058// blockcache instance which is in a 
global variable.  Experience has it that
-059// tests clash on the global variable if 
this test is run as small sized test.
-060@Category({IOTests.class, 
LargeTests.class})
-061public class TestCacheConfig {
-062  private static final Log LOG = 
LogFactory.getLog(TestCacheConfig.class);
-063  private Configuration conf;
-064
-065  static class Deserializer implements 
CacheableDeserializerCacheable {
-066private final Cacheable cacheable;
-067private int deserializedIdentifier = 
0;
-068
-069Deserializer(final Cacheable c) {
-070  deserializedIdentifier = 
CacheableDeserializerIdManager.registerDeserializer(this);
-071  this.cacheable = c;
-072}
-073
-074@Override
-075public int 
getDeserialiserIdentifier() {
-076  return deserializedIdentifier;
-077}
-078
-079@Override
-080public Cacheable deserialize(ByteBuff 
b, boolean reuse, MemoryType memType) throws IOException {
-081  LOG.info("Deserialized " + b + ", 
reuse=" + reuse);
-082  return cacheable;
-083}
-084
-085@Override
-086public Cacheable deserialize(ByteBuff 
b) throws IOException {
-087  LOG.info("Deserialized " + b);
-088  return cacheable;
-089}
-090  };
-091
-092  static class IndexCacheEntry extends 
DataCacheEntry {
-093private static IndexCacheEntry 
SINGLETON = new IndexCacheEntry();
-094
-095public IndexCacheEntry() {
-096  super(SINGLETON);
-097}
-098
-099@Override
-100public BlockType getBlockType() {
-101  return BlockType.ROOT_INDEX;
-102}
-103  }
-104
-105  static class DataCacheEntry implements 
Cacheable {
-106private static final int SIZE = 1;
-107private static DataCacheEntry 
SINGLETON = new DataCacheEntry();
-108final 
CacheableDeserializerCacheable deserializer;
-109
-110DataCacheEntry() {
-111  this(SINGLETON);
-112}
-113
-114DataCacheEntry(final Cacheable c) {
-115  this.deserializer = new 
Deserializer(c);
-116}
-117
-118@Override
-119public String toString() {
-120  return "size=" + SIZE + ", type=" + 
getBlockType();
-121};
-122
-123@Override
-124public long heapSize() {
-125  return SIZE;
-126}
-127
-128@Override
-129public int getSerializedLength() {
-130  return SIZE;
-131}
-132
-133@Override
-134public void serialize(ByteBuffer 
destination) {
-135  LOG.info("Serialized " + this + " 
to " + destination);
-136}
-137
-138@Override
-139public 
CacheableDeserializerCacheable getDeserializer() {
-140  return this.deserializer;
-141}
-142
-143@Override
-144public BlockType getBlockType() {
-145  return 

hbase git commit: HBASE-16993 BucketCache throw java.io.IOException: Invalid HFile block magic when configuring hbase.bucketcache.bucket.sizes.

2017-07-20 Thread anoopsamjohn
Repository: hbase
Updated Branches:
  refs/heads/branch-1.4 ffb702cd4 -> 0940714c4


HBASE-16993 BucketCache throw java.io.IOException: Invalid HFile block magic 
when configuring hbase.bucketcache.bucket.sizes.


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/0940714c
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/0940714c
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/0940714c

Branch: refs/heads/branch-1.4
Commit: 0940714c488346ef113bb75bfb81ae509b3093ff
Parents: ffb702c
Author: anoopsamjohn 
Authored: Thu Jul 20 23:01:34 2017 +0530
Committer: anoopsamjohn 
Committed: Thu Jul 20 23:01:34 2017 +0530

--
 hbase-common/src/main/resources/hbase-default.xml  |  2 +-
 .../apache/hadoop/hbase/io/hfile/CacheConfig.java  | 17 +
 .../hadoop/hbase/io/hfile/TestCacheConfig.java | 14 ++
 3 files changed, 28 insertions(+), 5 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/0940714c/hbase-common/src/main/resources/hbase-default.xml
--
diff --git a/hbase-common/src/main/resources/hbase-default.xml 
b/hbase-common/src/main/resources/hbase-default.xml
index 3c4044d..a29e9c8 100644
--- a/hbase-common/src/main/resources/hbase-default.xml
+++ b/hbase-common/src/main/resources/hbase-default.xml
@@ -848,7 +848,7 @@ possible configurations would overwhelm and obscure the 
important.
 A comma-separated list of sizes for buckets for the 
bucketcache.
 Can be multiple sizes. List block sizes in order from smallest to largest.
 The sizes you use will depend on your data access patterns.
-Must be a multiple of 1024 else you will run into
+Must be a multiple of 256 else you will run into
 'java.io.IOException: Invalid HFile block magic' when you go to read from 
cache.
 If you specify no values here, then you pick up the default bucketsizes set
 in code (See BucketAllocator#DEFAULT_BUCKET_SIZES). 

http://git-wip-us.apache.org/repos/asf/hbase/blob/0940714c/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/CacheConfig.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/CacheConfig.java 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/CacheConfig.java
index 340236b..57d2057 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/CacheConfig.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/CacheConfig.java
@@ -551,7 +551,8 @@ public class CacheConfig {
* @return Returns L2 block cache instance (for now it is BucketCache 
BlockCache all the time)
* or null if not supposed to be a L2.
*/
-  private static BlockCache getL2(final Configuration c) {
+  @VisibleForTesting
+  static BlockCache getL2(final Configuration c) {
 final boolean useExternal = c.getBoolean(EXTERNAL_BLOCKCACHE_KEY, 
EXTERNAL_BLOCKCACHE_DEFAULT);
 if (LOG.isDebugEnabled()) {
   LOG.debug("Trying to use " + (useExternal?" External":" Internal") + " 
l2 cache");
@@ -561,10 +562,8 @@ public class CacheConfig {
 if (useExternal) {
   return getExternalBlockcache(c);
 }
-
 // otherwise use the bucket cache.
 return getBucketCache(c);
-
   }
 
   private static BlockCache getExternalBlockcache(Configuration c) {
@@ -618,7 +617,17 @@ public class CacheConfig {
 if (configuredBucketSizes != null) {
   bucketSizes = new int[configuredBucketSizes.length];
   for (int i = 0; i < configuredBucketSizes.length; i++) {
-bucketSizes[i] = Integer.parseInt(configuredBucketSizes[i].trim());
+int bucketSize = Integer.parseInt(configuredBucketSizes[i].trim());
+if (bucketSize % 256 != 0) {
+  // We need all the bucket sizes to be multiples of 256. Having all 
the configured bucket
+  // sizes to be multiples of 256 will ensure that the block offsets 
within buckets,
+  // that are calculated, will also be multiples of 256.
+  // See BucketEntry where offset to each block is represented using 5 
bytes (instead of 8
+  // bytes long). We would like to save heap overhead as less as 
possible.
+  throw new IllegalArgumentException("Illegal value: " + bucketSize + 
" configured for '"
+  + BUCKET_CACHE_BUCKETS_KEY + "'. All bucket sizes to be 
multiples of 256");
+}
+bucketSizes[i] = bucketSize;
   }
 }
 BucketCache bucketCache = null;

http://git-wip-us.apache.org/repos/asf/hbase/blob/0940714c/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCacheConfig.java
--
diff 

hbase git commit: HBASE-16993 BucketCache throw java.io.IOException: Invalid HFile block magic when configuring hbase.bucketcache.bucket.sizes.

2017-07-20 Thread anoopsamjohn
Repository: hbase
Updated Branches:
  refs/heads/branch-1 cfd5b6b59 -> 9036556a3


HBASE-16993 BucketCache throw java.io.IOException: Invalid HFile block magic 
when configuring hbase.bucketcache.bucket.sizes.


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/9036556a
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/9036556a
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/9036556a

Branch: refs/heads/branch-1
Commit: 9036556a33c356225814c3ca50ecc09997269ea5
Parents: cfd5b6b
Author: anoopsamjohn 
Authored: Thu Jul 20 23:00:48 2017 +0530
Committer: anoopsamjohn 
Committed: Thu Jul 20 23:00:48 2017 +0530

--
 hbase-common/src/main/resources/hbase-default.xml  |  2 +-
 .../apache/hadoop/hbase/io/hfile/CacheConfig.java  | 17 +
 .../hadoop/hbase/io/hfile/TestCacheConfig.java | 14 ++
 3 files changed, 28 insertions(+), 5 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/9036556a/hbase-common/src/main/resources/hbase-default.xml
--
diff --git a/hbase-common/src/main/resources/hbase-default.xml 
b/hbase-common/src/main/resources/hbase-default.xml
index 3c4044d..a29e9c8 100644
--- a/hbase-common/src/main/resources/hbase-default.xml
+++ b/hbase-common/src/main/resources/hbase-default.xml
@@ -848,7 +848,7 @@ possible configurations would overwhelm and obscure the 
important.
 A comma-separated list of sizes for buckets for the 
bucketcache.
 Can be multiple sizes. List block sizes in order from smallest to largest.
 The sizes you use will depend on your data access patterns.
-Must be a multiple of 1024 else you will run into
+Must be a multiple of 256 else you will run into
 'java.io.IOException: Invalid HFile block magic' when you go to read from 
cache.
 If you specify no values here, then you pick up the default bucketsizes set
 in code (See BucketAllocator#DEFAULT_BUCKET_SIZES). 

http://git-wip-us.apache.org/repos/asf/hbase/blob/9036556a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/CacheConfig.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/CacheConfig.java 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/CacheConfig.java
index 340236b..57d2057 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/CacheConfig.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/CacheConfig.java
@@ -551,7 +551,8 @@ public class CacheConfig {
* @return Returns L2 block cache instance (for now it is BucketCache 
BlockCache all the time)
* or null if not supposed to be a L2.
*/
-  private static BlockCache getL2(final Configuration c) {
+  @VisibleForTesting
+  static BlockCache getL2(final Configuration c) {
 final boolean useExternal = c.getBoolean(EXTERNAL_BLOCKCACHE_KEY, 
EXTERNAL_BLOCKCACHE_DEFAULT);
 if (LOG.isDebugEnabled()) {
   LOG.debug("Trying to use " + (useExternal?" External":" Internal") + " 
l2 cache");
@@ -561,10 +562,8 @@ public class CacheConfig {
 if (useExternal) {
   return getExternalBlockcache(c);
 }
-
 // otherwise use the bucket cache.
 return getBucketCache(c);
-
   }
 
   private static BlockCache getExternalBlockcache(Configuration c) {
@@ -618,7 +617,17 @@ public class CacheConfig {
 if (configuredBucketSizes != null) {
   bucketSizes = new int[configuredBucketSizes.length];
   for (int i = 0; i < configuredBucketSizes.length; i++) {
-bucketSizes[i] = Integer.parseInt(configuredBucketSizes[i].trim());
+int bucketSize = Integer.parseInt(configuredBucketSizes[i].trim());
+if (bucketSize % 256 != 0) {
+  // We need all the bucket sizes to be multiples of 256. Having all 
the configured bucket
+  // sizes to be multiples of 256 will ensure that the block offsets 
within buckets,
+  // that are calculated, will also be multiples of 256.
+  // See BucketEntry where offset to each block is represented using 5 
bytes (instead of 8
+  // bytes long). We would like to save heap overhead as less as 
possible.
+  throw new IllegalArgumentException("Illegal value: " + bucketSize + 
" configured for '"
+  + BUCKET_CACHE_BUCKETS_KEY + "'. All bucket sizes to be 
multiples of 256");
+}
+bucketSizes[i] = bucketSize;
   }
 }
 BucketCache bucketCache = null;

http://git-wip-us.apache.org/repos/asf/hbase/blob/9036556a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCacheConfig.java
--
diff --git 

hbase git commit: HBASE-16993 BucketCache throw java.io.IOException: Invalid HFile block magic when configuring hbase.bucketcache.bucket.sizes.

2017-07-20 Thread anoopsamjohn
Repository: hbase
Updated Branches:
  refs/heads/branch-2 e095d3964 -> 8606cda35


HBASE-16993 BucketCache throw java.io.IOException: Invalid HFile block magic 
when configuring hbase.bucketcache.bucket.sizes.


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/8606cda3
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/8606cda3
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/8606cda3

Branch: refs/heads/branch-2
Commit: 8606cda3577542f1ce56de7af3671a61a12322a9
Parents: e095d39
Author: anoopsamjohn 
Authored: Thu Jul 20 22:59:52 2017 +0530
Committer: anoopsamjohn 
Committed: Thu Jul 20 22:59:52 2017 +0530

--
 hbase-common/src/main/resources/hbase-default.xml  |  2 +-
 .../apache/hadoop/hbase/io/hfile/CacheConfig.java  | 17 +
 .../hadoop/hbase/io/hfile/TestCacheConfig.java | 14 ++
 3 files changed, 28 insertions(+), 5 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/8606cda3/hbase-common/src/main/resources/hbase-default.xml
--
diff --git a/hbase-common/src/main/resources/hbase-default.xml 
b/hbase-common/src/main/resources/hbase-default.xml
index b10effd..1f8c2bf 100644
--- a/hbase-common/src/main/resources/hbase-default.xml
+++ b/hbase-common/src/main/resources/hbase-default.xml
@@ -905,7 +905,7 @@ possible configurations would overwhelm and obscure the 
important.
 A comma-separated list of sizes for buckets for the 
bucketcache.
 Can be multiple sizes. List block sizes in order from smallest to largest.
 The sizes you use will depend on your data access patterns.
-Must be a multiple of 1024 else you will run into
+Must be a multiple of 256 else you will run into
 'java.io.IOException: Invalid HFile block magic' when you go to read from 
cache.
 If you specify no values here, then you pick up the default bucketsizes set
 in code (See BucketAllocator#DEFAULT_BUCKET_SIZES).

http://git-wip-us.apache.org/repos/asf/hbase/blob/8606cda3/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/CacheConfig.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/CacheConfig.java 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/CacheConfig.java
index d514003..9fa0483 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/CacheConfig.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/CacheConfig.java
@@ -584,7 +584,8 @@ public class CacheConfig {
* @return Returns L2 block cache instance (for now it is BucketCache 
BlockCache all the time)
* or null if not supposed to be a L2.
*/
-  private static BlockCache getL2(final Configuration c) {
+  @VisibleForTesting
+  static BlockCache getL2(final Configuration c) {
 final boolean useExternal = c.getBoolean(EXTERNAL_BLOCKCACHE_KEY, 
EXTERNAL_BLOCKCACHE_DEFAULT);
 if (LOG.isDebugEnabled()) {
   LOG.debug("Trying to use " + (useExternal?" External":" Internal") + " 
l2 cache");
@@ -594,10 +595,8 @@ public class CacheConfig {
 if (useExternal) {
   return getExternalBlockcache(c);
 }
-
 // otherwise use the bucket cache.
 return getBucketCache(c);
-
   }
 
   private static BlockCache getExternalBlockcache(Configuration c) {
@@ -651,7 +650,17 @@ public class CacheConfig {
 if (configuredBucketSizes != null) {
   bucketSizes = new int[configuredBucketSizes.length];
   for (int i = 0; i < configuredBucketSizes.length; i++) {
-bucketSizes[i] = Integer.parseInt(configuredBucketSizes[i].trim());
+int bucketSize = Integer.parseInt(configuredBucketSizes[i].trim());
+if (bucketSize % 256 != 0) {
+  // We need all the bucket sizes to be multiples of 256. Having all 
the configured bucket
+  // sizes to be multiples of 256 will ensure that the block offsets 
within buckets,
+  // that are calculated, will also be multiples of 256.
+  // See BucketEntry where offset to each block is represented using 5 
bytes (instead of 8
+  // bytes long). We would like to save heap overhead as less as 
possible.
+  throw new IllegalArgumentException("Illegal value: " + bucketSize + 
" configured for '"
+  + BUCKET_CACHE_BUCKETS_KEY + "'. All bucket sizes to be 
multiples of 256");
+}
+bucketSizes[i] = bucketSize;
   }
 }
 BucketCache bucketCache = null;

http://git-wip-us.apache.org/repos/asf/hbase/blob/8606cda3/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCacheConfig.java
--
diff --git 

hbase git commit: HBASE-16993 BucketCache throw java.io.IOException: Invalid HFile block magic when configuring hbase.bucketcache.bucket.sizes.

2017-07-20 Thread anoopsamjohn
Repository: hbase
Updated Branches:
  refs/heads/master 0c49185c3 -> bc93b6610


HBASE-16993 BucketCache throw java.io.IOException: Invalid HFile block magic 
when configuring hbase.bucketcache.bucket.sizes.


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/bc93b661
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/bc93b661
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/bc93b661

Branch: refs/heads/master
Commit: bc93b6610b349d38502290af27da0ae0b5fd4936
Parents: 0c49185
Author: anoopsamjohn 
Authored: Thu Jul 20 22:59:06 2017 +0530
Committer: anoopsamjohn 
Committed: Thu Jul 20 22:59:06 2017 +0530

--
 hbase-common/src/main/resources/hbase-default.xml  |  2 +-
 .../apache/hadoop/hbase/io/hfile/CacheConfig.java  | 17 +
 .../hadoop/hbase/io/hfile/TestCacheConfig.java | 14 ++
 3 files changed, 28 insertions(+), 5 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/bc93b661/hbase-common/src/main/resources/hbase-default.xml
--
diff --git a/hbase-common/src/main/resources/hbase-default.xml 
b/hbase-common/src/main/resources/hbase-default.xml
index 27a833f..c4148a1 100644
--- a/hbase-common/src/main/resources/hbase-default.xml
+++ b/hbase-common/src/main/resources/hbase-default.xml
@@ -905,7 +905,7 @@ possible configurations would overwhelm and obscure the 
important.
 A comma-separated list of sizes for buckets for the 
bucketcache.
 Can be multiple sizes. List block sizes in order from smallest to largest.
 The sizes you use will depend on your data access patterns.
-Must be a multiple of 1024 else you will run into
+Must be a multiple of 256 else you will run into
 'java.io.IOException: Invalid HFile block magic' when you go to read from 
cache.
 If you specify no values here, then you pick up the default bucketsizes set
 in code (See BucketAllocator#DEFAULT_BUCKET_SIZES).

http://git-wip-us.apache.org/repos/asf/hbase/blob/bc93b661/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/CacheConfig.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/CacheConfig.java 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/CacheConfig.java
index d514003..9fa0483 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/CacheConfig.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/CacheConfig.java
@@ -584,7 +584,8 @@ public class CacheConfig {
* @return Returns L2 block cache instance (for now it is BucketCache 
BlockCache all the time)
* or null if not supposed to be a L2.
*/
-  private static BlockCache getL2(final Configuration c) {
+  @VisibleForTesting
+  static BlockCache getL2(final Configuration c) {
 final boolean useExternal = c.getBoolean(EXTERNAL_BLOCKCACHE_KEY, 
EXTERNAL_BLOCKCACHE_DEFAULT);
 if (LOG.isDebugEnabled()) {
   LOG.debug("Trying to use " + (useExternal?" External":" Internal") + " 
l2 cache");
@@ -594,10 +595,8 @@ public class CacheConfig {
 if (useExternal) {
   return getExternalBlockcache(c);
 }
-
 // otherwise use the bucket cache.
 return getBucketCache(c);
-
   }
 
   private static BlockCache getExternalBlockcache(Configuration c) {
@@ -651,7 +650,17 @@ public class CacheConfig {
 if (configuredBucketSizes != null) {
   bucketSizes = new int[configuredBucketSizes.length];
   for (int i = 0; i < configuredBucketSizes.length; i++) {
-bucketSizes[i] = Integer.parseInt(configuredBucketSizes[i].trim());
+int bucketSize = Integer.parseInt(configuredBucketSizes[i].trim());
+if (bucketSize % 256 != 0) {
+  // We need all the bucket sizes to be multiples of 256. Having all 
the configured bucket
+  // sizes to be multiples of 256 will ensure that the block offsets 
within buckets,
+  // that are calculated, will also be multiples of 256.
+  // See BucketEntry where offset to each block is represented using 5 
bytes (instead of 8
+  // bytes long). We would like to save heap overhead as less as 
possible.
+  throw new IllegalArgumentException("Illegal value: " + bucketSize + 
" configured for '"
+  + BUCKET_CACHE_BUCKETS_KEY + "'. All bucket sizes to be 
multiples of 256");
+}
+bucketSizes[i] = bucketSize;
   }
 }
 BucketCache bucketCache = null;

http://git-wip-us.apache.org/repos/asf/hbase/blob/bc93b661/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCacheConfig.java
--
diff --git 

hbase git commit: HBASE-17738 BucketCache startup is slow - addendum (Ram)

2017-07-20 Thread ramkrishna
Repository: hbase
Updated Branches:
  refs/heads/branch-2 9300fbc9c -> e095d3964


HBASE-17738 BucketCache startup is slow - addendum (Ram)


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/e095d396
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/e095d396
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/e095d396

Branch: refs/heads/branch-2
Commit: e095d3964b6f8cb67d15f8fe2d7924cbc7c801ca
Parents: 9300fbc
Author: Ramkrishna 
Authored: Thu Jul 20 22:38:13 2017 +0530
Committer: Ramkrishna 
Committed: Thu Jul 20 22:40:30 2017 +0530

--
 .../hadoop/hbase/util/ByteBufferArray.java  | 17 ++---
 .../hadoop/hbase/util/TestByteBufferArray.java  | 39 
 2 files changed, 51 insertions(+), 5 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/e095d396/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ByteBufferArray.java
--
diff --git 
a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ByteBufferArray.java 
b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ByteBufferArray.java
index 60f8c79..068afe2 100644
--- 
a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ByteBufferArray.java
+++ 
b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ByteBufferArray.java
@@ -44,14 +44,15 @@ import com.google.common.annotations.VisibleForTesting;
  * reading/writing data from this large buffer with a position and offset
  */
 @InterfaceAudience.Private
-public final class ByteBufferArray {
+public class ByteBufferArray {
   private static final Log LOG = LogFactory.getLog(ByteBufferArray.class);
 
   public static final int DEFAULT_BUFFER_SIZE = 4 * 1024 * 1024;
   @VisibleForTesting
   ByteBuffer buffers[];
   private int bufferSize;
-  private int bufferCount;
+  @VisibleForTesting
+  int bufferCount;
 
   /**
* We allocate a number of byte buffers as the capacity. In order not to out
@@ -75,12 +76,13 @@ public final class ByteBufferArray {
 createBuffers(directByteBuffer, allocator);
   }
 
-  private void createBuffers(boolean directByteBuffer, ByteBufferAllocator 
allocator)
+  @VisibleForTesting
+  void createBuffers(boolean directByteBuffer, ByteBufferAllocator allocator)
   throws IOException {
-int threadCount = Runtime.getRuntime().availableProcessors();
+int threadCount = getThreadCount();
 ExecutorService service = new ThreadPoolExecutor(threadCount, threadCount, 
0L,
 TimeUnit.MILLISECONDS, new LinkedBlockingQueue());
-int perThreadCount = Math.round((float) (bufferCount) / threadCount);
+int perThreadCount = (int)Math.floor((double) (bufferCount) / threadCount);
 int lastThreadCount = bufferCount - (perThreadCount * (threadCount - 1));
 Future[] futures = new Future[threadCount];
 try {
@@ -109,6 +111,11 @@ public final class ByteBufferArray {
 this.buffers[bufferCount] = ByteBuffer.allocate(0);
   }
 
+  @VisibleForTesting
+  int getThreadCount() {
+return Runtime.getRuntime().availableProcessors();
+  }
+
   /**
* A callable that creates buffers of the specified length either 
onheap/offheap using the
* {@link ByteBufferAllocator}

http://git-wip-us.apache.org/repos/asf/hbase/blob/e095d396/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestByteBufferArray.java
--
diff --git 
a/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestByteBufferArray.java
 
b/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestByteBufferArray.java
index c71b86c..7077643 100644
--- 
a/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestByteBufferArray.java
+++ 
b/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestByteBufferArray.java
@@ -79,4 +79,43 @@ public class TestByteBufferArray {
   }
 }
   }
+
+  @Test
+  public void testByteBufferCreation1() throws Exception {
+ByteBufferAllocator allocator = new ByteBufferAllocator() {
+  @Override
+  public ByteBuffer allocate(long size, boolean directByteBuffer) throws 
IOException {
+if (directByteBuffer) {
+  return ByteBuffer.allocateDirect((int) size);
+} else {
+  return ByteBuffer.allocate((int) size);
+}
+  }
+};
+ByteBufferArray array = new DummyByteBufferArray(7 * 1024 * 1024, false, 
allocator);
+// overwrite
+array.bufferCount = 25;
+array.buffers = new ByteBuffer[array.bufferCount + 1];
+array.createBuffers(true, allocator);
+for (int i = 0; i < array.buffers.length; i++) {
+  if (i == array.buffers.length - 1) {
+assertEquals(array.buffers[i].capacity(), 0);
+  

hbase git commit: HBASE-17738 BucketCache startup is slow - addendum (Ram)

2017-07-20 Thread ramkrishna
Repository: hbase
Updated Branches:
  refs/heads/master 01db60d65 -> 0c49185c3


HBASE-17738 BucketCache startup is slow - addendum (Ram)


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/0c49185c
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/0c49185c
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/0c49185c

Branch: refs/heads/master
Commit: 0c49185c3e95cc91ba6455a404ca4e89f2c2fc20
Parents: 01db60d
Author: Ramkrishna 
Authored: Thu Jul 20 22:38:13 2017 +0530
Committer: Ramkrishna 
Committed: Thu Jul 20 22:38:13 2017 +0530

--
 .../hadoop/hbase/util/ByteBufferArray.java  | 17 ++---
 .../hadoop/hbase/util/TestByteBufferArray.java  | 39 
 2 files changed, 51 insertions(+), 5 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/0c49185c/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ByteBufferArray.java
--
diff --git 
a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ByteBufferArray.java 
b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ByteBufferArray.java
index 60f8c79..068afe2 100644
--- 
a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ByteBufferArray.java
+++ 
b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ByteBufferArray.java
@@ -44,14 +44,15 @@ import com.google.common.annotations.VisibleForTesting;
  * reading/writing data from this large buffer with a position and offset
  */
 @InterfaceAudience.Private
-public final class ByteBufferArray {
+public class ByteBufferArray {
   private static final Log LOG = LogFactory.getLog(ByteBufferArray.class);
 
   public static final int DEFAULT_BUFFER_SIZE = 4 * 1024 * 1024;
   @VisibleForTesting
   ByteBuffer buffers[];
   private int bufferSize;
-  private int bufferCount;
+  @VisibleForTesting
+  int bufferCount;
 
   /**
* We allocate a number of byte buffers as the capacity. In order not to out
@@ -75,12 +76,13 @@ public final class ByteBufferArray {
 createBuffers(directByteBuffer, allocator);
   }
 
-  private void createBuffers(boolean directByteBuffer, ByteBufferAllocator 
allocator)
+  @VisibleForTesting
+  void createBuffers(boolean directByteBuffer, ByteBufferAllocator allocator)
   throws IOException {
-int threadCount = Runtime.getRuntime().availableProcessors();
+int threadCount = getThreadCount();
 ExecutorService service = new ThreadPoolExecutor(threadCount, threadCount, 
0L,
 TimeUnit.MILLISECONDS, new LinkedBlockingQueue());
-int perThreadCount = Math.round((float) (bufferCount) / threadCount);
+int perThreadCount = (int)Math.floor((double) (bufferCount) / threadCount);
 int lastThreadCount = bufferCount - (perThreadCount * (threadCount - 1));
 Future[] futures = new Future[threadCount];
 try {
@@ -109,6 +111,11 @@ public final class ByteBufferArray {
 this.buffers[bufferCount] = ByteBuffer.allocate(0);
   }
 
+  @VisibleForTesting
+  int getThreadCount() {
+return Runtime.getRuntime().availableProcessors();
+  }
+
   /**
* A callable that creates buffers of the specified length either 
onheap/offheap using the
* {@link ByteBufferAllocator}

http://git-wip-us.apache.org/repos/asf/hbase/blob/0c49185c/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestByteBufferArray.java
--
diff --git 
a/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestByteBufferArray.java
 
b/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestByteBufferArray.java
index c71b86c..7077643 100644
--- 
a/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestByteBufferArray.java
+++ 
b/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestByteBufferArray.java
@@ -79,4 +79,43 @@ public class TestByteBufferArray {
   }
 }
   }
+
+  @Test
+  public void testByteBufferCreation1() throws Exception {
+ByteBufferAllocator allocator = new ByteBufferAllocator() {
+  @Override
+  public ByteBuffer allocate(long size, boolean directByteBuffer) throws 
IOException {
+if (directByteBuffer) {
+  return ByteBuffer.allocateDirect((int) size);
+} else {
+  return ByteBuffer.allocate((int) size);
+}
+  }
+};
+ByteBufferArray array = new DummyByteBufferArray(7 * 1024 * 1024, false, 
allocator);
+// overwrite
+array.bufferCount = 25;
+array.buffers = new ByteBuffer[array.bufferCount + 1];
+array.createBuffers(true, allocator);
+for (int i = 0; i < array.buffers.length; i++) {
+  if (i == array.buffers.length - 1) {
+assertEquals(array.buffers[i].capacity(), 0);
+  } 

[4/4] hbase git commit: HBASE-18147 POC jenkinsfile for nightly checks.

2017-07-20 Thread busbey
HBASE-18147 POC jenkinsfile for nightly checks.

* Jenkinsfile that works for all current branches.
* adds dev-support script for setting parameters for our yetus nightly 
invocation
* adds ruby tools to dockerfile
* adds rubocop to dockerfile
* adds ruby-lint to dockerfile
* adds perlcritic to dockerfile


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/af5af7ee
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/af5af7ee
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/af5af7ee

Branch: refs/heads/branch-1.1-HBASE-18147
Commit: af5af7ee34a2cdc4580f0a401225d908df1b392f
Parents: b03a5e7
Author: Sean Busbey 
Authored: Thu Jul 20 01:01:59 2017 -0500
Committer: Sean Busbey 
Committed: Thu Jul 20 08:56:13 2017 -0500

--
 dev-support/Jenkinsfile| 254 
 dev-support/docker/Dockerfile  |  29 
 dev-support/hbase_nightly_yetus.sh |  86 +++
 3 files changed, 369 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/af5af7ee/dev-support/Jenkinsfile
--
diff --git a/dev-support/Jenkinsfile b/dev-support/Jenkinsfile
new file mode 100644
index 000..6686e8b
--- /dev/null
+++ b/dev-support/Jenkinsfile
@@ -0,0 +1,254 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements.  See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership.  The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License.  You may obtain a copy of the License at
+//
+//   http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied.  See the License for the
+// specific language governing permissions and limitations
+// under the License.
+pipeline {
+  agent {
+node {
+  label 'Hadoop'
+}
+  }
+  triggers {
+cron('@daily')
+  }
+  options {
+buildDiscarder(logRotator(numToKeepStr: '30'))
+timeout (time: 6, unit: 'HOURS')
+timestamps()
+  }
+  environment {
+TOOLS = "${env.WORKSPACE}/tools"
+// where we check out to across stages
+BASEDIR = "${env.WORKSPACE}/component"
+YETUS_RELEASE = '0.5.0'
+// where we'll write everything from different steps.
+OUTPUT_RELATIVE_GENERAL = 'output-general'
+OUTPUTDIR_GENERAL = "${env.WORKSPACE}/output-general"
+OUTPUT_RELATIVE_JDK7 = 'output-jdk7'
+OUTPUTDIR_JDK7 = "${env.WORKSPACE}/output-jdk7"
+OUTPUT_RELATIVE_JDK8 = 'output-jdk8'
+OUTPUTDIR_JDK8 = "${env.WORKSPACE}/output-jdk8"
+PROJECT = 'hbase'
+PROJET_PERSONALITY = 
'https://raw.githubusercontent.com/apache/hbase/master/dev-support/hbase-personality.sh'
+// This section of the docs tells folks not to use the javadoc tag. older 
branches have our old version of the check for said tag.
+AUTHOR_IGNORE_LIST = 
'src/main/asciidoc/_chapters/developer.adoc,dev-support/test-patch.sh'
+WHITESPACE_IGNORE_LIST = '.*/generated/.*'
+// output from surefire; sadly the archive function in yetus only works on 
file names.
+ARCHIVE_PATTERN_LIST = 
'TEST-*.xml,org.apache.h*-output.txt,org.apache.h*.txt'
+// These tests currently have known failures. Once they burn down to 0, 
remove from here so that new problems will cause a failure.
+TESTS_FILTER = 
'cc,checkstyle,javac,javadoc,pylint,shellcheck,whitespace,perlcritic,ruby-lint,rubocop'
+BRANCH_SPECIFIC_DOCKERFILE = "${env.BASEDIR}/dev-support/docker/Dockerfile"
+EXCLUDE_TESTS_URL = 
'https://builds.apache.org/job/HBase-Find-Flaky-Tests/lastSuccessfulBuild/artifact/excludes/'
+  }
+  parameters {
+booleanParam(name: 'USE_YETUS_PRERELEASE', defaultValue: false, 
description: '''Check to use the current HEAD of apache/yetus rather than our 
configured release.
+
+Should only be used manually when e.g. there is some non-work-aroundable 
issue in yetus we are checking a fix for.''')
+booleanParam(name: 'DEBUG', defaultValue: false, description: 'Produce a 
lot more meta-information.')
+  }
+  stages {
+stage ('yetus install') {
+  steps {
+sh  '''#!/usr/bin/env bash
+echo "Ensure we have a copy of Apache Yetus."
+if [[ true !=  "${USE_YETUS_PRERELEASE}" ]]; then
+  YETUS_DIR="${WORKSPACE}/yetus-${YETUS_RELEASE}"
+  echo "Checking for Yetus ${YETUS_RELEASE} in '${YETUS_DIR}'"
+  if [ ! -d "${YETUS_DIR}" ]; then
+echo "New download of Apache Yetus version ${YETUS_RELEASE}."
+rm 

[2/4] hbase git commit: HBASE-18147 POC jenkinsfile for nightly checks.

2017-07-20 Thread busbey
HBASE-18147 POC jenkinsfile for nightly checks.

* Jenkinsfile that works for all current branches.
* adds dev-support script for setting parameters for our yetus nightly 
invocation
* adds ruby tools to dockerfile
* adds rubocop to dockerfile
* adds ruby-lint to dockerfile
* adds perlcritic to dockerfile


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/c852591f
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/c852591f
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/c852591f

Branch: refs/heads/branch-1-HBASE-18147
Commit: c852591fc259c798fea69a634d50ac98bd2dff45
Parents: cfd5b6b
Author: Sean Busbey 
Authored: Thu Jul 20 01:01:59 2017 -0500
Committer: Sean Busbey 
Committed: Thu Jul 20 08:56:03 2017 -0500

--
 dev-support/Jenkinsfile| 254 
 dev-support/docker/Dockerfile  |  29 
 dev-support/hbase_nightly_yetus.sh |  86 +++
 3 files changed, 369 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/c852591f/dev-support/Jenkinsfile
--
diff --git a/dev-support/Jenkinsfile b/dev-support/Jenkinsfile
new file mode 100644
index 000..6686e8b
--- /dev/null
+++ b/dev-support/Jenkinsfile
@@ -0,0 +1,254 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements.  See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership.  The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License.  You may obtain a copy of the License at
+//
+//   http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied.  See the License for the
+// specific language governing permissions and limitations
+// under the License.
+pipeline {
+  agent {
+node {
+  label 'Hadoop'
+}
+  }
+  triggers {
+cron('@daily')
+  }
+  options {
+buildDiscarder(logRotator(numToKeepStr: '30'))
+timeout (time: 6, unit: 'HOURS')
+timestamps()
+  }
+  environment {
+TOOLS = "${env.WORKSPACE}/tools"
+// where we check out to across stages
+BASEDIR = "${env.WORKSPACE}/component"
+YETUS_RELEASE = '0.5.0'
+// where we'll write everything from different steps.
+OUTPUT_RELATIVE_GENERAL = 'output-general'
+OUTPUTDIR_GENERAL = "${env.WORKSPACE}/output-general"
+OUTPUT_RELATIVE_JDK7 = 'output-jdk7'
+OUTPUTDIR_JDK7 = "${env.WORKSPACE}/output-jdk7"
+OUTPUT_RELATIVE_JDK8 = 'output-jdk8'
+OUTPUTDIR_JDK8 = "${env.WORKSPACE}/output-jdk8"
+PROJECT = 'hbase'
+PROJET_PERSONALITY = 
'https://raw.githubusercontent.com/apache/hbase/master/dev-support/hbase-personality.sh'
+// This section of the docs tells folks not to use the javadoc tag. older 
branches have our old version of the check for said tag.
+AUTHOR_IGNORE_LIST = 
'src/main/asciidoc/_chapters/developer.adoc,dev-support/test-patch.sh'
+WHITESPACE_IGNORE_LIST = '.*/generated/.*'
+// output from surefire; sadly the archive function in yetus only works on 
file names.
+ARCHIVE_PATTERN_LIST = 
'TEST-*.xml,org.apache.h*-output.txt,org.apache.h*.txt'
+// These tests currently have known failures. Once they burn down to 0, 
remove from here so that new problems will cause a failure.
+TESTS_FILTER = 
'cc,checkstyle,javac,javadoc,pylint,shellcheck,whitespace,perlcritic,ruby-lint,rubocop'
+BRANCH_SPECIFIC_DOCKERFILE = "${env.BASEDIR}/dev-support/docker/Dockerfile"
+EXCLUDE_TESTS_URL = 
'https://builds.apache.org/job/HBase-Find-Flaky-Tests/lastSuccessfulBuild/artifact/excludes/'
+  }
+  parameters {
+booleanParam(name: 'USE_YETUS_PRERELEASE', defaultValue: false, 
description: '''Check to use the current HEAD of apache/yetus rather than our 
configured release.
+
+Should only be used manually when e.g. there is some non-work-aroundable 
issue in yetus we are checking a fix for.''')
+booleanParam(name: 'DEBUG', defaultValue: false, description: 'Produce a 
lot more meta-information.')
+  }
+  stages {
+stage ('yetus install') {
+  steps {
+sh  '''#!/usr/bin/env bash
+echo "Ensure we have a copy of Apache Yetus."
+if [[ true !=  "${USE_YETUS_PRERELEASE}" ]]; then
+  YETUS_DIR="${WORKSPACE}/yetus-${YETUS_RELEASE}"
+  echo "Checking for Yetus ${YETUS_RELEASE} in '${YETUS_DIR}'"
+  if [ ! -d "${YETUS_DIR}" ]; then
+echo "New download of Apache Yetus version ${YETUS_RELEASE}."
+rm -rf 

[1/4] hbase git commit: HBASE-18147 POC jenkinsfile for nightly checks. [Forced Update!]

2017-07-20 Thread busbey
Repository: hbase
Updated Branches:
  refs/heads/HBASE-18147 3f0992349 -> 37141d5f1 (forced update)
  refs/heads/branch-1-HBASE-18147 1b565836f -> c852591fc (forced update)
  refs/heads/branch-1.1-HBASE-18147 c8263c72a -> af5af7ee3 (forced update)
  refs/heads/branch-1.2-HBASE-18147 5d7999a4d -> 45573cb68 (forced update)


HBASE-18147 POC jenkinsfile for nightly checks.

* Jenkinsfile that works for all current branches.
* adds dev-support script for setting parameters for our yetus nightly 
invocation
* adds ruby tools to dockerfile
* adds rubocop to dockerfile
* adds ruby-lint to dockerfile
* adds perlcritic to dockerfile


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/37141d5f
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/37141d5f
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/37141d5f

Branch: refs/heads/HBASE-18147
Commit: 37141d5f1ad4196b1c47843ee2d763c9978e7d45
Parents: 01db60d
Author: Sean Busbey 
Authored: Thu Jul 20 01:01:59 2017 -0500
Committer: Sean Busbey 
Committed: Thu Jul 20 08:55:47 2017 -0500

--
 dev-support/Jenkinsfile| 254 
 dev-support/docker/Dockerfile  |  29 
 dev-support/hbase_nightly_yetus.sh |  86 +++
 3 files changed, 369 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/37141d5f/dev-support/Jenkinsfile
--
diff --git a/dev-support/Jenkinsfile b/dev-support/Jenkinsfile
new file mode 100644
index 000..6686e8b
--- /dev/null
+++ b/dev-support/Jenkinsfile
@@ -0,0 +1,254 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements.  See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership.  The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License.  You may obtain a copy of the License at
+//
+//   http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied.  See the License for the
+// specific language governing permissions and limitations
+// under the License.
+pipeline {
+  agent {
+node {
+  label 'Hadoop'
+}
+  }
+  triggers {
+cron('@daily')
+  }
+  options {
+buildDiscarder(logRotator(numToKeepStr: '30'))
+timeout (time: 6, unit: 'HOURS')
+timestamps()
+  }
+  environment {
+TOOLS = "${env.WORKSPACE}/tools"
+// where we check out to across stages
+BASEDIR = "${env.WORKSPACE}/component"
+YETUS_RELEASE = '0.5.0'
+// where we'll write everything from different steps.
+OUTPUT_RELATIVE_GENERAL = 'output-general'
+OUTPUTDIR_GENERAL = "${env.WORKSPACE}/output-general"
+OUTPUT_RELATIVE_JDK7 = 'output-jdk7'
+OUTPUTDIR_JDK7 = "${env.WORKSPACE}/output-jdk7"
+OUTPUT_RELATIVE_JDK8 = 'output-jdk8'
+OUTPUTDIR_JDK8 = "${env.WORKSPACE}/output-jdk8"
+PROJECT = 'hbase'
+PROJET_PERSONALITY = 
'https://raw.githubusercontent.com/apache/hbase/master/dev-support/hbase-personality.sh'
+// This section of the docs tells folks not to use the javadoc tag. older 
branches have our old version of the check for said tag.
+AUTHOR_IGNORE_LIST = 
'src/main/asciidoc/_chapters/developer.adoc,dev-support/test-patch.sh'
+WHITESPACE_IGNORE_LIST = '.*/generated/.*'
+// output from surefire; sadly the archive function in yetus only works on 
file names.
+ARCHIVE_PATTERN_LIST = 
'TEST-*.xml,org.apache.h*-output.txt,org.apache.h*.txt'
+// These tests currently have known failures. Once they burn down to 0, 
remove from here so that new problems will cause a failure.
+TESTS_FILTER = 
'cc,checkstyle,javac,javadoc,pylint,shellcheck,whitespace,perlcritic,ruby-lint,rubocop'
+BRANCH_SPECIFIC_DOCKERFILE = "${env.BASEDIR}/dev-support/docker/Dockerfile"
+EXCLUDE_TESTS_URL = 
'https://builds.apache.org/job/HBase-Find-Flaky-Tests/lastSuccessfulBuild/artifact/excludes/'
+  }
+  parameters {
+booleanParam(name: 'USE_YETUS_PRERELEASE', defaultValue: false, 
description: '''Check to use the current HEAD of apache/yetus rather than our 
configured release.
+
+Should only be used manually when e.g. there is some non-work-aroundable 
issue in yetus we are checking a fix for.''')
+booleanParam(name: 'DEBUG', defaultValue: false, description: 'Produce a 
lot more meta-information.')
+  }
+  stages {
+stage ('yetus install') {
+  steps {
+sh  '''#!/usr/bin/env bash
+echo 

[3/4] hbase git commit: HBASE-18147 POC jenkinsfile for nightly checks.

2017-07-20 Thread busbey
HBASE-18147 POC jenkinsfile for nightly checks.

* Jenkinsfile that works for all current branches.
* adds dev-support script for setting parameters for our yetus nightly 
invocation
* adds ruby tools to dockerfile
* adds rubocop to dockerfile
* adds ruby-lint to dockerfile
* adds perlcritic to dockerfile


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/45573cb6
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/45573cb6
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/45573cb6

Branch: refs/heads/branch-1.2-HBASE-18147
Commit: 45573cb683eaf1fdbd57512bdbd5384adb78b7a2
Parents: 7d2175e
Author: Sean Busbey 
Authored: Thu Jul 20 01:01:59 2017 -0500
Committer: Sean Busbey 
Committed: Thu Jul 20 08:56:08 2017 -0500

--
 dev-support/Jenkinsfile| 254 
 dev-support/docker/Dockerfile  |  29 
 dev-support/hbase_nightly_yetus.sh |  86 +++
 3 files changed, 369 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/45573cb6/dev-support/Jenkinsfile
--
diff --git a/dev-support/Jenkinsfile b/dev-support/Jenkinsfile
new file mode 100644
index 000..6686e8b
--- /dev/null
+++ b/dev-support/Jenkinsfile
@@ -0,0 +1,254 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements.  See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership.  The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License.  You may obtain a copy of the License at
+//
+//   http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied.  See the License for the
+// specific language governing permissions and limitations
+// under the License.
+pipeline {
+  agent {
+node {
+  label 'Hadoop'
+}
+  }
+  triggers {
+cron('@daily')
+  }
+  options {
+buildDiscarder(logRotator(numToKeepStr: '30'))
+timeout (time: 6, unit: 'HOURS')
+timestamps()
+  }
+  environment {
+TOOLS = "${env.WORKSPACE}/tools"
+// where we check out to across stages
+BASEDIR = "${env.WORKSPACE}/component"
+YETUS_RELEASE = '0.5.0'
+// where we'll write everything from different steps.
+OUTPUT_RELATIVE_GENERAL = 'output-general'
+OUTPUTDIR_GENERAL = "${env.WORKSPACE}/output-general"
+OUTPUT_RELATIVE_JDK7 = 'output-jdk7'
+OUTPUTDIR_JDK7 = "${env.WORKSPACE}/output-jdk7"
+OUTPUT_RELATIVE_JDK8 = 'output-jdk8'
+OUTPUTDIR_JDK8 = "${env.WORKSPACE}/output-jdk8"
+PROJECT = 'hbase'
+PROJET_PERSONALITY = 
'https://raw.githubusercontent.com/apache/hbase/master/dev-support/hbase-personality.sh'
+// This section of the docs tells folks not to use the javadoc tag. older 
branches have our old version of the check for said tag.
+AUTHOR_IGNORE_LIST = 
'src/main/asciidoc/_chapters/developer.adoc,dev-support/test-patch.sh'
+WHITESPACE_IGNORE_LIST = '.*/generated/.*'
+// output from surefire; sadly the archive function in yetus only works on 
file names.
+ARCHIVE_PATTERN_LIST = 
'TEST-*.xml,org.apache.h*-output.txt,org.apache.h*.txt'
+// These tests currently have known failures. Once they burn down to 0, 
remove from here so that new problems will cause a failure.
+TESTS_FILTER = 
'cc,checkstyle,javac,javadoc,pylint,shellcheck,whitespace,perlcritic,ruby-lint,rubocop'
+BRANCH_SPECIFIC_DOCKERFILE = "${env.BASEDIR}/dev-support/docker/Dockerfile"
+EXCLUDE_TESTS_URL = 
'https://builds.apache.org/job/HBase-Find-Flaky-Tests/lastSuccessfulBuild/artifact/excludes/'
+  }
+  parameters {
+booleanParam(name: 'USE_YETUS_PRERELEASE', defaultValue: false, 
description: '''Check to use the current HEAD of apache/yetus rather than our 
configured release.
+
+Should only be used manually when e.g. there is some non-work-aroundable 
issue in yetus we are checking a fix for.''')
+booleanParam(name: 'DEBUG', defaultValue: false, description: 'Produce a 
lot more meta-information.')
+  }
+  stages {
+stage ('yetus install') {
+  steps {
+sh  '''#!/usr/bin/env bash
+echo "Ensure we have a copy of Apache Yetus."
+if [[ true !=  "${USE_YETUS_PRERELEASE}" ]]; then
+  YETUS_DIR="${WORKSPACE}/yetus-${YETUS_RELEASE}"
+  echo "Checking for Yetus ${YETUS_RELEASE} in '${YETUS_DIR}'"
+  if [ ! -d "${YETUS_DIR}" ]; then
+echo "New download of Apache Yetus version ${YETUS_RELEASE}."
+rm 

[5/5] hbase git commit: HBASE-16312 update jquery version

2017-07-20 Thread stack
HBASE-16312 update jquery version

Upgrade jquery from 1.8.3 to 3.2.1 in hbase-server and hbase-thrift modules

Change-Id: I92d479e9802d954f607ba409077bc98581e9e5ca

Signed-off-by: Michael Stack 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/9300fbc9
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/9300fbc9
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/9300fbc9

Branch: refs/heads/branch-2
Commit: 9300fbc9c15d97dde01f48a090d7d81188b8b86f
Parents: cbf3904
Author: Peter Somogyi 
Authored: Mon Jul 10 13:27:12 2017 +0200
Committer: Michael Stack 
Committed: Thu Jul 20 10:08:12 2017 +0100

--
 hbase-rest/src/main/resources/hbase-webapps/rest/rest.jsp  | 2 --
 .../org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.jamon | 2 --
 .../apache/hadoop/hbase/tmpl/regionserver/RSStatusTmpl.jamon   | 2 --
 .../src/main/resources/hbase-webapps/master/procedures.jsp | 2 --
 .../src/main/resources/hbase-webapps/master/processMaster.jsp  | 2 --
 .../src/main/resources/hbase-webapps/master/processRS.jsp  | 2 --
 .../src/main/resources/hbase-webapps/master/snapshot.jsp   | 2 --
 .../src/main/resources/hbase-webapps/master/snapshotsStats.jsp | 2 --
 hbase-server/src/main/resources/hbase-webapps/master/table.jsp | 2 --
 .../src/main/resources/hbase-webapps/master/tablesDetailed.jsp | 2 --
 hbase-server/src/main/resources/hbase-webapps/master/zk.jsp| 2 --
 .../main/resources/hbase-webapps/regionserver/processRS.jsp| 2 --
 .../src/main/resources/hbase-webapps/regionserver/region.jsp   | 2 --
 .../main/resources/hbase-webapps/regionserver/storeFile.jsp| 2 --
 .../src/main/resources/hbase-webapps/static/js/jquery.min.js   | 6 --
 .../src/main/resources/hbase-webapps/thrift/thrift.jsp | 2 --
 .../src/main/resources/hbase-webapps/static/js/jquery.min.js   | 6 --
 .../src/main/resources/hbase-webapps/thrift/thrift.jsp | 2 --
 18 files changed, 8 insertions(+), 36 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/9300fbc9/hbase-rest/src/main/resources/hbase-webapps/rest/rest.jsp
--
diff --git a/hbase-rest/src/main/resources/hbase-webapps/rest/rest.jsp 
b/hbase-rest/src/main/resources/hbase-webapps/rest/rest.jsp
index e002749..ba3c027 100644
--- a/hbase-rest/src/main/resources/hbase-webapps/rest/rest.jsp
+++ b/hbase-rest/src/main/resources/hbase-webapps/rest/rest.jsp
@@ -27,9 +27,7 @@ Configuration conf = 
(Configuration)getServletContext().getAttribute("hbase.conf
 long startcode = conf.getLong("startcode", System.currentTimeMillis());
 String listenPort = conf.get("hbase.rest.port", "8080");
 %>
-
 
 
   

http://git-wip-us.apache.org/repos/asf/hbase/blob/9300fbc9/hbase-server/src/main/jamon/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.jamon
--
diff --git 
a/hbase-server/src/main/jamon/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.jamon
 
b/hbase-server/src/main/jamon/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.jamon
index 708e72d..bf3b623 100644
--- 
a/hbase-server/src/main/jamon/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.jamon
+++ 
b/hbase-server/src/main/jamon/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.jamon
@@ -93,9 +93,7 @@ AssignmentManager assignmentManager = 
master.getAssignmentManager();
   }
 
 
-
 
 
   

http://git-wip-us.apache.org/repos/asf/hbase/blob/9300fbc9/hbase-server/src/main/jamon/org/apache/hadoop/hbase/tmpl/regionserver/RSStatusTmpl.jamon
--
diff --git 
a/hbase-server/src/main/jamon/org/apache/hadoop/hbase/tmpl/regionserver/RSStatusTmpl.jamon
 
b/hbase-server/src/main/jamon/org/apache/hadoop/hbase/tmpl/regionserver/RSStatusTmpl.jamon
index 7219c0a..61795e0 100644
--- 
a/hbase-server/src/main/jamon/org/apache/hadoop/hbase/tmpl/regionserver/RSStatusTmpl.jamon
+++ 
b/hbase-server/src/main/jamon/org/apache/hadoop/hbase/tmpl/regionserver/RSStatusTmpl.jamon
@@ -50,9 +50,7 @@ org.apache.hadoop.hbase.zookeeper.MasterAddressTracker;
 : masterAddressTracker.getMasterAddress();
   int infoPort = masterAddressTracker == null ? 0 : 
masterAddressTracker.getMasterInfoPort();
 
-
 
 
   

http://git-wip-us.apache.org/repos/asf/hbase/blob/9300fbc9/hbase-server/src/main/resources/hbase-webapps/master/procedures.jsp
--
diff --git 
a/hbase-server/src/main/resources/hbase-webapps/master/procedures.jsp 
b/hbase-server/src/main/resources/hbase-webapps/master/procedures.jsp
index 00d8ead..29c3d45 100644
--- 

[1/5] hbase git commit: HBASE-16312 update jquery version

2017-07-20 Thread stack
Repository: hbase
Updated Branches:
  refs/heads/branch-2 cbf390422 -> 9300fbc9c


http://git-wip-us.apache.org/repos/asf/hbase/blob/9300fbc9/hbase-thrift/src/main/resources/hbase-webapps/thrift/thrift.jsp
--
diff --git a/hbase-thrift/src/main/resources/hbase-webapps/thrift/thrift.jsp 
b/hbase-thrift/src/main/resources/hbase-webapps/thrift/thrift.jsp
index 97b948f..579d0f7 100644
--- a/hbase-thrift/src/main/resources/hbase-webapps/thrift/thrift.jsp
+++ b/hbase-thrift/src/main/resources/hbase-webapps/thrift/thrift.jsp
@@ -33,9 +33,7 @@ String implType = 
conf.get("hbase.regionserver.thrift.server.type", "threadpool"
 String compact = conf.get("hbase.regionserver.thrift.compact", "false");
 String framed = conf.get("hbase.regionserver.thrift.framed", "false");
 %>
-
 
 
   



[3/5] hbase git commit: HBASE-16312 update jquery version

2017-07-20 Thread stack
http://git-wip-us.apache.org/repos/asf/hbase/blob/9300fbc9/hbase-server/src/main/resources/hbase-webapps/thrift/thrift.jsp
--
diff --git a/hbase-server/src/main/resources/hbase-webapps/thrift/thrift.jsp 
b/hbase-server/src/main/resources/hbase-webapps/thrift/thrift.jsp
index fa98f05..7f545ad 100644
--- a/hbase-server/src/main/resources/hbase-webapps/thrift/thrift.jsp
+++ b/hbase-server/src/main/resources/hbase-webapps/thrift/thrift.jsp
@@ -33,9 +33,7 @@ String implType = 
conf.get("hbase.regionserver.thrift.server.type", "threadpool"
 String compact = conf.get("hbase.regionserver.thrift.compact", "false");
 String framed = conf.get("hbase.regionserver.thrift.framed", "false");
 %>
-
 
 
   



[2/5] hbase git commit: HBASE-16312 update jquery version

2017-07-20 Thread stack
http://git-wip-us.apache.org/repos/asf/hbase/blob/9300fbc9/hbase-thrift/src/main/resources/hbase-webapps/static/js/jquery.min.js
--
diff --git 
a/hbase-thrift/src/main/resources/hbase-webapps/static/js/jquery.min.js 
b/hbase-thrift/src/main/resources/hbase-webapps/static/js/jquery.min.js
index 3883779..644d35e 100644
--- a/hbase-thrift/src/main/resources/hbase-webapps/static/js/jquery.min.js
+++ b/hbase-thrift/src/main/resources/hbase-webapps/static/js/jquery.min.js
@@ -1,2 +1,4 @@
-/*! jQuery v1.8.3 jquery.com | jquery.org/license */
-(function(e,t){function _(e){var t=M[e]={};return 
v.each(e.split(y),function(e,n){t[n]=!0}),t}function 
H(e,n,r){if(r===t&===1){var 
i="data-"+n.replace(P,"-$1").toLowerCase();r=e.getAttribute(i);if(typeof 
r=="string"){try{r=r==="true"?!0:r==="false"?!1:r==="null"?null:+r+""===r?+r:D.test(r)?v.parseJSON(r):r}catch(s){}v.data(e,n,r)}else
 r=t}return r}function B(e){var t;for(t in 
e){if(t==="data"&(e[t]))continue;if(t!=="toJSON")return!1}return!0}function
 et(){return!1}function tt(){return!0}function 
ut(e){return!e||!e.parentNode||e.parentNode.nodeType===11}function at(e,t){do 
e=e[t];while(e&!==1);return e}function 
ft(e,t,n){t=t||0;if(v.isFunction(t))return v.grep(e,function(e,r){var 
i=!!t.call(e,r,e);return i===n});if(t.nodeType)return 
v.grep(e,function(e,r){return e===t===n});if(typeof t=="string"){var 
r=v.grep(e,function(e){return e.nodeType===1});if(it.test(t))return 
v.filter(t,r,!n);t=v.filter(t,r)}return v.grep(e,function(e,r){return v.inArray(
 e,t)>=0===n})}function lt(e){var 
t=ct.split("|"),n=e.createDocumentFragment();if(n.createElement)while(t.length)n.createElement(t.pop());return
 n}function Lt(e,t){return 
e.getElementsByTagName(t)[0]||e.appendChild(e.ownerDocument.createElement(t))}function
 At(e,t){if(t.nodeType!==1||!v.hasData(e))return;var 
n,r,i,s=v._data(e),o=v._data(t,s),u=s.events;if(u){delete 
o.handle,o.events={};for(n in 
u)for(r=0,i=u[n].length;r").appendTo(i.body),n=t.css("display");t.remove();if(n==="none"||n===""){Pt=i.body.appendChild(Pt||v.extend(i.createElement("iframe"),{frameBorder:0,width:0,height:0}));if(!Ht||!Pt.
 
createElement)Ht=(Pt.contentWindow||Pt.contentDocument).document,Ht.write(""),Ht.close();t=Ht.body.appendChild(Ht.createElement(e)),n=Dt(t,"display"),i.body.removeChild(Pt)}return
 Wt[e]=n,n}function fn(e,t,n,r){var 
i;if(v.isArray(t))v.each(t,function(t,i){n||sn.test(e)?r(e,i):fn(e+"["+(typeof 
i=="object"?t:"")+"]",i,n,r)});else if(!n&(t)==="object")for(i in 
t)fn(e+"["+i+"]",t[i],n,r);else r(e,t)}function Cn(e){return 
function(t,n){typeof t!="string"&&(n=t,t="*");var 

[4/5] hbase git commit: HBASE-16312 update jquery version

2017-07-20 Thread stack
http://git-wip-us.apache.org/repos/asf/hbase/blob/9300fbc9/hbase-server/src/main/resources/hbase-webapps/static/js/jquery.min.js
--
diff --git 
a/hbase-server/src/main/resources/hbase-webapps/static/js/jquery.min.js 
b/hbase-server/src/main/resources/hbase-webapps/static/js/jquery.min.js
index 3883779..644d35e 100644
--- a/hbase-server/src/main/resources/hbase-webapps/static/js/jquery.min.js
+++ b/hbase-server/src/main/resources/hbase-webapps/static/js/jquery.min.js
@@ -1,2 +1,4 @@
-/*! jQuery v1.8.3 jquery.com | jquery.org/license */
-(function(e,t){function _(e){var t=M[e]={};return 
v.each(e.split(y),function(e,n){t[n]=!0}),t}function 
H(e,n,r){if(r===t&===1){var 
i="data-"+n.replace(P,"-$1").toLowerCase();r=e.getAttribute(i);if(typeof 
r=="string"){try{r=r==="true"?!0:r==="false"?!1:r==="null"?null:+r+""===r?+r:D.test(r)?v.parseJSON(r):r}catch(s){}v.data(e,n,r)}else
 r=t}return r}function B(e){var t;for(t in 
e){if(t==="data"&(e[t]))continue;if(t!=="toJSON")return!1}return!0}function
 et(){return!1}function tt(){return!0}function 
ut(e){return!e||!e.parentNode||e.parentNode.nodeType===11}function at(e,t){do 
e=e[t];while(e&!==1);return e}function 
ft(e,t,n){t=t||0;if(v.isFunction(t))return v.grep(e,function(e,r){var 
i=!!t.call(e,r,e);return i===n});if(t.nodeType)return 
v.grep(e,function(e,r){return e===t===n});if(typeof t=="string"){var 
r=v.grep(e,function(e){return e.nodeType===1});if(it.test(t))return 
v.filter(t,r,!n);t=v.filter(t,r)}return v.grep(e,function(e,r){return v.inArray(
 e,t)>=0===n})}function lt(e){var 
t=ct.split("|"),n=e.createDocumentFragment();if(n.createElement)while(t.length)n.createElement(t.pop());return
 n}function Lt(e,t){return 
e.getElementsByTagName(t)[0]||e.appendChild(e.ownerDocument.createElement(t))}function
 At(e,t){if(t.nodeType!==1||!v.hasData(e))return;var 
n,r,i,s=v._data(e),o=v._data(t,s),u=s.events;if(u){delete 
o.handle,o.events={};for(n in 
u)for(r=0,i=u[n].length;r").appendTo(i.body),n=t.css("display");t.remove();if(n==="none"||n===""){Pt=i.body.appendChild(Pt||v.extend(i.createElement("iframe"),{frameBorder:0,width:0,height:0}));if(!Ht||!Pt.
 
createElement)Ht=(Pt.contentWindow||Pt.contentDocument).document,Ht.write(""),Ht.close();t=Ht.body.appendChild(Ht.createElement(e)),n=Dt(t,"display"),i.body.removeChild(Pt)}return
 Wt[e]=n,n}function fn(e,t,n,r){var 
i;if(v.isArray(t))v.each(t,function(t,i){n||sn.test(e)?r(e,i):fn(e+"["+(typeof 
i=="object"?t:"")+"]",i,n,r)});else if(!n&(t)==="object")for(i in 
t)fn(e+"["+i+"]",t[i],n,r);else r(e,t)}function Cn(e){return 
function(t,n){typeof t!="string"&&(n=t,t="*");var 

[4/4] hbase git commit: HBASE-18147 POC jenkinsfile for nightly checks.

2017-07-20 Thread busbey
HBASE-18147 POC jenkinsfile for nightly checks.

* Jenkinsfile that works for all current branches.
* adds dev-support script for setting parameters for our yetus nightly 
invocation
* adds ruby tools to dockerfile
* adds rubocop to dockerfile
* adds ruby-lint to dockerfile
* adds perlcritic to dockerfile


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/c8263c72
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/c8263c72
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/c8263c72

Branch: refs/heads/branch-1.1-HBASE-18147
Commit: c8263c72a0c1b58802cf48efbf3bb258420b502e
Parents: b03a5e7
Author: Sean Busbey 
Authored: Thu Jul 20 01:01:59 2017 -0500
Committer: Sean Busbey 
Committed: Thu Jul 20 02:37:59 2017 -0500

--
 dev-support/Jenkinsfile| 254 
 dev-support/docker/Dockerfile  |  29 
 dev-support/hbase_nightly_yetus.sh |  86 +++
 3 files changed, 369 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/c8263c72/dev-support/Jenkinsfile
--
diff --git a/dev-support/Jenkinsfile b/dev-support/Jenkinsfile
new file mode 100644
index 000..744531b
--- /dev/null
+++ b/dev-support/Jenkinsfile
@@ -0,0 +1,254 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements.  See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership.  The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License.  You may obtain a copy of the License at
+//
+//   http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied.  See the License for the
+// specific language governing permissions and limitations
+// under the License.
+pipeline {
+  agent {
+node {
+  label 'Hadoop'
+}
+  }
+  triggers {
+cron('@daily')
+  }
+  options {
+buildDiscarder(logRotator(numToKeepStr: '30'))
+timeout (time: 6, unit: 'HOURS')
+timestamps()
+  }
+  environment {
+TOOLS = "${env.WORKSPACE}/tools"
+// where we check out to across stages
+BASEDIR = "${env.WORKSPACE}/component"
+YETUS_RELEASE = '0.5.0'
+// where we'll write everything from different steps.
+OUTPUT_RELATIVE_GENERAL = 'output-general'
+OUTPUTDIR_GENERAL = "${env.WORKSPACE}/output-general"
+OUTPUT_RELATIVE_JDK7 = 'output-jdk7'
+OUTPUTDIR_JDK7 = "${env.WORKSPACE}/output-jdk7"
+OUTPUT_RELATIVE_JDK8 = 'output-jdk8'
+OUTPUTDIR_JDK8 = "${env.WORKSPACE}/output-jdk8"
+PROJECT = 'hbase'
+PROJET_PERSONALITY = 
'https://raw.githubusercontent.com/apache/hbase/master/dev-support/hbase-personality.sh'
+// This section of the docs tells folks not to use the javadoc tag. older 
branches have our old version of the check for said tag.
+AUTHOR_IGNORE_LIST = 
'src/main/asciidoc/_chapters/developer.adoc,dev-support/test-patch.sh'
+WHITESPACE_IGNORE_LIST = '.*/generated/.*'
+// output from surefire; sadly the archive function in yetus only works on 
file names.
+ARCHIVE_PATTERN_LIST = 
'TEST-*.xml,org.apache.h*-output.txt,org.apache.h*.txt'
+// These tests currently have known failures. Once they burn down to 0, 
remove from here so that new problems will cause a failure.
+TESTS_FILTER = 
'cc,checkstyle,javac,javadoc,pylint,shellcheck,whitespace,perlcritic,ruby-lint,rubocop'
+BRANCH_SPECIFIC_DOCKERFILE = "${env.BASEDIR}/dev-support/docker/Dockerfile"
+EXCLUDE_TESTS_URL = 
'https://builds.apache.org/job/HBase-Find-Flaky-Tests/lastSuccessfulBuild/artifact/excludes/'
+  }
+  parameters {
+booleanParam(name: 'USE_YETUS_PRERELEASE', defaultValue: false, 
description: '''Check to use the current HEAD of apache/yetus rather than our 
configured release.
+
+Should only be used manually when e.g. there is some non-work-aroundable 
issue in yetus we are checking a fix for.''')
+booleanParam(name: 'DEBUG', defaultValue: false, description: 'Produce a 
lot more meta-information.')
+  }
+  stages {
+stage ('yetus install') {
+  steps {
+sh  '''#!/usr/bin/env bash
+echo "Ensure we have a copy of Apache Yetus."
+if [[ true !=  "${USE_YETUS_PRERELEASE}" ]]; then
+  YETUS_DIR="${WORKSPACE}/yetus-${YETUS_RELEASE}"
+  echo "Checking for Yetus ${YETUS_RELEASE} in '${YETUS_DIR}'"
+  if [ ! -d "${YETUS_DIR}" ]; then
+echo "New download of Apache Yetus version ${YETUS_RELEASE}."
+rm 

[2/4] hbase git commit: HBASE-18147 POC jenkinsfile for nightly checks.

2017-07-20 Thread busbey
HBASE-18147 POC jenkinsfile for nightly checks.

* Jenkinsfile that works for all current branches.
* adds dev-support script for setting parameters for our yetus nightly 
invocation
* adds ruby tools to dockerfile
* adds rubocop to dockerfile
* adds ruby-lint to dockerfile
* adds perlcritic to dockerfile


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/1b565836
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/1b565836
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/1b565836

Branch: refs/heads/branch-1-HBASE-18147
Commit: 1b565836fdf8566fbc417f5400b641addf544b31
Parents: cfd5b6b
Author: Sean Busbey 
Authored: Thu Jul 20 01:01:59 2017 -0500
Committer: Sean Busbey 
Committed: Thu Jul 20 02:37:47 2017 -0500

--
 dev-support/Jenkinsfile| 254 
 dev-support/docker/Dockerfile  |  29 
 dev-support/hbase_nightly_yetus.sh |  86 +++
 3 files changed, 369 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/1b565836/dev-support/Jenkinsfile
--
diff --git a/dev-support/Jenkinsfile b/dev-support/Jenkinsfile
new file mode 100644
index 000..744531b
--- /dev/null
+++ b/dev-support/Jenkinsfile
@@ -0,0 +1,254 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements.  See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership.  The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License.  You may obtain a copy of the License at
+//
+//   http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied.  See the License for the
+// specific language governing permissions and limitations
+// under the License.
+pipeline {
+  agent {
+node {
+  label 'Hadoop'
+}
+  }
+  triggers {
+cron('@daily')
+  }
+  options {
+buildDiscarder(logRotator(numToKeepStr: '30'))
+timeout (time: 6, unit: 'HOURS')
+timestamps()
+  }
+  environment {
+TOOLS = "${env.WORKSPACE}/tools"
+// where we check out to across stages
+BASEDIR = "${env.WORKSPACE}/component"
+YETUS_RELEASE = '0.5.0'
+// where we'll write everything from different steps.
+OUTPUT_RELATIVE_GENERAL = 'output-general'
+OUTPUTDIR_GENERAL = "${env.WORKSPACE}/output-general"
+OUTPUT_RELATIVE_JDK7 = 'output-jdk7'
+OUTPUTDIR_JDK7 = "${env.WORKSPACE}/output-jdk7"
+OUTPUT_RELATIVE_JDK8 = 'output-jdk8'
+OUTPUTDIR_JDK8 = "${env.WORKSPACE}/output-jdk8"
+PROJECT = 'hbase'
+PROJET_PERSONALITY = 
'https://raw.githubusercontent.com/apache/hbase/master/dev-support/hbase-personality.sh'
+// This section of the docs tells folks not to use the javadoc tag. older 
branches have our old version of the check for said tag.
+AUTHOR_IGNORE_LIST = 
'src/main/asciidoc/_chapters/developer.adoc,dev-support/test-patch.sh'
+WHITESPACE_IGNORE_LIST = '.*/generated/.*'
+// output from surefire; sadly the archive function in yetus only works on 
file names.
+ARCHIVE_PATTERN_LIST = 
'TEST-*.xml,org.apache.h*-output.txt,org.apache.h*.txt'
+// These tests currently have known failures. Once they burn down to 0, 
remove from here so that new problems will cause a failure.
+TESTS_FILTER = 
'cc,checkstyle,javac,javadoc,pylint,shellcheck,whitespace,perlcritic,ruby-lint,rubocop'
+BRANCH_SPECIFIC_DOCKERFILE = "${env.BASEDIR}/dev-support/docker/Dockerfile"
+EXCLUDE_TESTS_URL = 
'https://builds.apache.org/job/HBase-Find-Flaky-Tests/lastSuccessfulBuild/artifact/excludes/'
+  }
+  parameters {
+booleanParam(name: 'USE_YETUS_PRERELEASE', defaultValue: false, 
description: '''Check to use the current HEAD of apache/yetus rather than our 
configured release.
+
+Should only be used manually when e.g. there is some non-work-aroundable 
issue in yetus we are checking a fix for.''')
+booleanParam(name: 'DEBUG', defaultValue: false, description: 'Produce a 
lot more meta-information.')
+  }
+  stages {
+stage ('yetus install') {
+  steps {
+sh  '''#!/usr/bin/env bash
+echo "Ensure we have a copy of Apache Yetus."
+if [[ true !=  "${USE_YETUS_PRERELEASE}" ]]; then
+  YETUS_DIR="${WORKSPACE}/yetus-${YETUS_RELEASE}"
+  echo "Checking for Yetus ${YETUS_RELEASE} in '${YETUS_DIR}'"
+  if [ ! -d "${YETUS_DIR}" ]; then
+echo "New download of Apache Yetus version ${YETUS_RELEASE}."
+rm -rf 

[1/4] hbase git commit: HBASE-18147 POC jenkinsfile for nightly checks. [Forced Update!]

2017-07-20 Thread busbey
Repository: hbase
Updated Branches:
  refs/heads/HBASE-18147 82ab758d7 -> 3f0992349 (forced update)
  refs/heads/branch-1-HBASE-18147 63d7f647a -> 1b565836f (forced update)
  refs/heads/branch-1.1-HBASE-18147 1d790d79d -> c8263c72a (forced update)
  refs/heads/branch-1.2-HBASE-18147 c85b088dc -> 5d7999a4d (forced update)


HBASE-18147 POC jenkinsfile for nightly checks.

* Jenkinsfile that works for all current branches.
* adds dev-support script for setting parameters for our yetus nightly 
invocation
* adds ruby tools to dockerfile
* adds rubocop to dockerfile
* adds ruby-lint to dockerfile
* adds perlcritic to dockerfile


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/3f099234
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/3f099234
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/3f099234

Branch: refs/heads/HBASE-18147
Commit: 3f09923491adc542a94620090395d61a8bb343b6
Parents: 01db60d
Author: Sean Busbey 
Authored: Thu Jul 20 01:01:59 2017 -0500
Committer: Sean Busbey 
Committed: Thu Jul 20 02:37:34 2017 -0500

--
 dev-support/Jenkinsfile| 254 
 dev-support/docker/Dockerfile  |  29 
 dev-support/hbase_nightly_yetus.sh |  86 +++
 3 files changed, 369 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/3f099234/dev-support/Jenkinsfile
--
diff --git a/dev-support/Jenkinsfile b/dev-support/Jenkinsfile
new file mode 100644
index 000..744531b
--- /dev/null
+++ b/dev-support/Jenkinsfile
@@ -0,0 +1,254 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements.  See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership.  The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License.  You may obtain a copy of the License at
+//
+//   http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied.  See the License for the
+// specific language governing permissions and limitations
+// under the License.
+pipeline {
+  agent {
+node {
+  label 'Hadoop'
+}
+  }
+  triggers {
+cron('@daily')
+  }
+  options {
+buildDiscarder(logRotator(numToKeepStr: '30'))
+timeout (time: 6, unit: 'HOURS')
+timestamps()
+  }
+  environment {
+TOOLS = "${env.WORKSPACE}/tools"
+// where we check out to across stages
+BASEDIR = "${env.WORKSPACE}/component"
+YETUS_RELEASE = '0.5.0'
+// where we'll write everything from different steps.
+OUTPUT_RELATIVE_GENERAL = 'output-general'
+OUTPUTDIR_GENERAL = "${env.WORKSPACE}/output-general"
+OUTPUT_RELATIVE_JDK7 = 'output-jdk7'
+OUTPUTDIR_JDK7 = "${env.WORKSPACE}/output-jdk7"
+OUTPUT_RELATIVE_JDK8 = 'output-jdk8'
+OUTPUTDIR_JDK8 = "${env.WORKSPACE}/output-jdk8"
+PROJECT = 'hbase'
+PROJET_PERSONALITY = 
'https://raw.githubusercontent.com/apache/hbase/master/dev-support/hbase-personality.sh'
+// This section of the docs tells folks not to use the javadoc tag. older 
branches have our old version of the check for said tag.
+AUTHOR_IGNORE_LIST = 
'src/main/asciidoc/_chapters/developer.adoc,dev-support/test-patch.sh'
+WHITESPACE_IGNORE_LIST = '.*/generated/.*'
+// output from surefire; sadly the archive function in yetus only works on 
file names.
+ARCHIVE_PATTERN_LIST = 
'TEST-*.xml,org.apache.h*-output.txt,org.apache.h*.txt'
+// These tests currently have known failures. Once they burn down to 0, 
remove from here so that new problems will cause a failure.
+TESTS_FILTER = 
'cc,checkstyle,javac,javadoc,pylint,shellcheck,whitespace,perlcritic,ruby-lint,rubocop'
+BRANCH_SPECIFIC_DOCKERFILE = "${env.BASEDIR}/dev-support/docker/Dockerfile"
+EXCLUDE_TESTS_URL = 
'https://builds.apache.org/job/HBase-Find-Flaky-Tests/lastSuccessfulBuild/artifact/excludes/'
+  }
+  parameters {
+booleanParam(name: 'USE_YETUS_PRERELEASE', defaultValue: false, 
description: '''Check to use the current HEAD of apache/yetus rather than our 
configured release.
+
+Should only be used manually when e.g. there is some non-work-aroundable 
issue in yetus we are checking a fix for.''')
+booleanParam(name: 'DEBUG', defaultValue: false, description: 'Produce a 
lot more meta-information.')
+  }
+  stages {
+stage ('yetus install') {
+  steps {
+sh  '''#!/usr/bin/env bash
+echo 

[3/4] hbase git commit: HBASE-18147 POC jenkinsfile for nightly checks.

2017-07-20 Thread busbey
HBASE-18147 POC jenkinsfile for nightly checks.

* Jenkinsfile that works for all current branches.
* adds dev-support script for setting parameters for our yetus nightly 
invocation
* adds ruby tools to dockerfile
* adds rubocop to dockerfile
* adds ruby-lint to dockerfile
* adds perlcritic to dockerfile


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/5d7999a4
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/5d7999a4
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/5d7999a4

Branch: refs/heads/branch-1.2-HBASE-18147
Commit: 5d7999a4d7dda763d719eaa523c9278bee7670d8
Parents: 7d2175e
Author: Sean Busbey 
Authored: Thu Jul 20 01:01:59 2017 -0500
Committer: Sean Busbey 
Committed: Thu Jul 20 02:37:54 2017 -0500

--
 dev-support/Jenkinsfile| 254 
 dev-support/docker/Dockerfile  |  29 
 dev-support/hbase_nightly_yetus.sh |  86 +++
 3 files changed, 369 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/5d7999a4/dev-support/Jenkinsfile
--
diff --git a/dev-support/Jenkinsfile b/dev-support/Jenkinsfile
new file mode 100644
index 000..744531b
--- /dev/null
+++ b/dev-support/Jenkinsfile
@@ -0,0 +1,254 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements.  See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership.  The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License.  You may obtain a copy of the License at
+//
+//   http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied.  See the License for the
+// specific language governing permissions and limitations
+// under the License.
+pipeline {
+  agent {
+node {
+  label 'Hadoop'
+}
+  }
+  triggers {
+cron('@daily')
+  }
+  options {
+buildDiscarder(logRotator(numToKeepStr: '30'))
+timeout (time: 6, unit: 'HOURS')
+timestamps()
+  }
+  environment {
+TOOLS = "${env.WORKSPACE}/tools"
+// where we check out to across stages
+BASEDIR = "${env.WORKSPACE}/component"
+YETUS_RELEASE = '0.5.0'
+// where we'll write everything from different steps.
+OUTPUT_RELATIVE_GENERAL = 'output-general'
+OUTPUTDIR_GENERAL = "${env.WORKSPACE}/output-general"
+OUTPUT_RELATIVE_JDK7 = 'output-jdk7'
+OUTPUTDIR_JDK7 = "${env.WORKSPACE}/output-jdk7"
+OUTPUT_RELATIVE_JDK8 = 'output-jdk8'
+OUTPUTDIR_JDK8 = "${env.WORKSPACE}/output-jdk8"
+PROJECT = 'hbase'
+PROJET_PERSONALITY = 
'https://raw.githubusercontent.com/apache/hbase/master/dev-support/hbase-personality.sh'
+// This section of the docs tells folks not to use the javadoc tag. older 
branches have our old version of the check for said tag.
+AUTHOR_IGNORE_LIST = 
'src/main/asciidoc/_chapters/developer.adoc,dev-support/test-patch.sh'
+WHITESPACE_IGNORE_LIST = '.*/generated/.*'
+// output from surefire; sadly the archive function in yetus only works on 
file names.
+ARCHIVE_PATTERN_LIST = 
'TEST-*.xml,org.apache.h*-output.txt,org.apache.h*.txt'
+// These tests currently have known failures. Once they burn down to 0, 
remove from here so that new problems will cause a failure.
+TESTS_FILTER = 
'cc,checkstyle,javac,javadoc,pylint,shellcheck,whitespace,perlcritic,ruby-lint,rubocop'
+BRANCH_SPECIFIC_DOCKERFILE = "${env.BASEDIR}/dev-support/docker/Dockerfile"
+EXCLUDE_TESTS_URL = 
'https://builds.apache.org/job/HBase-Find-Flaky-Tests/lastSuccessfulBuild/artifact/excludes/'
+  }
+  parameters {
+booleanParam(name: 'USE_YETUS_PRERELEASE', defaultValue: false, 
description: '''Check to use the current HEAD of apache/yetus rather than our 
configured release.
+
+Should only be used manually when e.g. there is some non-work-aroundable 
issue in yetus we are checking a fix for.''')
+booleanParam(name: 'DEBUG', defaultValue: false, description: 'Produce a 
lot more meta-information.')
+  }
+  stages {
+stage ('yetus install') {
+  steps {
+sh  '''#!/usr/bin/env bash
+echo "Ensure we have a copy of Apache Yetus."
+if [[ true !=  "${USE_YETUS_PRERELEASE}" ]]; then
+  YETUS_DIR="${WORKSPACE}/yetus-${YETUS_RELEASE}"
+  echo "Checking for Yetus ${YETUS_RELEASE} in '${YETUS_DIR}'"
+  if [ ! -d "${YETUS_DIR}" ]; then
+echo "New download of Apache Yetus version ${YETUS_RELEASE}."
+rm 

[3/4] hbase git commit: HBASE-18147 POC jenkinsfile for nightly checks.

2017-07-20 Thread busbey
HBASE-18147 POC jenkinsfile for nightly checks.

* Jenkinsfile that works for all current branches.
* adds dev-support script for setting parameters for our yetus nightly 
invocation
* adds ruby tools to dockerfile
* adds rubocop to dockerfile
* adds ruby-lint to dockerfile
* adds perlcritic to dockerfile


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/c85b088d
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/c85b088d
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/c85b088d

Branch: refs/heads/branch-1.2-HBASE-18147
Commit: c85b088dc01ba5504b974128694a2a3cfe5544b5
Parents: 7d2175e
Author: Sean Busbey 
Authored: Thu Jul 20 01:01:59 2017 -0500
Committer: Sean Busbey 
Committed: Thu Jul 20 02:35:36 2017 -0500

--
 dev-support/Jenkinsfile| 254 
 dev-support/docker/Dockerfile  |  29 
 dev-support/hbase_nightly_yetus.sh |  86 +++
 3 files changed, 369 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/c85b088d/dev-support/Jenkinsfile
--
diff --git a/dev-support/Jenkinsfile b/dev-support/Jenkinsfile
new file mode 100644
index 000..03ffd6f
--- /dev/null
+++ b/dev-support/Jenkinsfile
@@ -0,0 +1,254 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements.  See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership.  The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License.  You may obtain a copy of the License at
+//
+//   http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied.  See the License for the
+// specific language governing permissions and limitations
+// under the License.
+pipeline {
+  agent {
+node {
+  label 'Hadoop'
+}
+  }
+  triggers {
+cron('@daily')
+  }
+  options {
+buildDiscarder(logRotator(numToKeepStr: '30'))
+timeout (time: 6, unit: 'HOURS')
+timestamps()
+  }
+  environment {
+TOOLS = "${env.WORKSPACE}/tools"
+// where we check out to across stages
+BASEDIR = "${env.WORKSPACE}/component"
+YETUS_RELEASE = '0.5.0'
+// where we'll write everything from different steps.
+OUTPUT_RELATIVE_GENERAL = 'output-general'
+OUTPUTDIR_GENERAL = "${env.WORKSPACE}/output-general"
+OUTPUT_RELATIVE_JDK7 = 'output-jdk7'
+OUTPUTDIR_JDK7 = "${env.WORKSPACE}/output-jdk7"
+OUTPUT_RELATIVE_JDK8 = 'output-jdk8'
+OUTPUTDIR_JDK8 = "${env.WORKSPACE}/output-jdk8"
+PROJECT = 'hbase'
+PROJET_PERSONALITY = 
'https://raw.githubusercontent.com/apache/hbase/master/dev-support/hbase-personality.sh'
+// This section of the docs tells folks not to use the javadoc tag. older 
branches have our old version of the check for said tag.
+AUTHOR_IGNORE_LIST = 
'src/main/asciidoc/_chapters/developer.adoc,dev-support/test-patch.sh'
+WHITESPACE_IGNORE_LIST = '.*/generated/.*'
+// output from surefire; sadly the archive function in yetus only works on 
file names.
+ARCHIVE_PATTERN_LIST = 
'TEST-*.xml,org.apache.h*-output.txt,org.apache.h*.txt'
+// These tests currently have known failures. Once they burn down to 0, 
remove from here so that new problems will cause a failure.
+TESTS_FILTER = 
'cc,checkstyle,javac,javadoc,pylint,shellcheck,whitespace,perlcritic,ruby-lint,rubocop'
+BRANCH_SPECIFIC_DOCKERFILE = "${env.BASEDIR}/dev-support/docker/Dockerfile"
+EXCLUDE_TESTS_URL = 
'https://builds.apache.org/job/HBase-Find-Flaky-Tests/lastSuccessfulBuild/artifact/excludes/'
+  }
+  parameters {
+booleanParam(name: 'USE_YETUS_PRERELEASE', defaultValue: false, 
description: '''Check to use the current HEAD of apache/yetus rather than our 
configured release.
+
+Should only be used manually when e.g. there is some non-work-aroundable 
issue in yetus we are checking a fix for.''')
+booleanParam(name: 'DEBUG', defaultValue: false, description: 'Produce a 
lot more meta-information.')
+  }
+  stages {
+stage ('yetus install') {
+  steps {
+sh  '''#!/usr/bin/env bash
+echo "Ensure we have a copy of Apache Yetus."
+if [[ true !=  "${USE_YETUS_PRERELEASE}" ]]; then
+  YETUS_DIR="${WORKSPACE}/yetus-${YETUS_RELEASE}"
+  echo "Checking for Yetus ${YETUS_RELEASE} in '${YETUS_DIR}'"
+  if [ ! -d "${YETUS_DIR}" ]; then
+echo "New download of Apache Yetus version ${YETUS_RELEASE}."
+rm 

[4/4] hbase git commit: HBASE-18147 POC jenkinsfile for nightly checks.

2017-07-20 Thread busbey
HBASE-18147 POC jenkinsfile for nightly checks.

* Jenkinsfile that works for all current branches.
* adds dev-support script for setting parameters for our yetus nightly 
invocation
* adds ruby tools to dockerfile
* adds rubocop to dockerfile
* adds ruby-lint to dockerfile
* adds perlcritic to dockerfile


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/1d790d79
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/1d790d79
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/1d790d79

Branch: refs/heads/branch-1.1-HBASE-18147
Commit: 1d790d79d7f809993ce9c5731ba778ddf0c65d84
Parents: b03a5e7
Author: Sean Busbey 
Authored: Thu Jul 20 01:01:59 2017 -0500
Committer: Sean Busbey 
Committed: Thu Jul 20 02:35:43 2017 -0500

--
 dev-support/Jenkinsfile| 254 
 dev-support/docker/Dockerfile  |  29 
 dev-support/hbase_nightly_yetus.sh |  86 +++
 3 files changed, 369 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/1d790d79/dev-support/Jenkinsfile
--
diff --git a/dev-support/Jenkinsfile b/dev-support/Jenkinsfile
new file mode 100644
index 000..03ffd6f
--- /dev/null
+++ b/dev-support/Jenkinsfile
@@ -0,0 +1,254 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements.  See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership.  The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License.  You may obtain a copy of the License at
+//
+//   http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied.  See the License for the
+// specific language governing permissions and limitations
+// under the License.
+pipeline {
+  agent {
+node {
+  label 'Hadoop'
+}
+  }
+  triggers {
+cron('@daily')
+  }
+  options {
+buildDiscarder(logRotator(numToKeepStr: '30'))
+timeout (time: 6, unit: 'HOURS')
+timestamps()
+  }
+  environment {
+TOOLS = "${env.WORKSPACE}/tools"
+// where we check out to across stages
+BASEDIR = "${env.WORKSPACE}/component"
+YETUS_RELEASE = '0.5.0'
+// where we'll write everything from different steps.
+OUTPUT_RELATIVE_GENERAL = 'output-general'
+OUTPUTDIR_GENERAL = "${env.WORKSPACE}/output-general"
+OUTPUT_RELATIVE_JDK7 = 'output-jdk7'
+OUTPUTDIR_JDK7 = "${env.WORKSPACE}/output-jdk7"
+OUTPUT_RELATIVE_JDK8 = 'output-jdk8'
+OUTPUTDIR_JDK8 = "${env.WORKSPACE}/output-jdk8"
+PROJECT = 'hbase'
+PROJET_PERSONALITY = 
'https://raw.githubusercontent.com/apache/hbase/master/dev-support/hbase-personality.sh'
+// This section of the docs tells folks not to use the javadoc tag. older 
branches have our old version of the check for said tag.
+AUTHOR_IGNORE_LIST = 
'src/main/asciidoc/_chapters/developer.adoc,dev-support/test-patch.sh'
+WHITESPACE_IGNORE_LIST = '.*/generated/.*'
+// output from surefire; sadly the archive function in yetus only works on 
file names.
+ARCHIVE_PATTERN_LIST = 
'TEST-*.xml,org.apache.h*-output.txt,org.apache.h*.txt'
+// These tests currently have known failures. Once they burn down to 0, 
remove from here so that new problems will cause a failure.
+TESTS_FILTER = 
'cc,checkstyle,javac,javadoc,pylint,shellcheck,whitespace,perlcritic,ruby-lint,rubocop'
+BRANCH_SPECIFIC_DOCKERFILE = "${env.BASEDIR}/dev-support/docker/Dockerfile"
+EXCLUDE_TESTS_URL = 
'https://builds.apache.org/job/HBase-Find-Flaky-Tests/lastSuccessfulBuild/artifact/excludes/'
+  }
+  parameters {
+booleanParam(name: 'USE_YETUS_PRERELEASE', defaultValue: false, 
description: '''Check to use the current HEAD of apache/yetus rather than our 
configured release.
+
+Should only be used manually when e.g. there is some non-work-aroundable 
issue in yetus we are checking a fix for.''')
+booleanParam(name: 'DEBUG', defaultValue: false, description: 'Produce a 
lot more meta-information.')
+  }
+  stages {
+stage ('yetus install') {
+  steps {
+sh  '''#!/usr/bin/env bash
+echo "Ensure we have a copy of Apache Yetus."
+if [[ true !=  "${USE_YETUS_PRERELEASE}" ]]; then
+  YETUS_DIR="${WORKSPACE}/yetus-${YETUS_RELEASE}"
+  echo "Checking for Yetus ${YETUS_RELEASE} in '${YETUS_DIR}'"
+  if [ ! -d "${YETUS_DIR}" ]; then
+echo "New download of Apache Yetus version ${YETUS_RELEASE}."
+rm 

[2/4] hbase git commit: HBASE-18147 POC jenkinsfile for nightly checks.

2017-07-20 Thread busbey
HBASE-18147 POC jenkinsfile for nightly checks.

* Jenkinsfile that works for all current branches.
* adds dev-support script for setting parameters for our yetus nightly 
invocation
* adds ruby tools to dockerfile
* adds rubocop to dockerfile
* adds ruby-lint to dockerfile
* adds perlcritic to dockerfile


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/63d7f647
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/63d7f647
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/63d7f647

Branch: refs/heads/branch-1-HBASE-18147
Commit: 63d7f647a4f627318cb925fadc7dc11fdf23178b
Parents: cfd5b6b
Author: Sean Busbey 
Authored: Thu Jul 20 01:01:59 2017 -0500
Committer: Sean Busbey 
Committed: Thu Jul 20 02:35:29 2017 -0500

--
 dev-support/Jenkinsfile| 254 
 dev-support/docker/Dockerfile  |  29 
 dev-support/hbase_nightly_yetus.sh |  86 +++
 3 files changed, 369 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/63d7f647/dev-support/Jenkinsfile
--
diff --git a/dev-support/Jenkinsfile b/dev-support/Jenkinsfile
new file mode 100644
index 000..03ffd6f
--- /dev/null
+++ b/dev-support/Jenkinsfile
@@ -0,0 +1,254 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements.  See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership.  The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License.  You may obtain a copy of the License at
+//
+//   http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied.  See the License for the
+// specific language governing permissions and limitations
+// under the License.
+pipeline {
+  agent {
+node {
+  label 'Hadoop'
+}
+  }
+  triggers {
+cron('@daily')
+  }
+  options {
+buildDiscarder(logRotator(numToKeepStr: '30'))
+timeout (time: 6, unit: 'HOURS')
+timestamps()
+  }
+  environment {
+TOOLS = "${env.WORKSPACE}/tools"
+// where we check out to across stages
+BASEDIR = "${env.WORKSPACE}/component"
+YETUS_RELEASE = '0.5.0'
+// where we'll write everything from different steps.
+OUTPUT_RELATIVE_GENERAL = 'output-general'
+OUTPUTDIR_GENERAL = "${env.WORKSPACE}/output-general"
+OUTPUT_RELATIVE_JDK7 = 'output-jdk7'
+OUTPUTDIR_JDK7 = "${env.WORKSPACE}/output-jdk7"
+OUTPUT_RELATIVE_JDK8 = 'output-jdk8'
+OUTPUTDIR_JDK8 = "${env.WORKSPACE}/output-jdk8"
+PROJECT = 'hbase'
+PROJET_PERSONALITY = 
'https://raw.githubusercontent.com/apache/hbase/master/dev-support/hbase-personality.sh'
+// This section of the docs tells folks not to use the javadoc tag. older 
branches have our old version of the check for said tag.
+AUTHOR_IGNORE_LIST = 
'src/main/asciidoc/_chapters/developer.adoc,dev-support/test-patch.sh'
+WHITESPACE_IGNORE_LIST = '.*/generated/.*'
+// output from surefire; sadly the archive function in yetus only works on 
file names.
+ARCHIVE_PATTERN_LIST = 
'TEST-*.xml,org.apache.h*-output.txt,org.apache.h*.txt'
+// These tests currently have known failures. Once they burn down to 0, 
remove from here so that new problems will cause a failure.
+TESTS_FILTER = 
'cc,checkstyle,javac,javadoc,pylint,shellcheck,whitespace,perlcritic,ruby-lint,rubocop'
+BRANCH_SPECIFIC_DOCKERFILE = "${env.BASEDIR}/dev-support/docker/Dockerfile"
+EXCLUDE_TESTS_URL = 
'https://builds.apache.org/job/HBase-Find-Flaky-Tests/lastSuccessfulBuild/artifact/excludes/'
+  }
+  parameters {
+booleanParam(name: 'USE_YETUS_PRERELEASE', defaultValue: false, 
description: '''Check to use the current HEAD of apache/yetus rather than our 
configured release.
+
+Should only be used manually when e.g. there is some non-work-aroundable 
issue in yetus we are checking a fix for.''')
+booleanParam(name: 'DEBUG', defaultValue: false, description: 'Produce a 
lot more meta-information.')
+  }
+  stages {
+stage ('yetus install') {
+  steps {
+sh  '''#!/usr/bin/env bash
+echo "Ensure we have a copy of Apache Yetus."
+if [[ true !=  "${USE_YETUS_PRERELEASE}" ]]; then
+  YETUS_DIR="${WORKSPACE}/yetus-${YETUS_RELEASE}"
+  echo "Checking for Yetus ${YETUS_RELEASE} in '${YETUS_DIR}'"
+  if [ ! -d "${YETUS_DIR}" ]; then
+echo "New download of Apache Yetus version ${YETUS_RELEASE}."
+rm -rf 

[1/4] hbase git commit: HBASE-18147 POC jenkinsfile for nightly checks. [Forced Update!]

2017-07-20 Thread busbey
Repository: hbase
Updated Branches:
  refs/heads/HBASE-18147 ee5c82a2c -> 82ab758d7 (forced update)
  refs/heads/branch-1-HBASE-18147 aff5726f1 -> 63d7f647a (forced update)
  refs/heads/branch-1.1-HBASE-18147 37eeaa630 -> 1d790d79d (forced update)
  refs/heads/branch-1.2-HBASE-18147 551b57fe0 -> c85b088dc (forced update)


HBASE-18147 POC jenkinsfile for nightly checks.

* Jenkinsfile that works for all current branches.
* adds dev-support script for setting parameters for our yetus nightly 
invocation
* adds ruby tools to dockerfile
* adds rubocop to dockerfile
* adds ruby-lint to dockerfile
* adds perlcritic to dockerfile


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/82ab758d
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/82ab758d
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/82ab758d

Branch: refs/heads/HBASE-18147
Commit: 82ab758d70d347cc6a8dec7f0c3259faff0468b7
Parents: 01db60d
Author: Sean Busbey 
Authored: Thu Jul 20 01:01:59 2017 -0500
Committer: Sean Busbey 
Committed: Thu Jul 20 02:35:15 2017 -0500

--
 dev-support/Jenkinsfile| 254 
 dev-support/docker/Dockerfile  |  29 
 dev-support/hbase_nightly_yetus.sh |  86 +++
 3 files changed, 369 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/82ab758d/dev-support/Jenkinsfile
--
diff --git a/dev-support/Jenkinsfile b/dev-support/Jenkinsfile
new file mode 100644
index 000..03ffd6f
--- /dev/null
+++ b/dev-support/Jenkinsfile
@@ -0,0 +1,254 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements.  See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership.  The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License.  You may obtain a copy of the License at
+//
+//   http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied.  See the License for the
+// specific language governing permissions and limitations
+// under the License.
+pipeline {
+  agent {
+node {
+  label 'Hadoop'
+}
+  }
+  triggers {
+cron('@daily')
+  }
+  options {
+buildDiscarder(logRotator(numToKeepStr: '30'))
+timeout (time: 6, unit: 'HOURS')
+timestamps()
+  }
+  environment {
+TOOLS = "${env.WORKSPACE}/tools"
+// where we check out to across stages
+BASEDIR = "${env.WORKSPACE}/component"
+YETUS_RELEASE = '0.5.0'
+// where we'll write everything from different steps.
+OUTPUT_RELATIVE_GENERAL = 'output-general'
+OUTPUTDIR_GENERAL = "${env.WORKSPACE}/output-general"
+OUTPUT_RELATIVE_JDK7 = 'output-jdk7'
+OUTPUTDIR_JDK7 = "${env.WORKSPACE}/output-jdk7"
+OUTPUT_RELATIVE_JDK8 = 'output-jdk8'
+OUTPUTDIR_JDK8 = "${env.WORKSPACE}/output-jdk8"
+PROJECT = 'hbase'
+PROJET_PERSONALITY = 
'https://raw.githubusercontent.com/apache/hbase/master/dev-support/hbase-personality.sh'
+// This section of the docs tells folks not to use the javadoc tag. older 
branches have our old version of the check for said tag.
+AUTHOR_IGNORE_LIST = 
'src/main/asciidoc/_chapters/developer.adoc,dev-support/test-patch.sh'
+WHITESPACE_IGNORE_LIST = '.*/generated/.*'
+// output from surefire; sadly the archive function in yetus only works on 
file names.
+ARCHIVE_PATTERN_LIST = 
'TEST-*.xml,org.apache.h*-output.txt,org.apache.h*.txt'
+// These tests currently have known failures. Once they burn down to 0, 
remove from here so that new problems will cause a failure.
+TESTS_FILTER = 
'cc,checkstyle,javac,javadoc,pylint,shellcheck,whitespace,perlcritic,ruby-lint,rubocop'
+BRANCH_SPECIFIC_DOCKERFILE = "${env.BASEDIR}/dev-support/docker/Dockerfile"
+EXCLUDE_TESTS_URL = 
'https://builds.apache.org/job/HBase-Find-Flaky-Tests/lastSuccessfulBuild/artifact/excludes/'
+  }
+  parameters {
+booleanParam(name: 'USE_YETUS_PRERELEASE', defaultValue: false, 
description: '''Check to use the current HEAD of apache/yetus rather than our 
configured release.
+
+Should only be used manually when e.g. there is some non-work-aroundable 
issue in yetus we are checking a fix for.''')
+booleanParam(name: 'DEBUG', defaultValue: false, description: 'Produce a 
lot more meta-information.')
+  }
+  stages {
+stage ('yetus install') {
+  steps {
+sh  '''#!/usr/bin/env bash
+echo 

[4/4] hbase git commit: HBASE-18147 POC jenkinsfile for nightly checks.

2017-07-20 Thread busbey
HBASE-18147 POC jenkinsfile for nightly checks.

* Jenkinsfile that works for all current branches.
* adds dev-support script for setting parameters for our yetus nightly 
invocation
* adds ruby tools to dockerfile
* adds rubocop to dockerfile
* adds ruby-lint to dockerfile
* adds perlcritic to dockerfile


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/37eeaa63
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/37eeaa63
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/37eeaa63

Branch: refs/heads/branch-1.1-HBASE-18147
Commit: 37eeaa6304d8dc9ce0beff0aa58358ef05e0db4e
Parents: b03a5e7
Author: Sean Busbey 
Authored: Thu Jul 20 01:01:59 2017 -0500
Committer: Sean Busbey 
Committed: Thu Jul 20 02:27:58 2017 -0500

--
 dev-support/Jenkinsfile| 254 
 dev-support/docker/Dockerfile  |  29 
 dev-support/hbase_nightly_yetus.sh |  86 +++
 3 files changed, 369 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/37eeaa63/dev-support/Jenkinsfile
--
diff --git a/dev-support/Jenkinsfile b/dev-support/Jenkinsfile
new file mode 100644
index 000..5a6cb73
--- /dev/null
+++ b/dev-support/Jenkinsfile
@@ -0,0 +1,254 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements.  See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership.  The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License.  You may obtain a copy of the License at
+//
+//   http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied.  See the License for the
+// specific language governing permissions and limitations
+// under the License.
+pipeline {
+  agent {
+node {
+  label 'Hadoop'
+}
+  }
+  triggers {
+cron('@daily')
+  }
+  options {
+buildDiscarder(logRotator(numToKeepStr: '30'))
+timeout (time: 6, unit: 'HOURS')
+timestamps()
+  }
+  environment {
+TOOLS = "${env.WORKSPACE}/tools"
+// where we check out to across stages
+BASEDIR = "${env.WORKSPACE}/component"
+YETUS_RELEASE = '0.5.0'
+// where we'll write everything from different steps.
+OUTPUT_RELATIVE_GENERAL = 'output-general'
+OUTPUTDIR_GENERAL = "${env.WORKSPACE}/output-general"
+OUTPUT_RELATIVE_JDK7 = 'output-jdk7'
+OUTPUTDIR_JDK7 = "${env.WORKSPACE}/output-jdk7"
+OUTPUT_RELATIVE_JDK8 = 'output-jdk8'
+OUTPUTDIR_JDK8 = "${env.WORKSPACE}/output-jdk8"
+PROJECT = 'hbase'
+PROJET_PERSONALITY = 
'https://raw.githubusercontent.com/apache/hbase/master/dev-support/hbase-personality.sh'
+// This section of the docs tells folks not to use the javadoc tag. older 
branches have our old version of the check for said tag.
+AUTHOR_IGNORE_LIST = 
'src/main/asciidoc/_chapters/developer.adoc,dev-support/test-patch.sh'
+WHITESPACE_IGNORE_LIST = '.*/generated/.*'
+// output from surefire; sadly the archive function in yetus only works on 
file names.
+ARCHIVE_PATTERN_LIST = 
'TEST-*.xml,org.apache.h*-output.txt,org.apache.h*.txt'
+// These tests currently have known failures. Once they burn down to 0, 
remove from here so that new problems will cause a failure.
+TESTS_FILTER = 
'cc,checkstyle,javac,javadoc,pylint,shellcheck,whitespace,perlcritic,ruby-lint,rubocop'
+BRANCH_SPECIFIC_DOCKERFILE = "${env.BASEDIR}/dev-support/docker/Dockerfile"
+EXCLUDE_TESTS_URL = 
'https://builds.apache.org/job/HBase-Find-Flaky-Tests/lastSuccessfulBuild/artifact/excludes/'
+  }
+  parameters {
+booleanParam(name: 'USE_YETUS_PRERELEASE', defaultValue: false, 
description: '''Check to use the current HEAD of apache/yetus rather than our 
configured release.
+
+Should only be used manually when e.g. there is some non-work-aroundable 
issue in yetus we are checking a fix for.''')
+booleanParam(name: 'DEBUG', defaultValue: false, description: 'Produce a 
lot more meta-information.')
+  }
+  stages {
+stage ('yetus install') {
+  steps {
+sh  '''#!/usr/bin/env bash
+echo "Ensure we have a copy of Apache Yetus."
+if [[ true !=  "${USE_YETUS_PRERELEASE}" ]]; then
+  YETUS_DIR="${WORKSPACE}/yetus-${YETUS_RELEASE}"
+  echo "Checking for Yetus ${YETUS_RELEASE} in '${YETUS_DIR}'"
+  if [ ! -d "${YETUS_DIR}" ]; then
+echo "New download of Apache Yetus version ${YETUS_RELEASE}."
+rm 

[2/4] hbase git commit: HBASE-18147 POC jenkinsfile for nightly checks.

2017-07-20 Thread busbey
HBASE-18147 POC jenkinsfile for nightly checks.

* Jenkinsfile that works for all current branches.
* adds dev-support script for setting parameters for our yetus nightly 
invocation
* adds ruby tools to dockerfile
* adds rubocop to dockerfile
* adds ruby-lint to dockerfile
* adds perlcritic to dockerfile


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/aff5726f
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/aff5726f
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/aff5726f

Branch: refs/heads/branch-1-HBASE-18147
Commit: aff5726f100bfbad320028b336b400ba028bacb5
Parents: cfd5b6b
Author: Sean Busbey 
Authored: Thu Jul 20 01:01:59 2017 -0500
Committer: Sean Busbey 
Committed: Thu Jul 20 02:27:43 2017 -0500

--
 dev-support/Jenkinsfile| 254 
 dev-support/docker/Dockerfile  |  29 
 dev-support/hbase_nightly_yetus.sh |  86 +++
 3 files changed, 369 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/aff5726f/dev-support/Jenkinsfile
--
diff --git a/dev-support/Jenkinsfile b/dev-support/Jenkinsfile
new file mode 100644
index 000..5a6cb73
--- /dev/null
+++ b/dev-support/Jenkinsfile
@@ -0,0 +1,254 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements.  See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership.  The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License.  You may obtain a copy of the License at
+//
+//   http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied.  See the License for the
+// specific language governing permissions and limitations
+// under the License.
+pipeline {
+  agent {
+node {
+  label 'Hadoop'
+}
+  }
+  triggers {
+cron('@daily')
+  }
+  options {
+buildDiscarder(logRotator(numToKeepStr: '30'))
+timeout (time: 6, unit: 'HOURS')
+timestamps()
+  }
+  environment {
+TOOLS = "${env.WORKSPACE}/tools"
+// where we check out to across stages
+BASEDIR = "${env.WORKSPACE}/component"
+YETUS_RELEASE = '0.5.0'
+// where we'll write everything from different steps.
+OUTPUT_RELATIVE_GENERAL = 'output-general'
+OUTPUTDIR_GENERAL = "${env.WORKSPACE}/output-general"
+OUTPUT_RELATIVE_JDK7 = 'output-jdk7'
+OUTPUTDIR_JDK7 = "${env.WORKSPACE}/output-jdk7"
+OUTPUT_RELATIVE_JDK8 = 'output-jdk8'
+OUTPUTDIR_JDK8 = "${env.WORKSPACE}/output-jdk8"
+PROJECT = 'hbase'
+PROJET_PERSONALITY = 
'https://raw.githubusercontent.com/apache/hbase/master/dev-support/hbase-personality.sh'
+// This section of the docs tells folks not to use the javadoc tag. older 
branches have our old version of the check for said tag.
+AUTHOR_IGNORE_LIST = 
'src/main/asciidoc/_chapters/developer.adoc,dev-support/test-patch.sh'
+WHITESPACE_IGNORE_LIST = '.*/generated/.*'
+// output from surefire; sadly the archive function in yetus only works on 
file names.
+ARCHIVE_PATTERN_LIST = 
'TEST-*.xml,org.apache.h*-output.txt,org.apache.h*.txt'
+// These tests currently have known failures. Once they burn down to 0, 
remove from here so that new problems will cause a failure.
+TESTS_FILTER = 
'cc,checkstyle,javac,javadoc,pylint,shellcheck,whitespace,perlcritic,ruby-lint,rubocop'
+BRANCH_SPECIFIC_DOCKERFILE = "${env.BASEDIR}/dev-support/docker/Dockerfile"
+EXCLUDE_TESTS_URL = 
'https://builds.apache.org/job/HBase-Find-Flaky-Tests/lastSuccessfulBuild/artifact/excludes/'
+  }
+  parameters {
+booleanParam(name: 'USE_YETUS_PRERELEASE', defaultValue: false, 
description: '''Check to use the current HEAD of apache/yetus rather than our 
configured release.
+
+Should only be used manually when e.g. there is some non-work-aroundable 
issue in yetus we are checking a fix for.''')
+booleanParam(name: 'DEBUG', defaultValue: false, description: 'Produce a 
lot more meta-information.')
+  }
+  stages {
+stage ('yetus install') {
+  steps {
+sh  '''#!/usr/bin/env bash
+echo "Ensure we have a copy of Apache Yetus."
+if [[ true !=  "${USE_YETUS_PRERELEASE}" ]]; then
+  YETUS_DIR="${WORKSPACE}/yetus-${YETUS_RELEASE}"
+  echo "Checking for Yetus ${YETUS_RELEASE} in '${YETUS_DIR}'"
+  if [ ! -d "${YETUS_DIR}" ]; then
+echo "New download of Apache Yetus version ${YETUS_RELEASE}."
+rm -rf 

[1/4] hbase git commit: HBASE-18147 POC jenkinsfile for nightly checks. [Forced Update!]

2017-07-20 Thread busbey
Repository: hbase
Updated Branches:
  refs/heads/HBASE-18147 4faa5b073 -> ee5c82a2c (forced update)
  refs/heads/branch-1-HBASE-18147 ac0543f4a -> aff5726f1 (forced update)
  refs/heads/branch-1.1-HBASE-18147 a1ebfa810 -> 37eeaa630 (forced update)
  refs/heads/branch-1.2-HBASE-18147 cac24bfa9 -> 551b57fe0 (forced update)


HBASE-18147 POC jenkinsfile for nightly checks.

* Jenkinsfile that works for all current branches.
* adds dev-support script for setting parameters for our yetus nightly 
invocation
* adds ruby tools to dockerfile
* adds rubocop to dockerfile
* adds ruby-lint to dockerfile
* adds perlcritic to dockerfile


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/ee5c82a2
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/ee5c82a2
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/ee5c82a2

Branch: refs/heads/HBASE-18147
Commit: ee5c82a2c96c29b1a68133d69b41fa1294cf243f
Parents: 01db60d
Author: Sean Busbey 
Authored: Thu Jul 20 01:01:59 2017 -0500
Committer: Sean Busbey 
Committed: Thu Jul 20 02:26:57 2017 -0500

--
 dev-support/Jenkinsfile| 254 
 dev-support/docker/Dockerfile  |  29 
 dev-support/hbase_nightly_yetus.sh |  86 +++
 3 files changed, 369 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/ee5c82a2/dev-support/Jenkinsfile
--
diff --git a/dev-support/Jenkinsfile b/dev-support/Jenkinsfile
new file mode 100644
index 000..5a6cb73
--- /dev/null
+++ b/dev-support/Jenkinsfile
@@ -0,0 +1,254 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements.  See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership.  The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License.  You may obtain a copy of the License at
+//
+//   http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied.  See the License for the
+// specific language governing permissions and limitations
+// under the License.
+pipeline {
+  agent {
+node {
+  label 'Hadoop'
+}
+  }
+  triggers {
+cron('@daily')
+  }
+  options {
+buildDiscarder(logRotator(numToKeepStr: '30'))
+timeout (time: 6, unit: 'HOURS')
+timestamps()
+  }
+  environment {
+TOOLS = "${env.WORKSPACE}/tools"
+// where we check out to across stages
+BASEDIR = "${env.WORKSPACE}/component"
+YETUS_RELEASE = '0.5.0'
+// where we'll write everything from different steps.
+OUTPUT_RELATIVE_GENERAL = 'output-general'
+OUTPUTDIR_GENERAL = "${env.WORKSPACE}/output-general"
+OUTPUT_RELATIVE_JDK7 = 'output-jdk7'
+OUTPUTDIR_JDK7 = "${env.WORKSPACE}/output-jdk7"
+OUTPUT_RELATIVE_JDK8 = 'output-jdk8'
+OUTPUTDIR_JDK8 = "${env.WORKSPACE}/output-jdk8"
+PROJECT = 'hbase'
+PROJET_PERSONALITY = 
'https://raw.githubusercontent.com/apache/hbase/master/dev-support/hbase-personality.sh'
+// This section of the docs tells folks not to use the javadoc tag. older 
branches have our old version of the check for said tag.
+AUTHOR_IGNORE_LIST = 
'src/main/asciidoc/_chapters/developer.adoc,dev-support/test-patch.sh'
+WHITESPACE_IGNORE_LIST = '.*/generated/.*'
+// output from surefire; sadly the archive function in yetus only works on 
file names.
+ARCHIVE_PATTERN_LIST = 
'TEST-*.xml,org.apache.h*-output.txt,org.apache.h*.txt'
+// These tests currently have known failures. Once they burn down to 0, 
remove from here so that new problems will cause a failure.
+TESTS_FILTER = 
'cc,checkstyle,javac,javadoc,pylint,shellcheck,whitespace,perlcritic,ruby-lint,rubocop'
+BRANCH_SPECIFIC_DOCKERFILE = "${env.BASEDIR}/dev-support/docker/Dockerfile"
+EXCLUDE_TESTS_URL = 
'https://builds.apache.org/job/HBase-Find-Flaky-Tests/lastSuccessfulBuild/artifact/excludes/'
+  }
+  parameters {
+booleanParam(name: 'USE_YETUS_PRERELEASE', defaultValue: false, 
description: '''Check to use the current HEAD of apache/yetus rather than our 
configured release.
+
+Should only be used manually when e.g. there is some non-work-aroundable 
issue in yetus we are checking a fix for.''')
+booleanParam(name: 'DEBUG', defaultValue: false, description: 'Produce a 
lot more meta-information.')
+  }
+  stages {
+stage ('yetus install') {
+  steps {
+sh  '''#!/usr/bin/env bash
+echo 

[3/4] hbase git commit: HBASE-18147 POC jenkinsfile for nightly checks.

2017-07-20 Thread busbey
HBASE-18147 POC jenkinsfile for nightly checks.

* Jenkinsfile that works for all current branches.
* adds dev-support script for setting parameters for our yetus nightly 
invocation
* adds ruby tools to dockerfile
* adds rubocop to dockerfile
* adds ruby-lint to dockerfile
* adds perlcritic to dockerfile


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/551b57fe
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/551b57fe
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/551b57fe

Branch: refs/heads/branch-1.2-HBASE-18147
Commit: 551b57fe00dd1fbeaaf74a1b74be6cf09787fd3e
Parents: 7d2175e
Author: Sean Busbey 
Authored: Thu Jul 20 01:01:59 2017 -0500
Committer: Sean Busbey 
Committed: Thu Jul 20 02:27:51 2017 -0500

--
 dev-support/Jenkinsfile| 254 
 dev-support/docker/Dockerfile  |  29 
 dev-support/hbase_nightly_yetus.sh |  86 +++
 3 files changed, 369 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/551b57fe/dev-support/Jenkinsfile
--
diff --git a/dev-support/Jenkinsfile b/dev-support/Jenkinsfile
new file mode 100644
index 000..5a6cb73
--- /dev/null
+++ b/dev-support/Jenkinsfile
@@ -0,0 +1,254 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements.  See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership.  The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License.  You may obtain a copy of the License at
+//
+//   http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied.  See the License for the
+// specific language governing permissions and limitations
+// under the License.
+pipeline {
+  agent {
+node {
+  label 'Hadoop'
+}
+  }
+  triggers {
+cron('@daily')
+  }
+  options {
+buildDiscarder(logRotator(numToKeepStr: '30'))
+timeout (time: 6, unit: 'HOURS')
+timestamps()
+  }
+  environment {
+TOOLS = "${env.WORKSPACE}/tools"
+// where we check out to across stages
+BASEDIR = "${env.WORKSPACE}/component"
+YETUS_RELEASE = '0.5.0'
+// where we'll write everything from different steps.
+OUTPUT_RELATIVE_GENERAL = 'output-general'
+OUTPUTDIR_GENERAL = "${env.WORKSPACE}/output-general"
+OUTPUT_RELATIVE_JDK7 = 'output-jdk7'
+OUTPUTDIR_JDK7 = "${env.WORKSPACE}/output-jdk7"
+OUTPUT_RELATIVE_JDK8 = 'output-jdk8'
+OUTPUTDIR_JDK8 = "${env.WORKSPACE}/output-jdk8"
+PROJECT = 'hbase'
+PROJET_PERSONALITY = 
'https://raw.githubusercontent.com/apache/hbase/master/dev-support/hbase-personality.sh'
+// This section of the docs tells folks not to use the javadoc tag. older 
branches have our old version of the check for said tag.
+AUTHOR_IGNORE_LIST = 
'src/main/asciidoc/_chapters/developer.adoc,dev-support/test-patch.sh'
+WHITESPACE_IGNORE_LIST = '.*/generated/.*'
+// output from surefire; sadly the archive function in yetus only works on 
file names.
+ARCHIVE_PATTERN_LIST = 
'TEST-*.xml,org.apache.h*-output.txt,org.apache.h*.txt'
+// These tests currently have known failures. Once they burn down to 0, 
remove from here so that new problems will cause a failure.
+TESTS_FILTER = 
'cc,checkstyle,javac,javadoc,pylint,shellcheck,whitespace,perlcritic,ruby-lint,rubocop'
+BRANCH_SPECIFIC_DOCKERFILE = "${env.BASEDIR}/dev-support/docker/Dockerfile"
+EXCLUDE_TESTS_URL = 
'https://builds.apache.org/job/HBase-Find-Flaky-Tests/lastSuccessfulBuild/artifact/excludes/'
+  }
+  parameters {
+booleanParam(name: 'USE_YETUS_PRERELEASE', defaultValue: false, 
description: '''Check to use the current HEAD of apache/yetus rather than our 
configured release.
+
+Should only be used manually when e.g. there is some non-work-aroundable 
issue in yetus we are checking a fix for.''')
+booleanParam(name: 'DEBUG', defaultValue: false, description: 'Produce a 
lot more meta-information.')
+  }
+  stages {
+stage ('yetus install') {
+  steps {
+sh  '''#!/usr/bin/env bash
+echo "Ensure we have a copy of Apache Yetus."
+if [[ true !=  "${USE_YETUS_PRERELEASE}" ]]; then
+  YETUS_DIR="${WORKSPACE}/yetus-${YETUS_RELEASE}"
+  echo "Checking for Yetus ${YETUS_RELEASE} in '${YETUS_DIR}'"
+  if [ ! -d "${YETUS_DIR}" ]; then
+echo "New download of Apache Yetus version ${YETUS_RELEASE}."
+rm 

[2/4] hbase git commit: HBASE-18147 POC jenkinsfile for nightly checks.

2017-07-20 Thread busbey
HBASE-18147 POC jenkinsfile for nightly checks.

* Jenkinsfile that works for all current branches.
* adds dev-support script for setting parameters for our yetus nightly 
invocation
* adds ruby tools to dockerfile
* adds rubocop to dockerfile
* adds ruby-lint to dockerfile
* adds perlcritic to dockerfile


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/ac0543f4
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/ac0543f4
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/ac0543f4

Branch: refs/heads/branch-1-HBASE-18147
Commit: ac0543f4aab4815baafcd3cda676e281365c1534
Parents: cfd5b6b
Author: Sean Busbey 
Authored: Thu Jul 20 01:01:59 2017 -0500
Committer: Sean Busbey 
Committed: Thu Jul 20 01:03:13 2017 -0500

--
 dev-support/Jenkinsfile| 269 
 dev-support/docker/Dockerfile  |  29 
 dev-support/hbase_nightly_yetus.sh |  86 ++
 3 files changed, 384 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/ac0543f4/dev-support/Jenkinsfile
--
diff --git a/dev-support/Jenkinsfile b/dev-support/Jenkinsfile
new file mode 100644
index 000..d4aed6f
--- /dev/null
+++ b/dev-support/Jenkinsfile
@@ -0,0 +1,269 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements.  See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership.  The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License.  You may obtain a copy of the License at
+//
+//   http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied.  See the License for the
+// specific language governing permissions and limitations
+// under the License.
+pipeline {
+  agent {
+node {
+  label 'Hadoop'
+}
+  }
+  triggers {
+cron('@daily')
+  }
+  options {
+buildDiscarder(logRotator(numToKeepStr: '30'))
+timeout (time: 6, unit: 'HOURS')
+timestamps()
+  }
+  environment {
+TOOLS = "${env.WORKSPACE}/tools"
+// where we check out to across stages
+BASEDIR = "${env.WORKSPACE}/component"
+YETUS_RELEASE = '0.5.0'
+// where we'll write everything from different steps.
+OUTPUT_RELATIVE_GENERAL = 'output-general'
+OUTPUTDIR_GENERAL = "${env.WORKSPACE}/output-general"
+OUTPUT_RELATIVE_JDK7 = 'output-jdk7'
+OUTPUTDIR_JDK7 = "${env.WORKSPACE}/output-jdk7"
+OUTPUT_RELATIVE_JDK8 = 'output-jdk8'
+OUTPUTDIR_JDK8 = "${env.WORKSPACE}/output-jdk8"
+PROJECT = 'hbase'
+PROJET_PERSONALITY = 
'https://raw.githubusercontent.com/apache/hbase/master/dev-support/hbase-personality.sh'
+// This section of the docs tells folks not to use the javadoc tag. older 
branches have our old version of the check for said tag.
+AUTHOR_IGNORE_LIST = 
'src/main/asciidoc/_chapters/developer.adoc,dev-support/test-patch.sh'
+WHITESPACE_IGNORE_LIST = '.*/generated/.*'
+// output from surefire; sadly the archive function in yetus only works on 
file names.
+ARCHIVE_PATTERN_LIST = 
'TEST-*.xml,org.apache.h*-output.txt,org.apache.h*.txt'
+// These tests currently have known failures. Once they burn down to 0, 
remove from here so that new problems will cause a failure.
+TESTS_FILTER = 
'cc,checkstyle,javac,javadoc,pylint,shellcheck,whitespace,perlcritic,ruby-lint,rubocop'
+BRANCH_SPECIFIC_DOCKERFILE = "${env.BASEDIR}/dev-support/docker/Dockerfile"
+EXCLUDE_TESTS_URL = 
'https://builds.apache.org/job/HBase-Find-Flaky-Tests/lastSuccessfulBuild/artifact/excludes/'
+  }
+  parameters {
+booleanParam(name: 'USE_YETUS_PRERELEASE', defaultValue: false, 
description: '''Check to use the current HEAD of apache/yetus rather than our 
configured release.
+
+Should only be used manually when e.g. there is some non-work-aroundable 
issue in yetus we are checking a fix for.''')
+booleanParam(name: 'DEBUG', defaultValue: false, description: 'Produce a 
lot more meta-information.')
+  }
+  stages {
+stage ('yetus install') {
+  steps {
+sh  '''#!/usr/bin/env bash
+echo "Ensure we have a copy of Apache Yetus."
+if [[ true !=  "${USE_YETUS_PRERELEASE}" ]]; then
+  YETUS_DIR="${WORKSPACE}/yetus-${YETUS_RELEASE}"
+  echo "Checking for Yetus ${YETUS_RELEASE} in '${YETUS_DIR}'"
+  if [ ! -d "${YETUS_DIR}" ]; then
+echo "New download of Apache Yetus version ${YETUS_RELEASE}."
+rm -rf 

[3/4] hbase git commit: HBASE-18147 POC jenkinsfile for nightly checks.

2017-07-20 Thread busbey
HBASE-18147 POC jenkinsfile for nightly checks.

* Jenkinsfile that works for all current branches.
* adds dev-support script for setting parameters for our yetus nightly 
invocation
* adds ruby tools to dockerfile
* adds rubocop to dockerfile
* adds ruby-lint to dockerfile
* adds perlcritic to dockerfile


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/cac24bfa
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/cac24bfa
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/cac24bfa

Branch: refs/heads/branch-1.2-HBASE-18147
Commit: cac24bfa9ff47eaeaf4c4091ec5199bad0a54f29
Parents: 7d2175e
Author: Sean Busbey 
Authored: Thu Jul 20 01:01:59 2017 -0500
Committer: Sean Busbey 
Committed: Thu Jul 20 01:03:20 2017 -0500

--
 dev-support/Jenkinsfile| 269 
 dev-support/docker/Dockerfile  |  29 
 dev-support/hbase_nightly_yetus.sh |  86 ++
 3 files changed, 384 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/cac24bfa/dev-support/Jenkinsfile
--
diff --git a/dev-support/Jenkinsfile b/dev-support/Jenkinsfile
new file mode 100644
index 000..d4aed6f
--- /dev/null
+++ b/dev-support/Jenkinsfile
@@ -0,0 +1,269 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements.  See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership.  The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License.  You may obtain a copy of the License at
+//
+//   http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied.  See the License for the
+// specific language governing permissions and limitations
+// under the License.
+pipeline {
+  agent {
+node {
+  label 'Hadoop'
+}
+  }
+  triggers {
+cron('@daily')
+  }
+  options {
+buildDiscarder(logRotator(numToKeepStr: '30'))
+timeout (time: 6, unit: 'HOURS')
+timestamps()
+  }
+  environment {
+TOOLS = "${env.WORKSPACE}/tools"
+// where we check out to across stages
+BASEDIR = "${env.WORKSPACE}/component"
+YETUS_RELEASE = '0.5.0'
+// where we'll write everything from different steps.
+OUTPUT_RELATIVE_GENERAL = 'output-general'
+OUTPUTDIR_GENERAL = "${env.WORKSPACE}/output-general"
+OUTPUT_RELATIVE_JDK7 = 'output-jdk7'
+OUTPUTDIR_JDK7 = "${env.WORKSPACE}/output-jdk7"
+OUTPUT_RELATIVE_JDK8 = 'output-jdk8'
+OUTPUTDIR_JDK8 = "${env.WORKSPACE}/output-jdk8"
+PROJECT = 'hbase'
+PROJET_PERSONALITY = 
'https://raw.githubusercontent.com/apache/hbase/master/dev-support/hbase-personality.sh'
+// This section of the docs tells folks not to use the javadoc tag. older 
branches have our old version of the check for said tag.
+AUTHOR_IGNORE_LIST = 
'src/main/asciidoc/_chapters/developer.adoc,dev-support/test-patch.sh'
+WHITESPACE_IGNORE_LIST = '.*/generated/.*'
+// output from surefire; sadly the archive function in yetus only works on 
file names.
+ARCHIVE_PATTERN_LIST = 
'TEST-*.xml,org.apache.h*-output.txt,org.apache.h*.txt'
+// These tests currently have known failures. Once they burn down to 0, 
remove from here so that new problems will cause a failure.
+TESTS_FILTER = 
'cc,checkstyle,javac,javadoc,pylint,shellcheck,whitespace,perlcritic,ruby-lint,rubocop'
+BRANCH_SPECIFIC_DOCKERFILE = "${env.BASEDIR}/dev-support/docker/Dockerfile"
+EXCLUDE_TESTS_URL = 
'https://builds.apache.org/job/HBase-Find-Flaky-Tests/lastSuccessfulBuild/artifact/excludes/'
+  }
+  parameters {
+booleanParam(name: 'USE_YETUS_PRERELEASE', defaultValue: false, 
description: '''Check to use the current HEAD of apache/yetus rather than our 
configured release.
+
+Should only be used manually when e.g. there is some non-work-aroundable 
issue in yetus we are checking a fix for.''')
+booleanParam(name: 'DEBUG', defaultValue: false, description: 'Produce a 
lot more meta-information.')
+  }
+  stages {
+stage ('yetus install') {
+  steps {
+sh  '''#!/usr/bin/env bash
+echo "Ensure we have a copy of Apache Yetus."
+if [[ true !=  "${USE_YETUS_PRERELEASE}" ]]; then
+  YETUS_DIR="${WORKSPACE}/yetus-${YETUS_RELEASE}"
+  echo "Checking for Yetus ${YETUS_RELEASE} in '${YETUS_DIR}'"
+  if [ ! -d "${YETUS_DIR}" ]; then
+echo "New download of Apache Yetus version ${YETUS_RELEASE}."
+rm 

[1/4] hbase git commit: HBASE-18147 POC jenkinsfile for nightly checks. [Forced Update!]

2017-07-20 Thread busbey
Repository: hbase
Updated Branches:
  refs/heads/HBASE-18147 7164c6fef -> 4faa5b073 (forced update)
  refs/heads/branch-1-HBASE-18147 4a2c0c38f -> ac0543f4a (forced update)
  refs/heads/branch-1.1-HBASE-18147 a60afbad9 -> a1ebfa810 (forced update)
  refs/heads/branch-1.2-HBASE-18147 ae1e6d26b -> cac24bfa9 (forced update)


HBASE-18147 POC jenkinsfile for nightly checks.

* Jenkinsfile that works for all current branches.
* adds dev-support script for setting parameters for our yetus nightly 
invocation
* adds ruby tools to dockerfile
* adds rubocop to dockerfile
* adds ruby-lint to dockerfile
* adds perlcritic to dockerfile


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/4faa5b07
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/4faa5b07
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/4faa5b07

Branch: refs/heads/HBASE-18147
Commit: 4faa5b07333914cb33541b13078b0cd3839e3c4b
Parents: 01db60d
Author: Sean Busbey 
Authored: Thu Jul 20 01:01:59 2017 -0500
Committer: Sean Busbey 
Committed: Thu Jul 20 01:01:59 2017 -0500

--
 dev-support/Jenkinsfile| 269 
 dev-support/docker/Dockerfile  |  29 
 dev-support/hbase_nightly_yetus.sh |  86 ++
 3 files changed, 384 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/4faa5b07/dev-support/Jenkinsfile
--
diff --git a/dev-support/Jenkinsfile b/dev-support/Jenkinsfile
new file mode 100644
index 000..d4aed6f
--- /dev/null
+++ b/dev-support/Jenkinsfile
@@ -0,0 +1,269 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements.  See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership.  The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License.  You may obtain a copy of the License at
+//
+//   http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied.  See the License for the
+// specific language governing permissions and limitations
+// under the License.
+pipeline {
+  agent {
+node {
+  label 'Hadoop'
+}
+  }
+  triggers {
+cron('@daily')
+  }
+  options {
+buildDiscarder(logRotator(numToKeepStr: '30'))
+timeout (time: 6, unit: 'HOURS')
+timestamps()
+  }
+  environment {
+TOOLS = "${env.WORKSPACE}/tools"
+// where we check out to across stages
+BASEDIR = "${env.WORKSPACE}/component"
+YETUS_RELEASE = '0.5.0'
+// where we'll write everything from different steps.
+OUTPUT_RELATIVE_GENERAL = 'output-general'
+OUTPUTDIR_GENERAL = "${env.WORKSPACE}/output-general"
+OUTPUT_RELATIVE_JDK7 = 'output-jdk7'
+OUTPUTDIR_JDK7 = "${env.WORKSPACE}/output-jdk7"
+OUTPUT_RELATIVE_JDK8 = 'output-jdk8'
+OUTPUTDIR_JDK8 = "${env.WORKSPACE}/output-jdk8"
+PROJECT = 'hbase'
+PROJET_PERSONALITY = 
'https://raw.githubusercontent.com/apache/hbase/master/dev-support/hbase-personality.sh'
+// This section of the docs tells folks not to use the javadoc tag. older 
branches have our old version of the check for said tag.
+AUTHOR_IGNORE_LIST = 
'src/main/asciidoc/_chapters/developer.adoc,dev-support/test-patch.sh'
+WHITESPACE_IGNORE_LIST = '.*/generated/.*'
+// output from surefire; sadly the archive function in yetus only works on 
file names.
+ARCHIVE_PATTERN_LIST = 
'TEST-*.xml,org.apache.h*-output.txt,org.apache.h*.txt'
+// These tests currently have known failures. Once they burn down to 0, 
remove from here so that new problems will cause a failure.
+TESTS_FILTER = 
'cc,checkstyle,javac,javadoc,pylint,shellcheck,whitespace,perlcritic,ruby-lint,rubocop'
+BRANCH_SPECIFIC_DOCKERFILE = "${env.BASEDIR}/dev-support/docker/Dockerfile"
+EXCLUDE_TESTS_URL = 
'https://builds.apache.org/job/HBase-Find-Flaky-Tests/lastSuccessfulBuild/artifact/excludes/'
+  }
+  parameters {
+booleanParam(name: 'USE_YETUS_PRERELEASE', defaultValue: false, 
description: '''Check to use the current HEAD of apache/yetus rather than our 
configured release.
+
+Should only be used manually when e.g. there is some non-work-aroundable 
issue in yetus we are checking a fix for.''')
+booleanParam(name: 'DEBUG', defaultValue: false, description: 'Produce a 
lot more meta-information.')
+  }
+  stages {
+stage ('yetus install') {
+  steps {
+sh  '''#!/usr/bin/env bash
+echo 

[4/4] hbase git commit: HBASE-18147 POC jenkinsfile for nightly checks.

2017-07-20 Thread busbey
HBASE-18147 POC jenkinsfile for nightly checks.

* Jenkinsfile that works for all current branches.
* adds dev-support script for setting parameters for our yetus nightly 
invocation
* adds ruby tools to dockerfile
* adds rubocop to dockerfile
* adds ruby-lint to dockerfile
* adds perlcritic to dockerfile


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/a1ebfa81
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/a1ebfa81
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/a1ebfa81

Branch: refs/heads/branch-1.1-HBASE-18147
Commit: a1ebfa810828ae0f432ec0d81d9f1a245d69860d
Parents: b03a5e7
Author: Sean Busbey 
Authored: Thu Jul 20 01:01:59 2017 -0500
Committer: Sean Busbey 
Committed: Thu Jul 20 01:03:26 2017 -0500

--
 dev-support/Jenkinsfile| 269 
 dev-support/docker/Dockerfile  |  29 
 dev-support/hbase_nightly_yetus.sh |  86 ++
 3 files changed, 384 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/a1ebfa81/dev-support/Jenkinsfile
--
diff --git a/dev-support/Jenkinsfile b/dev-support/Jenkinsfile
new file mode 100644
index 000..d4aed6f
--- /dev/null
+++ b/dev-support/Jenkinsfile
@@ -0,0 +1,269 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements.  See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership.  The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License.  You may obtain a copy of the License at
+//
+//   http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied.  See the License for the
+// specific language governing permissions and limitations
+// under the License.
+pipeline {
+  agent {
+node {
+  label 'Hadoop'
+}
+  }
+  triggers {
+cron('@daily')
+  }
+  options {
+buildDiscarder(logRotator(numToKeepStr: '30'))
+timeout (time: 6, unit: 'HOURS')
+timestamps()
+  }
+  environment {
+TOOLS = "${env.WORKSPACE}/tools"
+// where we check out to across stages
+BASEDIR = "${env.WORKSPACE}/component"
+YETUS_RELEASE = '0.5.0'
+// where we'll write everything from different steps.
+OUTPUT_RELATIVE_GENERAL = 'output-general'
+OUTPUTDIR_GENERAL = "${env.WORKSPACE}/output-general"
+OUTPUT_RELATIVE_JDK7 = 'output-jdk7'
+OUTPUTDIR_JDK7 = "${env.WORKSPACE}/output-jdk7"
+OUTPUT_RELATIVE_JDK8 = 'output-jdk8'
+OUTPUTDIR_JDK8 = "${env.WORKSPACE}/output-jdk8"
+PROJECT = 'hbase'
+PROJET_PERSONALITY = 
'https://raw.githubusercontent.com/apache/hbase/master/dev-support/hbase-personality.sh'
+// This section of the docs tells folks not to use the javadoc tag. older 
branches have our old version of the check for said tag.
+AUTHOR_IGNORE_LIST = 
'src/main/asciidoc/_chapters/developer.adoc,dev-support/test-patch.sh'
+WHITESPACE_IGNORE_LIST = '.*/generated/.*'
+// output from surefire; sadly the archive function in yetus only works on 
file names.
+ARCHIVE_PATTERN_LIST = 
'TEST-*.xml,org.apache.h*-output.txt,org.apache.h*.txt'
+// These tests currently have known failures. Once they burn down to 0, 
remove from here so that new problems will cause a failure.
+TESTS_FILTER = 
'cc,checkstyle,javac,javadoc,pylint,shellcheck,whitespace,perlcritic,ruby-lint,rubocop'
+BRANCH_SPECIFIC_DOCKERFILE = "${env.BASEDIR}/dev-support/docker/Dockerfile"
+EXCLUDE_TESTS_URL = 
'https://builds.apache.org/job/HBase-Find-Flaky-Tests/lastSuccessfulBuild/artifact/excludes/'
+  }
+  parameters {
+booleanParam(name: 'USE_YETUS_PRERELEASE', defaultValue: false, 
description: '''Check to use the current HEAD of apache/yetus rather than our 
configured release.
+
+Should only be used manually when e.g. there is some non-work-aroundable 
issue in yetus we are checking a fix for.''')
+booleanParam(name: 'DEBUG', defaultValue: false, description: 'Produce a 
lot more meta-information.')
+  }
+  stages {
+stage ('yetus install') {
+  steps {
+sh  '''#!/usr/bin/env bash
+echo "Ensure we have a copy of Apache Yetus."
+if [[ true !=  "${USE_YETUS_PRERELEASE}" ]]; then
+  YETUS_DIR="${WORKSPACE}/yetus-${YETUS_RELEASE}"
+  echo "Checking for Yetus ${YETUS_RELEASE} in '${YETUS_DIR}'"
+  if [ ! -d "${YETUS_DIR}" ]; then
+echo "New download of Apache Yetus version ${YETUS_RELEASE}."
+rm