[4/7] hbase git commit: HBASE-18631 Allow ChaosMonkey properties to be specified in hbase-site

2017-08-20 Thread elserj
HBASE-18631 Allow ChaosMonkey properties to be specified in hbase-site


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/b2eb09aa
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/b2eb09aa
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/b2eb09aa

Branch: refs/heads/branch-1.4
Commit: b2eb09aa2369fb88a1b4d8c22678453933e7c3db
Parents: e0f5f3a
Author: Josh Elser 
Authored: Fri Aug 18 22:25:14 2017 -0400
Committer: Josh Elser 
Committed: Sun Aug 20 15:25:40 2017 -0400

--
 .../hadoop/hbase/IntegrationTestBase.java   | 21 +
 .../hadoop/hbase/TestIntegrationTestBase.java   | 48 
 .../hbase/chaos/factories/MonkeyConstants.java  | 11 +
 3 files changed, 80 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/b2eb09aa/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestBase.java
--
diff --git 
a/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestBase.java 
b/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestBase.java
index d3433c7..46f0490 100644
--- a/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestBase.java
+++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestBase.java
@@ -19,6 +19,7 @@
 package org.apache.hadoop.hbase;
 
 import java.io.IOException;
+import java.util.Map.Entry;
 import java.util.Properties;
 import java.util.Set;
 
@@ -27,6 +28,7 @@ import org.apache.commons.lang.StringUtils;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.chaos.factories.MonkeyConstants;
 import org.apache.hadoop.hbase.chaos.factories.MonkeyFactory;
 import org.apache.hadoop.hbase.chaos.monkies.ChaosMonkey;
 import org.apache.hadoop.hbase.util.AbstractHBaseTool;
@@ -86,6 +88,10 @@ public abstract class IntegrationTestBase extends 
AbstractHBaseTool {
   noClusterCleanUp = true;
 }
 monkeyProps = new Properties();
+// Add entries for the CM from hbase-site.xml as a convenience.
+// Do this prior to loading from the properties file to make sure those in 
the properties
+// file are given precedence to those in hbase-site.xml (backwards 
compatibility).
+loadMonkeyProperties(monkeyProps, HBaseConfiguration.create());
 if (cmd.hasOption(CHAOS_MONKEY_PROPS)) {
   String chaosMonkeyPropsFile = cmd.getOptionValue(CHAOS_MONKEY_PROPS);
   if (StringUtils.isNotEmpty(chaosMonkeyPropsFile)) {
@@ -100,6 +106,21 @@ public abstract class IntegrationTestBase extends 
AbstractHBaseTool {
 }
   }
 
+  /**
+   * Loads entries from the provided {@code conf} into {@code props} when the 
configuration key
+   * is one that may be configuring ChaosMonkey actions.
+   */
+  void loadMonkeyProperties(Properties props, Configuration conf) {
+for (Entry entry : conf) {
+  for (String prefix : MonkeyConstants.MONKEY_CONFIGURATION_KEY_PREFIXES) {
+if (entry.getKey().startsWith(prefix)) {
+  props.put(entry.getKey(), entry.getValue());
+  break;
+}
+  }
+}
+  }
+
   @Override
   protected void processOptions(CommandLine cmd) {
 processBaseOptions(cmd);

http://git-wip-us.apache.org/repos/asf/hbase/blob/b2eb09aa/hbase-it/src/test/java/org/apache/hadoop/hbase/TestIntegrationTestBase.java
--
diff --git 
a/hbase-it/src/test/java/org/apache/hadoop/hbase/TestIntegrationTestBase.java 
b/hbase-it/src/test/java/org/apache/hadoop/hbase/TestIntegrationTestBase.java
new file mode 100644
index 000..7330909
--- /dev/null
+++ 
b/hbase-it/src/test/java/org/apache/hadoop/hbase/TestIntegrationTestBase.java
@@ -0,0 +1,48 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase;
+
+import static org.junit.Assert.assertEquals;
+
+import java.util.Properties;
+
+import org.apache.hadoop.conf.Configuration;
+import or

[2/7] hbase git commit: HBASE-18631 Allow ChaosMonkey properties to be specified in hbase-site

2017-08-20 Thread elserj
HBASE-18631 Allow ChaosMonkey properties to be specified in hbase-site


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/95e1fa30
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/95e1fa30
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/95e1fa30

Branch: refs/heads/branch-2
Commit: 95e1fa30b086eb709f2ab57ef7781537fde1533c
Parents: bb2b6b8
Author: Josh Elser 
Authored: Fri Aug 18 22:25:14 2017 -0400
Committer: Josh Elser 
Committed: Sun Aug 20 14:56:17 2017 -0400

--
 .../hadoop/hbase/IntegrationTestBase.java   | 21 +
 .../hadoop/hbase/TestIntegrationTestBase.java   | 48 
 .../hbase/chaos/factories/MonkeyConstants.java  | 11 +
 3 files changed, 80 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/95e1fa30/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestBase.java
--
diff --git 
a/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestBase.java 
b/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestBase.java
index d3433c7..46f0490 100644
--- a/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestBase.java
+++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestBase.java
@@ -19,6 +19,7 @@
 package org.apache.hadoop.hbase;
 
 import java.io.IOException;
+import java.util.Map.Entry;
 import java.util.Properties;
 import java.util.Set;
 
@@ -27,6 +28,7 @@ import org.apache.commons.lang.StringUtils;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.chaos.factories.MonkeyConstants;
 import org.apache.hadoop.hbase.chaos.factories.MonkeyFactory;
 import org.apache.hadoop.hbase.chaos.monkies.ChaosMonkey;
 import org.apache.hadoop.hbase.util.AbstractHBaseTool;
@@ -86,6 +88,10 @@ public abstract class IntegrationTestBase extends 
AbstractHBaseTool {
   noClusterCleanUp = true;
 }
 monkeyProps = new Properties();
+// Add entries for the CM from hbase-site.xml as a convenience.
+// Do this prior to loading from the properties file to make sure those in 
the properties
+// file are given precedence to those in hbase-site.xml (backwards 
compatibility).
+loadMonkeyProperties(monkeyProps, HBaseConfiguration.create());
 if (cmd.hasOption(CHAOS_MONKEY_PROPS)) {
   String chaosMonkeyPropsFile = cmd.getOptionValue(CHAOS_MONKEY_PROPS);
   if (StringUtils.isNotEmpty(chaosMonkeyPropsFile)) {
@@ -100,6 +106,21 @@ public abstract class IntegrationTestBase extends 
AbstractHBaseTool {
 }
   }
 
+  /**
+   * Loads entries from the provided {@code conf} into {@code props} when the 
configuration key
+   * is one that may be configuring ChaosMonkey actions.
+   */
+  void loadMonkeyProperties(Properties props, Configuration conf) {
+for (Entry entry : conf) {
+  for (String prefix : MonkeyConstants.MONKEY_CONFIGURATION_KEY_PREFIXES) {
+if (entry.getKey().startsWith(prefix)) {
+  props.put(entry.getKey(), entry.getValue());
+  break;
+}
+  }
+}
+  }
+
   @Override
   protected void processOptions(CommandLine cmd) {
 processBaseOptions(cmd);

http://git-wip-us.apache.org/repos/asf/hbase/blob/95e1fa30/hbase-it/src/test/java/org/apache/hadoop/hbase/TestIntegrationTestBase.java
--
diff --git 
a/hbase-it/src/test/java/org/apache/hadoop/hbase/TestIntegrationTestBase.java 
b/hbase-it/src/test/java/org/apache/hadoop/hbase/TestIntegrationTestBase.java
new file mode 100644
index 000..7330909
--- /dev/null
+++ 
b/hbase-it/src/test/java/org/apache/hadoop/hbase/TestIntegrationTestBase.java
@@ -0,0 +1,48 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase;
+
+import static org.junit.Assert.assertEquals;
+
+import java.util.Properties;
+
+import org.apache.hadoop.conf.Configuration;
+import org.

[7/7] hbase git commit: HBASE-18631 Allow ChaosMonkey properties to be specified in hbase-site

2017-08-20 Thread elserj
HBASE-18631 Allow ChaosMonkey properties to be specified in hbase-site


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/e391bb04
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/e391bb04
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/e391bb04

Branch: refs/heads/branch-1.1
Commit: e391bb047ed9416b9592ac4c87195ed52f7cbc22
Parents: a566f33
Author: Josh Elser 
Authored: Fri Aug 18 22:25:14 2017 -0400
Committer: Josh Elser 
Committed: Sun Aug 20 16:01:10 2017 -0400

--
 .../hadoop/hbase/IntegrationTestBase.java   | 21 +
 .../hadoop/hbase/TestIntegrationTestBase.java   | 48 
 .../hbase/chaos/factories/MonkeyConstants.java  | 10 
 3 files changed, 79 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/e391bb04/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestBase.java
--
diff --git 
a/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestBase.java 
b/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestBase.java
index 3929524..4c9aa3b 100644
--- a/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestBase.java
+++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestBase.java
@@ -19,6 +19,7 @@
 package org.apache.hadoop.hbase;
 
 import java.io.IOException;
+import java.util.Map.Entry;
 import java.util.Properties;
 import java.util.Set;
 
@@ -27,6 +28,7 @@ import org.apache.commons.lang.StringUtils;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.chaos.factories.MonkeyConstants;
 import org.apache.hadoop.hbase.chaos.factories.MonkeyFactory;
 import org.apache.hadoop.hbase.chaos.monkies.ChaosMonkey;
 import org.apache.hadoop.hbase.util.AbstractHBaseTool;
@@ -86,6 +88,10 @@ public abstract class IntegrationTestBase extends 
AbstractHBaseTool {
   noClusterCleanUp = true;
 }
 monkeyProps = new Properties();
+// Add entries for the CM from hbase-site.xml as a convenience.
+// Do this prior to loading from the properties file to make sure those in 
the properties
+// file are given precedence to those in hbase-site.xml (backwards 
compatibility).
+loadMonkeyProperties(monkeyProps, HBaseConfiguration.create());
 if (cmd.hasOption(CHAOS_MONKEY_PROPS)) {
   String chaosMonkeyPropsFile = cmd.getOptionValue(CHAOS_MONKEY_PROPS);
   if (StringUtils.isNotEmpty(chaosMonkeyPropsFile)) {
@@ -100,6 +106,21 @@ public abstract class IntegrationTestBase extends 
AbstractHBaseTool {
 }
   }
 
+  /**
+   * Loads entries from the provided {@code conf} into {@code props} when the 
configuration key
+   * is one that may be configuring ChaosMonkey actions.
+   */
+  void loadMonkeyProperties(Properties props, Configuration conf) {
+for (Entry entry : conf) {
+  for (String prefix : MonkeyConstants.MONKEY_CONFIGURATION_KEY_PREFIXES) {
+if (entry.getKey().startsWith(prefix)) {
+  props.put(entry.getKey(), entry.getValue());
+  break;
+}
+  }
+}
+  }
+
   @Override
   protected void processOptions(CommandLine cmd) {
 processBaseOptions(cmd);

http://git-wip-us.apache.org/repos/asf/hbase/blob/e391bb04/hbase-it/src/test/java/org/apache/hadoop/hbase/TestIntegrationTestBase.java
--
diff --git 
a/hbase-it/src/test/java/org/apache/hadoop/hbase/TestIntegrationTestBase.java 
b/hbase-it/src/test/java/org/apache/hadoop/hbase/TestIntegrationTestBase.java
new file mode 100644
index 000..7330909
--- /dev/null
+++ 
b/hbase-it/src/test/java/org/apache/hadoop/hbase/TestIntegrationTestBase.java
@@ -0,0 +1,48 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase;
+
+import static org.junit.Assert.assertEquals;
+
+import java.util.Properties;
+
+import org.apache.hadoop.conf.Configuration;
+import org

[5/7] hbase git commit: HBASE-18631 Allow ChaosMonkey properties to be specified in hbase-site

2017-08-20 Thread elserj
HBASE-18631 Allow ChaosMonkey properties to be specified in hbase-site


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/d1bd8835
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/d1bd8835
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/d1bd8835

Branch: refs/heads/branch-1.3
Commit: d1bd883593803bbc5f017d97f0959568dd1ae3aa
Parents: 92a2dfc
Author: Josh Elser 
Authored: Fri Aug 18 22:25:14 2017 -0400
Committer: Josh Elser 
Committed: Sun Aug 20 15:47:29 2017 -0400

--
 .../hadoop/hbase/IntegrationTestBase.java   | 21 +
 .../hadoop/hbase/TestIntegrationTestBase.java   | 48 
 .../hbase/chaos/factories/MonkeyConstants.java  | 11 +
 3 files changed, 80 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/d1bd8835/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestBase.java
--
diff --git 
a/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestBase.java 
b/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestBase.java
index d3433c7..46f0490 100644
--- a/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestBase.java
+++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestBase.java
@@ -19,6 +19,7 @@
 package org.apache.hadoop.hbase;
 
 import java.io.IOException;
+import java.util.Map.Entry;
 import java.util.Properties;
 import java.util.Set;
 
@@ -27,6 +28,7 @@ import org.apache.commons.lang.StringUtils;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.chaos.factories.MonkeyConstants;
 import org.apache.hadoop.hbase.chaos.factories.MonkeyFactory;
 import org.apache.hadoop.hbase.chaos.monkies.ChaosMonkey;
 import org.apache.hadoop.hbase.util.AbstractHBaseTool;
@@ -86,6 +88,10 @@ public abstract class IntegrationTestBase extends 
AbstractHBaseTool {
   noClusterCleanUp = true;
 }
 monkeyProps = new Properties();
+// Add entries for the CM from hbase-site.xml as a convenience.
+// Do this prior to loading from the properties file to make sure those in 
the properties
+// file are given precedence to those in hbase-site.xml (backwards 
compatibility).
+loadMonkeyProperties(monkeyProps, HBaseConfiguration.create());
 if (cmd.hasOption(CHAOS_MONKEY_PROPS)) {
   String chaosMonkeyPropsFile = cmd.getOptionValue(CHAOS_MONKEY_PROPS);
   if (StringUtils.isNotEmpty(chaosMonkeyPropsFile)) {
@@ -100,6 +106,21 @@ public abstract class IntegrationTestBase extends 
AbstractHBaseTool {
 }
   }
 
+  /**
+   * Loads entries from the provided {@code conf} into {@code props} when the 
configuration key
+   * is one that may be configuring ChaosMonkey actions.
+   */
+  void loadMonkeyProperties(Properties props, Configuration conf) {
+for (Entry entry : conf) {
+  for (String prefix : MonkeyConstants.MONKEY_CONFIGURATION_KEY_PREFIXES) {
+if (entry.getKey().startsWith(prefix)) {
+  props.put(entry.getKey(), entry.getValue());
+  break;
+}
+  }
+}
+  }
+
   @Override
   protected void processOptions(CommandLine cmd) {
 processBaseOptions(cmd);

http://git-wip-us.apache.org/repos/asf/hbase/blob/d1bd8835/hbase-it/src/test/java/org/apache/hadoop/hbase/TestIntegrationTestBase.java
--
diff --git 
a/hbase-it/src/test/java/org/apache/hadoop/hbase/TestIntegrationTestBase.java 
b/hbase-it/src/test/java/org/apache/hadoop/hbase/TestIntegrationTestBase.java
new file mode 100644
index 000..7330909
--- /dev/null
+++ 
b/hbase-it/src/test/java/org/apache/hadoop/hbase/TestIntegrationTestBase.java
@@ -0,0 +1,48 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase;
+
+import static org.junit.Assert.assertEquals;
+
+import java.util.Properties;
+
+import org.apache.hadoop.conf.Configuration;
+import or

[1/7] hbase git commit: HBASE-18631 Allow ChaosMonkey properties to be specified in hbase-site

2017-08-20 Thread elserj
Repository: hbase
Updated Branches:
  refs/heads/branch-1 7a9668264 -> 00a4e0697
  refs/heads/branch-1.1 a566f3352 -> e391bb047
  refs/heads/branch-1.2 658b01e26 -> 5bf83cc14
  refs/heads/branch-1.3 92a2dfcd6 -> d1bd88359
  refs/heads/branch-1.4 e0f5f3aa1 -> b2eb09aa2
  refs/heads/branch-2 bb2b6b866 -> 95e1fa30b
  refs/heads/master b932d38b2 -> 13028d715


HBASE-18631 Allow ChaosMonkey properties to be specified in hbase-site


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/13028d71
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/13028d71
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/13028d71

Branch: refs/heads/master
Commit: 13028d71576eebda3f5a831903e2a2e6d5f97ff4
Parents: b932d38
Author: Josh Elser 
Authored: Fri Aug 18 22:25:14 2017 -0400
Committer: Josh Elser 
Committed: Sun Aug 20 14:51:32 2017 -0400

--
 .../hadoop/hbase/IntegrationTestBase.java   | 21 +
 .../hadoop/hbase/TestIntegrationTestBase.java   | 48 
 .../hbase/chaos/factories/MonkeyConstants.java  | 11 +
 3 files changed, 80 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/13028d71/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestBase.java
--
diff --git 
a/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestBase.java 
b/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestBase.java
index d3433c7..46f0490 100644
--- a/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestBase.java
+++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestBase.java
@@ -19,6 +19,7 @@
 package org.apache.hadoop.hbase;
 
 import java.io.IOException;
+import java.util.Map.Entry;
 import java.util.Properties;
 import java.util.Set;
 
@@ -27,6 +28,7 @@ import org.apache.commons.lang.StringUtils;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.chaos.factories.MonkeyConstants;
 import org.apache.hadoop.hbase.chaos.factories.MonkeyFactory;
 import org.apache.hadoop.hbase.chaos.monkies.ChaosMonkey;
 import org.apache.hadoop.hbase.util.AbstractHBaseTool;
@@ -86,6 +88,10 @@ public abstract class IntegrationTestBase extends 
AbstractHBaseTool {
   noClusterCleanUp = true;
 }
 monkeyProps = new Properties();
+// Add entries for the CM from hbase-site.xml as a convenience.
+// Do this prior to loading from the properties file to make sure those in 
the properties
+// file are given precedence to those in hbase-site.xml (backwards 
compatibility).
+loadMonkeyProperties(monkeyProps, HBaseConfiguration.create());
 if (cmd.hasOption(CHAOS_MONKEY_PROPS)) {
   String chaosMonkeyPropsFile = cmd.getOptionValue(CHAOS_MONKEY_PROPS);
   if (StringUtils.isNotEmpty(chaosMonkeyPropsFile)) {
@@ -100,6 +106,21 @@ public abstract class IntegrationTestBase extends 
AbstractHBaseTool {
 }
   }
 
+  /**
+   * Loads entries from the provided {@code conf} into {@code props} when the 
configuration key
+   * is one that may be configuring ChaosMonkey actions.
+   */
+  void loadMonkeyProperties(Properties props, Configuration conf) {
+for (Entry entry : conf) {
+  for (String prefix : MonkeyConstants.MONKEY_CONFIGURATION_KEY_PREFIXES) {
+if (entry.getKey().startsWith(prefix)) {
+  props.put(entry.getKey(), entry.getValue());
+  break;
+}
+  }
+}
+  }
+
   @Override
   protected void processOptions(CommandLine cmd) {
 processBaseOptions(cmd);

http://git-wip-us.apache.org/repos/asf/hbase/blob/13028d71/hbase-it/src/test/java/org/apache/hadoop/hbase/TestIntegrationTestBase.java
--
diff --git 
a/hbase-it/src/test/java/org/apache/hadoop/hbase/TestIntegrationTestBase.java 
b/hbase-it/src/test/java/org/apache/hadoop/hbase/TestIntegrationTestBase.java
new file mode 100644
index 000..7330909
--- /dev/null
+++ 
b/hbase-it/src/test/java/org/apache/hadoop/hbase/TestIntegrationTestBase.java
@@ -0,0 +1,48 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WI

[3/7] hbase git commit: HBASE-18631 Allow ChaosMonkey properties to be specified in hbase-site

2017-08-20 Thread elserj
HBASE-18631 Allow ChaosMonkey properties to be specified in hbase-site


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/00a4e069
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/00a4e069
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/00a4e069

Branch: refs/heads/branch-1
Commit: 00a4e0697a464192c817a0de89e99017e65d3b44
Parents: 7a96682
Author: Josh Elser 
Authored: Fri Aug 18 22:25:14 2017 -0400
Committer: Josh Elser 
Committed: Sun Aug 20 15:07:35 2017 -0400

--
 .../hadoop/hbase/IntegrationTestBase.java   | 21 +
 .../hadoop/hbase/TestIntegrationTestBase.java   | 48 
 .../hbase/chaos/factories/MonkeyConstants.java  | 11 +
 3 files changed, 80 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/00a4e069/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestBase.java
--
diff --git 
a/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestBase.java 
b/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestBase.java
index d3433c7..46f0490 100644
--- a/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestBase.java
+++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestBase.java
@@ -19,6 +19,7 @@
 package org.apache.hadoop.hbase;
 
 import java.io.IOException;
+import java.util.Map.Entry;
 import java.util.Properties;
 import java.util.Set;
 
@@ -27,6 +28,7 @@ import org.apache.commons.lang.StringUtils;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.chaos.factories.MonkeyConstants;
 import org.apache.hadoop.hbase.chaos.factories.MonkeyFactory;
 import org.apache.hadoop.hbase.chaos.monkies.ChaosMonkey;
 import org.apache.hadoop.hbase.util.AbstractHBaseTool;
@@ -86,6 +88,10 @@ public abstract class IntegrationTestBase extends 
AbstractHBaseTool {
   noClusterCleanUp = true;
 }
 monkeyProps = new Properties();
+// Add entries for the CM from hbase-site.xml as a convenience.
+// Do this prior to loading from the properties file to make sure those in 
the properties
+// file are given precedence to those in hbase-site.xml (backwards 
compatibility).
+loadMonkeyProperties(monkeyProps, HBaseConfiguration.create());
 if (cmd.hasOption(CHAOS_MONKEY_PROPS)) {
   String chaosMonkeyPropsFile = cmd.getOptionValue(CHAOS_MONKEY_PROPS);
   if (StringUtils.isNotEmpty(chaosMonkeyPropsFile)) {
@@ -100,6 +106,21 @@ public abstract class IntegrationTestBase extends 
AbstractHBaseTool {
 }
   }
 
+  /**
+   * Loads entries from the provided {@code conf} into {@code props} when the 
configuration key
+   * is one that may be configuring ChaosMonkey actions.
+   */
+  void loadMonkeyProperties(Properties props, Configuration conf) {
+for (Entry entry : conf) {
+  for (String prefix : MonkeyConstants.MONKEY_CONFIGURATION_KEY_PREFIXES) {
+if (entry.getKey().startsWith(prefix)) {
+  props.put(entry.getKey(), entry.getValue());
+  break;
+}
+  }
+}
+  }
+
   @Override
   protected void processOptions(CommandLine cmd) {
 processBaseOptions(cmd);

http://git-wip-us.apache.org/repos/asf/hbase/blob/00a4e069/hbase-it/src/test/java/org/apache/hadoop/hbase/TestIntegrationTestBase.java
--
diff --git 
a/hbase-it/src/test/java/org/apache/hadoop/hbase/TestIntegrationTestBase.java 
b/hbase-it/src/test/java/org/apache/hadoop/hbase/TestIntegrationTestBase.java
new file mode 100644
index 000..7330909
--- /dev/null
+++ 
b/hbase-it/src/test/java/org/apache/hadoop/hbase/TestIntegrationTestBase.java
@@ -0,0 +1,48 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase;
+
+import static org.junit.Assert.assertEquals;
+
+import java.util.Properties;
+
+import org.apache.hadoop.conf.Configuration;
+import org.

[6/7] hbase git commit: HBASE-18631 Allow ChaosMonkey properties to be specified in hbase-site

2017-08-20 Thread elserj
HBASE-18631 Allow ChaosMonkey properties to be specified in hbase-site


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/5bf83cc1
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/5bf83cc1
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/5bf83cc1

Branch: refs/heads/branch-1.2
Commit: 5bf83cc14ee53bded53bba5e940cff81f82d15c3
Parents: 658b01e
Author: Josh Elser 
Authored: Fri Aug 18 22:25:14 2017 -0400
Committer: Josh Elser 
Committed: Sun Aug 20 15:56:07 2017 -0400

--
 .../hadoop/hbase/IntegrationTestBase.java   | 21 +
 .../hadoop/hbase/TestIntegrationTestBase.java   | 48 
 .../hbase/chaos/factories/MonkeyConstants.java  | 11 +
 3 files changed, 80 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/5bf83cc1/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestBase.java
--
diff --git 
a/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestBase.java 
b/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestBase.java
index 3929524..4c9aa3b 100644
--- a/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestBase.java
+++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestBase.java
@@ -19,6 +19,7 @@
 package org.apache.hadoop.hbase;
 
 import java.io.IOException;
+import java.util.Map.Entry;
 import java.util.Properties;
 import java.util.Set;
 
@@ -27,6 +28,7 @@ import org.apache.commons.lang.StringUtils;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.chaos.factories.MonkeyConstants;
 import org.apache.hadoop.hbase.chaos.factories.MonkeyFactory;
 import org.apache.hadoop.hbase.chaos.monkies.ChaosMonkey;
 import org.apache.hadoop.hbase.util.AbstractHBaseTool;
@@ -86,6 +88,10 @@ public abstract class IntegrationTestBase extends 
AbstractHBaseTool {
   noClusterCleanUp = true;
 }
 monkeyProps = new Properties();
+// Add entries for the CM from hbase-site.xml as a convenience.
+// Do this prior to loading from the properties file to make sure those in 
the properties
+// file are given precedence to those in hbase-site.xml (backwards 
compatibility).
+loadMonkeyProperties(monkeyProps, HBaseConfiguration.create());
 if (cmd.hasOption(CHAOS_MONKEY_PROPS)) {
   String chaosMonkeyPropsFile = cmd.getOptionValue(CHAOS_MONKEY_PROPS);
   if (StringUtils.isNotEmpty(chaosMonkeyPropsFile)) {
@@ -100,6 +106,21 @@ public abstract class IntegrationTestBase extends 
AbstractHBaseTool {
 }
   }
 
+  /**
+   * Loads entries from the provided {@code conf} into {@code props} when the 
configuration key
+   * is one that may be configuring ChaosMonkey actions.
+   */
+  void loadMonkeyProperties(Properties props, Configuration conf) {
+for (Entry entry : conf) {
+  for (String prefix : MonkeyConstants.MONKEY_CONFIGURATION_KEY_PREFIXES) {
+if (entry.getKey().startsWith(prefix)) {
+  props.put(entry.getKey(), entry.getValue());
+  break;
+}
+  }
+}
+  }
+
   @Override
   protected void processOptions(CommandLine cmd) {
 processBaseOptions(cmd);

http://git-wip-us.apache.org/repos/asf/hbase/blob/5bf83cc1/hbase-it/src/test/java/org/apache/hadoop/hbase/TestIntegrationTestBase.java
--
diff --git 
a/hbase-it/src/test/java/org/apache/hadoop/hbase/TestIntegrationTestBase.java 
b/hbase-it/src/test/java/org/apache/hadoop/hbase/TestIntegrationTestBase.java
new file mode 100644
index 000..7330909
--- /dev/null
+++ 
b/hbase-it/src/test/java/org/apache/hadoop/hbase/TestIntegrationTestBase.java
@@ -0,0 +1,48 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase;
+
+import static org.junit.Assert.assertEquals;
+
+import java.util.Properties;
+
+import org.apache.hadoop.conf.Configuration;
+import or

[39/50] [abbrv] hbase git commit: HBASE-18608 AsyncConnection should return AsyncAdmin interface instead of the implemenation

2017-08-20 Thread busbey
HBASE-18608 AsyncConnection should return AsyncAdmin interface instead of the 
implemenation


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/092dc6de
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/092dc6de
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/092dc6de

Branch: refs/heads/HBASE-18467
Commit: 092dc6de8483eea1b4e0d960cf22e65359379da1
Parents: a17ed03
Author: Guanghao Zhang 
Authored: Wed Aug 16 18:00:53 2017 +0800
Committer: Guanghao Zhang 
Committed: Thu Aug 17 09:47:39 2017 +0800

--
 .../hadoop/hbase/client/AsyncAdminBuilder.java  | 16 
 .../hadoop/hbase/client/AsyncAdminBuilderBase.java  | 12 ++--
 .../apache/hadoop/hbase/client/AsyncConnection.java |  4 ++--
 .../hadoop/hbase/client/AsyncConnectionImpl.java| 12 ++--
 .../hadoop/hbase/client/RawAsyncHBaseAdmin.java |  2 +-
 .../hadoop/hbase/client/TestAsyncAdminBuilder.java  |  6 +++---
 6 files changed, 26 insertions(+), 26 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/092dc6de/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncAdminBuilder.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncAdminBuilder.java
 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncAdminBuilder.java
index d706949..fb0aefd 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncAdminBuilder.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncAdminBuilder.java
@@ -29,7 +29,7 @@ import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
  * create a new AsyncAdmin instance.
  */
 @InterfaceAudience.Public
-public interface AsyncAdminBuilder {
+public interface AsyncAdminBuilder {
 
   /**
* Set timeout for a whole admin operation. Operation timeout and max 
attempt times(or max retry
@@ -39,7 +39,7 @@ public interface AsyncAdminBuilder {
* @param unit
* @return this for invocation chaining
*/
-  AsyncAdminBuilder setOperationTimeout(long timeout, TimeUnit unit);
+  AsyncAdminBuilder setOperationTimeout(long timeout, TimeUnit unit);
 
   /**
* Set timeout for each rpc request.
@@ -47,7 +47,7 @@ public interface AsyncAdminBuilder {
* @param unit
* @return this for invocation chaining
*/
-  AsyncAdminBuilder setRpcTimeout(long timeout, TimeUnit unit);
+  AsyncAdminBuilder setRpcTimeout(long timeout, TimeUnit unit);
 
   /**
* Set the base pause time for retrying. We use an exponential policy to 
generate sleep time when
@@ -56,7 +56,7 @@ public interface AsyncAdminBuilder {
* @param unit
* @return this for invocation chaining
*/
-  AsyncAdminBuilder setRetryPause(long timeout, TimeUnit unit);
+  AsyncAdminBuilder setRetryPause(long timeout, TimeUnit unit);
 
   /**
* Set the max retry times for an admin operation. Usually it is the max 
attempt times minus 1.
@@ -65,7 +65,7 @@ public interface AsyncAdminBuilder {
* @param maxRetries
* @return this for invocation chaining
*/
-  default AsyncAdminBuilder setMaxRetries(int maxRetries) {
+  default AsyncAdminBuilder setMaxRetries(int maxRetries) {
 return setMaxAttempts(retries2Attempts(maxRetries));
   }
 
@@ -76,18 +76,18 @@ public interface AsyncAdminBuilder {
* @param maxAttempts
* @return this for invocation chaining
*/
-  AsyncAdminBuilder setMaxAttempts(int maxAttempts);
+  AsyncAdminBuilder setMaxAttempts(int maxAttempts);
 
   /**
* Set the number of retries that are allowed before we start to log.
* @param startLogErrorsCnt
* @return this for invocation chaining
*/
-  AsyncAdminBuilder setStartLogErrorsCnt(int startLogErrorsCnt);
+  AsyncAdminBuilder setStartLogErrorsCnt(int startLogErrorsCnt);
 
   /**
* Create a {@link AsyncAdmin} instance.
* @return a {@link AsyncAdmin} instance
*/
-  T build();
+  AsyncAdmin build();
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hbase/blob/092dc6de/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncAdminBuilderBase.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncAdminBuilderBase.java
 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncAdminBuilderBase.java
index 013e8d7..77ff88d 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncAdminBuilderBase.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncAdminBuilderBase.java
@@ -25,7 +25,7 @@ import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
  * Base class for all asynchronous admin builders.
  */
 @InterfaceAudience.Private
-abstract class AsyncAdm

[09/50] [abbrv] hbase git commit: HBASE-14135 Merge backup images (Vladimir Rodionov)

2017-08-20 Thread busbey
http://git-wip-us.apache.org/repos/asf/hbase/blob/05e6e569/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/mapreduce/HFileSplitterJob.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/mapreduce/HFileSplitterJob.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/mapreduce/HFileSplitterJob.java
deleted file mode 100644
index ba1b65e..000
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/mapreduce/HFileSplitterJob.java
+++ /dev/null
@@ -1,181 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.hbase.backup.mapreduce;
-
-import java.io.IOException;
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.conf.Configured;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hbase.CellUtil;
-import org.apache.hadoop.hbase.HBaseConfiguration;
-import org.apache.hadoop.hbase.KeyValue;
-import org.apache.hadoop.hbase.KeyValue.Type;
-import org.apache.hadoop.hbase.TableName;
-import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.client.Connection;
-import org.apache.hadoop.hbase.client.ConnectionFactory;
-import org.apache.hadoop.hbase.client.RegionLocator;
-import org.apache.hadoop.hbase.client.Table;
-import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
-import org.apache.hadoop.hbase.mapreduce.HFileInputFormat;
-import org.apache.hadoop.hbase.mapreduce.HFileOutputFormat2;
-import org.apache.hadoop.hbase.mapreduce.KeyValueSortReducer;
-import org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil;
-import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
-import org.apache.hadoop.io.NullWritable;
-import org.apache.hadoop.mapreduce.Job;
-import org.apache.hadoop.mapreduce.Mapper;
-import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
-import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
-import org.apache.hadoop.util.Tool;
-import org.apache.hadoop.util.ToolRunner;
-
-/**
- * A tool to split HFiles into new region boundaries as a MapReduce job. The 
tool generates HFiles
- * for later bulk importing.
- */
-@InterfaceAudience.Private
-public class HFileSplitterJob extends Configured implements Tool {
-  private static final Log LOG = LogFactory.getLog(HFileSplitterJob.class);
-  final static String NAME = "HFileSplitterJob";
-  public final static String BULK_OUTPUT_CONF_KEY = "hfile.bulk.output";
-  public final static String TABLES_KEY = "hfile.input.tables";
-  public final static String TABLE_MAP_KEY = "hfile.input.tablesmap";
-  private final static String JOB_NAME_CONF_KEY = "mapreduce.job.name";
-
-  public HFileSplitterJob() {
-  }
-
-  protected HFileSplitterJob(final Configuration c) {
-super(c);
-  }
-
-  /**
-   * A mapper that just writes out cells. This one can be used together with
-   * {@link KeyValueSortReducer}
-   */
-  static class HFileCellMapper extends
-  Mapper {
-
-@Override
-public void map(NullWritable key, KeyValue value, Context context) throws 
IOException,
-InterruptedException {
-  // Convert value to KeyValue if subclass
-  if (!value.getClass().equals(KeyValue.class)) {
-value =
-new KeyValue(value.getRowArray(), value.getRowOffset(), 
value.getRowLength(),
-value.getFamilyArray(), value.getFamilyOffset(), 
value.getFamilyLength(),
-value.getQualifierArray(), value.getQualifierOffset(), 
value.getQualifierLength(),
-value.getTimestamp(), Type.codeToType(value.getTypeByte()), 
value.getValueArray(),
-value.getValueOffset(), value.getValueLength());
-  }
-  context.write(new ImmutableBytesWritable(CellUtil.cloneRow(value)), 
value);
-}
-
-@Override
-public void setup(Context context) throws IOException {
-  // do nothing
-}
-  }
-
-  /**
-   * Sets up the actual job.
-   * @param args The command line parameters.
-   * @return The newly created job.
-   * @throws IOException When setting up the job fails.
-   */
-  public Job createS

[25/50] [abbrv] hbase git commit: HBASE-18544 Move the HRegion#addRegionToMETA to TestDefaultMemStore

2017-08-20 Thread busbey
HBASE-18544 Move the HRegion#addRegionToMETA to TestDefaultMemStore

Signed-off-by: Michael Stack 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/310934d0
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/310934d0
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/310934d0

Branch: refs/heads/HBASE-18467
Commit: 310934d0604605fe361e836fe4277c48b5c493fa
Parents: 63e313b
Author: Chun-Hao Tang 
Authored: Wed Aug 16 00:43:02 2017 +0800
Committer: Michael Stack 
Committed: Tue Aug 15 14:52:33 2017 -0700

--
 .../hadoop/hbase/regionserver/HRegion.java  | 31 ++--
 .../hbase/regionserver/TestDefaultMemStore.java | 28 +-
 2 files changed, 29 insertions(+), 30 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/310934d0/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
index 3b24f3d..b9cafd9 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
@@ -3928,7 +3928,7 @@ public class HRegion implements HeapSize, 
PropagatingConfigurationObserver, Regi
* We throw RegionTooBusyException if above memstore limit
* and expect client to retry using some kind of backoff
   */
-  private void checkResources() throws RegionTooBusyException {
+  void checkResources() throws RegionTooBusyException {
 // If catalog region, do not impose resource constraints or block updates.
 if (this.getRegionInfo().isMetaRegion()) return;
 
@@ -3974,7 +3974,7 @@ public class HRegion implements HeapSize, 
PropagatingConfigurationObserver, Regi
* @param edits Cell updates by column
* @throws IOException
*/
-  private void put(final byte [] row, byte [] family, List edits)
+  void put(final byte [] row, byte [] family, List edits)
   throws IOException {
 NavigableMap> familyMap;
 familyMap = new TreeMap<>(Bytes.BYTES_COMPARATOR);
@@ -6878,33 +6878,6 @@ public class HRegion implements HeapSize, 
PropagatingConfigurationObserver, Regi
   }
 
   /**
-   * Inserts a new region's meta information into the passed
-   * meta region. Used by the HMaster bootstrap code adding
-   * new table to hbase:meta table.
-   *
-   * @param meta hbase:meta HRegion to be updated
-   * @param r HRegion to add to meta
-   *
-   * @throws IOException
-   */
-  // TODO remove since only test and merge use this
-  public static void addRegionToMETA(final HRegion meta, final HRegion r) 
throws IOException {
-meta.checkResources();
-// The row key is the region name
-byte[] row = r.getRegionInfo().getRegionName();
-final long now = EnvironmentEdgeManager.currentTime();
-final List cells = new ArrayList<>(2);
-cells.add(new KeyValue(row, HConstants.CATALOG_FAMILY,
-  HConstants.REGIONINFO_QUALIFIER, now,
-  r.getRegionInfo().toByteArray()));
-// Set into the root table the version of the meta table.
-cells.add(new KeyValue(row, HConstants.CATALOG_FAMILY,
-  HConstants.META_VERSION_QUALIFIER, now,
-  Bytes.toBytes(HConstants.META_VERSION)));
-meta.put(row, HConstants.CATALOG_FAMILY, cells);
-  }
-
-  /**
* Computes the Path of the HRegion
*
* @param tabledir qualified path for table

http://git-wip-us.apache.org/repos/asf/hbase/blob/310934d0/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestDefaultMemStore.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestDefaultMemStore.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestDefaultMemStore.java
index 0b1638b..7b10846 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestDefaultMemStore.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestDefaultMemStore.java
@@ -975,7 +975,7 @@ public class TestDefaultMemStore {
 HRegion r =
 HRegion.createHRegion(hri, testDir, conf, desc,
 wFactory.getWAL(hri.getEncodedNameAsBytes(), 
hri.getTable().getNamespace()));
-HRegion.addRegionToMETA(meta, r);
+addRegionToMETA(meta, r);
 edge.setCurrentTimeMillis(1234 + 100);
 StringBuffer sb = new StringBuffer();
 assertTrue(meta.shouldFlush(sb) == false);
@@ -983,6 +983,32 @@ public class TestDefaultMemStore {
 assertTrue(meta.shouldFlush(sb) == true);
   }
 
+  /**
+   * Inserts a new region's meta information into the passed
+   * meta region. Used by t

[28/50] [abbrv] hbase git commit: HBASE-18424 Fix TestAsyncTableGetMultiThreaded

2017-08-20 Thread busbey
HBASE-18424 Fix TestAsyncTableGetMultiThreaded

Signed-off-by: zhangduo 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/665fd0d0
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/665fd0d0
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/665fd0d0

Branch: refs/heads/HBASE-18467
Commit: 665fd0d07e34141c2765f02398eb1ad9e376f32f
Parents: 5280c10
Author: Vladimir Rodionov 
Authored: Wed Aug 16 11:29:34 2017 +0800
Committer: zhangduo 
Committed: Wed Aug 16 11:29:34 2017 +0800

--
 .../hadoop/hbase/client/TestAsyncTableGetMultiThreaded.java  | 8 +---
 1 file changed, 5 insertions(+), 3 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/665fd0d0/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncTableGetMultiThreaded.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncTableGetMultiThreaded.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncTableGetMultiThreaded.java
index 2abc54d..225060b 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncTableGetMultiThreaded.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncTableGetMultiThreaded.java
@@ -37,7 +37,11 @@ import java.util.stream.Collectors;
 import java.util.stream.IntStream;
 
 import org.apache.commons.io.IOUtils;
-import org.apache.hadoop.hbase.*;
+import org.apache.hadoop.hbase.HBaseTestingUtility;
+import org.apache.hadoop.hbase.HRegionInfo;
+import org.apache.hadoop.hbase.MemoryCompactionPolicy;
+import org.apache.hadoop.hbase.ServerName;
+import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.io.ByteBufferPool;
 import org.apache.hadoop.hbase.regionserver.CompactingMemStore;
 import org.apache.hadoop.hbase.regionserver.HRegion;
@@ -47,14 +51,12 @@ import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.hbase.util.Threads;
 import org.junit.AfterClass;
 import org.junit.BeforeClass;
-import org.junit.Ignore;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
 
 /**
  * Will split the table, and move region randomly when testing.
  */
-@Ignore // Can't move hbase:meta off master server in AMv2. TODO.
 @Category({ LargeTests.class, ClientTests.class })
 public class TestAsyncTableGetMultiThreaded {
   private static final HBaseTestingUtility TEST_UTIL = new 
HBaseTestingUtility();



[02/50] [abbrv] hbase git commit: HBASE-18551 [AMv2] UnassignProcedure and crashed regionservers; AMENDMENT -- disable TestAM#testSocketTimeout... mock is insufficent for new processing

2017-08-20 Thread busbey
HBASE-18551 [AMv2] UnassignProcedure and crashed regionservers; AMENDMENT -- 
disable TestAM#testSocketTimeout... mock is insufficent for new processing


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/1070888f
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/1070888f
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/1070888f

Branch: refs/heads/HBASE-18467
Commit: 1070888fff3a89d435018f11bfb2fd5609be8bab
Parents: 71a9a9a
Author: Michael Stack 
Authored: Fri Aug 11 14:20:06 2017 -0700
Committer: Michael Stack 
Committed: Fri Aug 11 14:20:35 2017 -0700

--
 .../hadoop/hbase/master/assignment/TestAssignmentManager.java | 7 +++
 1 file changed, 3 insertions(+), 4 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/1070888f/hbase-server/src/test/java/org/apache/hadoop/hbase/master/assignment/TestAssignmentManager.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/assignment/TestAssignmentManager.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/assignment/TestAssignmentManager.java
index d18c12a..4d2a894 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/assignment/TestAssignmentManager.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/assignment/TestAssignmentManager.java
@@ -243,7 +243,7 @@ public class TestAssignmentManager {
 }
   }
 
-  @Test
+  @Ignore @Test // Disabled for now. Since HBASE-18551, this mock is 
insufficient.
   public void testSocketTimeout() throws Exception {
 final TableName tableName = TableName.valueOf(this.name.getMethodName());
 final HRegionInfo hri = createRegionInfo(tableName, 1);
@@ -254,9 +254,8 @@ public class TestAssignmentManager {
 rsDispatcher.setMockRsExecutor(new SocketTimeoutRsExecutor(20, 3));
 waitOnFuture(submitProcedure(am.createAssignProcedure(hri, false)));
 
-rsDispatcher.setMockRsExecutor(new SocketTimeoutRsExecutor(20, 3));
-
-exception.expect(ServerCrashException.class);
+rsDispatcher.setMockRsExecutor(new SocketTimeoutRsExecutor(20, 1));
+// exception.expect(ServerCrashException.class);
 waitOnFuture(submitProcedure(am.createUnassignProcedure(hri, null, 
false)));
 
 assertEquals(assignSubmittedCount + 1, 
assignProcMetrics.getSubmittedCounter().getCount());



[23/50] [abbrv] hbase git commit: HBASE-18599 Add missing @Deprecated annotations

2017-08-20 Thread busbey
HBASE-18599 Add missing @Deprecated annotations

Signed-off-by: Michael Stack 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/70c4f78c
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/70c4f78c
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/70c4f78c

Branch: refs/heads/HBASE-18467
Commit: 70c4f78ce03cf9e13d148e75445b19d43571a09a
Parents: effd109
Author: Lars Francke 
Authored: Tue Aug 15 09:36:51 2017 +0200
Committer: Michael Stack 
Committed: Tue Aug 15 10:44:50 2017 -0700

--
 .../apache/hadoop/hbase/HColumnDescriptor.java  | 17 +--
 .../apache/hadoop/hbase/HTableDescriptor.java   |  8 +++--
 .../org/apache/hadoop/hbase/client/Admin.java   | 32 +++-
 .../apache/hadoop/hbase/client/AsyncAdmin.java  |  5 ++-
 .../client/metrics/ServerSideScanMetrics.java   | 15 +++--
 .../hbase/coprocessor/RegionObserver.java   |  5 ++-
 6 files changed, 65 insertions(+), 17 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/70c4f78c/hbase-client/src/main/java/org/apache/hadoop/hbase/HColumnDescriptor.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/HColumnDescriptor.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/HColumnDescriptor.java
index 5fe85cc..507bf49 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/HColumnDescriptor.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/HColumnDescriptor.java
@@ -100,14 +100,18 @@ public class HColumnDescriptor implements 
ColumnFamilyDescriptor, Comparable:
-   * @deprecated use {@link ColumnFamilyDescriptorBuilder#of(String)}
+   * @deprecated As of release 2.0.0, this will be removed in HBase 3.0.0
+   * (https://issues.apache.org/jira/browse/HBASE-18433";>HBASE-18433).
+   * Use {@link ColumnFamilyDescriptorBuilder#of(String)}.
*/
+  @Deprecated
   public HColumnDescriptor(final String familyName) {
 this(Bytes.toBytes(familyName));
   }
@@ -118,8 +122,11 @@ public class HColumnDescriptor implements 
ColumnFamilyDescriptor, Comparable:
-   * @deprecated use {@link ColumnFamilyDescriptorBuilder#of(byte[])}
+   * @deprecated As of release 2.0.0, this will be removed in HBase 3.0.0
+   * (https://issues.apache.org/jira/browse/HBASE-18433";>HBASE-18433).
+   * Use {@link ColumnFamilyDescriptorBuilder#of(byte[])}.
*/
+  @Deprecated
   public HColumnDescriptor(final byte [] familyName) {
 this(new ModifyableColumnFamilyDescriptor(familyName));
   }
@@ -128,9 +135,13 @@ public class HColumnDescriptor implements 
ColumnFamilyDescriptor, Comparablehttps://issues.apache.org/jira/browse/HBASE-18433";>HBASE-18433).
+   * Use {@link 
ColumnFamilyDescriptorBuilder#copy(ColumnFamilyDescriptor)}.
*/
+  @Deprecated
   public HColumnDescriptor(HColumnDescriptor desc) {
 this(desc, true);
   }

http://git-wip-us.apache.org/repos/asf/hbase/blob/70c4f78c/hbase-client/src/main/java/org/apache/hadoop/hbase/HTableDescriptor.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/HTableDescriptor.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/HTableDescriptor.java
index c09d434..a0f23c1 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/HTableDescriptor.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/HTableDescriptor.java
@@ -44,7 +44,7 @@ import 
org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder.ModifyableCo
  * if the table is read only, the maximum size of the memstore,
  * when the region split should occur, coprocessors associated with it etc...
  * @deprecated As of release 2.0.0, this will be removed in HBase 3.0.0.
- * use {@link TableDescriptorBuilder} to build {@link 
HTableDescriptor}.
+ * Use {@link TableDescriptorBuilder} to build {@link 
HTableDescriptor}.
  */
 @Deprecated
 @InterfaceAudience.Public
@@ -602,9 +602,13 @@ public class HTableDescriptor implements TableDescriptor, 
Comparablehttps://issues.apache.org/jira/browse/HBASE-18008";>HBASE-18008).
+   * Use {@link #getColumnFamilyNames()}.
*/
+  @Deprecated
   public Set getFamiliesKeys() {
 return delegatee.getColumnFamilyNames();
   }

http://git-wip-us.apache.org/repos/asf/hbase/blob/70c4f78c/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Admin.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Admin.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Admin.java
index d2acae3..8de9f89 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Admin

[12/50] [abbrv] hbase git commit: HBASE-18528 DON'T allow user to modify the passed table/column descriptor

2017-08-20 Thread busbey
HBASE-18528 DON'T allow user to modify the passed table/column descriptor


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/e2b797be
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/e2b797be
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/e2b797be

Branch: refs/heads/HBASE-18467
Commit: e2b797be390f05c55a490a64bc72e2d8c19fcbb7
Parents: c6bf4d5
Author: Chia-Ping Tsai 
Authored: Mon Aug 14 14:02:30 2017 +0800
Committer: Chia-Ping Tsai 
Committed: Mon Aug 14 14:02:30 2017 +0800

--
 .../client/ImmutableHColumnDescriptor.java  |  5 +-
 .../hbase/client/ImmutableHTableDescriptor.java | 11 ++-
 .../hbase/coprocessor/MasterObserver.java   | 48 ++--
 .../hbase/master/MasterCoprocessorHost.java | 80 +++-
 4 files changed, 79 insertions(+), 65 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/e2b797be/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ImmutableHColumnDescriptor.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ImmutableHColumnDescriptor.java
 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ImmutableHColumnDescriptor.java
index c8d34ff..89ef851 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ImmutableHColumnDescriptor.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ImmutableHColumnDescriptor.java
@@ -35,8 +35,9 @@ public class ImmutableHColumnDescriptor extends 
HColumnDescriptor {
 super(desc, false);
   }
 
-  ImmutableHColumnDescriptor(final ModifyableColumnFamilyDescriptor desc) {
-super(desc);
+  public ImmutableHColumnDescriptor(final ColumnFamilyDescriptor desc) {
+super(desc instanceof ModifyableColumnFamilyDescriptor ?
+  (ModifyableColumnFamilyDescriptor) desc : new 
ModifyableColumnFamilyDescriptor(desc));
   }
 
   @Override

http://git-wip-us.apache.org/repos/asf/hbase/blob/e2b797be/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ImmutableHTableDescriptor.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ImmutableHTableDescriptor.java
 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ImmutableHTableDescriptor.java
index 4e9e9af..169f143 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ImmutableHTableDescriptor.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ImmutableHTableDescriptor.java
@@ -28,19 +28,17 @@ import org.apache.hadoop.hbase.HTableDescriptor;
  * Read-only table descriptor.
  */
 @Deprecated // deprecated for hbase 2.0, remove for hbase 3.0. see 
HTableDescriptor.
-@InterfaceAudience.Public
+@InterfaceAudience.Private
 public class ImmutableHTableDescriptor extends HTableDescriptor {
 
   @Override
   protected HColumnDescriptor toHColumnDescriptor(ColumnFamilyDescriptor desc) 
{
 if (desc == null) {
   return null;
-} else if (desc instanceof ModifyableColumnFamilyDescriptor) {
-  return new ImmutableHColumnDescriptor((ModifyableColumnFamilyDescriptor) 
desc);
 } else if (desc instanceof HColumnDescriptor) {
   return new ImmutableHColumnDescriptor((HColumnDescriptor) desc);
 } else {
-  return new ImmutableHColumnDescriptor(new 
ModifyableColumnFamilyDescriptor(desc));
+  return new ImmutableHColumnDescriptor(desc);
 }
   }
   /*
@@ -51,6 +49,11 @@ public class ImmutableHTableDescriptor extends 
HTableDescriptor {
 super(desc, false);
   }
 
+  public ImmutableHTableDescriptor(final TableDescriptor desc) {
+super(desc instanceof ModifyableTableDescriptor ?
+  (ModifyableTableDescriptor) desc : new 
ModifyableTableDescriptor(desc.getTableName(), desc));
+  }
+
   @Override
   protected ModifyableTableDescriptor getDelegateeForModification() {
 throw new UnsupportedOperationException("HTableDescriptor is read-only");

http://git-wip-us.apache.org/repos/asf/hbase/blob/e2b797be/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/MasterObserver.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/MasterObserver.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/MasterObserver.java
index f4f5db3..8e368ba 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/MasterObserver.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/MasterObserver.java
@@ -107,7 +107,7 @@ public interface MasterObserver extends Coprocessor {
* table handler and it is async to the create RPC call.
* It can't bypass the default action, e.g., ctx.bypass() won't 

[26/50] [abbrv] hbase git commit: HBASE-18581 Removed dead code and some tidy up work in BaseLoadBalancer

2017-08-20 Thread busbey
HBASE-18581 Removed dead code and some tidy up work in BaseLoadBalancer

  * calls to methods getLowestLocalityRegionServer() & 
getLeastLoadedTopServerForRegion() got removed in HBASE-18164
  * call to calculateRegionServerLocalities() got removed in HBASE-15486
  * Some other minor improvements

Change-Id: Ib149530d8d20c019b0891c026e23180e260f59db
Signed-off-by: Apekshit Sharma 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/2b88edfd
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/2b88edfd
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/2b88edfd

Branch: refs/heads/HBASE-18467
Commit: 2b88edfd8d6c1cb512abf1d9f3316c50ed342cfc
Parents: 310934d
Author: Umesh Agashe 
Authored: Fri Aug 11 11:18:13 2017 -0700
Committer: Apekshit Sharma 
Committed: Tue Aug 15 14:55:52 2017 -0700

--
 .../hbase/master/balancer/BaseLoadBalancer.java | 190 ---
 1 file changed, 32 insertions(+), 158 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/2b88edfd/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.java
index 8f5b6f5..30f59a9 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.java
@@ -1,4 +1,4 @@
- /**
+ /*
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information
@@ -34,6 +34,7 @@ import java.util.Random;
 import java.util.Set;
 import java.util.TreeMap;
 import java.util.function.Predicate;
+import java.util.stream.Collectors;
 
 import org.apache.commons.lang.NotImplementedException;
 import org.apache.commons.logging.Log;
@@ -360,10 +361,10 @@ public abstract class BaseLoadBalancer implements 
LoadBalancer {
   }
 
   numMaxRegionsPerTable = new int[numTables];
-  for (int serverIndex = 0 ; serverIndex < 
numRegionsPerServerPerTable.length; serverIndex++) {
-for (tableIndex = 0 ; tableIndex < 
numRegionsPerServerPerTable[serverIndex].length; tableIndex++) {
-  if (numRegionsPerServerPerTable[serverIndex][tableIndex] > 
numMaxRegionsPerTable[tableIndex]) {
-numMaxRegionsPerTable[tableIndex] = 
numRegionsPerServerPerTable[serverIndex][tableIndex];
+  for (int[] aNumRegionsPerServerPerTable : numRegionsPerServerPerTable) {
+for (tableIndex = 0; tableIndex < aNumRegionsPerServerPerTable.length; 
tableIndex++) {
+  if (aNumRegionsPerServerPerTable[tableIndex] > 
numMaxRegionsPerTable[tableIndex]) {
+numMaxRegionsPerTable[tableIndex] = 
aNumRegionsPerServerPerTable[tableIndex];
   }
 }
   }
@@ -375,10 +376,7 @@ public abstract class BaseLoadBalancer implements 
LoadBalancer {
 } else {
   hasRegionReplicas = true;
   HRegionInfo primaryInfo = 
RegionReplicaUtil.getRegionInfoForDefaultReplica(info);
-  regionIndexToPrimaryIndex[i] =
-  regionsToIndex.containsKey(primaryInfo) ?
-  regionsToIndex.get(primaryInfo):
-  -1;
+  regionIndexToPrimaryIndex[i] = 
regionsToIndex.getOrDefault(primaryInfo, -1);
 }
   }
 
@@ -608,7 +606,7 @@ public abstract class BaseLoadBalancer implements 
LoadBalancer {
 
 /** An action to move or swap a region */
 public static class Action {
-  public static enum Type {
+  public enum Type {
 ASSIGN_REGION,
 MOVE_REGION,
 SWAP_REGIONS,
@@ -806,9 +804,9 @@ public abstract class BaseLoadBalancer implements 
LoadBalancer {
   == numMaxRegionsPerTable[tableIndex]) {
 //recompute maxRegionsPerTable since the previous value was coming 
from the old server
 numMaxRegionsPerTable[tableIndex] = 0;
-for (int serverIndex = 0 ; serverIndex < 
numRegionsPerServerPerTable.length; serverIndex++) {
-  if (numRegionsPerServerPerTable[serverIndex][tableIndex] > 
numMaxRegionsPerTable[tableIndex]) {
-numMaxRegionsPerTable[tableIndex] = 
numRegionsPerServerPerTable[serverIndex][tableIndex];
+for (int[] aNumRegionsPerServerPerTable : numRegionsPerServerPerTable) 
{
+  if (aNumRegionsPerServerPerTable[tableIndex] > 
numMaxRegionsPerTable[tableIndex]) {
+numMaxRegionsPerTable[tableIndex] = 
aNumRegionsPerServerPerTable[tableIndex];
   }
 }
   }
@@ -912,49 +910,7 @@ public abstract class BaseLoad

[43/50] [abbrv] hbase git commit: HBASE-18518 Remove jersey1* dependencies form project and jersey1* jars from lib dir.

2017-08-20 Thread busbey
HBASE-18518 Remove jersey1* dependencies form project and jersey1* jars from 
lib dir.

This patch removes jersey1 dependencies form hbase REST project also
removes dead code in 
hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ResourceConfig.java and 
prevents jersey1 jars in lib dir. RESTApiClusterManager.java is modified to use 
jersey2.

Signed-off-by: Michael Stack 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/7fee03ed
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/7fee03ed
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/7fee03ed

Branch: refs/heads/HBASE-18467
Commit: 7fee03ed65af53618d206aed159ab4f2ccb464e1
Parents: 2af2b4c
Author: Samir Ahmic 
Authored: Sun Aug 6 16:16:18 2017 +0200
Committer: Michael Stack 
Committed: Thu Aug 17 22:06:34 2017 -0700

--
 .../src/main/assembly/hadoop-two-compat.xml |  2 ++
 .../hadoop/hbase/RESTApiClusterManager.java | 32 ++--
 .../hadoop/hbase/rest/ResourceConfig.java   | 31 ---
 .../hadoop/hbase/rest/model/VersionModel.java   |  4 +--
 .../hadoop/hbase/rest/TestVersionResource.java  |  6 ++--
 5 files changed, 23 insertions(+), 52 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/7fee03ed/hbase-assembly/src/main/assembly/hadoop-two-compat.xml
--
diff --git a/hbase-assembly/src/main/assembly/hadoop-two-compat.xml 
b/hbase-assembly/src/main/assembly/hadoop-two-compat.xml
index 3b8908c..8039c4d 100644
--- a/hbase-assembly/src/main/assembly/hadoop-two-compat.xml
+++ b/hbase-assembly/src/main/assembly/hadoop-two-compat.xml
@@ -59,6 +59,8 @@
   
 
   org.jruby:jruby-complete
+  com.sun.jersey:*
+  com.sun.jersey.contribs:*
 
   
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/7fee03ed/hbase-it/src/test/java/org/apache/hadoop/hbase/RESTApiClusterManager.java
--
diff --git 
a/hbase-it/src/test/java/org/apache/hadoop/hbase/RESTApiClusterManager.java 
b/hbase-it/src/test/java/org/apache/hadoop/hbase/RESTApiClusterManager.java
index 03ba460..055b58a 100644
--- a/hbase-it/src/test/java/org/apache/hadoop/hbase/RESTApiClusterManager.java
+++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/RESTApiClusterManager.java
@@ -18,10 +18,6 @@
 
 package org.apache.hadoop.hbase;
 
-import com.sun.jersey.api.client.Client;
-import com.sun.jersey.api.client.ClientResponse;
-import com.sun.jersey.api.client.filter.HTTPBasicAuthFilter;
-
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
@@ -29,7 +25,12 @@ import org.apache.hadoop.conf.Configured;
 import org.apache.hadoop.util.ReflectionUtils;
 import org.codehaus.jackson.JsonNode;
 import org.codehaus.jackson.map.ObjectMapper;
-
+import org.glassfish.jersey.client.authentication.HttpAuthenticationFeature;
+import javax.ws.rs.client.Client;
+import javax.ws.rs.client.ClientBuilder;
+import javax.ws.rs.client.Entity;
+import javax.ws.rs.client.Invocation;
+import javax.ws.rs.client.WebTarget;
 import javax.ws.rs.core.MediaType;
 import javax.ws.rs.core.Response;
 import javax.ws.rs.core.UriBuilder;
@@ -91,7 +92,7 @@ public class RESTApiClusterManager extends Configured 
implements ClusterManager
   private static final String API_VERSION = "v6";
 
   // Client instances are expensive, so use the same one for all our REST 
queries.
-  private Client client = Client.create();
+  private Client client = ClientBuilder.newClient();
 
   // An instance of HBaseClusterManager is used for methods like the kill, 
resume, and suspend
   // because cluster managers don't tend to implement these operations.
@@ -117,7 +118,7 @@ public class RESTApiClusterManager extends Configured 
implements ClusterManager
 clusterName = conf.get(REST_API_CLUSTER_MANAGER_CLUSTER_NAME, 
DEFAULT_CLUSTER_NAME);
 
 // Add filter to Client instance to enable server authentication.
-client.addFilter(new HTTPBasicAuthFilter(serverUsername, serverPassword));
+client.register(HttpAuthenticationFeature.basic(serverUsername, 
serverPassword));
   }
 
   @Override
@@ -193,10 +194,9 @@ public class RESTApiClusterManager extends Configured 
implements ClusterManager
 .build();
 String body = "{ \"items\": [ \"" + roleName + "\" ] }";
 LOG.info("Executing POST against " + uri + " with body " + body + "...");
-ClientResponse response = client.resource(uri)
-.type(MediaType.APPLICATION_JSON)
-.post(ClientResponse.class, body);
-
+WebTarget webTarget = client.target(uri);
+Invocation.Builder invocationBuilder =  
webTarget.request(Media

[42/50] [abbrv] hbase git commit: HBASE-18489 Addendum revert protected to private, give shaded import a separated block

2017-08-20 Thread busbey
HBASE-18489 Addendum revert protected to private, give shaded import a 
separated block


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/2af2b4c0
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/2af2b4c0
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/2af2b4c0

Branch: refs/heads/HBASE-18467
Commit: 2af2b4c040ca2de9c10ee6dcff92b55c37956faf
Parents: 1ab6882f
Author: zhangduo 
Authored: Fri Aug 18 10:47:30 2017 +0800
Committer: zhangduo 
Committed: Fri Aug 18 10:54:49 2017 +0800

--
 .../hbase/regionserver/RSRpcServices.java   | 23 ++--
 .../hadoop/hbase/regionserver/StoreScanner.java |  5 +++--
 2 files changed, 15 insertions(+), 13 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/2af2b4c0/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSRpcServices.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSRpcServices.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSRpcServices.java
index 696bd76..9a25275 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSRpcServices.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSRpcServices.java
@@ -115,6 +115,18 @@ import 
org.apache.hadoop.hbase.regionserver.handler.OpenRegionHandler;
 import org.apache.hadoop.hbase.regionserver.wal.WALEdit;
 import org.apache.hadoop.hbase.security.Superusers;
 import org.apache.hadoop.hbase.security.User;
+import org.apache.hadoop.hbase.util.Bytes;
+import org.apache.hadoop.hbase.util.DNS;
+import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
+import org.apache.hadoop.hbase.util.Pair;
+import org.apache.hadoop.hbase.util.ServerRegionReplicaUtil;
+import org.apache.hadoop.hbase.util.Strings;
+import org.apache.hadoop.hbase.wal.WAL;
+import org.apache.hadoop.hbase.wal.WALKey;
+import org.apache.hadoop.hbase.wal.WALSplitter;
+import org.apache.hadoop.hbase.zookeeper.ZKSplitLog;
+import org.apache.zookeeper.KeeperException;
+
 import 
org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
 import org.apache.hadoop.hbase.shaded.com.google.common.cache.Cache;
 import org.apache.hadoop.hbase.shaded.com.google.common.cache.CacheBuilder;
@@ -208,17 +220,6 @@ import 
org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.BulkLoadDescr
 import 
org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.CompactionDescriptor;
 import 
org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor;
 import 
org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.RegionEventDescriptor;
-import org.apache.hadoop.hbase.util.Bytes;
-import org.apache.hadoop.hbase.util.DNS;
-import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
-import org.apache.hadoop.hbase.util.Pair;
-import org.apache.hadoop.hbase.util.ServerRegionReplicaUtil;
-import org.apache.hadoop.hbase.util.Strings;
-import org.apache.hadoop.hbase.wal.WAL;
-import org.apache.hadoop.hbase.wal.WALKey;
-import org.apache.hadoop.hbase.wal.WALSplitter;
-import org.apache.hadoop.hbase.zookeeper.ZKSplitLog;
-import org.apache.zookeeper.KeeperException;
 
 /**
  * Implements the regionserver RPC services.

http://git-wip-us.apache.org/repos/asf/hbase/blob/2af2b4c0/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreScanner.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreScanner.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreScanner.java
index 5286c39..a220f54 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreScanner.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreScanner.java
@@ -48,10 +48,11 @@ import 
org.apache.hadoop.hbase.regionserver.querymatcher.CompactionScanQueryMatc
 import 
org.apache.hadoop.hbase.regionserver.querymatcher.LegacyScanQueryMatcher;
 import org.apache.hadoop.hbase.regionserver.querymatcher.ScanQueryMatcher;
 import org.apache.hadoop.hbase.regionserver.querymatcher.UserScanQueryMatcher;
-import 
org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
 import org.apache.hadoop.hbase.util.CollectionUtils;
 import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
 
+import 
org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
+
 /**
  * Scanner scans both the memstore and the Store. Coalesce KeyValue stream 
into List
  * for a single row.
@@ -104,7 +105,7 @@ public class StoreScanner extends 
NonReversedNonLazyKeyValueScanner
* KVs skipped via seeking to next row/column. TODO: est

[35/50] [abbrv] hbase git commit: HBASE-18511 Default no regions on master

2017-08-20 Thread busbey
HBASE-18511 Default no regions on master

Changes the configuration hbase.balancer.tablesOnMaster from list of
table names to instead be a boolean; true if master carries
tables/regions and false if it does not.

Adds a new configuration hbase.balancer.tablesOnMaster.systemTablesOnly.
If true, hbase.balancer.tablesOnMaster is considered true but only
system tables are put on the master.

M hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java
  Master was claiming itself active master though it had stopped. Fix
the activeMaster flag. Set it to false on exit.

M hbase-server/src/main/java/org/apache/hadoop/hbase/master/LoadBalancer.java
 Add new configs and convenience methods for getting current state of
settings.

M 
hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.java
 Move configs up into super Interface and now the settings mean
different, remove the no longer needed processing.


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/47344671
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/47344671
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/47344671

Branch: refs/heads/HBASE-18467
Commit: 473446719b7b81b56216862bf2a94a576ff90f60
Parents: acf9b87
Author: Michael Stack 
Authored: Wed Aug 2 22:54:21 2017 -0700
Committer: Michael Stack 
Committed: Wed Aug 16 08:39:36 2017 -0700

--
 .../org/apache/hadoop/hbase/master/HMaster.java |  28 ++-
 .../hadoop/hbase/master/LoadBalancer.java   |  31 ++-
 .../hadoop/hbase/master/ServerManager.java  |  30 +--
 .../hbase/master/balancer/BaseLoadBalancer.java | 110 --
 .../balancer/FavoredStochasticBalancer.java |  11 +-
 .../hbase/regionserver/HRegionServer.java   |   5 +-
 .../hadoop/hbase/HBaseTestingUtility.java   |   2 +-
 .../apache/hadoop/hbase/MiniHBaseCluster.java   |   3 +-
 .../apache/hadoop/hbase/client/TestAdmin1.java  |   8 +-
 .../hbase/client/TestAsyncTableAdminApi.java|  16 +-
 .../hbase/client/TestClientClusterStatus.java   |   5 +-
 .../hadoop/hbase/client/TestFromClientSide.java |   5 +-
 .../hadoop/hbase/fs/TestBlockReorder.java   |   4 +-
 .../hadoop/hbase/master/TestMasterMetrics.java  |  19 +-
 .../hbase/master/TestMasterMetricsWrapper.java  |  13 +-
 .../hbase/master/TestMasterNoCluster.java   |   7 +-
 .../master/balancer/TestBaseLoadBalancer.java   |  10 +-
 .../balancer/TestRegionsOnMasterOptions.java| 200 +++
 .../hbase/regionserver/TestClusterId.java   |   4 +-
 .../TestRSKilledWhenInitializing.java   |  15 +-
 .../hbase/regionserver/TestRegionOpen.java  |   5 +-
 .../regionserver/TestRegionServerAbort.java |  14 +-
 .../regionserver/TestRegionServerHostname.java  |  11 +-
 .../regionserver/TestRegionServerMetrics.java   |  57 --
 .../TestRegionServerReadRequestMetrics.java |  12 +-
 .../TestRegionServerReportForDuty.java  |  15 +-
 .../TestSplitTransactionOnCluster.java  |  16 +-
 .../TestFlushWithThroughputController.java  |   8 +-
 .../security/access/TestNamespaceCommands.java  |  13 +-
 29 files changed, 491 insertions(+), 186 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/47344671/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java
index ce83838..6b4d4e9 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java
@@ -530,6 +530,17 @@ public class HMaster extends HRegionServer implements 
MasterServices {
 }
   }
 
+  // Main run loop. Calls through to the regionserver run loop.
+  @Override
+  public void run() {
+try {
+  super.run();
+} finally {
+  // If on way out, then we are no longer active master.
+  this.activeMaster = false;
+}
+  }
+
   // return the actual infoPort, -1 means disable info server.
   private int putUpJettyServer() throws IOException {
 if (!conf.getBoolean("hbase.master.infoserver.redirect", true)) {
@@ -604,9 +615,8 @@ public class HMaster extends HRegionServer implements 
MasterServices {
*/
   @Override
   protected void waitForMasterActive(){
-boolean tablesOnMaster = BaseLoadBalancer.tablesOnMaster(conf);
-while (!(tablesOnMaster && activeMaster)
-&& !isStopped() && !isAborted()) {
+boolean tablesOnMaster = LoadBalancer.isTablesOnMaster(conf);
+while (!(tablesOnMaster && activeMaster) && !isStopped() && !isAborted()) {
   sleeper.sleep();
 }
   }
@@ -644,7 +654,7 @@ public class HMaster extends HR

[41/50] [abbrv] hbase git commit: HBASE-14498 Master stuck in infinite loop when all Zookeeper servers are unreachable

2017-08-20 Thread busbey
HBASE-14498 Master stuck in infinite loop when all Zookeeper servers are 
unreachable

Signed-off-by: Michael Stack 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/1ab6882f
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/1ab6882f
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/1ab6882f

Branch: refs/heads/HBASE-18467
Commit: 1ab6882f6285e0bfa2864a09208edf4f2ae4125d
Parents: 75a6b36
Author: Pankaj Kumar 
Authored: Thu Aug 17 17:06:50 2017 +0800
Committer: Michael Stack 
Committed: Thu Aug 17 19:02:39 2017 -0700

--
 .../hbase/zookeeper/ZooKeeperWatcher.java   | 85 +++-
 .../hbase/zookeeper/TestZooKeeperWatcher.java   | 47 +++
 .../hbase/regionserver/HRegionServer.java   |  2 +-
 3 files changed, 131 insertions(+), 3 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/1ab6882f/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/ZooKeeperWatcher.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/ZooKeeperWatcher.java
 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/ZooKeeperWatcher.java
index 6bec352..8266c9a 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/ZooKeeperWatcher.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/ZooKeeperWatcher.java
@@ -26,6 +26,9 @@ import java.util.List;
 import java.util.Map;
 import java.util.concurrent.CopyOnWriteArrayList;
 import java.util.concurrent.CountDownLatch;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Executors;
+import java.util.concurrent.atomic.AtomicBoolean;
 import java.util.regex.Matcher;
 import java.util.regex.Pattern;
 
@@ -39,6 +42,7 @@ import org.apache.hadoop.hbase.HRegionInfo;
 import org.apache.hadoop.hbase.ZooKeeperConnectionException;
 import org.apache.hadoop.hbase.classification.InterfaceAudience;
 import org.apache.hadoop.hbase.security.Superusers;
+import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.zookeeper.KeeperException;
 import org.apache.zookeeper.WatchedEvent;
@@ -76,7 +80,7 @@ public class ZooKeeperWatcher implements Watcher, Abortable, 
Closeable {
   private final RecoverableZooKeeper recoverableZooKeeper;
 
   // abortable in case of zk failure
-  protected Abortable abortable;
+  protected final Abortable abortable;
   // Used if abortable is null
   private boolean aborted = false;
 
@@ -89,6 +93,13 @@ public class ZooKeeperWatcher implements Watcher, Abortable, 
Closeable {
   // negotiation to complete
   public CountDownLatch saslLatch = new CountDownLatch(1);
 
+  // Connection timeout on disconnect event
+  private long connWaitTimeOut;
+  private AtomicBoolean connected = new AtomicBoolean(false);
+  private boolean forceAbortOnZKDisconnect;
+ 
+  // Execute service for zookeeper disconnect event watcher
+  private ExecutorService zkEventWatcherExecService = null;
 
 
   private final Configuration conf;
@@ -122,6 +133,24 @@ public class ZooKeeperWatcher implements Watcher, 
Abortable, Closeable {
   public ZooKeeperWatcher(Configuration conf, String identifier,
   Abortable abortable, boolean canCreateBaseZNode)
   throws IOException, ZooKeeperConnectionException {
+this(conf, identifier, abortable, canCreateBaseZNode, false);
+  }
+
+  /**
+   * Instantiate a ZooKeeper connection and watcher.
+   * @param conf Configuration
+   * @param identifier string that is passed to RecoverableZookeeper to be 
used as identifier for
+   *  this instance. Use null for default.
+   * @param abortable Can be null if there is on error there is no host to 
abort: e.g. client
+   *  context.
+   * @param canCreateBaseZNode whether create base node.
+   * @param forceAbortOnZKDisconnect abort the watcher if true.
+   * @throws IOException when any IO exception
+   * @throws ZooKeeperConnectionException when any zookeeper connection 
exception
+   */
+  public ZooKeeperWatcher(Configuration conf, String identifier, Abortable 
abortable,
+  boolean canCreateBaseZNode, boolean forceAbortOnZKDisconnect) throws 
IOException,
+  ZooKeeperConnectionException {
 this.conf = conf;
 this.quorum = ZKConfig.getZKQuorumServersString(conf);
 this.prefix = identifier;
@@ -130,6 +159,9 @@ public class ZooKeeperWatcher implements Watcher, 
Abortable, Closeable {
 this.identifier = identifier + "0x0";
 this.abortable = abortable;
 this.znodePaths = new ZNodePaths(conf);
+// On Disconnected event a thread will wait for sometime (2/3 of 
zookeeper.session.timeout),
+// it will abort the process if no SyncConnected event reported by the 
time.
+   

[01/50] [abbrv] hbase git commit: HBASE-18025 CatalogJanitor should collect outdated RegionStates from the AM [Forced Update!]

2017-08-20 Thread busbey
Repository: hbase
Updated Branches:
  refs/heads/HBASE-18467 10c2e559f -> 1abee706c (forced update)


HBASE-18025 CatalogJanitor should collect outdated RegionStates from the AM


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/71a9a9a9
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/71a9a9a9
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/71a9a9a9

Branch: refs/heads/HBASE-18467
Commit: 71a9a9a9440c9f2e2e9dd301dd372197e38e70c5
Parents: 043ec9b
Author: Esteban Gutierrez 
Authored: Fri Jul 21 14:13:13 2017 -0500
Committer: Esteban Gutierrez 
Committed: Fri Aug 11 13:36:38 2017 -0500

--
 .../hadoop/hbase/master/CatalogJanitor.java |  13 +-
 .../hadoop/hbase/master/ServerManager.java  |   7 +
 .../hbase/master/assignment/RegionStates.java   |   6 +
 .../TestCatalogJanitorInMemoryStates.java   | 185 +++
 4 files changed, 209 insertions(+), 2 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/71a9a9a9/hbase-server/src/main/java/org/apache/hadoop/hbase/master/CatalogJanitor.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/CatalogJanitor.java 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/CatalogJanitor.java
index ba92c76..8daa7db 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/CatalogJanitor.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/CatalogJanitor.java
@@ -221,6 +221,11 @@ public class CatalogJanitor extends ScheduledChore {
   ProcedureExecutor pe = 
this.services.getMasterProcedureExecutor();
   pe.submitProcedure(new GCMergedRegionsProcedure(pe.getEnvironment(),
   mergedRegion, regionA, regionB));
+  // Remove from in-memory states
+  
this.services.getAssignmentManager().getRegionStates().deleteRegion(regionA);
+  
this.services.getAssignmentManager().getRegionStates().deleteRegion(regionB);
+  this.services.getServerManager().removeRegion(regionA);
+  this.services.getServerManager().removeRegion(regionB);
   return true;
 }
 return false;
@@ -234,6 +239,7 @@ public class CatalogJanitor extends ScheduledChore {
*/
   int scan() throws IOException {
 int result = 0;
+
 try {
   if (!alreadyRunning.compareAndSet(false, true)) {
 LOG.debug("CatalogJanitor already running");
@@ -281,8 +287,8 @@ public class CatalogJanitor extends ScheduledChore {
 }
 
 if (!parentNotCleaned.contains(e.getKey().getEncodedName()) &&
-  cleanParent(e.getKey(), e.getValue())) {
-result++;
+cleanParent(e.getKey(), e.getValue())) {
+  result++;
 } else {
   // We could not clean the parent, so it's daughters should not be
   // cleaned either (HBASE-6160)
@@ -355,6 +361,9 @@ public class CatalogJanitor extends ScheduledChore {
 " -- no longer hold references");
   ProcedureExecutor pe = 
this.services.getMasterProcedureExecutor();
   pe.submitProcedure(new GCRegionProcedure(pe.getEnvironment(), parent));
+  // Remove from in-memory states
+  
this.services.getAssignmentManager().getRegionStates().deleteRegion(parent);
+  this.services.getServerManager().removeRegion(parent);
   return true;
 }
 return false;

http://git-wip-us.apache.org/repos/asf/hbase/blob/71a9a9a9/hbase-server/src/main/java/org/apache/hadoop/hbase/master/ServerManager.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/ServerManager.java 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/ServerManager.java
index c9c792a..f0e9b88 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/ServerManager.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/ServerManager.java
@@ -1028,6 +1028,13 @@ public class ServerManager {
 flushedSequenceIdByRegion.remove(encodedName);
   }
 
+  @VisibleForTesting
+  public boolean isRegionInServerManagerStates(final HRegionInfo hri) {
+final byte[] encodedName = hri.getEncodedNameAsBytes();
+return (storeFlushedSequenceIdsByRegion.containsKey(encodedName)
+|| flushedSequenceIdByRegion.containsKey(encodedName));
+  }
+
   /**
* Called by delete table and similar to notify the ServerManager that a 
region was removed.
*/

http://git-wip-us.apache.org/repos/asf/hbase/blob/71a9a9a9/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/RegionStates.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/RegionStates.java
 
b/hbase-server/s

[05/50] [abbrv] hbase git commit: HBASE-18555: Remove redundant familyMap.put() from addxxx() of sub-classes of Mutation and Query

2017-08-20 Thread busbey
HBASE-18555: Remove redundant familyMap.put() from addxxx() of sub-classes of 
Mutation and Query

Signed-off-by: Jerry He 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/173dce73
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/173dce73
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/173dce73

Branch: refs/heads/HBASE-18467
Commit: 173dce73471da005fb6780a7e7b65b43bad481e2
Parents: 95e8839
Author: Xiang Li 
Authored: Fri Aug 11 00:07:11 2017 +0800
Committer: Jerry He 
Committed: Fri Aug 11 22:49:38 2017 -0700

--
 .../main/java/org/apache/hadoop/hbase/client/Append.java  |  2 +-
 .../main/java/org/apache/hadoop/hbase/client/Delete.java  | 10 +-
 .../src/main/java/org/apache/hadoop/hbase/client/Get.java |  2 +-
 .../java/org/apache/hadoop/hbase/client/Increment.java|  2 --
 .../java/org/apache/hadoop/hbase/client/Mutation.java |  1 +
 .../src/main/java/org/apache/hadoop/hbase/client/Put.java |  7 ---
 .../main/java/org/apache/hadoop/hbase/client/Scan.java|  2 +-
 7 files changed, 9 insertions(+), 17 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/173dce73/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Append.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Append.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Append.java
index 02ec770..2bd0860 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Append.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Append.java
@@ -137,10 +137,10 @@ public class Append extends Mutation {
 List list = this.familyMap.get(family);
 if (list == null) {
   list  = new ArrayList<>(1);
+  this.familyMap.put(family, list);
 }
 // find where the new entry should be placed in the List
 list.add(cell);
-this.familyMap.put(family, list);
 return this;
   }
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/173dce73/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Delete.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Delete.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Delete.java
index 395c277..bf5241c 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Delete.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Delete.java
@@ -183,9 +183,9 @@ public class Delete extends Mutation implements 
Comparable {
 List list = familyMap.get(family);
 if (list == null) {
   list = new ArrayList<>(1);
+  familyMap.put(family, list);
 }
 list.add(kv);
-familyMap.put(family, list);
 return this;
   }
 
@@ -219,12 +219,12 @@ public class Delete extends Mutation implements 
Comparable {
 List list = familyMap.get(family);
 if(list == null) {
   list = new ArrayList<>(1);
+  familyMap.put(family, list);
 } else if(!list.isEmpty()) {
   list.clear();
 }
 KeyValue kv = new KeyValue(row, family, null, timestamp, 
KeyValue.Type.DeleteFamily);
 list.add(kv);
-familyMap.put(family, list);
 return this;
   }
 
@@ -239,10 +239,10 @@ public class Delete extends Mutation implements 
Comparable {
 List list = familyMap.get(family);
 if(list == null) {
   list = new ArrayList<>(1);
+  familyMap.put(family, list);
 }
 list.add(new KeyValue(row, family, null, timestamp,
   KeyValue.Type.DeleteFamilyVersion));
-familyMap.put(family, list);
 return this;
   }
 
@@ -272,10 +272,10 @@ public class Delete extends Mutation implements 
Comparable {
 List list = familyMap.get(family);
 if (list == null) {
   list = new ArrayList<>(1);
+  familyMap.put(family, list);
 }
 list.add(new KeyValue(this.row, family, qualifier, timestamp,
 KeyValue.Type.DeleteColumn));
-familyMap.put(family, list);
 return this;
   }
 
@@ -307,10 +307,10 @@ public class Delete extends Mutation implements 
Comparable {
 List list = familyMap.get(family);
 if(list == null) {
   list = new ArrayList<>(1);
+  familyMap.put(family, list);
 }
 KeyValue kv = new KeyValue(this.row, family, qualifier, timestamp, 
KeyValue.Type.Delete);
 list.add(kv);
-familyMap.put(family, list);
 return this;
   }
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/173dce73/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Get.java
--
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Get.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Get.java

[24/50] [abbrv] hbase git commit: HBASE-18504 Add documentation for WAL compression

2017-08-20 Thread busbey
HBASE-18504 Add documentation for WAL compression

Signed-off-by: Michael Stack 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/63e313b5
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/63e313b5
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/63e313b5

Branch: refs/heads/HBASE-18467
Commit: 63e313b5c0d7c56d9cf9602e3c204214331189d5
Parents: 70c4f78
Author: Peter Somogyi 
Authored: Wed Aug 2 17:00:52 2017 +0200
Committer: Michael Stack 
Committed: Tue Aug 15 12:54:08 2017 -0700

--
 src/main/asciidoc/_chapters/architecture.adoc | 16 
 1 file changed, 16 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/63e313b5/src/main/asciidoc/_chapters/architecture.adoc
--
diff --git a/src/main/asciidoc/_chapters/architecture.adoc 
b/src/main/asciidoc/_chapters/architecture.adoc
index ebb0677..2ded813 100644
--- a/src/main/asciidoc/_chapters/architecture.adoc
+++ b/src/main/asciidoc/_chapters/architecture.adoc
@@ -1216,6 +1216,22 @@ This will be the default for HBase 0.99 
(link:https://issues.apache.org/jira/bro
 You must also enable HFile version 3 (which is the default HFile format 
starting in HBase 0.99.
 See link:https://issues.apache.org/jira/browse/HBASE-10855[HBASE-10855]). 
Distributed log replay is unsafe for rolling upgrades.
 
+[[wal.compression]]
+ WAL Compression 
+
+The content of the WAL can be compressed using LRU Dictionary compression.
+This can be used to speed up WAL replication to different datanodes.
+The dictionary can store up to 2^15^ elements; eviction starts after this 
number is exceeded.
+
+To enable WAL compression, set the `hbase.regionserver.wal.enablecompression` 
property to `true`.
+The default value for this property is `false`.
+By default, WAL tag compression is turned on when WAL compression is enabled.
+You can turn off WAL tag compression by setting the 
`hbase.regionserver.wal.tags.enablecompression` property to 'false'.
+
+A possible downside to WAL compression is that we lose more data from the last 
block in the WAL if it ill-terminated
+mid-write. If entries in this last block were added with new dictionary 
entries but we failed persist the amended
+dictionary because of an abrupt termination, a read of this last block may not 
be able to resolve last-written entries. 
+
 [[wal.disable]]
  Disabling the WAL
 



[04/50] [abbrv] hbase git commit: HBASE-18557: Change splitable to mergeable in MergeTableRegionsProcedure

2017-08-20 Thread busbey
HBASE-18557: Change splitable to mergeable in MergeTableRegionsProcedure

Signed-off-by: Jerry He 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/95e88396
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/95e88396
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/95e88396

Branch: refs/heads/HBASE-18467
Commit: 95e883967cbb383b48d8fae548fb55b88c7f0529
Parents: aa8f67a
Author: Yi Liang 
Authored: Thu Aug 10 11:15:59 2017 -0700
Committer: Jerry He 
Committed: Fri Aug 11 22:45:22 2017 -0700

--
 .../hadoop/hbase/master/assignment/MergeTableRegionsProcedure.java | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/95e88396/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/MergeTableRegionsProcedure.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/MergeTableRegionsProcedure.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/MergeTableRegionsProcedure.java
index 74d9b75..9aaf297 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/MergeTableRegionsProcedure.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/MergeTableRegionsProcedure.java
@@ -513,7 +513,7 @@ public class MergeTableRegionsProcedure
   throws IOException {
 GetRegionInfoResponse response =
   Util.getRegionInfoResponse(env, rs.getServerName(), rs.getRegion());
-return response.hasSplittable() && response.getSplittable();
+return response.hasMergeable() && response.getMergeable();
   }
 
   /**



[03/50] [abbrv] hbase git commit: HBASE-18526 FIFOCompactionPolicy pre-check uses wrong scope (Vladimir Rodionov)

2017-08-20 Thread busbey
HBASE-18526 FIFOCompactionPolicy pre-check uses wrong scope (Vladimir Rodionov)


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/aa8f67a1
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/aa8f67a1
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/aa8f67a1

Branch: refs/heads/HBASE-18467
Commit: aa8f67a148cbefbfc4bfdc25b2dc48c7ed947212
Parents: 1070888
Author: tedyu 
Authored: Fri Aug 11 16:41:40 2017 -0700
Committer: tedyu 
Committed: Fri Aug 11 16:41:40 2017 -0700

--
 .../org/apache/hadoop/hbase/master/HMaster.java   | 18 +-
 1 file changed, 9 insertions(+), 9 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/aa8f67a1/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java
index 96bf859..421ae8b 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java
@@ -53,7 +53,6 @@ import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hbase.client.TableDescriptor;
 import org.apache.hadoop.hbase.ClusterStatus;
 import org.apache.hadoop.hbase.CoordinatedStateException;
 import org.apache.hadoop.hbase.CoordinatedStateManager;
@@ -79,6 +78,7 @@ import org.apache.hadoop.hbase.UnknownRegionException;
 import org.apache.hadoop.hbase.classification.InterfaceAudience;
 import org.apache.hadoop.hbase.client.MasterSwitchType;
 import org.apache.hadoop.hbase.client.Result;
+import org.apache.hadoop.hbase.client.TableDescriptor;
 import org.apache.hadoop.hbase.client.TableState;
 import org.apache.hadoop.hbase.coprocessor.BypassCoprocessorException;
 import org.apache.hadoop.hbase.coprocessor.CoprocessorHost;
@@ -164,6 +164,9 @@ import 
org.apache.hadoop.hbase.replication.master.TableCFsUpdater;
 import org.apache.hadoop.hbase.replication.regionserver.Replication;
 import org.apache.hadoop.hbase.security.AccessDeniedException;
 import org.apache.hadoop.hbase.security.UserProvider;
+import 
org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
+import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
+import org.apache.hadoop.hbase.shaded.com.google.common.collect.Maps;
 import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
 import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionInfoResponse.CompactionState;
 import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionServerInfo;
@@ -200,9 +203,6 @@ import org.eclipse.jetty.server.ServerConnector;
 import org.eclipse.jetty.servlet.ServletHolder;
 import org.eclipse.jetty.webapp.WebAppContext;
 
-import 
org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
-import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
-import org.apache.hadoop.hbase.shaded.com.google.common.collect.Maps;
 import com.google.protobuf.Descriptors;
 import com.google.protobuf.Service;
 
@@ -1937,14 +1937,14 @@ public class HMaster extends HRegionServer implements 
MasterServices {
   }
 
   // 3. blocking file count
-  String sbfc = htd.getConfigurationValue(HStore.BLOCKING_STOREFILES_KEY);
-  if (sbfc != null) {
-blockingFileCount = Integer.parseInt(sbfc);
+  sv = hcd.getConfigurationValue(HStore.BLOCKING_STOREFILES_KEY);
+  if (sv != null) {
+blockingFileCount = Integer.parseInt(sv);
   }
   if (blockingFileCount < 1000) {
 message =
-"blocking file count '" + HStore.BLOCKING_STOREFILES_KEY + "' " + 
blockingFileCount
-+ " is below recommended minimum of 1000";
+"Blocking file count '" + HStore.BLOCKING_STOREFILES_KEY + "' " + 
blockingFileCount
++ " is below recommended minimum of 1000 for column family "+ 
hcd.getNameAsString();
 throw new IOException(message);
   }
 }



[31/50] [abbrv] hbase git commit: HBASE-18251 Remove unnecessary traversing to the first and last keys in the CellSet (Toshihoro Suzuki)

2017-08-20 Thread busbey
HBASE-18251 Remove unnecessary traversing to the first and last keys in
the CellSet (Toshihoro Suzuki)


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/9da4e690
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/9da4e690
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/9da4e690

Branch: refs/heads/HBASE-18467
Commit: 9da4e6906e9d7f62b8a8fe5dc996b066dac4066e
Parents: b087818
Author: Ramkrishna 
Authored: Wed Aug 16 11:05:43 2017 +0530
Committer: Ramkrishna 
Committed: Wed Aug 16 11:06:31 2017 +0530

--
 .../hadoop/hbase/regionserver/CellFlatMap.java  | 63 +---
 .../hadoop/hbase/regionserver/CellSet.java  |  7 +--
 2 files changed, 57 insertions(+), 13 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/9da4e690/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CellFlatMap.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CellFlatMap.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CellFlatMap.java
index c83a382..aff6018 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CellFlatMap.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CellFlatMap.java
@@ -282,37 +282,85 @@ public abstract class CellFlatMap implements 
NavigableMap {
   }
 
   //  Entry's getters 

-  // all interfaces returning Entries are unsupported because we are dealing 
only with the keys
+
+  private static class CellFlatMapEntry implements Entry {
+private final Cell cell;
+
+public CellFlatMapEntry (Cell cell) {
+  this.cell = cell;
+}
+
+@Override
+public Cell getKey() {
+  return cell;
+}
+
+@Override
+public Cell getValue() {
+  return cell;
+}
+
+@Override
+public Cell setValue(Cell value) {
+  throw new UnsupportedOperationException();
+}
+  }
+
   @Override
   public Entry lowerEntry(Cell k) {
-throw new UnsupportedOperationException();
+Cell cell = lowerKey(k);
+if (cell == null) {
+  return null;
+}
+return new CellFlatMapEntry(cell);
   }
 
   @Override
   public Entry higherEntry(Cell k) {
-throw new UnsupportedOperationException();
+Cell cell = higherKey(k);
+if (cell == null) {
+  return null;
+}
+return new CellFlatMapEntry(cell);
   }
 
   @Override
   public Entry ceilingEntry(Cell k) {
-throw new UnsupportedOperationException();
+Cell cell = ceilingKey(k);
+if (cell == null) {
+  return null;
+}
+return new CellFlatMapEntry(cell);
   }
 
   @Override
   public Entry floorEntry(Cell k) {
-throw new UnsupportedOperationException();
+Cell cell = floorKey(k);
+if (cell == null) {
+  return null;
+}
+return new CellFlatMapEntry(cell);
   }
 
   @Override
   public Entry firstEntry() {
-throw new UnsupportedOperationException();
+Cell cell = firstKey();
+if (cell == null) {
+  return null;
+}
+return new CellFlatMapEntry(cell);
   }
 
   @Override
   public Entry lastEntry() {
-throw new UnsupportedOperationException();
+Cell cell = lastKey();
+if (cell == null) {
+  return null;
+}
+return new CellFlatMapEntry(cell);
   }
 
+  // The following 2 methods (pollFirstEntry, pollLastEntry) are unsupported 
because these are updating methods.
   @Override
   public Entry pollFirstEntry() {
 throw new UnsupportedOperationException();
@@ -323,7 +371,6 @@ public abstract class CellFlatMap implements 
NavigableMap {
 throw new UnsupportedOperationException();
   }
 
-
   //  Updates 
   // All updating methods below are unsupported.
   // Assuming an array of Cells will be allocated externally,

http://git-wip-us.apache.org/repos/asf/hbase/blob/9da4e690/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CellSet.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CellSet.java 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CellSet.java
index 48262a9..6da57d3 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CellSet.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CellSet.java
@@ -126,15 +126,12 @@ public class CellSet implements NavigableSet  {
 throw new UnsupportedOperationException("Not implemented");
   }
 
-  // TODO: why do we have a double traversing through map? Recall we have Cell 
to Cell mapping...
-  // First for first/last key, which actually returns Cell

[48/50] [abbrv] hbase git commit: HBASE-18631 Allow ChaosMonkey properties to be specified in hbase-site

2017-08-20 Thread busbey
HBASE-18631 Allow ChaosMonkey properties to be specified in hbase-site


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/13028d71
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/13028d71
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/13028d71

Branch: refs/heads/HBASE-18467
Commit: 13028d71576eebda3f5a831903e2a2e6d5f97ff4
Parents: b932d38
Author: Josh Elser 
Authored: Fri Aug 18 22:25:14 2017 -0400
Committer: Josh Elser 
Committed: Sun Aug 20 14:51:32 2017 -0400

--
 .../hadoop/hbase/IntegrationTestBase.java   | 21 +
 .../hadoop/hbase/TestIntegrationTestBase.java   | 48 
 .../hbase/chaos/factories/MonkeyConstants.java  | 11 +
 3 files changed, 80 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/13028d71/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestBase.java
--
diff --git 
a/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestBase.java 
b/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestBase.java
index d3433c7..46f0490 100644
--- a/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestBase.java
+++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestBase.java
@@ -19,6 +19,7 @@
 package org.apache.hadoop.hbase;
 
 import java.io.IOException;
+import java.util.Map.Entry;
 import java.util.Properties;
 import java.util.Set;
 
@@ -27,6 +28,7 @@ import org.apache.commons.lang.StringUtils;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.chaos.factories.MonkeyConstants;
 import org.apache.hadoop.hbase.chaos.factories.MonkeyFactory;
 import org.apache.hadoop.hbase.chaos.monkies.ChaosMonkey;
 import org.apache.hadoop.hbase.util.AbstractHBaseTool;
@@ -86,6 +88,10 @@ public abstract class IntegrationTestBase extends 
AbstractHBaseTool {
   noClusterCleanUp = true;
 }
 monkeyProps = new Properties();
+// Add entries for the CM from hbase-site.xml as a convenience.
+// Do this prior to loading from the properties file to make sure those in 
the properties
+// file are given precedence to those in hbase-site.xml (backwards 
compatibility).
+loadMonkeyProperties(monkeyProps, HBaseConfiguration.create());
 if (cmd.hasOption(CHAOS_MONKEY_PROPS)) {
   String chaosMonkeyPropsFile = cmd.getOptionValue(CHAOS_MONKEY_PROPS);
   if (StringUtils.isNotEmpty(chaosMonkeyPropsFile)) {
@@ -100,6 +106,21 @@ public abstract class IntegrationTestBase extends 
AbstractHBaseTool {
 }
   }
 
+  /**
+   * Loads entries from the provided {@code conf} into {@code props} when the 
configuration key
+   * is one that may be configuring ChaosMonkey actions.
+   */
+  void loadMonkeyProperties(Properties props, Configuration conf) {
+for (Entry entry : conf) {
+  for (String prefix : MonkeyConstants.MONKEY_CONFIGURATION_KEY_PREFIXES) {
+if (entry.getKey().startsWith(prefix)) {
+  props.put(entry.getKey(), entry.getValue());
+  break;
+}
+  }
+}
+  }
+
   @Override
   protected void processOptions(CommandLine cmd) {
 processBaseOptions(cmd);

http://git-wip-us.apache.org/repos/asf/hbase/blob/13028d71/hbase-it/src/test/java/org/apache/hadoop/hbase/TestIntegrationTestBase.java
--
diff --git 
a/hbase-it/src/test/java/org/apache/hadoop/hbase/TestIntegrationTestBase.java 
b/hbase-it/src/test/java/org/apache/hadoop/hbase/TestIntegrationTestBase.java
new file mode 100644
index 000..7330909
--- /dev/null
+++ 
b/hbase-it/src/test/java/org/apache/hadoop/hbase/TestIntegrationTestBase.java
@@ -0,0 +1,48 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase;
+
+import static org.junit.Assert.assertEquals;
+
+import java.util.Properties;
+
+import org.apache.hadoop.conf.Configuration;
+import o

[13/50] [abbrv] hbase git commit: HBASE-18588 Verify we're using netty .so epolling on linux post HBASE-18271

2017-08-20 Thread busbey
HBASE-18588 Verify we're using netty .so epolling on linux post HBASE-18271


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/ddbaf56c
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/ddbaf56c
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/ddbaf56c

Branch: refs/heads/HBASE-18467
Commit: ddbaf56ca8c712dc44608d3323280f578c56aed2
Parents: e2b797b
Author: Michael Stack 
Authored: Mon Aug 14 09:09:56 2017 -0700
Committer: Michael Stack 
Committed: Mon Aug 14 09:09:56 2017 -0700

--
 hbase-protocol-shaded/pom.xml | 17 +
 1 file changed, 17 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/ddbaf56c/hbase-protocol-shaded/pom.xml
--
diff --git a/hbase-protocol-shaded/pom.xml b/hbase-protocol-shaded/pom.xml
index b28c03e..4c72eca 100644
--- a/hbase-protocol-shaded/pom.xml
+++ b/hbase-protocol-shaded/pom.xml
@@ -192,6 +192,23 @@
 
   
 
+
+  
+
+  com.google.code.maven-replacer-plugin
+
+replacer
+[1.5.3,)
+
+  replace
+
+  
+  
+
+ false
+
+  
+
   
 
   



[16/50] [abbrv] hbase git commit: HBASE-18522 Add RowMutations support to Batch

2017-08-20 Thread busbey
HBASE-18522 Add RowMutations support to Batch


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/096dac2e
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/096dac2e
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/096dac2e

Branch: refs/heads/HBASE-18467
Commit: 096dac2e83c675f212bad4f91888d8440ba152ca
Parents: bd40073
Author: Jerry He 
Authored: Mon Aug 14 10:39:46 2017 -0700
Committer: Jerry He 
Committed: Mon Aug 14 10:39:46 2017 -0700

--
 .../hbase/client/MultiServerCallable.java   | 64 +++-
 .../org/apache/hadoop/hbase/client/Table.java   |  4 +-
 .../hbase/shaded/protobuf/RequestConverter.java |  6 +-
 .../shaded/protobuf/ResponseConverter.java  | 37 ++-
 .../hbase/client/TestFromClientSide3.java   | 46 ++
 .../hadoop/hbase/client/TestMultiParallel.java  | 34 ++-
 6 files changed, 168 insertions(+), 23 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/096dac2e/hbase-client/src/main/java/org/apache/hadoop/hbase/client/MultiServerCallable.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/MultiServerCallable.java
 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/MultiServerCallable.java
index 33c9a0b..7f6052e 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/MultiServerCallable.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/MultiServerCallable.java
@@ -19,6 +19,7 @@ package org.apache.hadoop.hbase.client;
 
 import java.io.IOException;
 import java.util.ArrayList;
+import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
 
@@ -93,30 +94,64 @@ class MultiServerCallable extends 
CancellableRegionServerCallable
 RegionAction.Builder regionActionBuilder = RegionAction.newBuilder();
 ClientProtos.Action.Builder actionBuilder = 
ClientProtos.Action.newBuilder();
 MutationProto.Builder mutationBuilder = MutationProto.newBuilder();
-List cells = null;
-// The multi object is a list of Actions by region.  Iterate by region.
+
+// Pre-size. Presume at least a KV per Action. There are likely more.
+List cells =
+(this.cellBlock ? new ArrayList(countOfActions) : null);
+
 long nonceGroup = multiAction.getNonceGroup();
 if (nonceGroup != HConstants.NO_NONCE) {
   multiRequestBuilder.setNonceGroup(nonceGroup);
 }
+// Index to track RegionAction within the MultiRequest
+int regionActionIndex = -1;
+// Map from a created RegionAction to the original index for a 
RowMutations within
+// its original list of actions
+Map rowMutationsIndexMap = new HashMap<>();
+// The multi object is a list of Actions by region. Iterate by region.
 for (Map.Entry> e: 
this.multiAction.actions.entrySet()) {
   final byte [] regionName = e.getKey();
   final List actions = e.getValue();
   regionActionBuilder.clear();
   regionActionBuilder.setRegion(RequestConverter.buildRegionSpecifier(
   HBaseProtos.RegionSpecifier.RegionSpecifierType.REGION_NAME, 
regionName));
-  if (this.cellBlock) {
-// Pre-size. Presume at least a KV per Action.  There are likely more.
-if (cells == null) cells = new ArrayList<>(countOfActions);
-// Send data in cellblocks. The call to buildNoDataMultiRequest will 
skip RowMutations.
-// They have already been handled above. Guess at count of cells
-regionActionBuilder = 
RequestConverter.buildNoDataRegionAction(regionName, actions, cells,
-  regionActionBuilder, actionBuilder, mutationBuilder);
-  } else {
-regionActionBuilder = RequestConverter.buildRegionAction(regionName, 
actions,
-  regionActionBuilder, actionBuilder, mutationBuilder);
+
+  int rowMutations = 0;
+  for (Action action : actions) {
+Row row = action.getAction();
+// Row Mutations are a set of Puts and/or Deletes all to be applied 
atomically
+// on the one row. We do separate RegionAction for each RowMutations.
+// We maintain a map to keep track of this RegionAction and the 
original Action index.
+if (row instanceof RowMutations) {
+  RowMutations rms = (RowMutations)row;
+  if (this.cellBlock) {
+// Build a multi request absent its Cell payload. Send data in 
cellblocks.
+regionActionBuilder = 
RequestConverter.buildNoDataRegionAction(regionName, rms, cells,
+  regionActionBuilder, actionBuilder, mutationBuilder);
+  } else {
+regionActionBuilder = 
RequestConverter.buildRegionAction(regionName, rms);
+  }
+  regionActionBuilder.setAtomic(true);
+  multiRequestBuilder.ad

[32/50] [abbrv] hbase git commit: HBASE-17994 Add async client test to Performance Evaluation tool

2017-08-20 Thread busbey
HBASE-17994 Add async client test to Performance Evaluation tool


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/2a9cdd5e
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/2a9cdd5e
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/2a9cdd5e

Branch: refs/heads/HBASE-18467
Commit: 2a9cdd5e75ab6e51e9ca0539caba29cc4350b98e
Parents: 9da4e69
Author: Guanghao Zhang 
Authored: Wed Jul 12 15:37:55 2017 +0800
Committer: Guanghao Zhang 
Committed: Wed Aug 16 14:05:07 2017 +0800

--
 .../hadoop/hbase/PerformanceEvaluation.java | 390 +--
 1 file changed, 351 insertions(+), 39 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/2a9cdd5e/hbase-server/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java
index 162e761..eebb0f3 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java
@@ -53,6 +53,8 @@ import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hbase.classification.InterfaceAudience;
 import org.apache.hadoop.hbase.client.Admin;
 import org.apache.hadoop.hbase.client.Append;
+import org.apache.hadoop.hbase.client.AsyncConnection;
+import org.apache.hadoop.hbase.client.AsyncTable;
 import org.apache.hadoop.hbase.client.BufferedMutator;
 import org.apache.hadoop.hbase.client.Connection;
 import org.apache.hadoop.hbase.client.ConnectionFactory;
@@ -62,6 +64,7 @@ import org.apache.hadoop.hbase.client.Durability;
 import org.apache.hadoop.hbase.client.Get;
 import org.apache.hadoop.hbase.client.Increment;
 import org.apache.hadoop.hbase.client.Put;
+import org.apache.hadoop.hbase.client.RawAsyncTable;
 import org.apache.hadoop.hbase.client.Result;
 import org.apache.hadoop.hbase.client.ResultScanner;
 import org.apache.hadoop.hbase.client.RowMutations;
@@ -99,9 +102,9 @@ import org.apache.htrace.Sampler;
 import org.apache.htrace.Trace;
 import org.apache.htrace.TraceScope;
 import org.apache.htrace.impl.ProbabilitySampler;
-
 import org.apache.hadoop.hbase.shaded.com.google.common.base.MoreObjects;
 import 
org.apache.hadoop.hbase.shaded.com.google.common.util.concurrent.ThreadFactoryBuilder;
+
 import com.codahale.metrics.Histogram;
 import com.codahale.metrics.UniformReservoir;
 
@@ -153,6 +156,16 @@ public class PerformanceEvaluation extends Configured 
implements Tool {
   private static final Path PERF_EVAL_DIR = new Path("performance_evaluation");
 
   static {
+addCommandDescriptor(AsyncRandomReadTest.class, "asyncRandomRead",
+"Run async random read test");
+addCommandDescriptor(AsyncRandomWriteTest.class, "asyncRandomWrite",
+"Run async random write test");
+addCommandDescriptor(AsyncSequentialReadTest.class, "asyncSequentialRead",
+"Run async sequential read test");
+addCommandDescriptor(AsyncSequentialWriteTest.class, 
"asyncSequentialWrite",
+"Run async sequential write test");
+addCommandDescriptor(AsyncScanTest.class, "asyncScan",
+"Run async scan test (read every row)");
 addCommandDescriptor(RandomReadTest.class, RANDOM_READ,
   "Run random read test");
 addCommandDescriptor(RandomSeekScanTest.class, RANDOM_SEEK_SCAN,
@@ -226,7 +239,7 @@ public class PerformanceEvaluation extends Configured 
implements Tool {
 super(conf);
   }
 
-  protected static void addCommandDescriptor(Class cmdClass,
+  protected static void addCommandDescriptor(Class 
cmdClass,
   String name, String description) {
 CmdDescriptor cmdDescriptor = new CmdDescriptor(cmdClass, name, 
description);
 COMMANDS.put(name, cmdDescriptor);
@@ -295,9 +308,15 @@ public class PerformanceEvaluation extends Configured 
implements Tool {
   TestOptions opts = mapper.readValue(value.toString(), TestOptions.class);
   Configuration conf = 
HBaseConfiguration.create(context.getConfiguration());
   final Connection con = ConnectionFactory.createConnection(conf);
+  AsyncConnection asyncCon = null;
+  try {
+asyncCon = ConnectionFactory.createAsyncConnection(conf).get();
+  } catch (ExecutionException e) {
+throw new IOException(e);
+  }
 
   // Evaluation task
-  RunResult result = PerformanceEvaluation.runOneClient(this.cmd, conf, 
con, opts, status);
+  RunResult result = PerformanceEvaluation.runOneClient(this.cmd, conf, 
con, asyncCon, opts, status);
   // Collect how much time the thing took. Report as map output and
   // to the ELAPSED_TIME counter.
   context.g

[46/50] [abbrv] hbase git commit: HBASE-18471 The DeleteFamily cell is skipped when StoreScanner seeks to next column

2017-08-20 Thread busbey
HBASE-18471 The DeleteFamily cell is skipped when StoreScanner seeks to next 
column


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/56f9e1a6
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/56f9e1a6
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/56f9e1a6

Branch: refs/heads/HBASE-18467
Commit: 56f9e1a60af224f8f3b0453932155ca46d44cf71
Parents: e9bafeb
Author: Chia-Ping Tsai 
Authored: Sat Aug 19 02:15:12 2017 +0800
Committer: Chia-Ping Tsai 
Committed: Sat Aug 19 02:15:12 2017 +0800

--
 .../java/org/apache/hadoop/hbase/CellUtil.java  |  53 ++
 .../querymatcher/ScanQueryMatcher.java  |  13 +++
 .../hbase/client/TestFromClientSide3.java   | 101 +++
 3 files changed, 167 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/56f9e1a6/hbase-common/src/main/java/org/apache/hadoop/hbase/CellUtil.java
--
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/CellUtil.java 
b/hbase-common/src/main/java/org/apache/hadoop/hbase/CellUtil.java
index 1146de4..03d8b70 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/CellUtil.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/CellUtil.java
@@ -3135,4 +3135,57 @@ public final class CellUtil {
   return Type.DeleteFamily.getCode();
 }
   }
+
+  /**
+   * @return An new cell is located following input cell. If both of type and 
timestamp are
+   * minimum, the input cell will be returned directly.
+   */
+  @InterfaceAudience.Private
+  public static Cell createNextOnRowCol(Cell cell) {
+long ts = cell.getTimestamp();
+byte type = cell.getTypeByte();
+if (type != Type.Minimum.getCode()) {
+  type = KeyValue.Type.values()[KeyValue.Type.codeToType(type).ordinal() - 
1].getCode();
+} else if (ts != HConstants.OLDEST_TIMESTAMP) {
+  ts = ts - 1;
+  type = Type.Maximum.getCode();
+} else {
+  return cell;
+}
+return createNextOnRowCol(cell, ts, type);
+  }
+
+  private static Cell createNextOnRowCol(Cell cell, long ts, byte type) {
+if (cell instanceof ByteBufferCell) {
+  return new LastOnRowColByteBufferCell(((ByteBufferCell) 
cell).getRowByteBuffer(),
+  ((ByteBufferCell) cell).getRowPosition(), cell.getRowLength(),
+  ((ByteBufferCell) cell).getFamilyByteBuffer(),
+  ((ByteBufferCell) cell).getFamilyPosition(), 
cell.getFamilyLength(),
+  ((ByteBufferCell) cell).getQualifierByteBuffer(),
+  ((ByteBufferCell) cell).getQualifierPosition(), 
cell.getQualifierLength()) {
+@Override
+public long getTimestamp() {
+  return ts;
+}
+
+@Override
+public byte getTypeByte() {
+  return type;
+}
+  };
+}
+return new LastOnRowColCell(cell.getRowArray(), cell.getRowOffset(), 
cell.getRowLength(),
+cell.getFamilyArray(), cell.getFamilyOffset(), 
cell.getFamilyLength(),
+cell.getQualifierArray(), cell.getQualifierOffset(), 
cell.getQualifierLength()) {
+  @Override
+  public long getTimestamp() {
+return ts;
+  }
+
+  @Override
+  public byte getTypeByte() {
+return type;
+  }
+};
+  }
 }

http://git-wip-us.apache.org/repos/asf/hbase/blob/56f9e1a6/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/querymatcher/ScanQueryMatcher.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/querymatcher/ScanQueryMatcher.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/querymatcher/ScanQueryMatcher.java
index 524d3f7..f00a400 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/querymatcher/ScanQueryMatcher.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/querymatcher/ScanQueryMatcher.java
@@ -290,6 +290,19 @@ public abstract class ScanQueryMatcher implements 
ShipperListener {
   public abstract boolean moreRowsMayExistAfter(Cell cell);
 
   public Cell getKeyForNextColumn(Cell cell) {
+// We aren't sure whether any DeleteFamily cells exist, so we can't skip 
to next column.
+// TODO: Current way disable us to seek to next column quickly. Is there 
any better solution?
+// see HBASE-18471 for more details
+// see TestFromClientSide3#testScanAfterDeletingSpecifiedRow
+// see TestFromClientSide3#testScanAfterDeletingSpecifiedRowV2
+if (cell.getQualifierLength() == 0) {
+  Cell nextKey = CellUtil.createNextOnRowCol(cell);
+  if (nextKey != cell) {
+return nextKey;
+  }
+  // The cell is at the end of row/family/qualifier, so it is im

[20/50] [abbrv] hbase git commit: HBASE-18582 Correct the docs for Mutation#setCellVisibility

2017-08-20 Thread busbey
HBASE-18582 Correct the docs for Mutation#setCellVisibility

Signed-off-by: Chia-Ping Tsai 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/d4317c80
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/d4317c80
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/d4317c80

Branch: refs/heads/HBASE-18467
Commit: d4317c80e62e4eb0c2e997adf4438b927dfbcd96
Parents: 0e32869
Author: brandboat 
Authored: Mon Aug 14 22:10:23 2017 +0800
Committer: Chia-Ping Tsai 
Committed: Tue Aug 15 21:37:55 2017 +0800

--
 .../src/main/java/org/apache/hadoop/hbase/client/Mutation.java  | 1 -
 1 file changed, 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/d4317c80/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Mutation.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Mutation.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Mutation.java
index 25b088d..24b4cb8 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Mutation.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Mutation.java
@@ -331,7 +331,6 @@ public abstract class Mutation extends 
OperationWithAttributes implements Row, C
 
   /**
* Sets the visibility expression associated with cells in this Mutation.
-   * It is illegal to set CellVisibility on Delete 
mutation.
* @param expression
*/
   public Mutation setCellVisibility(CellVisibility expression) {



[34/50] [abbrv] hbase git commit: HBASE-18493 [AMv2] Skipped re-assignment of regions on crashed server through AssignmentManager.checkIfShouldMoveSystemRegionAsync() as those regions are handled by S

2017-08-20 Thread busbey
HBASE-18493 [AMv2] Skipped re-assignment of regions on crashed server through 
AssignmentManager.checkIfShouldMoveSystemRegionAsync() as those regions are 
handled by ServerCrashProcedure

Signed-off-by: Michael Stack 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/acf9b87d
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/acf9b87d
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/acf9b87d

Branch: refs/heads/HBASE-18467
Commit: acf9b87dca2cd190f4b5318efd5dc48e19b317f4
Parents: 4c74a73
Author: Umesh Agashe 
Authored: Tue Aug 15 14:00:04 2017 -0700
Committer: Michael Stack 
Committed: Wed Aug 16 08:12:05 2017 -0700

--
 .../hadoop/hbase/master/assignment/AssignmentManager.java   | 9 +
 1 file changed, 9 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/acf9b87d/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/AssignmentManager.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/AssignmentManager.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/AssignmentManager.java
index 54cb1ca..0b23f47 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/AssignmentManager.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/AssignmentManager.java
@@ -480,6 +480,15 @@ public class AssignmentManager implements ServerListener {
 synchronized (checkIfShouldMoveSystemRegionLock) {
   List plans = new ArrayList<>();
   for (ServerName server : getExcludedServersForSystemTable()) {
+if (master.getServerManager().isServerDead(server)) {
+  // TODO: See HBASE-18494 and HBASE-18495. Though 
getExcludedServersForSystemTable()
+  // considers only online servers, the server could be queued for 
dead server
+  // processing. As region assignments for crashed server is 
handled by
+  // ServerCrashProcedure, do NOT handle them here. The goal is to 
handle this through
+  // regular flow of LoadBalancer as a favored node and not to 
have this special
+  // handling.
+  continue;
+}
 List regionsShouldMove = 
getCarryingSystemTables(server);
 if (!regionsShouldMove.isEmpty()) {
   for (HRegionInfo regionInfo : regionsShouldMove) {



[21/50] [abbrv] hbase git commit: HBASE-2631 Decide between InMB and MB as suffix for field names in ClusterStatus objects

2017-08-20 Thread busbey
HBASE-2631 Decide between InMB and MB as suffix for field names in 
ClusterStatus objects

Signed-off-by: Chia-Ping Tsai 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/d37266f6
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/d37266f6
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/d37266f6

Branch: refs/heads/HBASE-18467
Commit: d37266f63cf90068415a8cef05b1c63dccc0a9d9
Parents: d4317c8
Author: Deon Huang 
Authored: Sun Aug 13 21:52:03 2017 +0800
Committer: Chia-Ping Tsai 
Committed: Tue Aug 15 21:42:31 2017 +0800

--
 .../org/apache/hadoop/hbase/ServerLoad.java | 27 
 .../tmpl/master/RegionServerListTmpl.jamon  |  4 +--
 .../org/apache/hadoop/hbase/TestServerLoad.java |  6 ++---
 3 files changed, 32 insertions(+), 5 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/d37266f6/hbase-client/src/main/java/org/apache/hadoop/hbase/ServerLoad.java
--
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ServerLoad.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/ServerLoad.java
index 8547dfb..8d4c7d3 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ServerLoad.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ServerLoad.java
@@ -131,18 +131,45 @@ public class ServerLoad {
 return storeUncompressedSizeMB;
   }
 
+  /**
+   * @deprecated As of release 2.0.0, this will be removed in HBase 3.0.0
+   * Use {@link #getStorefileSizeMB()} instead.
+   */
+  @Deprecated
   public int getStorefileSizeInMB() {
 return storefileSizeMB;
   }
 
+  public int getStorefileSizeMB() {
+return storefileSizeMB;
+  }
+
+  /**
+   * @deprecated As of release 2.0.0, this will be removed in HBase 3.0.0
+   * Use {@link #getMemstoreSizeMB()} instead.
+   */
+  @Deprecated
   public int getMemstoreSizeInMB() {
 return memstoreSizeMB;
   }
 
+  public int getMemstoreSizeMB() {
+return memstoreSizeMB;
+  }
+
+  /**
+   * @deprecated As of release 2.0.0, this will be removed in HBase 3.0.0
+   * Use {@link #getStorefileIndexSizeMB()} instead.
+   */
+  @Deprecated
   public int getStorefileIndexSizeInMB() {
 return storefileIndexSizeMB;
   }
 
+  public int getStorefileIndexSizeMB() {
+return storefileIndexSizeMB;
+  }
+
   public long getReadRequestsCount() {
 return readRequestsCount;
   }

http://git-wip-us.apache.org/repos/asf/hbase/blob/d37266f6/hbase-server/src/main/jamon/org/apache/hadoop/hbase/tmpl/master/RegionServerListTmpl.jamon
--
diff --git 
a/hbase-server/src/main/jamon/org/apache/hadoop/hbase/tmpl/master/RegionServerListTmpl.jamon
 
b/hbase-server/src/main/jamon/org/apache/hadoop/hbase/tmpl/master/RegionServerListTmpl.jamon
index a62d5eb..5dd10e8 100644
--- 
a/hbase-server/src/main/jamon/org/apache/hadoop/hbase/tmpl/master/RegionServerListTmpl.jamon
+++ 
b/hbase-server/src/main/jamon/org/apache/hadoop/hbase/tmpl/master/RegionServerListTmpl.jamon
@@ -153,7 +153,7 @@ for (ServerName serverName: serverNames) {
   * TraditionalBinaryPrefix.MEGA.value, "B", 1) %>
 <% TraditionalBinaryPrefix.long2String(sl.getMaxHeapMB()
   * TraditionalBinaryPrefix.MEGA.value, "B", 1) %>
-<% TraditionalBinaryPrefix.long2String(sl.getMemstoreSizeInMB()
+<% TraditionalBinaryPrefix.long2String(sl.getMemstoreSizeMB()
   * TraditionalBinaryPrefix.MEGA.value, "B", 1) %>
 
 
@@ -232,7 +232,7 @@ if (sl != null) {
 <% sl.getStorefiles() %>
 <% TraditionalBinaryPrefix.long2String(
   sl.getStoreUncompressedSizeMB() * TraditionalBinaryPrefix.MEGA.value, "B", 
1) %>
-<% TraditionalBinaryPrefix.long2String(sl.getStorefileSizeInMB()
+<% TraditionalBinaryPrefix.long2String(sl.getStorefileSizeMB()
   * TraditionalBinaryPrefix.MEGA.value, "B", 1) %>
 <% TraditionalBinaryPrefix.long2String(sl.getTotalStaticIndexSizeKB()
   * TraditionalBinaryPrefix.KILO.value, "B", 1) %>

http://git-wip-us.apache.org/repos/asf/hbase/blob/d37266f6/hbase-server/src/test/java/org/apache/hadoop/hbase/TestServerLoad.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestServerLoad.java 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestServerLoad.java
index cbd76ce..2d248b0 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestServerLoad.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestServerLoad.java
@@ -41,8 +41,8 @@ public class TestServerLoad {
 assertEquals(114, sl.getStorefiles());
 assertEquals(129, sl.getStoreUncompressedSizeMB());
 assertEquals(504, sl.getRootIndexSizeKB());
-assertEquals(820, sl.getStorefileSizeInMB());
-assertEqual

[49/50] [abbrv] hbase git commit: HBASE-18467 WIP build up a jira comment.

2017-08-20 Thread busbey
HBASE-18467 WIP build up a jira comment.

HBASE-18467 WIP fail yetus checks instead of running.

WIP fixing syntax.

WIP switch to non-jenkins substituded string.

WIP adding attempt at using jira selector after all.

HBASE-18467 make sure placeholder failure for yetus writes console report.

HBASE-18467 more syntax.

HBASE-18467 WIP let's try using the changeset directly.

HBASE-18467 WIP assemble comment contents.

HBASE-18467 WIP, doesn't look like newlines are cool with groovy.

HBASE-18467 WIP ah, it was that you can't have newlines in var names.

HBASE-18467 WIP more debugging. ugh.

HBASE-18467 WIP


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/c2537ac6
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/c2537ac6
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/c2537ac6

Branch: refs/heads/HBASE-18467
Commit: c2537ac6d865f511f19989a580702215be25f8f7
Parents: 13028d7
Author: Sean Busbey 
Authored: Wed Aug 9 00:48:46 2017 -0500
Committer: Sean Busbey 
Committed: Sun Aug 20 15:31:33 2017 -0500

--
 dev-support/Jenkinsfile| 104 ++--
 dev-support/hbase_nightly_yetus.sh |   7 +++
 2 files changed, 105 insertions(+), 6 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/c2537ac6/dev-support/Jenkinsfile
--
diff --git a/dev-support/Jenkinsfile b/dev-support/Jenkinsfile
index 1f01a47..7207ab3 100644
--- a/dev-support/Jenkinsfile
+++ b/dev-support/Jenkinsfile
@@ -128,7 +128,17 @@ curl -L  -o personality.sh "${env.PROJET_PERSONALITY}"
   steps {
 unstash 'yetus'
 // TODO should this be a download from master, similar to how the 
personality is?
-sh "${env.BASEDIR}/dev-support/hbase_nightly_yetus.sh"
+sh '''#!/usr/bin/env bash
+  declare commentfile
+  if "${BASEDIR}/dev-support/hbase_nightly_yetus.sh" ; then
+commentfile="${OUTPUTDIR}/success"
+echo '(/) *{color:green}+1 general checks{color}*' >> 
"${commentfile}"
+  else
+commentfile="${OUTPUTDIR}/failure"
+echo '(x) *{color:red}-1 general checks{color}*' >> 
"${commentfile}"
+  fi
+  echo '-- For more information [see general 
report|${BUILD_URL}/General_Nightly_Build_Report/]' >> "${commentfile}"
+'''
   }
   post {
 always {
@@ -159,13 +169,21 @@ curl -L  -o personality.sh "${env.PROJET_PERSONALITY}"
   }
   steps {
 unstash 'yetus'
-sh """#!/usr/bin/env bash
+sh '''#!/usr/bin/env bash
   # for branch-1.1 we don't do jdk8 findbugs, so do it here
-  if [ "${env.BRANCH_NAME}" == "branch-1.1" ]; then
+  if [ "${BRANCH_NAME}" == "branch-1.1" ]; then
 TESTS+=",findbugs"
   fi
-  "${env.BASEDIR}/dev-support/hbase_nightly_yetus.sh"
-"""
+  declare commentfile
+  if "${BASEDIR}/dev-support/hbase_nightly_yetus.sh" ; then
+commentfile='${OUTPUTDIR}/success'
+echo '(/) *{color:green}+1 jdk7 checks{color}*' >> "${commentfile}"
+  else
+commentfile='${OUTPUTDIR}/failure'
+echo '(x) *{color:red}-1 jdk7 checks{color}*' >> "${commentfile}"
+  fi
+  echo '-- For more information [see jdk7 
report|${BUILD_URL}/JDK7_Nightly_Build_Report/]' >> "${commentfile}"
+'''
   }
   post {
 always {
@@ -215,7 +233,17 @@ curl -L  -o personality.sh "${env.PROJET_PERSONALITY}"
   }
   steps {
 unstash 'yetus'
-sh "${env.BASEDIR}/dev-support/hbase_nightly_yetus.sh"
+sh '''#!/usr/bin/env bash
+  declare commentfile
+  if "${BASEDIR}/dev-support/hbase_nightly_yetus.sh" ; then
+commentfile='${OUTPUTDIR}/success'
+echo '(/) *{color:green}+1 jdk8 checks{color}*' >> "${commentfile}"
+  else
+commentfile='${OUTPUTDIR}/failure'
+echo '(x) *{color:red}-1 jdk8 checks{color}*' >> "${commentfile}"
+  fi
+  echo '-- For more information [see jdk8 
report|${BUILD_URL}/JDK8_Nightly_Build_Report/]' >> "${commentfile}"
+'''
   }
   post {
 always {
@@ -304,6 +332,70 @@ END
   fi
 '''
   }
+  // This approach only works because the source release artifact is the 
last stage that does work.
+  post {
+success {
+  writeFile file: "${env.WORKSPACE}/src_tarball_success", text: '(/) 
*{color:green}+1 source release artifact{color}*'
+}
+failure {
+  writeFile file: "${env.WORKSPACE}/src_tarball_failure", text: '(x) 
*{color:red}-1 source release artifact{color}*'
+}
+  }
+}
+stage ('Fail if previous stages f

[10/50] [abbrv] hbase git commit: HBASE-14135 Merge backup images (Vladimir Rodionov)

2017-08-20 Thread busbey
HBASE-14135 Merge backup images (Vladimir Rodionov)


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/05e6e569
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/05e6e569
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/05e6e569

Branch: refs/heads/HBASE-18467
Commit: 05e6e5695089640006d06c2f74126b50a73363b7
Parents: c6ac04a
Author: Josh Elser 
Authored: Sun Aug 13 20:55:58 2017 -0400
Committer: Josh Elser 
Committed: Sun Aug 13 20:55:58 2017 -0400

--
 .../apache/hadoop/hbase/backup/BackupAdmin.java |  20 +-
 .../hadoop/hbase/backup/BackupDriver.java   |   2 +
 .../apache/hadoop/hbase/backup/BackupInfo.java  |   5 +
 .../hadoop/hbase/backup/BackupMergeJob.java |  40 +++
 .../hbase/backup/BackupRestoreFactory.java  |  20 +-
 .../hadoop/hbase/backup/HBackupFileSystem.java  |  57 ++--
 .../hbase/backup/impl/BackupAdminImpl.java  | 213 +---
 .../hbase/backup/impl/BackupCommands.java   | 163 ++---
 .../hadoop/hbase/backup/impl/BackupManager.java |  21 +-
 .../hbase/backup/impl/BackupManifest.java   |  24 +-
 .../hbase/backup/impl/BackupSystemTable.java| 314 ++---
 .../hbase/backup/impl/RestoreTablesClient.java  |  32 +-
 .../backup/mapreduce/HFileSplitterJob.java  | 181 --
 .../mapreduce/MapReduceBackupMergeJob.java  | 321 ++
 .../mapreduce/MapReduceHFileSplitterJob.java| 181 ++
 .../backup/mapreduce/MapReduceRestoreJob.java   |  84 ++---
 .../hadoop/hbase/backup/util/BackupUtils.java   |  93 +++--
 .../TestIncrementalBackupMergeWithFailures.java | 336 +++
 .../backup/TestRepairAfterFailedDelete.java |   2 +-
 19 files changed, 1574 insertions(+), 535 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/05e6e569/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/BackupAdmin.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/BackupAdmin.java 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/BackupAdmin.java
index 6f642a4..9dc6382 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/BackupAdmin.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/BackupAdmin.java
@@ -38,8 +38,8 @@ import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
 public interface BackupAdmin extends Closeable {
 
   /**
-   * Backup given list of tables fully. This is a synchronous operation.
-   * It returns backup id on success or throw exception on failure.
+   * Backup given list of tables fully. This is a synchronous operation. It 
returns backup id on
+   * success or throw exception on failure.
* @param userRequest BackupRequest instance
* @return the backup Id
*/
@@ -61,16 +61,24 @@ public interface BackupAdmin extends Closeable {
*/
   BackupInfo getBackupInfo(String backupId) throws IOException;
 
-
   /**
* Delete backup image command
-   * @param backupIds backup id list
+   * @param backupIds array of backup ids
* @return total number of deleted sessions
* @throws IOException exception
*/
   int deleteBackups(String[] backupIds) throws IOException;
 
   /**
+   * Merge backup images command
+   * @param backupIds array of backup ids of images to be merged
+   *The resulting backup image will have the same backup id as the most
+   *recent image from a list of images to be merged
+   * @throws IOException exception
+   */
+  void mergeBackups(String[] backupIds) throws IOException;
+
+  /**
* Show backup history command
* @param n last n backup sessions
* @return list of backup info objects
@@ -113,7 +121,7 @@ public interface BackupAdmin extends Closeable {
   /**
* Add tables to backup set command
* @param name name of backup set.
-   * @param tables list of tables to be added to this set.
+   * @param tables array of tables to be added to this set.
* @throws IOException exception
*/
   void addToBackupSet(String name, TableName[] tables) throws IOException;
@@ -121,7 +129,7 @@ public interface BackupAdmin extends Closeable {
   /**
* Remove tables from backup set
* @param name name of backup set.
-   * @param tables list of tables to be removed from this set.
+   * @param tables array of tables to be removed from this set.
* @throws IOException exception
*/
   void removeFromBackupSet(String name, TableName[] tables) throws IOException;

http://git-wip-us.apache.org/repos/asf/hbase/blob/05e6e569/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/BackupDriver.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/BackupDriver.java 
b/hba

[30/50] [abbrv] hbase git commit: HBASE-18437 Revoke access permissions of a user from a table does not work as expected

2017-08-20 Thread busbey
HBASE-18437 Revoke access permissions of a user from a table does not work as 
expected

Signed-off-by: Andrew Purtell 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/b0878184
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/b0878184
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/b0878184

Branch: refs/heads/HBASE-18467
Commit: b0878184a31804a4bf061df7581964157b4849d5
Parents: 59ffb611
Author: Ashish Singhi 
Authored: Fri Aug 11 12:48:32 2017 +0530
Committer: Andrew Purtell 
Committed: Tue Aug 15 22:29:16 2017 -0700

--
 .../hbase/security/access/Permission.java   |  6 ++
 .../security/access/AccessControlLists.java | 37 +++-
 .../security/access/TestAccessController.java   | 96 ++--
 3 files changed, 106 insertions(+), 33 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/b0878184/hbase-client/src/main/java/org/apache/hadoop/hbase/security/access/Permission.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/access/Permission.java
 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/access/Permission.java
index 8476f61..18096e1 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/access/Permission.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/access/Permission.java
@@ -110,6 +110,12 @@ public class Permission extends VersionedWritable {
 return false;
   }
 
+  public void setActions(Action[] assigned) {
+if (assigned != null && assigned.length > 0) {
+  actions = Arrays.copyOf(assigned, assigned.length);
+}
+  }
+
   @Override
   public boolean equals(Object obj) {
 if (!(obj instanceof Permission)) {

http://git-wip-us.apache.org/repos/asf/hbase/blob/b0878184/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlLists.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlLists.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlLists.java
index 12bdc22..38e292c 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlLists.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlLists.java
@@ -241,13 +241,40 @@ public class AccessControlLists {
*/
   static void removeUserPermission(Configuration conf, UserPermission 
userPerm, Table t)
   throws IOException {
-Delete d = new Delete(userPermissionRowKey(userPerm));
-byte[] key = userPermissionKey(userPerm);
-
+if (null == userPerm.getActions()) {
+  removePermissionRecord(conf, userPerm, t);
+} else {
+  // Get all the global user permissions from the acl table
+  List permsList = getUserPermissions(conf, 
userPermissionRowKey(userPerm));
+  List remainingActions = new ArrayList<>();
+  List dropActions = 
Arrays.asList(userPerm.getActions());
+  for (UserPermission perm : permsList) {
+// Find the user and remove only the requested permissions
+if 
(Bytes.toString(perm.getUser()).equals(Bytes.toString(userPerm.getUser( {
+  for (Permission.Action oldAction : perm.getActions()) {
+if (!dropActions.contains(oldAction)) {
+  remainingActions.add(oldAction);
+}
+  }
+  if (!remainingActions.isEmpty()) {
+perm.setActions(remainingActions.toArray(new 
Permission.Action[remainingActions.size()]));
+addUserPermission(conf, perm, t);
+  } else {
+removePermissionRecord(conf, userPerm, t);
+  }
+  break;
+}
+  }
+}
 if (LOG.isDebugEnabled()) {
-  LOG.debug("Removing permission "+ userPerm.toString());
+  LOG.debug("Removed permission "+ userPerm.toString());
 }
-d.addColumns(ACL_LIST_FAMILY, key);
+  }
+
+  private static void removePermissionRecord(Configuration conf, 
UserPermission userPerm, Table t)
+  throws IOException {
+Delete d = new Delete(userPermissionRowKey(userPerm));
+d.addColumns(ACL_LIST_FAMILY, userPermissionKey(userPerm));
 try {
   t.delete(d);
 } finally {

http://git-wip-us.apache.org/repos/asf/hbase/blob/b0878184/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessController.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessController.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessController.java
index c1fbb28..6583366 100644
--- 
a/h

[07/50] [abbrv] hbase git commit: HBASE-18271 Shade netty Purge mention of netty-all.

2017-08-20 Thread busbey
http://git-wip-us.apache.org/repos/asf/hbase/blob/c6ac04ab/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestNettyIPC.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestNettyIPC.java 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestNettyIPC.java
index 3b32383..6a39e12 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestNettyIPC.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestNettyIPC.java
@@ -17,10 +17,10 @@
  */
 package org.apache.hadoop.hbase.ipc;
 
-import io.netty.channel.epoll.EpollEventLoopGroup;
-import io.netty.channel.epoll.EpollSocketChannel;
-import io.netty.channel.nio.NioEventLoopGroup;
-import io.netty.channel.socket.nio.NioSocketChannel;
+import 
org.apache.hadoop.hbase.shaded.io.netty.channel.epoll.EpollEventLoopGroup;
+import 
org.apache.hadoop.hbase.shaded.io.netty.channel.epoll.EpollSocketChannel;
+import org.apache.hadoop.hbase.shaded.io.netty.channel.nio.NioEventLoopGroup;
+import 
org.apache.hadoop.hbase.shaded.io.netty.channel.socket.nio.NioSocketChannel;
 
 import java.util.ArrayList;
 import java.util.Collection;

http://git-wip-us.apache.org/repos/asf/hbase/blob/c6ac04ab/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestAsyncFSWAL.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestAsyncFSWAL.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestAsyncFSWAL.java
index 9b28975..2ae916f 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestAsyncFSWAL.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestAsyncFSWAL.java
@@ -17,10 +17,10 @@
  */
 package org.apache.hadoop.hbase.regionserver.wal;
 
-import io.netty.channel.Channel;
-import io.netty.channel.EventLoopGroup;
-import io.netty.channel.nio.NioEventLoopGroup;
-import io.netty.channel.socket.nio.NioSocketChannel;
+import org.apache.hadoop.hbase.shaded.io.netty.channel.Channel;
+import org.apache.hadoop.hbase.shaded.io.netty.channel.EventLoopGroup;
+import org.apache.hadoop.hbase.shaded.io.netty.channel.nio.NioEventLoopGroup;
+import 
org.apache.hadoop.hbase.shaded.io.netty.channel.socket.nio.NioSocketChannel;
 
 import java.io.IOException;
 import java.util.List;

http://git-wip-us.apache.org/repos/asf/hbase/blob/c6ac04ab/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestAsyncProtobufLog.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestAsyncProtobufLog.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestAsyncProtobufLog.java
index bb67820..708d64c 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestAsyncProtobufLog.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestAsyncProtobufLog.java
@@ -19,10 +19,10 @@ package org.apache.hadoop.hbase.regionserver.wal;
 
 import org.apache.hadoop.hbase.shaded.com.google.common.base.Throwables;
 
-import io.netty.channel.Channel;
-import io.netty.channel.EventLoopGroup;
-import io.netty.channel.nio.NioEventLoopGroup;
-import io.netty.channel.socket.nio.NioSocketChannel;
+import org.apache.hadoop.hbase.shaded.io.netty.channel.Channel;
+import org.apache.hadoop.hbase.shaded.io.netty.channel.EventLoopGroup;
+import org.apache.hadoop.hbase.shaded.io.netty.channel.nio.NioEventLoopGroup;
+import 
org.apache.hadoop.hbase.shaded.io.netty.channel.socket.nio.NioSocketChannel;
 
 import java.io.IOException;
 import java.io.InterruptedIOException;

http://git-wip-us.apache.org/repos/asf/hbase/blob/c6ac04ab/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestAsyncWALReplay.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestAsyncWALReplay.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestAsyncWALReplay.java
index 17f58f8..881cf7c 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestAsyncWALReplay.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestAsyncWALReplay.java
@@ -17,10 +17,10 @@
  */
 package org.apache.hadoop.hbase.regionserver.wal;
 
-import io.netty.channel.Channel;
-import io.netty.channel.EventLoopGroup;
-import io.netty.channel.nio.NioEventLoopGroup;
-import io.netty.channel.socket.nio.NioSocketChannel;
+import org.apache.hadoop.hbase.shaded.io.netty.channel.Channel;
+import org.apache.hadoop.hbase.shaded.io.netty.channel.EventLoopGroup;
+import org.apache.hadoop.hbase.shaded.io.netty.channel.nio.NioEventLoopGroup;
+import 
org.apache.hadoop.hbase.shaded.io.netty.cha

[06/50] [abbrv] hbase git commit: HBASE-15511 ClusterStatus should be able to return responses by scope

2017-08-20 Thread busbey
HBASE-15511 ClusterStatus should be able to return responses by scope

Signed-off-by: Chia-Ping Tsai 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/923195c3
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/923195c3
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/923195c3

Branch: refs/heads/HBASE-18467
Commit: 923195c39e872ebca92b5cc5f148e4c1d39718ea
Parents: 173dce7
Author: Reid Chan 
Authored: Mon Aug 14 01:01:30 2017 +0800
Committer: Chia-Ping Tsai 
Committed: Mon Aug 14 01:02:39 2017 +0800

--
 .../org/apache/hadoop/hbase/ClusterStatus.java  | 297 ++-
 .../org/apache/hadoop/hbase/client/Admin.java   |   8 +
 .../apache/hadoop/hbase/client/AsyncAdmin.java  |   6 +
 .../hadoop/hbase/client/AsyncHBaseAdmin.java|   8 +-
 .../apache/hadoop/hbase/client/HBaseAdmin.java  |  12 +-
 .../hadoop/hbase/client/RawAsyncHBaseAdmin.java |   9 +-
 .../hbase/shaded/protobuf/ProtobufUtil.java |  81 -
 .../hbase/shaded/protobuf/RequestConverter.java |  14 +-
 .../src/main/protobuf/ClusterStatus.proto   |  12 +
 .../src/main/protobuf/Master.proto  |   1 +
 .../src/main/protobuf/ClusterStatus.proto   |  12 +
 .../hbase/master/ClusterStatusPublisher.java|  18 +-
 .../org/apache/hadoop/hbase/master/HMaster.java |  62 ++--
 .../hadoop/hbase/master/MasterRpcServices.java  |   3 +-
 .../hbase/client/TestClientClusterStatus.java   | 221 ++
 15 files changed, 710 insertions(+), 54 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/923195c3/hbase-client/src/main/java/org/apache/hadoop/hbase/ClusterStatus.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/ClusterStatus.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/ClusterStatus.java
index 95d77a2..0dc4984 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ClusterStatus.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ClusterStatus.java
@@ -47,6 +47,28 @@ import org.apache.hadoop.io.VersionedWritable;
  * Regions in transition at master
  * The unique cluster ID
  * 
+ * {@link Options} provides a way to filter out infos which unwanted.
+ * The following codes will retrieve all the cluster information.
+ * 
+ * {@code
+ * // Original version still works
+ * Admin admin = connection.getAdmin();
+ * ClusterStatus status = admin.getClusterStatus();
+ * // or below, a new version which has the same effects
+ * ClusterStatus status = admin.getClusterStatus(Options.defaultOptions());
+ * }
+ * 
+ * If information about dead servers and master coprocessors are unwanted,
+ * then codes in the following way:
+ * 
+ * {@code
+ * Admin admin = connection.getAdmin();
+ * ClusterStatus status = admin.getClusterStatus(
+ *Options.defaultOptions()
+ *   .excludeDeadServers()
+ *   .excludeMasterCoprocessors());
+ * }
+ * 
  */
 @InterfaceAudience.Public
 public class ClusterStatus extends VersionedWritable {
@@ -72,6 +94,12 @@ public class ClusterStatus extends VersionedWritable {
   private String[] masterCoprocessors;
   private Boolean balancerOn;
 
+  /**
+   * Use {@link ClusterStatus.Builder} to construct a ClusterStatus instead.
+   * @deprecated As of release 2.0.0, this will be removed in HBase 3.0.0
+   * (https://issues.apache.org/jira/browse/HBASE-15511";>HBASE-15511).
+   */
+  @Deprecated
   public ClusterStatus(final String hbaseVersion, final String clusterid,
   final Map servers,
   final Collection deadServers,
@@ -80,8 +108,8 @@ public class ClusterStatus extends VersionedWritable {
   final List rit,
   final String[] masterCoprocessors,
   final Boolean balancerOn) {
+// TODO: make this constructor private
 this.hbaseVersion = hbaseVersion;
-
 this.liveServers = servers;
 this.deadServers = deadServers;
 this.master = master;
@@ -133,7 +161,8 @@ public class ClusterStatus extends VersionedWritable {
*/
   public double getAverageLoad() {
 int load = getRegionsCount();
-return (double)load / (double)getServersSize();
+int serverSize = getServersSize();
+return serverSize != 0 ? (double)load / (double)serverSize : 0.0;
   }
 
   /**
@@ -333,4 +362,268 @@ public class ClusterStatus extends VersionedWritable {
 }
 return sb.toString();
   }
+
+  public static Builder newBuilder() {
+return new Builder();
+  }
+
+  /**
+   * Builder for construct a ClusterStatus.
+   */
+  public static class Builder {
+private String hbaseVersion = null;
+private Map liveServers = null;
+private Collection deadServers = null;
+private ServerName master = null;
+priv

[33/50] [abbrv] hbase git commit: HBASE-18553 Expose scan cursor for asynchronous scanner

2017-08-20 Thread busbey
HBASE-18553 Expose scan cursor for asynchronous scanner


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/4c74a73d
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/4c74a73d
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/4c74a73d

Branch: refs/heads/HBASE-18467
Commit: 4c74a73d57e09fd2c0ecde862a196c28dc6cd219
Parents: 2a9cdd5
Author: zhangduo 
Authored: Tue Aug 15 17:15:06 2017 +0800
Committer: zhangduo 
Committed: Wed Aug 16 21:04:57 2017 +0800

--
 .../AsyncScanSingleRegionRpcRetryingCaller.java |  35 -
 .../hbase/client/AsyncTableResultScanner.java   |  20 ++-
 .../hbase/client/RawScanResultConsumer.java |  11 +-
 .../client/AbstractTestResultScannerCursor.java |  89 +++
 .../client/TestAsyncResultScannerCursor.java|  49 ++
 .../hbase/client/TestRawAsyncScanCursor.java| 157 +--
 .../hbase/client/TestResultScannerCursor.java   |  34 
 .../hadoop/hbase/client/TestScanCursor.java |  90 ---
 8 files changed, 330 insertions(+), 155 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/4c74a73d/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncScanSingleRegionRpcRetryingCaller.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncScanSingleRegionRpcRetryingCaller.java
 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncScanSingleRegionRpcRetryingCaller.java
index 02a4357..d16cb8b 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncScanSingleRegionRpcRetryingCaller.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncScanSingleRegionRpcRetryingCaller.java
@@ -35,6 +35,7 @@ import org.apache.hadoop.hbase.shaded.io.netty.util.Timeout;
 
 import java.io.IOException;
 import java.util.ArrayList;
+import java.util.Arrays;
 import java.util.List;
 import java.util.Optional;
 import java.util.concurrent.CompletableFuture;
@@ -157,10 +158,9 @@ class AsyncScanSingleRegionRpcRetryingCaller {
 
 private ScanResumerImpl resumer;
 
-public ScanControllerImpl(ScanResponse resp) {
-  callerThread = Thread.currentThread();
-  cursor = resp.hasCursor() ? 
Optional.of(ProtobufUtil.toCursor(resp.getCursor()))
-  : Optional.empty();
+public ScanControllerImpl(Optional cursor) {
+  this.callerThread = Thread.currentThread();
+  this.cursor = cursor;
 }
 
 private void preCheck() {
@@ -476,10 +476,11 @@ class AsyncScanSingleRegionRpcRetryingCaller {
 }
 updateServerSideMetrics(scanMetrics, resp);
 boolean isHeartbeatMessage = resp.hasHeartbeatMessage() && 
resp.getHeartbeatMessage();
+Result[] rawResults;
 Result[] results;
 int numberOfCompleteRowsBefore = resultCache.numberOfCompleteRows();
 try {
-  Result[] rawResults = 
ResponseConverter.getResults(controller.cellScanner(), resp);
+  rawResults = ResponseConverter.getResults(controller.cellScanner(), 
resp);
   updateResultsMetrics(scanMetrics, rawResults, isHeartbeatMessage);
   results = resultCache.addAndGet(
 
Optional.ofNullable(rawResults).orElse(ScanResultCache.EMPTY_RESULT_ARRAY),
@@ -493,12 +494,30 @@ class AsyncScanSingleRegionRpcRetryingCaller {
   return;
 }
 
-ScanControllerImpl scanController = new ScanControllerImpl(resp);
+ScanControllerImpl scanController;
 if (results.length > 0) {
+  scanController = new ScanControllerImpl(
+  resp.hasCursor() ? 
Optional.of(ProtobufUtil.toCursor(resp.getCursor()))
+  : Optional.empty());
   updateNextStartRowWhenError(results[results.length - 1]);
   consumer.onNext(results, scanController);
-} else if (resp.hasHeartbeatMessage() && resp.getHeartbeatMessage()) {
-  consumer.onHeartbeat(scanController);
+} else {
+  Optional cursor = Optional.empty();
+  if (resp.hasCursor()) {
+cursor = Optional.of(ProtobufUtil.toCursor(resp.getCursor()));
+  } else if (scan.isNeedCursorResult() && rawResults.length > 0) {
+// It is size limit exceed and we need to return the last Result's row.
+// When user setBatch and the scanner is reopened, the server may 
return Results that
+// user has seen and the last Result can not be seen because the 
number is not enough.
+// So the row keys of results may not be same, we must use the last 
one.
+cursor = Optional.of(new Cursor(rawResults[rawResults.length - 
1].getRow()));
+  }
+  scanController = new ScanControllerImpl(cursor);
+  if (isHeartbeatMessage || cursor.isPresent()) {
+// only call onHeartbeat if server tells us explicitly this is a 
heartbeat message, or we
+// want to pass a c

[38/50] [abbrv] hbase git commit: HBASE-18125 shell disregards spaces at the end of a split key in a split file

2017-08-20 Thread busbey
HBASE-18125 shell disregards spaces at the end of a split key in a split file

Signed-off-by: fchenxi 
Signed-off-by: Michael Stack 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/a17ed035
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/a17ed035
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/a17ed035

Branch: refs/heads/HBASE-18467
Commit: a17ed0356f12c6f7a682557d92cc401b7a4297f1
Parents: 4c3a64d
Author: Chenxi Tong 
Authored: Wed Aug 9 12:00:53 2017 +0800
Committer: Michael Stack 
Committed: Wed Aug 16 15:14:12 2017 -0700

--
 hbase-shell/src/main/ruby/hbase/admin.rb | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/a17ed035/hbase-shell/src/main/ruby/hbase/admin.rb
--
diff --git a/hbase-shell/src/main/ruby/hbase/admin.rb 
b/hbase-shell/src/main/ruby/hbase/admin.rb
index 4b0de5f..2aacd7f 100644
--- a/hbase-shell/src/main/ruby/hbase/admin.rb
+++ b/hbase-shell/src/main/ruby/hbase/admin.rb
@@ -386,7 +386,7 @@ module Hbase
   end
   arg[SPLITS] = []
   File.foreach(splits_file) do |line|
-arg[SPLITS].push(line.strip)
+arg[SPLITS].push(line.chomp)
   end
   htd.setValue(SPLITS_FILE, arg[SPLITS_FILE])
 end



[37/50] [abbrv] hbase git commit: HBASE-18573 Update Append and Delete to use Mutation#getCellList(family)

2017-08-20 Thread busbey
HBASE-18573 Update Append and Delete to use Mutation#getCellList(family)

Signed-off-by: Jerry He 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/4c3a64db
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/4c3a64db
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/4c3a64db

Branch: refs/heads/HBASE-18467
Commit: 4c3a64db13b086ad3d8a6ffa1be8ba2f5a24719c
Parents: 5d2c3dd
Author: Xiang Li 
Authored: Thu Aug 17 00:39:35 2017 +0800
Committer: Jerry He 
Committed: Wed Aug 16 14:50:46 2017 -0700

--
 .../org/apache/hadoop/hbase/client/Append.java  |  9 +++---
 .../org/apache/hadoop/hbase/client/Delete.java  | 31 
 2 files changed, 10 insertions(+), 30 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/4c3a64db/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Append.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Append.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Append.java
index 2bd0860..6947313 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Append.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Append.java
@@ -134,11 +134,10 @@ public class Append extends Mutation {
   public Append add(final Cell cell) {
 // Presume it is KeyValue for now.
 byte [] family = CellUtil.cloneFamily(cell);
-List list = this.familyMap.get(family);
-if (list == null) {
-  list  = new ArrayList<>(1);
-  this.familyMap.put(family, list);
-}
+
+// Get cell list for the family
+List list = getCellList(family);
+
 // find where the new entry should be placed in the List
 list.add(cell);
 return this;

http://git-wip-us.apache.org/repos/asf/hbase/blob/4c3a64db/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Delete.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Delete.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Delete.java
index bf5241c..66b6cfc 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Delete.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Delete.java
@@ -180,11 +180,7 @@ public class Delete extends Mutation implements 
Comparable {
 " doesn't match the original one " +  Bytes.toStringBinary(this.row));
 }
 byte [] family = CellUtil.cloneFamily(kv);
-List list = familyMap.get(family);
-if (list == null) {
-  list = new ArrayList<>(1);
-  familyMap.put(family, list);
-}
+List list = getCellList(family);
 list.add(kv);
 return this;
   }
@@ -216,11 +212,8 @@ public class Delete extends Mutation implements 
Comparable {
 if (timestamp < 0) {
   throw new IllegalArgumentException("Timestamp cannot be negative. ts=" + 
timestamp);
 }
-List list = familyMap.get(family);
-if(list == null) {
-  list = new ArrayList<>(1);
-  familyMap.put(family, list);
-} else if(!list.isEmpty()) {
+List list = getCellList(family);
+if(!list.isEmpty()) {
   list.clear();
 }
 KeyValue kv = new KeyValue(row, family, null, timestamp, 
KeyValue.Type.DeleteFamily);
@@ -236,11 +229,7 @@ public class Delete extends Mutation implements 
Comparable {
* @return this for invocation chaining
*/
   public Delete addFamilyVersion(final byte [] family, final long timestamp) {
-List list = familyMap.get(family);
-if(list == null) {
-  list = new ArrayList<>(1);
-  familyMap.put(family, list);
-}
+List list = getCellList(family);
 list.add(new KeyValue(row, family, null, timestamp,
   KeyValue.Type.DeleteFamilyVersion));
 return this;
@@ -269,11 +258,7 @@ public class Delete extends Mutation implements 
Comparable {
 if (timestamp < 0) {
   throw new IllegalArgumentException("Timestamp cannot be negative. ts=" + 
timestamp);
 }
-List list = familyMap.get(family);
-if (list == null) {
-  list = new ArrayList<>(1);
-  familyMap.put(family, list);
-}
+List list = getCellList(family);
 list.add(new KeyValue(this.row, family, qualifier, timestamp,
 KeyValue.Type.DeleteColumn));
 return this;
@@ -304,11 +289,7 @@ public class Delete extends Mutation implements 
Comparable {
 if (timestamp < 0) {
   throw new IllegalArgumentException("Timestamp cannot be negative. ts=" + 
timestamp);
 }
-List list = familyMap.get(family);
-if(list == null) {
-  list = new ArrayList<>(1);
-  familyMap.put(family, list);
-}
+List list = getCellList(family);
 KeyValue kv = new KeyValue(this.row, family, qualifier, times

[17/50] [abbrv] hbase git commit: HBASE-18238 rubocop autocorrect for bin/

2017-08-20 Thread busbey
HBASE-18238 rubocop autocorrect for bin/


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/ea8fa59a
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/ea8fa59a
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/ea8fa59a

Branch: refs/heads/HBASE-18467
Commit: ea8fa59a4c2fe7633ebe70df622098bfe36b5df9
Parents: 096dac2
Author: Mike Drob 
Authored: Wed Jul 19 12:05:26 2017 -0500
Committer: Mike Drob 
Committed: Mon Aug 14 13:53:37 2017 -0500

--
 bin/draining_servers.rb | 108 +++
 bin/get-active-master.rb|   6 +-
 bin/hirb.rb |  46 +++--
 bin/region_mover.rb |   2 +-
 bin/region_status.rb|  50 +++---
 bin/replication/copy_tables_desc.rb |  47 +++---
 bin/shutdown_regionserver.rb|  16 ++---
 7 files changed, 132 insertions(+), 143 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/ea8fa59a/bin/draining_servers.rb
--
diff --git a/bin/draining_servers.rb b/bin/draining_servers.rb
index 8e1b250..ea74c30 100644
--- a/bin/draining_servers.rb
+++ b/bin/draining_servers.rb
@@ -16,7 +16,7 @@
 # limitations under the License.
 #
 
-# Add or remove servers from draining mode via zookeeper 
+# Add or remove servers from draining mode via zookeeper
 
 require 'optparse'
 include Java
@@ -29,13 +29,13 @@ java_import org.apache.commons.logging.Log
 java_import org.apache.commons.logging.LogFactory
 
 # Name of this script
-NAME = "draining_servers"
+NAME = 'draining_servers'.freeze
 
 # Do command-line parsing
 options = {}
 optparse = OptionParser.new do |opts|
   opts.banner = "Usage: ./hbase org.jruby.Main #{NAME}.rb [options] 
add|remove|list || ..."
-  opts.separator 'Add remove or list servers in draining mode. Can accept 
either hostname to drain all region servers' +
+  opts.separator 'Add remove or list servers in draining mode. Can accept 
either hostname to drain all region servers' \
  'in that host, a host:port pair or a host,port,startCode 
triplet. More than one server can be given separated by space'
   opts.on('-h', '--help', 'Display usage information') do
 puts opts
@@ -51,117 +51,117 @@ optparse.parse!
 # Return array of servernames where servername is hostname+port+startcode
 # comma-delimited
 def getServers(admin)
-  serverInfos = admin.getClusterStatus().getServers()
+  serverInfos = admin.getClusterStatus.getServers
   servers = []
   for server in serverInfos
-servers << server.getServerName()
+servers << server.getServerName
   end
-  return servers
+  servers
 end
 
 def getServerNames(hostOrServers, config)
   ret = []
   connection = ConnectionFactory.createConnection(config)
-  
+
   for hostOrServer in hostOrServers
 # check whether it is already serverName. No need to connect to cluster
 parts = hostOrServer.split(',')
-if parts.size() == 3
+if parts.size == 3
   ret << hostOrServer
-else 
-  admin = connection.getAdmin() if not admin
+else
+  admin = connection.getAdmin unless admin
   servers = getServers(admin)
 
-  hostOrServer = hostOrServer.gsub(/:/, ",")
-  for server in servers 
+  hostOrServer = hostOrServer.tr(':', ',')
+  for server in servers
 ret << server if server.start_with?(hostOrServer)
   end
 end
   end
-  
-  admin.close() if admin
-  connection.close()
-  return ret
+
+  admin.close if admin
+  connection.close
+  ret
 end
 
-def addServers(options, hostOrServers)
-  config = HBaseConfiguration.create()
+def addServers(_options, hostOrServers)
+  config = HBaseConfiguration.create
   servers = getServerNames(hostOrServers, config)
-  
-  zkw = org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher.new(config, 
"draining_servers", nil)
+
+  zkw = org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher.new(config, 
'draining_servers', nil)
   parentZnode = zkw.znodePaths.drainingZNode
-  
+
   begin
 for server in servers
   node = ZKUtil.joinZNode(parentZnode, server)
   ZKUtil.createAndFailSilent(zkw, node)
 end
   ensure
-zkw.close()
+zkw.close
   end
 end
 
-def removeServers(options, hostOrServers)
-  config = HBaseConfiguration.create()
+def removeServers(_options, hostOrServers)
+  config = HBaseConfiguration.create
   servers = getServerNames(hostOrServers, config)
-  
-  zkw = org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher.new(config, 
"draining_servers", nil)
+
+  zkw = org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher.new(config, 
'draining_servers', nil)
   parentZnode = zkw.znodePaths.drainingZNode
-  
+
   begin
 for server in servers
   node = ZKUtil.joinZNode(parentZnode, server)
   ZKUtil.dele

[36/50] [abbrv] hbase git commit: HBASE-18271 Shade netty Purge mention of netty-all; ADDENDUM for sparktest

2017-08-20 Thread busbey
HBASE-18271 Shade netty Purge mention of netty-all; ADDENDUM for sparktest


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/5d2c3ddf
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/5d2c3ddf
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/5d2c3ddf

Branch: refs/heads/HBASE-18467
Commit: 5d2c3ddf5238fa0b48b8aa4fb99108eaf69f16aa
Parents: 4734467
Author: Michael Stack 
Authored: Wed Aug 16 10:04:33 2017 -0700
Committer: Michael Stack 
Committed: Wed Aug 16 10:04:56 2017 -0700

--
 hbase-spark/pom.xml | 8 +---
 1 file changed, 5 insertions(+), 3 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/5d2c3ddf/hbase-spark/pom.xml
--
diff --git a/hbase-spark/pom.xml b/hbase-spark/pom.xml
index 8137b53..d294835 100644
--- a/hbase-spark/pom.xml
+++ b/hbase-spark/pom.xml
@@ -559,9 +559,11 @@
   test
 
 
-  
--Xmx1536m -XX:ReservedCodeCacheSize=512m
-
+  
+
org.apache.hadoop.hbase.shaded.
+  
+   -Xmx1536m -XX:ReservedCodeCacheSize=512m
+  
   false
 
   



[15/50] [abbrv] hbase git commit: HBASE-18593 Tell m2eclipse what to do w/ replacer plugin

2017-08-20 Thread busbey
HBASE-18593 Tell m2eclipse what to do w/ replacer plugin


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/bd400730
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/bd400730
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/bd400730

Branch: refs/heads/HBASE-18467
Commit: bd40073094b248f74ac9a3c0fff7ef6668265feb
Parents: 424dff2
Author: Michael Stack 
Authored: Mon Aug 14 09:13:27 2017 -0700
Committer: Michael Stack 
Committed: Mon Aug 14 09:13:27 2017 -0700

--
 hbase-protocol-shaded/pom.xml | 17 +
 1 file changed, 17 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/bd400730/hbase-protocol-shaded/pom.xml
--
diff --git a/hbase-protocol-shaded/pom.xml b/hbase-protocol-shaded/pom.xml
index b28c03e..4c72eca 100644
--- a/hbase-protocol-shaded/pom.xml
+++ b/hbase-protocol-shaded/pom.xml
@@ -192,6 +192,23 @@
 
   
 
+
+  
+
+  com.google.code.maven-replacer-plugin
+
+replacer
+[1.5.3,)
+
+  replace
+
+  
+  
+
+ false
+
+  
+
   
 
   



[27/50] [abbrv] hbase git commit: HBASE-18587 Fix flaky TestFileIOEngine

2017-08-20 Thread busbey
HBASE-18587 Fix flaky TestFileIOEngine

This short circuits reads and writes with 0 length and also removes flakiness 
in TestFileIOEngine

Signed-off-by: Michael Stack 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/5280c100
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/5280c100
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/5280c100

Branch: refs/heads/HBASE-18467
Commit: 5280c100ff93f65cd568ce830e088cc12a2f5585
Parents: 2b88edf
Author: Zach York 
Authored: Thu Aug 10 16:55:28 2017 -0700
Committer: Michael Stack 
Committed: Tue Aug 15 14:57:10 2017 -0700

--
 .../hbase/io/hfile/bucket/FileIOEngine.java |  23 ++--
 .../hbase/io/hfile/bucket/TestFileIOEngine.java | 123 +++
 2 files changed, 88 insertions(+), 58 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/5280c100/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/FileIOEngine.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/FileIOEngine.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/FileIOEngine.java
index a847bfe..ab77696 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/FileIOEngine.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/FileIOEngine.java
@@ -33,6 +33,7 @@ import org.apache.hadoop.hbase.io.hfile.CacheableDeserializer;
 import org.apache.hadoop.hbase.io.hfile.Cacheable.MemoryType;
 import org.apache.hadoop.hbase.nio.ByteBuff;
 import org.apache.hadoop.hbase.nio.SingleByteBuff;
+import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions;
 import org.apache.hadoop.util.StringUtils;
 
 /**
@@ -122,15 +123,18 @@ public class FileIOEngine implements IOEngine {
   @Override
   public Cacheable read(long offset, int length, 
CacheableDeserializer deserializer)
   throws IOException {
+Preconditions.checkArgument(length >= 0, "Length of read can not be less 
than 0.");
 ByteBuffer dstBuffer = ByteBuffer.allocate(length);
-accessFile(readAccessor, dstBuffer, offset);
-// The buffer created out of the fileChannel is formed by copying the data 
from the file
-// Hence in this case there is no shared memory that we point to. Even if 
the BucketCache evicts
-// this buffer from the file the data is already copied and there is no 
need to ensure that
-// the results are not corrupted before consuming them.
-if (dstBuffer.limit() != length) {
-  throw new RuntimeException("Only " + dstBuffer.limit() + " bytes read, " 
+ length
-  + " expected");
+if (length != 0) {
+  accessFile(readAccessor, dstBuffer, offset);
+  // The buffer created out of the fileChannel is formed by copying the 
data from the file
+  // Hence in this case there is no shared memory that we point to. Even 
if the BucketCache evicts
+  // this buffer from the file the data is already copied and there is no 
need to ensure that
+  // the results are not corrupted before consuming them.
+  if (dstBuffer.limit() != length) {
+throw new RuntimeException("Only " + dstBuffer.limit() + " bytes read, 
" + length
++ " expected");
+  }
 }
 return deserializer.deserialize(new SingleByteBuff(dstBuffer), true, 
MemoryType.EXCLUSIVE);
   }
@@ -143,6 +147,9 @@ public class FileIOEngine implements IOEngine {
*/
   @Override
   public void write(ByteBuffer srcBuffer, long offset) throws IOException {
+if (!srcBuffer.hasRemaining()) {
+  return;
+}
 accessFile(writeAccessor, srcBuffer, offset);
   }
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/5280c100/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/bucket/TestFileIOEngine.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/bucket/TestFileIOEngine.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/bucket/TestFileIOEngine.java
index d13022d..4451c0c 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/bucket/TestFileIOEngine.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/bucket/TestFileIOEngine.java
@@ -18,6 +18,7 @@
  */
 package org.apache.hadoop.hbase.io.hfile.bucket;
 
+import static org.junit.Assert.assertArrayEquals;
 import static org.junit.Assert.assertTrue;
 
 import java.io.File;
@@ -30,6 +31,8 @@ import 
org.apache.hadoop.hbase.io.hfile.bucket.TestByteBufferIOEngine.BufferGrab
 import org.apache.hadoop.hbase.nio.ByteBuff;
 import org.apache.hadoop.hbase.testclassification.IOTests;
 import org.apache.hadoop.hbase.testclassification.SmallTests;
+import 

[45/50] [abbrv] hbase git commit: HBASE-18572 Delete can't remove the cells which have no visibility label

2017-08-20 Thread busbey
HBASE-18572 Delete can't remove the cells which have no visibility label


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/e9bafeb0
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/e9bafeb0
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/e9bafeb0

Branch: refs/heads/HBASE-18467
Commit: e9bafeb091549cce35f25a56688b6632578de74b
Parents: e2532ec
Author: Chia-Ping Tsai 
Authored: Sat Aug 19 01:55:45 2017 +0800
Committer: Chia-Ping Tsai 
Committed: Sat Aug 19 01:55:45 2017 +0800

--
 .../DefaultVisibilityLabelServiceImpl.java  |  12 +-
 .../visibility/VisibilityController.java|   4 +
 .../visibility/VisibilityScanDeleteTracker.java |  19 +-
 .../ExpAsStringVisibilityLabelServiceImpl.java  |   4 +
 .../TestVisibilityLabelsWithDeletes.java| 191 +++
 5 files changed, 222 insertions(+), 8 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/e9bafeb0/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/DefaultVisibilityLabelServiceImpl.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/DefaultVisibilityLabelServiceImpl.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/DefaultVisibilityLabelServiceImpl.java
index d4a5627..672da8a 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/DefaultVisibilityLabelServiceImpl.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/DefaultVisibilityLabelServiceImpl.java
@@ -551,12 +551,16 @@ public class DefaultVisibilityLabelServiceImpl implements 
VisibilityLabelService
   @Override
   public boolean matchVisibility(List putVisTags, Byte putTagsFormat, 
List deleteVisTags,
   Byte deleteTagsFormat) throws IOException {
+// Early out if there are no tags in both of cell and delete
+if (putVisTags.isEmpty() && deleteVisTags.isEmpty()) {
+  return true;
+}
+// Early out if one of the tags is empty
+if (putVisTags.isEmpty() ^ deleteVisTags.isEmpty()) {
+  return false;
+}
 if ((deleteTagsFormat != null && deleteTagsFormat == 
SORTED_ORDINAL_SERIALIZATION_FORMAT)
 && (putTagsFormat == null || putTagsFormat == 
SORTED_ORDINAL_SERIALIZATION_FORMAT)) {
-  if (putVisTags.isEmpty()) {
-// Early out if there are no tags in the cell
-return false;
-  }
   if (putTagsFormat == null) {
 return matchUnSortedVisibilityTags(putVisTags, deleteVisTags);
   } else {

http://git-wip-us.apache.org/repos/asf/hbase/blob/e9bafeb0/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityController.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityController.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityController.java
index 23f0583..130587a 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityController.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityController.java
@@ -1074,6 +1074,10 @@ public class VisibilityController implements 
MasterObserver, RegionObserver,
 public ReturnCode filterKeyValue(Cell cell) throws IOException {
   List putVisTags = new ArrayList<>();
   Byte putCellVisTagsFormat = VisibilityUtils.extractVisibilityTags(cell, 
putVisTags);
+  if (putVisTags.isEmpty() && deleteCellVisTags.isEmpty()) {
+// Early out if there are no tags in the cell
+return ReturnCode.INCLUDE;
+  }
   boolean matchFound = VisibilityLabelServiceManager
   .getInstance().getVisibilityLabelService()
   .matchVisibility(putVisTags, putCellVisTagsFormat, deleteCellVisTags,

http://git-wip-us.apache.org/repos/asf/hbase/blob/e9bafeb0/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityScanDeleteTracker.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityScanDeleteTracker.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityScanDeleteTracker.java
index 67181e1..f62e8d0 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityScanDeleteTracker.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityScanDeleteTracker.java
@@ -20,6 +20,7 @@ package org.apache.hadoop.hbase.security.visibility;
 
 import java.io.IOException;
 import java.util.ArrayList;
+import java.util

[18/50] [abbrv] hbase git commit: HBASE-18303 Clean up @Parameter boilerplate

2017-08-20 Thread busbey
HBASE-18303 Clean up @Parameter boilerplate


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/0b26ccda
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/0b26ccda
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/0b26ccda

Branch: refs/heads/HBASE-18467
Commit: 0b26ccdaa1b8700e7958aeebbaf9cad81e737dd0
Parents: ea8fa59
Author: Mike Drob 
Authored: Fri Jun 30 12:13:56 2017 -0500
Committer: Mike Drob 
Committed: Mon Aug 14 14:23:24 2017 -0500

--
 .../hadoop/hbase/filter/TestKeyOnlyFilter.java  | 15 -
 .../hadoop/hbase/HBaseCommonTestingUtility.java | 22 
 .../apache/hadoop/hbase/types/TestStruct.java   | 18 +++-
 .../hadoop/hbase/util/TestByteBufferUtils.java  |  8 ++-
 .../hbase/codec/keyvalue/TestKeyValueTool.java  | 16 +++---
 .../codec/prefixtree/row/TestRowEncoder.java|  6 +-
 .../hadoop/hbase/rest/TestMultiRowResource.java |  5 +
 .../hadoop/hbase/HBaseTestingUtility.java   | 20 +-
 .../encoding/TestSeekToBlockWithEncoders.java   |  4 ++--
 .../hadoop/hbase/io/hfile/TestCacheOnWrite.java |  3 ++-
 .../apache/hadoop/hbase/io/hfile/TestHFile.java |  3 ++-
 .../hbase/io/hfile/TestHFileBlockIndex.java |  3 ++-
 .../hbase/io/hfile/TestHFileWriterV3.java   |  3 ++-
 .../hbase/util/TestCoprocessorScanPolicy.java   |  3 ++-
 14 files changed, 59 insertions(+), 70 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/0b26ccda/hbase-client/src/test/java/org/apache/hadoop/hbase/filter/TestKeyOnlyFilter.java
--
diff --git 
a/hbase-client/src/test/java/org/apache/hadoop/hbase/filter/TestKeyOnlyFilter.java
 
b/hbase-client/src/test/java/org/apache/hadoop/hbase/filter/TestKeyOnlyFilter.java
index 33e3cd9..f957b59 100644
--- 
a/hbase-client/src/test/java/org/apache/hadoop/hbase/filter/TestKeyOnlyFilter.java
+++ 
b/hbase-client/src/test/java/org/apache/hadoop/hbase/filter/TestKeyOnlyFilter.java
@@ -26,6 +26,7 @@ import java.util.List;
 import org.apache.hadoop.hbase.ByteBufferKeyValue;
 
 import org.apache.hadoop.hbase.CellUtil;
+import org.apache.hadoop.hbase.HBaseCommonTestingUtility;
 import org.apache.hadoop.hbase.KeyValue;
 import org.apache.hadoop.hbase.KeyValue.Type;
 import org.apache.hadoop.hbase.KeyValueUtil;
@@ -44,20 +45,12 @@ import org.junit.runners.Parameterized.Parameters;
 @RunWith(Parameterized.class)
 public class TestKeyOnlyFilter {
 
-  private final boolean lenAsVal;
+  @Parameterized.Parameter
+  public boolean lenAsVal;
 
   @Parameters
   public static Collection parameters() {
-List paramList = new ArrayList<>(2);
-{
-  paramList.add(new Object[] { false });
-  paramList.add(new Object[] { true });
-}
-return paramList;
-  }
-
-  public TestKeyOnlyFilter(boolean lenAsVal) {
-this.lenAsVal = lenAsVal;
+return HBaseCommonTestingUtility.BOOLEAN_PARAMETERIZED;
   }
 
   @Test

http://git-wip-us.apache.org/repos/asf/hbase/blob/0b26ccda/hbase-common/src/test/java/org/apache/hadoop/hbase/HBaseCommonTestingUtility.java
--
diff --git 
a/hbase-common/src/test/java/org/apache/hadoop/hbase/HBaseCommonTestingUtility.java
 
b/hbase-common/src/test/java/org/apache/hadoop/hbase/HBaseCommonTestingUtility.java
index e191046..1790f4a 100644
--- 
a/hbase-common/src/test/java/org/apache/hadoop/hbase/HBaseCommonTestingUtility.java
+++ 
b/hbase-common/src/test/java/org/apache/hadoop/hbase/HBaseCommonTestingUtility.java
@@ -20,6 +20,8 @@ package org.apache.hadoop.hbase;
 
 import java.io.File;
 import java.io.IOException;
+import java.util.Arrays;
+import java.util.List;
 import java.util.UUID;
 
 import org.apache.commons.io.FileUtils;
@@ -28,6 +30,7 @@ import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hbase.classification.InterfaceAudience;
+import org.apache.hadoop.hbase.io.compress.Compression;
 
 /**
  * Common helpers for testing HBase that do not depend on specific server/etc. 
things.
@@ -37,6 +40,25 @@ import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
 public class HBaseCommonTestingUtility {
   protected static final Log LOG = 
LogFactory.getLog(HBaseCommonTestingUtility.class);
 
+  /** Compression algorithms to use in parameterized JUnit 4 tests */
+  public static final List COMPRESSION_ALGORITHMS_PARAMETERIZED =
+Arrays.asList(new Object[][] {
+  { Compression.Algorithm.NONE },
+  { Compression.Algorithm.GZ }
+});
+
+  /** This is for unit tests parameterized with a two booleans. */
+  public static final List BOOLEAN_PARAMETERIZED =
+  Arrays.asList(new Object[][] {
+  {f

[11/50] [abbrv] hbase git commit: HBASE-18566 [RSGROUP]Log the client IP/port of the rsgroup admin

2017-08-20 Thread busbey
HBASE-18566 [RSGROUP]Log the client IP/port of the rsgroup admin

Signed-off-by: tedyu 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/c6bf4d51
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/c6bf4d51
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/c6bf4d51

Branch: refs/heads/HBASE-18467
Commit: c6bf4d51928985517c438d1efcaefdeddc9ac5ef
Parents: 05e6e56
Author: Guangxu Cheng 
Authored: Mon Aug 14 10:44:52 2017 +0800
Committer: tedyu 
Committed: Sun Aug 13 20:29:12 2017 -0700

--
 .../hadoop/hbase/rsgroup/RSGroupAdminEndpoint.java   | 15 +++
 .../java/org/apache/hadoop/hbase/master/HMaster.java |  2 +-
 .../apache/hadoop/hbase/master/MasterServices.java   |  2 ++
 .../hadoop/hbase/master/MockNoopMasterServices.java  |  5 +
 4 files changed, 23 insertions(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/c6bf4d51/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupAdminEndpoint.java
--
diff --git 
a/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupAdminEndpoint.java
 
b/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupAdminEndpoint.java
index 9fda3f0..0bc5c76 100644
--- 
a/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupAdminEndpoint.java
+++ 
b/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupAdminEndpoint.java
@@ -120,6 +120,8 @@ public class RSGroupAdminEndpoint implements 
MasterObserver, CoprocessorService
 GetRSGroupInfoRequest request, RpcCallback 
done) {
   GetRSGroupInfoResponse.Builder builder = 
GetRSGroupInfoResponse.newBuilder();
   String groupName = request.getRSGroupName();
+  LOG.info(master.getClientIdAuditPrefix() + " initiates rsgroup info 
retrieval, group="
+  + groupName);
   try {
 RSGroupInfo rsGroupInfo = groupAdminServer.getRSGroupInfo(groupName);
 if (rsGroupInfo != null) {
@@ -137,6 +139,8 @@ public class RSGroupAdminEndpoint implements 
MasterObserver, CoprocessorService
   GetRSGroupInfoOfTableResponse.Builder builder = 
GetRSGroupInfoOfTableResponse.newBuilder();
   try {
 TableName tableName = ProtobufUtil.toTableName(request.getTableName());
+LOG.info(master.getClientIdAuditPrefix() + " initiates rsgroup info 
retrieval, table="
++ tableName);
 RSGroupInfo RSGroupInfo = 
groupAdminServer.getRSGroupInfoOfTable(tableName);
 if (RSGroupInfo != null) {
   
builder.setRSGroupInfo(RSGroupProtobufUtil.toProtoGroupInfo(RSGroupInfo));
@@ -156,6 +160,8 @@ public class RSGroupAdminEndpoint implements 
MasterObserver, CoprocessorService
 for (HBaseProtos.ServerName el : request.getServersList()) {
   hostPorts.add(Address.fromParts(el.getHostName(), el.getPort()));
 }
+LOG.info(master.getClientIdAuditPrefix() + " move servers " + 
hostPorts +" to rsgroup "
++ request.getTargetGroup());
 groupAdminServer.moveServers(hostPorts, request.getTargetGroup());
   } catch (IOException e) {
 CoprocessorRpcUtils.setControllerException(controller, e);
@@ -172,6 +178,8 @@ public class RSGroupAdminEndpoint implements 
MasterObserver, CoprocessorService
 for (HBaseProtos.TableName tableName : request.getTableNameList()) {
   tables.add(ProtobufUtil.toTableName(tableName));
 }
+LOG.info(master.getClientIdAuditPrefix() + " move tables " + tables +" 
to rsgroup "
++ request.getTargetGroup());
 groupAdminServer.moveTables(tables, request.getTargetGroup());
   } catch (IOException e) {
 CoprocessorRpcUtils.setControllerException(controller, e);
@@ -183,6 +191,7 @@ public class RSGroupAdminEndpoint implements 
MasterObserver, CoprocessorService
 public void addRSGroup(RpcController controller, AddRSGroupRequest request,
 RpcCallback done) {
   AddRSGroupResponse.Builder builder = AddRSGroupResponse.newBuilder();
+  LOG.info(master.getClientIdAuditPrefix() + " add rsgroup " + 
request.getRSGroupName());
   try {
 groupAdminServer.addRSGroup(request.getRSGroupName());
   } catch (IOException e) {
@@ -196,6 +205,7 @@ public class RSGroupAdminEndpoint implements 
MasterObserver, CoprocessorService
 RemoveRSGroupRequest request, RpcCallback done) 
{
   RemoveRSGroupResponse.Builder builder =
   RemoveRSGroupResponse.newBuilder();
+  LOG.info(master.getClientIdAuditPrefix() + " remove rsgroup " + 
request.getRSGroupName());
   try {
 groupAdminServer.removeRSGroup(request.getRSGroupName());
   } catch (IOException e) {
@@ -208,6 +218,7 @@ public class RSGroupAdminEndpoint imple

[14/50] [abbrv] hbase git commit: Revert "HBASE-18588 Verify we're using netty .so epolling on linux post HBASE-18271" References the wrong JIRA

2017-08-20 Thread busbey
Revert "HBASE-18588 Verify we're using netty .so epolling on linux post 
HBASE-18271"
References the wrong JIRA

This reverts commit ddbaf56ca8c712dc44608d3323280f578c56aed2.


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/424dff20
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/424dff20
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/424dff20

Branch: refs/heads/HBASE-18467
Commit: 424dff20607577901c06cb40b1293ea5051ec5c5
Parents: ddbaf56
Author: Michael Stack 
Authored: Mon Aug 14 09:12:51 2017 -0700
Committer: Michael Stack 
Committed: Mon Aug 14 09:12:51 2017 -0700

--
 hbase-protocol-shaded/pom.xml | 17 -
 1 file changed, 17 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/424dff20/hbase-protocol-shaded/pom.xml
--
diff --git a/hbase-protocol-shaded/pom.xml b/hbase-protocol-shaded/pom.xml
index 4c72eca..b28c03e 100644
--- a/hbase-protocol-shaded/pom.xml
+++ b/hbase-protocol-shaded/pom.xml
@@ -192,23 +192,6 @@
 
   
 
-
-  
-
-  com.google.code.maven-replacer-plugin
-
-replacer
-[1.5.3,)
-
-  replace
-
-  
-  
-
- false
-
-  
-
   
 
   



[50/50] [abbrv] hbase git commit: HBASE-18467 attempt at using jiraComment.

2017-08-20 Thread busbey
HBASE-18467 attempt at using jiraComment.


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/1abee706
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/1abee706
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/1abee706

Branch: refs/heads/HBASE-18467
Commit: 1abee706c3aaa7b50159305baa57d3a675b4b48a
Parents: c2537ac
Author: Sean Busbey 
Authored: Sun Aug 20 16:29:03 2017 -0500
Committer: Sean Busbey 
Committed: Sun Aug 20 16:29:03 2017 -0500

--
 dev-support/Jenkinsfile | 24 +---
 1 file changed, 17 insertions(+), 7 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/1abee706/dev-support/Jenkinsfile
--
diff --git a/dev-support/Jenkinsfile b/dev-support/Jenkinsfile
index 7207ab3..6506adf 100644
--- a/dev-support/Jenkinsfile
+++ b/dev-support/Jenkinsfile
@@ -377,23 +377,33 @@ END
 comment += 'Committer, please check your recent inclusion of a 
patch for this issue.\\'
  }
  comment += "  [build ${currentBuild.displayName} on 
builds.a.o|${env.BUILD_URL}]: ${currentBuild.result}\\\\details (if 
available):"
- echo "Comment so far: "
+ echo "[DEBUG] Comment so far: "
  echo comment
  echo ""
- echo "trying to aggregate step-wise results"
+ echo "[DEBUG] trying to aggregate step-wise results"
  comment += results.collect { fileExists(file: it) ? readFile(file: 
it) : "\n" }.join
+ echo "[INFO] Comment:"
  echo comment
- echo "${currentBuild.changeSets.size} changes"
+ echo ""
+ echo "[INFO] ${currentBuild.changeSets.size} changes"
+ def seenJiras = []
  for ( change in currentBuild.changeSets ) {
echo "change:"
echo "  ${change.getAuthor}"
echo "  ${change.getMsg}"
echo "  ${change.getMsgAnnotated}"
echo ""
-   // figure out jira  from the message
-   // dedup jiras we already commented on
-   // comment, something like
-   // jiraComment issueKey: currentIssue, body: comment
+   change.getMsg.findAll( /HBASE-[0-9]+/ ).each { currentIssue ->
+ echo "[DEBUG] found jira key: ${currentIssue}"
+ if ( currentIssue in seenJiras ) {
+   echo "[DEBUG] already commented on ${currentIssue}."
+ } else {
+   echo "[INFO] commenting on ${currentIssue}."
+   jiraComment issueKey: currentIssue, body: comment
+   seenJiras << currentIssue
+ }
+   }
+   //TODO warn if no JIRA key found in message, email committer
  }
   }
 }



[47/50] [abbrv] hbase git commit: HBASE-18592 [hbase-thirdparty] Doc on new hbase-thirdparty dependency for the refguide

2017-08-20 Thread busbey
HBASE-18592 [hbase-thirdparty] Doc on new hbase-thirdparty dependency for the 
refguide


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/b932d38b
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/b932d38b
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/b932d38b

Branch: refs/heads/HBASE-18467
Commit: b932d38b2a53c1764345fec44e1bbb38addf04ec
Parents: 56f9e1a
Author: Michael Stack 
Authored: Fri Aug 18 16:18:42 2017 -0700
Committer: Michael Stack 
Committed: Fri Aug 18 16:18:42 2017 -0700

--
 src/main/asciidoc/_chapters/developer.adoc | 54 +
 1 file changed, 54 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/b932d38b/src/main/asciidoc/_chapters/developer.adoc
--
diff --git a/src/main/asciidoc/_chapters/developer.adoc 
b/src/main/asciidoc/_chapters/developer.adoc
index 6a546fb..3d010b6 100644
--- a/src/main/asciidoc/_chapters/developer.adoc
+++ b/src/main/asciidoc/_chapters/developer.adoc
@@ -2116,6 +2116,60 @@ However any substantive discussion (as with any off-list 
project-related discuss
 
 Misspellings and/or bad grammar is preferable to the disruption a JIRA comment 
edit causes: See the discussion at 
link:http://search-hadoop.com/?q=%5BReopened%5D+%28HBASE-451%29+Remove+HTableDescriptor+from+HRegionInfo&fc_project=HBase[Re:(HBASE-451)
 Remove HTableDescriptor from HRegionInfo]
 
+[[thirdparty]]
+=== The hbase-thirdparty dependency and shading/relocation
+
+A new project was created for the release of hbase-2.0.0. It was called
+`hbase-thirdparty`. This project exists only to provide the main hbase
+project with relocated -- or shaded -- versions of popular thirdparty
+libraries such as guava, netty, and protobuf. The mainline HBase project
+relies on the relocated versions of these libraries gotten from 
hbase-thirdparty
+rather than on finding these classes in their usual locations. We do this so
+we can specify whatever the version we wish. If we don't relocate, we must
+harmonize our version to match that which hadoop and/or spark uses.
+
+For developers, this means you need to be careful referring to classes from
+netty, guava, protobuf, gson, etc. (see the hbase-thirdparty pom.xml for what
+it provides). Devs must refer to the hbase-thirdparty provided classes. In
+practice, this is usually not an issue (though it can be a bit of a pain). You
+will have to hunt for the relocated version of your particular class. You'll
+find it by prepending the general relocation prefix of 
`org.apache.hadoop.hbase.shaded.`.
+For example if you are looking for `com.google.protobuf.Message`, the relocated
+version used by HBase internals can be found at
+`org.apache.hadoop.hbase.shaded.com.google.protobuf.Message`.
+
+For a few thirdparty libs, like protobuf (see the protobuf chapter in this book
+for the why), your IDE may give you both options -- the `com.google.protobuf.*`
+and the `org.apache.hadoop.hbase.shaded.com.google.protobuf.*` -- because both
+classes are on your CLASSPATH. Unless you are doing the particular juggling
+required in Coprocessor Endpoint development (again see above cited protobuf
+chapter), you'll want to use the shaded version, always.
+
+Of note, the relocation of netty is particular. The netty folks have put in
+place facility to aid relocation; it seems like shading netty is a popular 
project.
+One case of this requires the setting of a peculiar system property on the JVM
+so that classes out in the bundld shared library (.so) can be found in their
+relocated location. Here is the property that needs to be set:
+
+`-Dorg.apache.hadoop.hbase.shaded.io.netty.packagePrefix=org.apache.hadoop.hbase.shaded.`
+
+(Note that the trailing '.' is required). Starting hbase normally or when 
running
+test suites, the setting of this property is done for you. If you are doing 
something
+out of the ordinary, starting hbase from your own context, you'll need to 
provide
+this property on platforms that favor the bundled .so. See release notes on 
HBASE-18271
+for more. The complaint you see is something like the following:
+`Cause: java.lang.RuntimeException: Failed construction of Master: class 
org.apache.hadoop.hbase.master.HMasterorg.apache.hadoop.hbase.shaded.io.netty.channel.epoll.`
+
+The `hbase-thirdparty` project has groupid of `org.apache.hbase.thirdparty`.
+As of this writing, it provides three jars; one for netty with an artifactid of
+`hbase-thirdparty-netty`, one for protobuf at `hbase-thirdparty-protobuf` and 
then
+a jar for all else -- gson, guava -- at `hbase-thirdpaty-miscellaneous`.
+
+The hbase-thirdparty artifacts are a product produced by the Apache HBase
+project under the aegis of the HBase Project Management Committee. Releases
+are done via

[29/50] [abbrv] hbase git commit: HBASE-18598 AsyncNonMetaRegionLocator use FIFO algorithm to get a candidate locate request

2017-08-20 Thread busbey
HBASE-18598 AsyncNonMetaRegionLocator use FIFO algorithm to get a candidate 
locate request


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/59ffb611
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/59ffb611
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/59ffb611

Branch: refs/heads/HBASE-18467
Commit: 59ffb6119b2e4613bc8baec9a0738096184a3d92
Parents: 665fd0d
Author: Guanghao Zhang 
Authored: Tue Aug 15 16:15:29 2017 +0800
Committer: Guanghao Zhang 
Committed: Wed Aug 16 13:08:40 2017 +0800

--
 .../hbase/client/AsyncNonMetaRegionLocator.java | 119 ++-
 .../client/TestAsyncNonMetaRegionLocator.java   |   1 +
 2 files changed, 63 insertions(+), 57 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/59ffb611/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncNonMetaRegionLocator.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncNonMetaRegionLocator.java
 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncNonMetaRegionLocator.java
index 31f369c..ab1f0db 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncNonMetaRegionLocator.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncNonMetaRegionLocator.java
@@ -29,18 +29,18 @@ import static 
org.apache.hadoop.hbase.util.CollectionUtils.computeIfAbsent;
 
 import java.io.IOException;
 import java.util.Arrays;
-import java.util.HashMap;
 import java.util.HashSet;
 import java.util.Iterator;
+import java.util.LinkedHashMap;
 import java.util.List;
 import java.util.Map;
+import java.util.Optional;
 import java.util.Set;
 import java.util.concurrent.CompletableFuture;
 import java.util.concurrent.ConcurrentHashMap;
 import java.util.concurrent.ConcurrentMap;
 import java.util.concurrent.ConcurrentNavigableMap;
 import java.util.concurrent.ConcurrentSkipListMap;
-import java.util.concurrent.ThreadLocalRandom;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
@@ -107,7 +107,7 @@ class AsyncNonMetaRegionLocator {
 public final Set pendingRequests = new HashSet<>();
 
 public final Map> 
allRequests =
-new HashMap<>();
+new LinkedHashMap<>();
 
 public boolean hasQuota(int max) {
   return pendingRequests.size() < max;
@@ -120,6 +120,49 @@ class AsyncNonMetaRegionLocator {
 public void send(LocateRequest req) {
   pendingRequests.add(req);
 }
+
+public Optional getCandidate() {
+  return allRequests.keySet().stream().filter(r -> 
!isPending(r)).findFirst();
+}
+
+public void clearCompletedRequests(Optional location) {
+  for (Iterator>> iter = allRequests
+  .entrySet().iterator(); iter.hasNext();) {
+Map.Entry> entry = 
iter.next();
+if (tryComplete(entry.getKey(), entry.getValue(), location)) {
+  iter.remove();
+}
+  }
+}
+
+private boolean tryComplete(LocateRequest req, 
CompletableFuture future,
+Optional location) {
+  if (future.isDone()) {
+return true;
+  }
+  if (!location.isPresent()) {
+return false;
+  }
+  HRegionLocation loc = location.get();
+  boolean completed;
+  if (req.locateType.equals(RegionLocateType.BEFORE)) {
+// for locating the row before current row, the common case is to find 
the previous region in
+// reverse scan, so we check the endKey first. In general, the 
condition should be startKey <
+// req.row and endKey >= req.row. Here we split it to endKey == 
req.row || (endKey > req.row
+// && startKey < req.row). The two conditions are equal since startKey 
< endKey.
+int c = Bytes.compareTo(loc.getRegionInfo().getEndKey(), req.row);
+completed =
+c == 0 || (c > 0 && 
Bytes.compareTo(loc.getRegionInfo().getStartKey(), req.row) < 0);
+  } else {
+completed = loc.getRegionInfo().containsRow(req.row);
+  }
+  if (completed) {
+future.complete(loc);
+return true;
+  } else {
+return false;
+  }
+}
   }
 
   AsyncNonMetaRegionLocator(AsyncConnectionImpl conn) {
@@ -186,48 +229,27 @@ class AsyncNonMetaRegionLocator {
 }
   }
 
-  private boolean tryComplete(LocateRequest req, 
CompletableFuture future,
-  HRegionLocation loc) {
-if (future.isDone()) {
-  return true;
-}
-boolean completed;
-if (req.locateType.equals(RegionLocateType.BEFORE)) {
-  // for locating the row before current row, the common case is to find 
the previous region in
-  // reverse scan, so we check the endKey first. In general, the condition 
should be startKey <
-  // req.row and endKey >= req

[44/50] [abbrv] hbase git commit: HBASE-18617 FuzzyRowKeyFilter should not modify the filter pairs (vinisha)

2017-08-20 Thread busbey
HBASE-18617 FuzzyRowKeyFilter should not modify the filter pairs (vinisha)


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/e2532ecd
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/e2532ecd
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/e2532ecd

Branch: refs/heads/HBASE-18467
Commit: e2532ecd1e6093f9c56b16e2e8e6865bd0ad1cb7
Parents: 7fee03e
Author: tedyu 
Authored: Fri Aug 18 09:10:20 2017 -0700
Committer: tedyu 
Committed: Fri Aug 18 09:10:20 2017 -0700

--
 .../hadoop/hbase/filter/FuzzyRowFilter.java | 22 +++-
 .../filter/TestFuzzyRowFilterEndToEnd.java  | 27 +---
 2 files changed, 33 insertions(+), 16 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/e2532ecd/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FuzzyRowFilter.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FuzzyRowFilter.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FuzzyRowFilter.java
index 20c6656..2aa4857 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FuzzyRowFilter.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FuzzyRowFilter.java
@@ -76,23 +76,31 @@ public class FuzzyRowFilter extends FilterBase {
   private RowTracker tracker;
 
   public FuzzyRowFilter(List> fuzzyKeysData) {
-Pair p;
-for (int i = 0; i < fuzzyKeysData.size(); i++) {
-  p = fuzzyKeysData.get(i);
-  if (p.getFirst().length != p.getSecond().length) {
+List> fuzzyKeyDataCopy = new 
ArrayList<>(fuzzyKeysData.size());
+
+for (Pair aFuzzyKeysData : fuzzyKeysData) {
+  if (aFuzzyKeysData.getFirst().length != 
aFuzzyKeysData.getSecond().length) {
 Pair readable =
-new Pair<>(Bytes.toStringBinary(p.getFirst()), 
Bytes.toStringBinary(p
-.getSecond()));
+  new Pair<>(Bytes.toStringBinary(aFuzzyKeysData.getFirst()), 
Bytes.toStringBinary(aFuzzyKeysData.getSecond()));
 throw new IllegalArgumentException("Fuzzy pair lengths do not match: " 
+ readable);
   }
+
+  Pair p = new Pair<>();
+  // create a copy of pair bytes so that they are not modified by the 
filter.
+  p.setFirst(Arrays.copyOf(aFuzzyKeysData.getFirst(), 
aFuzzyKeysData.getFirst().length));
+  p.setSecond(Arrays.copyOf(aFuzzyKeysData.getSecond(), 
aFuzzyKeysData.getSecond().length));
+
   // update mask ( 0 -> -1 (0xff), 1 -> 2)
   p.setSecond(preprocessMask(p.getSecond()));
   preprocessSearchKey(p);
+
+  fuzzyKeyDataCopy.add(p);
 }
-this.fuzzyKeysData = fuzzyKeysData;
+this.fuzzyKeysData = fuzzyKeyDataCopy;
 this.tracker = new RowTracker();
   }
 
+
   private void preprocessSearchKey(Pair p) {
 if (!UNSAFE_UNALIGNED) {
   // do nothing

http://git-wip-us.apache.org/repos/asf/hbase/blob/e2532ecd/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFuzzyRowFilterEndToEnd.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFuzzyRowFilterEndToEnd.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFuzzyRowFilterEndToEnd.java
index 686c2a2..b043e07 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFuzzyRowFilterEndToEnd.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFuzzyRowFilterEndToEnd.java
@@ -17,13 +17,6 @@
  */
 package org.apache.hadoop.hbase.filter;
 
-import static org.junit.Assert.assertEquals;
-
-import java.io.IOException;
-import java.nio.ByteBuffer;
-import java.util.ArrayList;
-import java.util.List;
-
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
@@ -42,6 +35,7 @@ import org.apache.hadoop.hbase.filter.FilterList.Operator;
 import org.apache.hadoop.hbase.regionserver.ConstantSizeRegionSplitPolicy;
 import org.apache.hadoop.hbase.regionserver.HRegion;
 import org.apache.hadoop.hbase.regionserver.RegionScanner;
+import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
 import org.apache.hadoop.hbase.testclassification.FilterTests;
 import org.apache.hadoop.hbase.testclassification.MediumTests;
 import org.apache.hadoop.hbase.util.Bytes;
@@ -53,10 +47,16 @@ import org.junit.BeforeClass;
 import org.junit.Rule;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
-
-import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
 import org.junit.rules.TestName;
 
+import java.io.IOException;
+import java.nio.ByteBuffer;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+
+import static or

[19/50] [abbrv] hbase git commit: HBASE-18533 Expose BucketCache values to be configured

2017-08-20 Thread busbey
HBASE-18533 Expose BucketCache values to be configured

Before this commit, BucketCache always used the default values.
This commit adds the ability to configure these values.

Signed-off-by: tedyu 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/0e32869f
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/0e32869f
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/0e32869f

Branch: refs/heads/HBASE-18467
Commit: 0e32869f01697abf29292aa786d0cdcca10213c6
Parents: 0b26ccd
Author: Zach York 
Authored: Wed Aug 2 14:43:03 2017 -0700
Committer: tedyu 
Committed: Mon Aug 14 13:27:26 2017 -0700

--
 .../hadoop/hbase/io/hfile/CacheConfig.java  |   2 +-
 .../hbase/io/hfile/bucket/BucketCache.java  | 126 ++-
 .../hbase/io/hfile/bucket/TestBucketCache.java  | 114 -
 .../io/hfile/bucket/TestBucketWriterThread.java |   3 +-
 4 files changed, 214 insertions(+), 31 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/0e32869f/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/CacheConfig.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/CacheConfig.java 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/CacheConfig.java
index 140009b..13f048e 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/CacheConfig.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/CacheConfig.java
@@ -671,7 +671,7 @@ public class CacheConfig {
   // Bucket cache logs its stats on creation internal to the constructor.
   bucketCache = new BucketCache(bucketCacheIOEngineName,
 bucketCacheSize, blockSize, bucketSizes, writerThreads, 
writerQueueLen, persistentPath,
-ioErrorsTolerationDuration);
+ioErrorsTolerationDuration, c);
 } catch (IOException ioex) {
   LOG.error("Can't instantiate bucket cache", ioex); throw new 
RuntimeException(ioex);
 }

http://git-wip-us.apache.org/repos/asf/hbase/blob/0e32869f/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.java
index 1084399..79b1f4d 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.java
@@ -52,8 +52,11 @@ import java.util.concurrent.locks.Lock;
 import java.util.concurrent.locks.ReentrantLock;
 import java.util.concurrent.locks.ReentrantReadWriteLock;
 
+import com.google.common.base.Preconditions;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.HBaseConfiguration;
 import org.apache.hadoop.hbase.classification.InterfaceAudience;
 import org.apache.hadoop.hbase.io.HeapSize;
 import org.apache.hadoop.hbase.io.hfile.BlockCache;
@@ -100,14 +103,23 @@ import 
org.apache.hadoop.hbase.shaded.com.google.common.util.concurrent.ThreadFa
 public class BucketCache implements BlockCache, HeapSize {
   private static final Log LOG = LogFactory.getLog(BucketCache.class);
 
+  /** Priority buckets config */
+  static final String SINGLE_FACTOR_CONFIG_NAME = 
"hbase.bucketcache.single.factor";
+  static final String MULTI_FACTOR_CONFIG_NAME = 
"hbase.bucketcache.multi.factor";
+  static final String MEMORY_FACTOR_CONFIG_NAME = 
"hbase.bucketcache.memory.factor";
+  static final String EXTRA_FREE_FACTOR_CONFIG_NAME = 
"hbase.bucketcache.extrafreefactor";
+  static final String ACCEPT_FACTOR_CONFIG_NAME = 
"hbase.bucketcache.acceptfactor";
+  static final String MIN_FACTOR_CONFIG_NAME = "hbase.bucketcache.minfactor";
+
   /** Priority buckets */
-  private static final float DEFAULT_SINGLE_FACTOR = 0.25f;
-  private static final float DEFAULT_MULTI_FACTOR = 0.50f;
-  private static final float DEFAULT_MEMORY_FACTOR = 0.25f;
-  private static final float DEFAULT_EXTRA_FREE_FACTOR = 0.10f;
+  @VisibleForTesting
+  static final float DEFAULT_SINGLE_FACTOR = 0.25f;
+  static final float DEFAULT_MULTI_FACTOR = 0.50f;
+  static final float DEFAULT_MEMORY_FACTOR = 0.25f;
+  static final float DEFAULT_MIN_FACTOR = 0.85f;
 
+  private static final float DEFAULT_EXTRA_FREE_FACTOR = 0.10f;
   private static final float DEFAULT_ACCEPT_FACTOR = 0.95f;
-  private static final float DEFAULT_MIN_FACTOR = 0.85f;
 
   // Number of blocks to clear for each of the bucket size that is full
   private static final int DEFAULT_FREE_ENTIRE_BLOC

[22/50] [abbrv] hbase git commit: HBASE-17064 Add TaskMonitor#getTasks() variant which accepts type selection

2017-08-20 Thread busbey
HBASE-17064 Add TaskMonitor#getTasks() variant which accepts type selection

Signed-off-by: tedyu 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/effd1093
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/effd1093
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/effd1093

Branch: refs/heads/HBASE-18467
Commit: effd1093b559aeba2bf66a4cf81cd4a0013de184
Parents: d37266f
Author: Reid Chan 
Authored: Tue Aug 15 15:50:22 2017 +0800
Committer: tedyu 
Committed: Tue Aug 15 09:45:19 2017 -0700

--
 .../hbase/tmpl/common/TaskMonitorTmpl.jamon | 21 +
 .../hadoop/hbase/monitoring/TaskMonitor.java| 97 +---
 .../hbase/monitoring/TestTaskMonitor.java   | 48 ++
 3 files changed, 133 insertions(+), 33 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/effd1093/hbase-server/src/main/jamon/org/apache/hadoop/hbase/tmpl/common/TaskMonitorTmpl.jamon
--
diff --git 
a/hbase-server/src/main/jamon/org/apache/hadoop/hbase/tmpl/common/TaskMonitorTmpl.jamon
 
b/hbase-server/src/main/jamon/org/apache/hadoop/hbase/tmpl/common/TaskMonitorTmpl.jamon
index b4a5fea..986bc3a 100644
--- 
a/hbase-server/src/main/jamon/org/apache/hadoop/hbase/tmpl/common/TaskMonitorTmpl.jamon
+++ 
b/hbase-server/src/main/jamon/org/apache/hadoop/hbase/tmpl/common/TaskMonitorTmpl.jamon
@@ -27,27 +27,8 @@ String filter = "general";
 String format = "html";
 
 <%java>
-List tasks = taskMonitor.getTasks();
-Iterator iter = tasks.iterator();
 // apply requested filter
-while (iter.hasNext()) {
-  MonitoredTask t = iter.next();
-  if (filter.equals("general")) {
-if (t instanceof MonitoredRPCHandler)
-  iter.remove();
-  } else if (filter.equals("handler")) {
-if (!(t instanceof MonitoredRPCHandler))
-  iter.remove();
-  } else if (filter.equals("rpc")) {
-if (!(t instanceof MonitoredRPCHandler) || 
-!((MonitoredRPCHandler) t).isRPCRunning())
-  iter.remove();
-  } else if (filter.equals("operation")) {
-if (!(t instanceof MonitoredRPCHandler) || 
-!((MonitoredRPCHandler) t).isOperationRunning())
-  iter.remove();
-  }
-}
+List tasks = taskMonitor.getTasks(filter);
 long now = System.currentTimeMillis();
 Collections.reverse(tasks);
 boolean first = true;

http://git-wip-us.apache.org/repos/asf/hbase/blob/effd1093/hbase-server/src/main/java/org/apache/hadoop/hbase/monitoring/TaskMonitor.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/monitoring/TaskMonitor.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/monitoring/TaskMonitor.java
index 780916f..ad9bd02 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/monitoring/TaskMonitor.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/monitoring/TaskMonitor.java
@@ -157,22 +157,52 @@ public class TaskMonitor {
* MonitoredTasks handled by this TaskMonitor.
* @return A complete list of MonitoredTasks.
*/
-  public synchronized List getTasks() {
+  public List getTasks() {
+return getTasks(null);
+  }
+
+  /**
+   * Produces a list containing copies of the current state of all non-expired 
+   * MonitoredTasks handled by this TaskMonitor.
+   * @param filter type of wanted tasks
+   * @return A filtered list of MonitoredTasks.
+   */
+  public synchronized List getTasks(String filter) {
 purgeExpiredTasks();
-ArrayList ret = Lists.newArrayListWithCapacity(tasks.size() 
+ rpcTasks.size());
-for (Iterator it = tasks.iterator();
- it.hasNext();) {
-  TaskAndWeakRefPair pair = it.next();
-  MonitoredTask t = pair.get();
-  ret.add(t.clone());
+TaskFilter taskFilter = createTaskFilter(filter);
+ArrayList results =
+Lists.newArrayListWithCapacity(tasks.size() + rpcTasks.size());
+processTasks(tasks, taskFilter, results);
+processTasks(rpcTasks, taskFilter, results);
+return results;
+  }
+
+  /**
+   * Create a task filter according to a given filter type.
+   * @param filter type of monitored task
+   * @return a task filter
+   */
+  private static TaskFilter createTaskFilter(String filter) {
+switch (TaskFilter.TaskType.getTaskType(filter)) {
+  case GENERAL: return task -> task instanceof MonitoredRPCHandler;
+  case HANDLER: return task -> !(task instanceof MonitoredRPCHandler);
+  case RPC: return task -> !(task instanceof MonitoredRPCHandler) ||
+   !((MonitoredRPCHandler) task).isRPCRunning();
+  case OPERATION: return task -> !(task instanceof MonitoredRPCHandler) ||
+ !((MonitoredRPCHandler) 
task).isOperationRunning();
+  default: return task -> f

[40/50] [abbrv] hbase git commit: HBASE-18375: Fix the bug where the pool chunks from ChunkCreator are deallocated and not returned to pool, because there is no reference to them

2017-08-20 Thread busbey
HBASE-18375: Fix the bug where the pool chunks from ChunkCreator are 
deallocated and not returned to pool, because there is no reference to them


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/75a6b368
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/75a6b368
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/75a6b368

Branch: refs/heads/HBASE-18467
Commit: 75a6b36849c58d6a751f57226ab0c8f7884a9e87
Parents: 092dc6d
Author: anastas 
Authored: Thu Aug 17 18:23:19 2017 +0300
Committer: anastas 
Committed: Thu Aug 17 18:23:19 2017 +0300

--
 .../regionserver/CellChunkImmutableSegment.java |   5 +-
 .../hadoop/hbase/regionserver/ChunkCreator.java | 171 +--
 .../hbase/regionserver/CompactionPipeline.java  |  19 ++-
 .../hbase/regionserver/MemStoreLABImpl.java |  27 ++-
 .../hbase/regionserver/TestMemStoreLAB.java |  12 +-
 .../TestMemstoreLABWithoutPool.java |   3 +-
 6 files changed, 126 insertions(+), 111 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/75a6b368/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CellChunkImmutableSegment.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CellChunkImmutableSegment.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CellChunkImmutableSegment.java
index cdda279..3653166 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CellChunkImmutableSegment.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CellChunkImmutableSegment.java
@@ -176,10 +176,7 @@ public class CellChunkImmutableSegment extends 
ImmutableSegment {
   private int createCellReference(ByteBufferKeyValue cell, ByteBuffer 
idxBuffer, int idxOffset) {
 int offset = idxOffset;
 int dataChunkID = cell.getChunkId();
-// ensure strong pointer to data chunk, as index is no longer directly 
points to it
-Chunk c = ChunkCreator.getInstance().saveChunkFromGC(dataChunkID);
-// if c is null, it means that this cell chunks was already released 
shouldn't happen
-assert (c!=null);
+
 offset = ByteBufferUtils.putInt(idxBuffer, offset, dataChunkID);// 
write data chunk id
 offset = ByteBufferUtils.putInt(idxBuffer, offset, cell.getOffset());  
// offset
 offset = ByteBufferUtils.putInt(idxBuffer, offset, 
KeyValueUtil.length(cell)); // length

http://git-wip-us.apache.org/repos/asf/hbase/blob/75a6b368/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ChunkCreator.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ChunkCreator.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ChunkCreator.java
index 7e5395c..e818426 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ChunkCreator.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ChunkCreator.java
@@ -18,7 +18,6 @@
  */
 package org.apache.hadoop.hbase.regionserver;
 
-import java.lang.ref.WeakReference;
 import java.util.Iterator;
 import java.util.Map;
 import java.util.Set;
@@ -58,21 +57,8 @@ public class ChunkCreator {
   // the header size need to be changed in case chunk id size is changed
   public static final int SIZEOF_CHUNK_HEADER = Bytes.SIZEOF_INT;
 
-  // An object pointed by a weak reference can be garbage collected, in 
opposite to an object
-  // referenced by a strong (regular) reference. Every chunk created via 
ChunkCreator is referenced
-  // from either weakChunkIdMap or strongChunkIdMap.
-  // Upon chunk C creation, C's ID is mapped into weak reference to C, in 
order not to disturb C's
-  // GC in case all other reference to C are going to be removed.
-  // When chunk C is referenced from CellChunkMap (via C's ID) it is possible 
to GC the chunk C.
-  // To avoid that upon inserting C into CellChunkMap, C's ID is mapped into 
strong (regular)
-  // reference to C.
-
-  // map that doesn't influence GC
-  private Map> weakChunkIdMap =
-  new ConcurrentHashMap>();
-
-  // map that keeps chunks from garbage collection
-  private Map strongChunkIdMap = new 
ConcurrentHashMap();
+  // mapping from chunk IDs to chunks
+  private Map chunkIdMap = new ConcurrentHashMap();
 
   private final int chunkSize;
   private final boolean offheap;
@@ -95,7 +81,7 @@ public class ChunkCreator {
   }
 
   /**
-   * Initializes the instance of MSLABChunkCreator
+   * Initializes the instance of ChunkCreator
* @param chunkSize the chunkSize
* @param offheap indicates if the chunk is to be created offheap or not
* @param globalMemStoreSize  the global memstore s

[08/50] [abbrv] hbase git commit: HBASE-18271 Shade netty Purge mention of netty-all.

2017-08-20 Thread busbey
HBASE-18271 Shade netty Purge mention of netty-all.

Add in hbase-thirdparty hbase-shaded-netty instead.
s/io.netty/org.apache.hadoop.hbase.shaded.io.netty/ everywhere in hbase.

Also set a system property when running tests and when starting
hbase; required by netty so can find the relocation files in the
bundled .so.


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/c6ac04ab
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/c6ac04ab
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/c6ac04ab

Branch: refs/heads/HBASE-18467
Commit: c6ac04ab39dafad8ebc7033b852f535a932d4ce6
Parents: 923195c
Author: Michael Stack 
Authored: Wed Aug 2 14:47:51 2017 -0700
Committer: Michael Stack 
Committed: Sun Aug 13 12:46:21 2017 -0700

--
 bin/hbase   | 12 -
 hbase-client/pom.xml|  4 +-
 .../client/AsyncAdminRequestRetryingCaller.java |  2 +-
 .../client/AsyncBatchRpcRetryingCaller.java |  2 +-
 .../hbase/client/AsyncConnectionImpl.java   |  2 +-
 .../hadoop/hbase/client/AsyncHBaseAdmin.java|  4 +-
 .../AsyncMasterRequestRpcRetryingCaller.java|  2 +-
 .../hadoop/hbase/client/AsyncRegionLocator.java |  4 +-
 .../hbase/client/AsyncRpcRetryingCaller.java|  2 +-
 .../client/AsyncRpcRetryingCallerFactory.java   |  2 +-
 .../AsyncScanSingleRegionRpcRetryingCaller.java |  4 +-
 .../AsyncServerRequestRpcRetryingCaller.java|  2 +-
 .../AsyncSingleRequestRpcRetryingCaller.java|  2 +-
 .../hbase/client/ClusterStatusListener.java | 20 -
 .../hadoop/hbase/client/RawAsyncHBaseAdmin.java |  4 +-
 .../hadoop/hbase/ipc/AbstractRpcClient.java |  2 +-
 .../hbase/ipc/BufferCallBeforeInitHandler.java  |  6 +--
 .../java/org/apache/hadoop/hbase/ipc/Call.java  |  2 +-
 .../hadoop/hbase/ipc/CellBlockBuilder.java  |  6 +--
 .../hbase/ipc/DefaultNettyEventLoopConfig.java  | 10 ++---
 .../apache/hadoop/hbase/ipc/NettyRpcClient.java | 10 ++---
 .../hbase/ipc/NettyRpcClientConfigHelper.java   |  4 +-
 .../hadoop/hbase/ipc/NettyRpcConnection.java| 34 +++
 .../hadoop/hbase/ipc/NettyRpcDuplexHandler.java | 16 +++
 .../apache/hadoop/hbase/ipc/RpcConnection.java  |  6 +--
 .../hbase/security/CryptoAESUnwrapHandler.java  | 10 ++---
 .../hbase/security/CryptoAESWrapHandler.java| 16 +++
 .../NettyHBaseRpcConnectionHeaderHandler.java   | 12 ++---
 .../hbase/security/NettyHBaseSaslRpcClient.java |  4 +-
 .../NettyHBaseSaslRpcClientHandler.java |  8 ++--
 .../hbase/security/SaslChallengeDecoder.java|  6 +--
 .../hbase/security/SaslUnwrapHandler.java   | 10 ++---
 .../hadoop/hbase/security/SaslWrapHandler.java  | 16 +++
 .../hbase/client/example/HttpProxyExample.java  | 44 +--
 hbase-prefix-tree/pom.xml   |  8 ++--
 hbase-rsgroup/pom.xml   |  4 +-
 hbase-server/pom.xml|  7 +--
 .../hbase/io/asyncfs/AsyncFSOutputHelper.java   |  4 +-
 .../asyncfs/FanOutOneBlockAsyncDFSOutput.java   | 32 +++---
 .../FanOutOneBlockAsyncDFSOutputHelper.java | 46 ++--
 .../FanOutOneBlockAsyncDFSOutputSaslHelper.java | 38 
 .../apache/hadoop/hbase/ipc/NettyRpcServer.java | 30 ++---
 .../ipc/NettyRpcServerPreambleHandler.java  |  8 ++--
 .../hbase/ipc/NettyRpcServerRequestDecoder.java |  8 ++--
 .../ipc/NettyRpcServerResponseEncoder.java  |  8 ++--
 .../hbase/ipc/NettyServerRpcConnection.java |  4 +-
 .../hbase/mapreduce/TableMapReduceUtil.java |  2 +-
 .../hbase/master/ClusterStatusPublisher.java| 30 ++---
 .../hbase/regionserver/wal/AsyncFSWAL.java  |  6 +--
 .../wal/AsyncProtobufLogWriter.java |  4 +-
 .../wal/SecureAsyncProtobufLogWriter.java   |  4 +-
 .../hbase/util/NettyEventLoopGroupConfig.java   | 20 -
 .../hadoop/hbase/wal/AsyncFSWALProvider.java| 12 ++---
 .../hbase/wal/NettyAsyncFSWALConfigHelper.java  |  4 +-
 .../TestFanOutOneBlockAsyncDFSOutput.java   | 10 ++---
 .../hbase/io/asyncfs/TestLocalAsyncOutput.java  |  8 ++--
 .../TestSaslFanOutOneBlockAsyncDFSOutput.java   | 10 ++---
 .../apache/hadoop/hbase/ipc/TestNettyIPC.java   |  8 ++--
 .../hbase/regionserver/wal/TestAsyncFSWAL.java  |  8 ++--
 .../regionserver/wal/TestAsyncProtobufLog.java  |  8 ++--
 .../regionserver/wal/TestAsyncWALReplay.java|  8 ++--
 pom.xml | 19 +---
 62 files changed, 330 insertions(+), 318 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/c6ac04ab/bin/hbase
--
diff --git a/bin/hbase b/bin/hbase
index d7a8069..7eeaf2d 100755
--- a/bin/hbase
+++ b/bin/hbase
@@ -473,8 +473,16 @@ fi
 HEAP_SETT

hbase git commit: HBASE-18467 attempt at using jiraComment. [Forced Update!]

2017-08-20 Thread busbey
Repository: hbase
Updated Branches:
  refs/heads/HBASE-18467 1abee706c -> b1813c01d (forced update)


HBASE-18467 attempt at using jiraComment.


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/b1813c01
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/b1813c01
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/b1813c01

Branch: refs/heads/HBASE-18467
Commit: b1813c01dde40c29f8b6cafd597c4494c8ca3ff6
Parents: c2537ac
Author: Sean Busbey 
Authored: Sun Aug 20 16:29:03 2017 -0500
Committer: Sean Busbey 
Committed: Sun Aug 20 17:04:03 2017 -0500

--
 dev-support/Jenkinsfile | 26 +++---
 1 file changed, 19 insertions(+), 7 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/b1813c01/dev-support/Jenkinsfile
--
diff --git a/dev-support/Jenkinsfile b/dev-support/Jenkinsfile
index 7207ab3..f1ce069 100644
--- a/dev-support/Jenkinsfile
+++ b/dev-support/Jenkinsfile
@@ -170,6 +170,7 @@ curl -L  -o personality.sh "${env.PROJET_PERSONALITY}"
   steps {
 unstash 'yetus'
 sh '''#!/usr/bin/env bash
+  mkdir -p "${OUTPUTDIR}"
   # for branch-1.1 we don't do jdk8 findbugs, so do it here
   if [ "${BRANCH_NAME}" == "branch-1.1" ]; then
 TESTS+=",findbugs"
@@ -234,6 +235,7 @@ curl -L  -o personality.sh "${env.PROJET_PERSONALITY}"
   steps {
 unstash 'yetus'
 sh '''#!/usr/bin/env bash
+  mkdir -p "${OUTPUTDIR}"
   declare commentfile
   if "${BASEDIR}/dev-support/hbase_nightly_yetus.sh" ; then
 commentfile='${OUTPUTDIR}/success'
@@ -377,23 +379,33 @@ END
 comment += 'Committer, please check your recent inclusion of a 
patch for this issue.\\'
  }
  comment += "  [build ${currentBuild.displayName} on 
builds.a.o|${env.BUILD_URL}]: ${currentBuild.result}\\\\details (if 
available):"
- echo "Comment so far: "
+ echo "[DEBUG] Comment so far: "
  echo comment
  echo ""
- echo "trying to aggregate step-wise results"
+ echo "[DEBUG] trying to aggregate step-wise results"
  comment += results.collect { fileExists(file: it) ? readFile(file: 
it) : "\n" }.join
+ echo "[INFO] Comment:"
  echo comment
- echo "${currentBuild.changeSets.size} changes"
+ echo ""
+ echo "[INFO] ${currentBuild.changeSets.size} changes"
+ def seenJiras = []
  for ( change in currentBuild.changeSets ) {
echo "change:"
echo "  ${change.getAuthor}"
echo "  ${change.getMsg}"
echo "  ${change.getMsgAnnotated}"
echo ""
-   // figure out jira  from the message
-   // dedup jiras we already commented on
-   // comment, something like
-   // jiraComment issueKey: currentIssue, body: comment
+   change.getMsg.findAll( /HBASE-[0-9]+/ ).each { currentIssue ->
+ echo "[DEBUG] found jira key: ${currentIssue}"
+ if ( currentIssue in seenJiras ) {
+   echo "[DEBUG] already commented on ${currentIssue}."
+ } else {
+   echo "[INFO] commenting on ${currentIssue}."
+   jiraComment issueKey: currentIssue, body: comment
+   seenJiras << currentIssue
+ }
+   }
+   //TODO warn if no JIRA key found in message, email committer
  }
   }
 }



hbase git commit: HBASE-18467 correct naming the output file for comments. correctly join the list of results.

2017-08-20 Thread busbey
Repository: hbase
Updated Branches:
  refs/heads/HBASE-18467 b1813c01d -> 949b0aff6


HBASE-18467 correct naming the output file for comments. correctly join the 
list of results.


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/949b0aff
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/949b0aff
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/949b0aff

Branch: refs/heads/HBASE-18467
Commit: 949b0aff6d8231259fc9e95a643deeea93127884
Parents: b1813c0
Author: Sean Busbey 
Authored: Sun Aug 20 18:51:28 2017 -0500
Committer: Sean Busbey 
Committed: Sun Aug 20 18:51:28 2017 -0500

--
 dev-support/Jenkinsfile | 12 +---
 1 file changed, 5 insertions(+), 7 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/949b0aff/dev-support/Jenkinsfile
--
diff --git a/dev-support/Jenkinsfile b/dev-support/Jenkinsfile
index f1ce069..e4ddbba 100644
--- a/dev-support/Jenkinsfile
+++ b/dev-support/Jenkinsfile
@@ -170,17 +170,16 @@ curl -L  -o personality.sh "${env.PROJET_PERSONALITY}"
   steps {
 unstash 'yetus'
 sh '''#!/usr/bin/env bash
-  mkdir -p "${OUTPUTDIR}"
   # for branch-1.1 we don't do jdk8 findbugs, so do it here
   if [ "${BRANCH_NAME}" == "branch-1.1" ]; then
 TESTS+=",findbugs"
   fi
   declare commentfile
   if "${BASEDIR}/dev-support/hbase_nightly_yetus.sh" ; then
-commentfile='${OUTPUTDIR}/success'
+commentfile="${OUTPUTDIR}/success"
 echo '(/) *{color:green}+1 jdk7 checks{color}*' >> "${commentfile}"
   else
-commentfile='${OUTPUTDIR}/failure'
+commentfile="${OUTPUTDIR}/failure"
 echo '(x) *{color:red}-1 jdk7 checks{color}*' >> "${commentfile}"
   fi
   echo '-- For more information [see jdk7 
report|${BUILD_URL}/JDK7_Nightly_Build_Report/]' >> "${commentfile}"
@@ -235,13 +234,12 @@ curl -L  -o personality.sh "${env.PROJET_PERSONALITY}"
   steps {
 unstash 'yetus'
 sh '''#!/usr/bin/env bash
-  mkdir -p "${OUTPUTDIR}"
   declare commentfile
   if "${BASEDIR}/dev-support/hbase_nightly_yetus.sh" ; then
-commentfile='${OUTPUTDIR}/success'
+commentfile="${OUTPUTDIR}/success"
 echo '(/) *{color:green}+1 jdk8 checks{color}*' >> "${commentfile}"
   else
-commentfile='${OUTPUTDIR}/failure'
+commentfile="${OUTPUTDIR}/failure"
 echo '(x) *{color:red}-1 jdk8 checks{color}*' >> "${commentfile}"
   fi
   echo '-- For more information [see jdk8 
report|${BUILD_URL}/JDK8_Nightly_Build_Report/]' >> "${commentfile}"
@@ -383,7 +381,7 @@ END
  echo comment
  echo ""
  echo "[DEBUG] trying to aggregate step-wise results"
- comment += results.collect { fileExists(file: it) ? readFile(file: 
it) : "\n" }.join
+ comment += results.collect { fileExists(file: it) ? readFile(file: 
it) : "\n" }.join("\n")
  echo "[INFO] Comment:"
  echo comment
  echo ""



hbase git commit: HBASE-18467 fixing substitutions. debug output of changeSets

2017-08-20 Thread busbey
Repository: hbase
Updated Branches:
  refs/heads/HBASE-18467 949b0aff6 -> 8e76d38b1


HBASE-18467 fixing substitutions. debug output of changeSets


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/8e76d38b
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/8e76d38b
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/8e76d38b

Branch: refs/heads/HBASE-18467
Commit: 8e76d38b14cd26a66546c23688d077542c7a7234
Parents: 949b0af
Author: Sean Busbey 
Authored: Sun Aug 20 22:17:58 2017 -0500
Committer: Sean Busbey 
Committed: Sun Aug 20 22:17:58 2017 -0500

--
 dev-support/Jenkinsfile | 13 +++--
 1 file changed, 7 insertions(+), 6 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/8e76d38b/dev-support/Jenkinsfile
--
diff --git a/dev-support/Jenkinsfile b/dev-support/Jenkinsfile
index e4ddbba..b9e283e 100644
--- a/dev-support/Jenkinsfile
+++ b/dev-support/Jenkinsfile
@@ -137,7 +137,7 @@ curl -L  -o personality.sh "${env.PROJET_PERSONALITY}"
 commentfile="${OUTPUTDIR}/failure"
 echo '(x) *{color:red}-1 general checks{color}*' >> 
"${commentfile}"
   fi
-  echo '-- For more information [see general 
report|${BUILD_URL}/General_Nightly_Build_Report/]' >> "${commentfile}"
+  echo "-- For more information [see general 
report|${BUILD_URL}/General_Nightly_Build_Report/]" >> "${commentfile}"
 '''
   }
   post {
@@ -182,7 +182,7 @@ curl -L  -o personality.sh "${env.PROJET_PERSONALITY}"
 commentfile="${OUTPUTDIR}/failure"
 echo '(x) *{color:red}-1 jdk7 checks{color}*' >> "${commentfile}"
   fi
-  echo '-- For more information [see jdk7 
report|${BUILD_URL}/JDK7_Nightly_Build_Report/]' >> "${commentfile}"
+  echo "-- For more information [see jdk7 
report|${BUILD_URL}/JDK7_Nightly_Build_Report/]" >> "${commentfile}"
 '''
   }
   post {
@@ -242,7 +242,7 @@ curl -L  -o personality.sh "${env.PROJET_PERSONALITY}"
 commentfile="${OUTPUTDIR}/failure"
 echo '(x) *{color:red}-1 jdk8 checks{color}*' >> "${commentfile}"
   fi
-  echo '-- For more information [see jdk8 
report|${BUILD_URL}/JDK8_Nightly_Build_Report/]' >> "${commentfile}"
+  echo "-- For more information [see jdk8 
report|${BUILD_URL}/JDK8_Nightly_Build_Report/]" >> "${commentfile}"
 '''
   }
   post {
@@ -376,15 +376,16 @@ END
 // Ideally get the committer our of the change and @ mention them 
in the per-jira comment
 comment += 'Committer, please check your recent inclusion of a 
patch for this issue.\\'
  }
- comment += "  [build ${currentBuild.displayName} on 
builds.a.o|${env.BUILD_URL}]: ${currentBuild.result}\\\\details (if 
available):"
- echo "[DEBUG] Comment so far: "
- echo comment
+ comment += "  [build ${currentBuild.displayName} on 
builds.a.o|${env.BUILD_URL}]: ${currentBuild.result}\\\\details (if 
available):\n"
+ //echo "[DEBUG] Comment so far: "
+ //echo comment
  echo ""
  echo "[DEBUG] trying to aggregate step-wise results"
  comment += results.collect { fileExists(file: it) ? readFile(file: 
it) : "\n" }.join("\n")
  echo "[INFO] Comment:"
  echo comment
  echo ""
+ echo "[DEBUG] ${currentBuild.changeSets}"
  echo "[INFO] ${currentBuild.changeSets.size} changes"
  def seenJiras = []
  for ( change in currentBuild.changeSets ) {



hbase git commit: HBASE-18467 delete result files from prior runs.

2017-08-20 Thread busbey
Repository: hbase
Updated Branches:
  refs/heads/HBASE-18467 8e76d38b1 -> 61c292496


HBASE-18467 delete result files from prior runs.


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/61c29249
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/61c29249
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/61c29249

Branch: refs/heads/HBASE-18467
Commit: 61c292496bf6383364a78f49c8b78942121f08f3
Parents: 8e76d38
Author: Sean Busbey 
Authored: Sun Aug 20 22:29:46 2017 -0500
Committer: Sean Busbey 
Committed: Sun Aug 20 22:29:46 2017 -0500

--
 dev-support/Jenkinsfile | 4 
 1 file changed, 4 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/61c29249/dev-support/Jenkinsfile
--
diff --git a/dev-support/Jenkinsfile b/dev-support/Jenkinsfile
index b9e283e..b63a77f 100644
--- a/dev-support/Jenkinsfile
+++ b/dev-support/Jenkinsfile
@@ -129,6 +129,7 @@ curl -L  -o personality.sh "${env.PROJET_PERSONALITY}"
 unstash 'yetus'
 // TODO should this be a download from master, similar to how the 
personality is?
 sh '''#!/usr/bin/env bash
+  rm -f "${OUTPUTDIR}/success" "${OUTPUTDIR}/failure"
   declare commentfile
   if "${BASEDIR}/dev-support/hbase_nightly_yetus.sh" ; then
 commentfile="${OUTPUTDIR}/success"
@@ -175,6 +176,7 @@ curl -L  -o personality.sh "${env.PROJET_PERSONALITY}"
 TESTS+=",findbugs"
   fi
   declare commentfile
+  rm -f "${OUTPUTDIR}/success" "${OUTPUTDIR}/failure"
   if "${BASEDIR}/dev-support/hbase_nightly_yetus.sh" ; then
 commentfile="${OUTPUTDIR}/success"
 echo '(/) *{color:green}+1 jdk7 checks{color}*' >> "${commentfile}"
@@ -235,6 +237,7 @@ curl -L  -o personality.sh "${env.PROJET_PERSONALITY}"
 unstash 'yetus'
 sh '''#!/usr/bin/env bash
   declare commentfile
+  rm -f "${OUTPUTDIR}/success" "${OUTPUTDIR}/failure"
   if "${BASEDIR}/dev-support/hbase_nightly_yetus.sh" ; then
 commentfile="${OUTPUTDIR}/success"
 echo '(/) *{color:green}+1 jdk8 checks{color}*' >> "${commentfile}"
@@ -315,6 +318,7 @@ curl -L  -o personality.sh "${env.PROJET_PERSONALITY}"
 }
 // expectation check largely based on HBASE-14952
 sh '''#!/bin/bash -e
+  rm -rf "${env.WORKSPACE}/src_tarball_success" 
"${env.WORKSPACE}/src_tarball_failure"
   echo "Checking against things we don't expect to include in the 
source tarball (git related, hbase-native-client, etc.)"
   cat >known_excluded <