hadoop git commit: HADOOP-13161. remove JDK7 from Dockerfile (aw)

2016-05-16 Thread aw
Repository: hadoop
Updated Branches:
  refs/heads/trunk 61f46be07 -> 2c91fd824


HADOOP-13161. remove JDK7 from Dockerfile (aw)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/2c91fd82
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/2c91fd82
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/2c91fd82

Branch: refs/heads/trunk
Commit: 2c91fd82415a7b0fcdf91952bda98bea669f1646
Parents: 61f46be
Author: Allen Wittenauer 
Authored: Mon May 16 20:25:40 2016 -0700
Committer: Allen Wittenauer 
Committed: Mon May 16 20:25:40 2016 -0700

--
 dev-support/docker/Dockerfile | 26 ++
 1 file changed, 14 insertions(+), 12 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/2c91fd82/dev-support/docker/Dockerfile
--
diff --git a/dev-support/docker/Dockerfile b/dev-support/docker/Dockerfile
index eba7df7..f9bf5aa 100644
--- a/dev-support/docker/Dockerfile
+++ b/dev-support/docker/Dockerfile
@@ -28,9 +28,11 @@ ENV DEBCONF_TERSE true
 
 ##
 # Install common dependencies from packages
+#
+# WARNING: DO NOT PUT JAVA APPS HERE! Otherwise they will install default
+# Ubuntu Java.  See Java section below!
 ##
 RUN apt-get -q update && apt-get -q install --no-install-recommends -y \
-ant \
 build-essential \
 bzip2 \
 cmake \
@@ -42,17 +44,14 @@ RUN apt-get -q update && apt-get -q install 
--no-install-recommends -y \
 git \
 gnupg-agent \
 make \
-maven \
 libbz2-dev \
 libcurl4-openssl-dev \
 libfuse-dev \
-libjansson-dev \
 libprotobuf-dev \
 libprotoc-dev \
 libsnappy-dev \
 libssl-dev \
 libtool \
-openjdk-7-jdk \
 pinentry-curses \
 pkg-config \
 protobuf-compiler \
@@ -63,10 +62,6 @@ RUN apt-get -q update && apt-get -q install 
--no-install-recommends -y \
 snappy \
 zlib1g-dev
 
-# Fixing the Apache commons / Maven dependency problem under Ubuntu:
-# See http://wiki.apache.org/commons/VfsProblems
-RUN cd /usr/share/maven/lib && ln -s ../../java/commons-lang.jar .
-
 ##
 # Install ISA-L library
 ##
@@ -87,13 +82,20 @@ RUN add-apt-repository -y ppa:webupd8team/java
 RUN apt-get -q update
 
 # Auto-accept the Oracle JDK license
-RUN echo oracle-java7-installer shared/accepted-oracle-license-v1-1 select 
true | sudo /usr/bin/debconf-set-selections
-RUN apt-get -q install --no-install-recommends -y oracle-java7-installer
-
-# Auto-accept the Oracle JDK license
 RUN echo oracle-java8-installer shared/accepted-oracle-license-v1-1 select 
true | sudo /usr/bin/debconf-set-selections
 RUN apt-get -q install --no-install-recommends -y oracle-java8-installer
 
+
+# Apps that require Java
+###
+RUN apt-get -q update && apt-get -q install --no-install-recommends -y \
+ant \
+maven
+
+# Fixing the Apache commons / Maven dependency problem under Ubuntu:
+# See http://wiki.apache.org/commons/VfsProblems
+RUN cd /usr/share/maven/lib && ln -s ../../java/commons-lang.jar .
+
 ##
 # Install findbugs
 ##


-
To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org
For additional commands, e-mail: common-commits-h...@hadoop.apache.org



[2/2] hadoop git commit: HDFS-10208. Addendum for HDFS-9579: to handle the case when client machine can't resolve network path (Ming Ma via sjlee)

2016-05-16 Thread sjlee
HDFS-10208. Addendum for HDFS-9579: to handle the case when client machine 
can't resolve network path (Ming Ma via sjlee)

(cherry picked from commit 61f46be071e42f9eb49a54b1bd2e54feac59f808)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/9330a7b4
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/9330a7b4
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/9330a7b4

Branch: refs/heads/branch-2
Commit: 9330a7b4de7b023f2242554e72c0d7c0d98cf41d
Parents: 09a613b
Author: Sangjin Lee 
Authored: Mon May 16 18:49:47 2016 -0700
Committer: Sangjin Lee 
Committed: Mon May 16 18:59:19 2016 -0700

--
 .../fs/CommonConfigurationKeysPublic.java   |  7 ++-
 .../org/apache/hadoop/net/NetworkTopology.java  | 35 
 .../java/org/apache/hadoop/net/NodeBase.java|  9 +++
 .../src/main/resources/core-default.xml | 13 +
 .../org/apache/hadoop/hdfs/ClientContext.java   | 47 ++--
 .../org/apache/hadoop/hdfs/MiniDFSCluster.java  | 31 +++---
 .../hadoop/hdfs/TestDistributedFileSystem.java  | 59 +---
 .../apache/hadoop/net/TestNetworkTopology.java  |  9 +++
 8 files changed, 174 insertions(+), 36 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/9330a7b4/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeysPublic.java
--
diff --git 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeysPublic.java
 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeysPublic.java
index ca17f8d..e16c0ba 100644
--- 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeysPublic.java
+++ 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeysPublic.java
@@ -116,7 +116,12 @@ public class CommonConfigurationKeysPublic {
   public static final String  FS_TRASH_INTERVAL_KEY = "fs.trash.interval";
   /** Default value for FS_TRASH_INTERVAL_KEY */
   public static final longFS_TRASH_INTERVAL_DEFAULT = 0;
-
+  /** See core-default.xml. */
+  public static final String  FS_CLIENT_TOPOLOGY_RESOLUTION_ENABLED =
+  "fs.client.resolve.topology.enabled";
+  /** Default value for FS_CLIENT_TOPOLOGY_RESOLUTION_ENABLED. */
+  public static final boolean FS_CLIENT_TOPOLOGY_RESOLUTION_ENABLED_DEFAULT =
+  false;
   /** See core-default.xml */
   public static final String  IO_MAPFILE_BLOOM_SIZE_KEY =
 "io.mapfile.bloom.size";

http://git-wip-us.apache.org/repos/asf/hadoop/blob/9330a7b4/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/NetworkTopology.java
--
diff --git 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/NetworkTopology.java
 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/NetworkTopology.java
index 1e23ff6..cf5b176 100644
--- 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/NetworkTopology.java
+++ 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/NetworkTopology.java
@@ -655,6 +655,41 @@ public class NetworkTopology {
 return dis+2;
   }
 
+  /** Return the distance between two nodes by comparing their network paths
+   * without checking if they belong to the same ancestor node by reference.
+   * It is assumed that the distance from one node to its parent is 1
+   * The distance between two nodes is calculated by summing up their distances
+   * to their closest common ancestor.
+   * @param node1 one node
+   * @param node2 another node
+   * @return the distance between node1 and node2
+   */
+  static public int getDistanceByPath(Node node1, Node node2) {
+if (node1 == null && node2 == null) {
+  return 0;
+}
+if (node1 == null || node2 == null) {
+  LOG.warn("One of the nodes is a null pointer");
+  return Integer.MAX_VALUE;
+}
+String[] paths1 = NodeBase.getPathComponents(node1);
+String[] paths2 = NodeBase.getPathComponents(node2);
+int dis = 0;
+int index = 0;
+int minLevel = Math.min(paths1.length, paths2.length);
+while (index < minLevel) {
+  if (!paths1[index].equals(paths2[index])) {
+// Once the path starts to diverge,  compute the distance that include
+// the rest of paths.
+dis += 2 * (minLevel - index);
+break;
+  }
+  index++;
+}
+dis += Math.abs(paths1.length - paths2.length);
+return dis;
+  }
+
   /** Check if two nodes are on the same rack
* @param node1 one node (can be null)
* @param node2 another 

[1/2] hadoop git commit: HDFS-10208. Addendum for HDFS-9579: to handle the case when client machine can't resolve network path (Ming Ma via sjlee)

2016-05-16 Thread sjlee
Repository: hadoop
Updated Branches:
  refs/heads/branch-2 09a613b02 -> 9330a7b4d
  refs/heads/trunk 730bc746f -> 61f46be07


HDFS-10208. Addendum for HDFS-9579: to handle the case when client machine 
can't resolve network path (Ming Ma via sjlee)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/61f46be0
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/61f46be0
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/61f46be0

Branch: refs/heads/trunk
Commit: 61f46be071e42f9eb49a54b1bd2e54feac59f808
Parents: 730bc74
Author: Sangjin Lee 
Authored: Mon May 16 18:49:47 2016 -0700
Committer: Sangjin Lee 
Committed: Mon May 16 18:49:47 2016 -0700

--
 .../fs/CommonConfigurationKeysPublic.java   |  7 ++-
 .../org/apache/hadoop/net/NetworkTopology.java  | 35 
 .../java/org/apache/hadoop/net/NodeBase.java|  9 
 .../src/main/resources/core-default.xml | 13 +
 .../org/apache/hadoop/hdfs/ClientContext.java   | 47 +---
 .../org/apache/hadoop/hdfs/MiniDFSCluster.java  | 31 +++
 .../hadoop/hdfs/TestDistributedFileSystem.java  | 57 +---
 .../apache/hadoop/net/TestNetworkTopology.java  |  9 
 8 files changed, 173 insertions(+), 35 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/61f46be0/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeysPublic.java
--
diff --git 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeysPublic.java
 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeysPublic.java
index 648ad59..f6ccc56 100644
--- 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeysPublic.java
+++ 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeysPublic.java
@@ -111,7 +111,12 @@ public class CommonConfigurationKeysPublic {
   public static final String  FS_TRASH_INTERVAL_KEY = "fs.trash.interval";
   /** Default value for FS_TRASH_INTERVAL_KEY */
   public static final longFS_TRASH_INTERVAL_DEFAULT = 0;
-
+  /** See core-default.xml. */
+  public static final String  FS_CLIENT_TOPOLOGY_RESOLUTION_ENABLED =
+  "fs.client.resolve.topology.enabled";
+  /** Default value for FS_CLIENT_TOPOLOGY_RESOLUTION_ENABLED. */
+  public static final boolean FS_CLIENT_TOPOLOGY_RESOLUTION_ENABLED_DEFAULT =
+  false;
   /** See core-default.xml */
   public static final String  IO_MAPFILE_BLOOM_SIZE_KEY =
 "io.mapfile.bloom.size";

http://git-wip-us.apache.org/repos/asf/hadoop/blob/61f46be0/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/NetworkTopology.java
--
diff --git 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/NetworkTopology.java
 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/NetworkTopology.java
index 1e23ff6..cf5b176 100644
--- 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/NetworkTopology.java
+++ 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/NetworkTopology.java
@@ -655,6 +655,41 @@ public class NetworkTopology {
 return dis+2;
   }
 
+  /** Return the distance between two nodes by comparing their network paths
+   * without checking if they belong to the same ancestor node by reference.
+   * It is assumed that the distance from one node to its parent is 1
+   * The distance between two nodes is calculated by summing up their distances
+   * to their closest common ancestor.
+   * @param node1 one node
+   * @param node2 another node
+   * @return the distance between node1 and node2
+   */
+  static public int getDistanceByPath(Node node1, Node node2) {
+if (node1 == null && node2 == null) {
+  return 0;
+}
+if (node1 == null || node2 == null) {
+  LOG.warn("One of the nodes is a null pointer");
+  return Integer.MAX_VALUE;
+}
+String[] paths1 = NodeBase.getPathComponents(node1);
+String[] paths2 = NodeBase.getPathComponents(node2);
+int dis = 0;
+int index = 0;
+int minLevel = Math.min(paths1.length, paths2.length);
+while (index < minLevel) {
+  if (!paths1[index].equals(paths2[index])) {
+// Once the path starts to diverge,  compute the distance that include
+// the rest of paths.
+dis += 2 * (minLevel - index);
+break;
+  }
+  index++;
+}
+dis += Math.abs(paths1.length - paths2.length);
+return dis;
+  }
+
   /** Check if two nodes are on the same rack
* @param 

svn commit: r1744175 - in /hadoop/common/site/main: author/src/documentation/content/xdocs/ publish/

2016-05-16 Thread lei
Author: lei
Date: Tue May 17 01:54:03 2016
New Revision: 1744175

URL: http://svn.apache.org/viewvc?rev=1744175=rev
Log:
Add lei (myself) to PMC list

Modified:
hadoop/common/site/main/author/src/documentation/content/xdocs/who.xml
hadoop/common/site/main/publish/bylaws.pdf
hadoop/common/site/main/publish/index.pdf
hadoop/common/site/main/publish/issue_tracking.pdf
hadoop/common/site/main/publish/linkmap.pdf
hadoop/common/site/main/publish/mailing_lists.pdf
hadoop/common/site/main/publish/privacy_policy.pdf
hadoop/common/site/main/publish/releases.pdf
hadoop/common/site/main/publish/version_control.pdf
hadoop/common/site/main/publish/who.html
hadoop/common/site/main/publish/who.pdf

Modified: hadoop/common/site/main/author/src/documentation/content/xdocs/who.xml
URL: 
http://svn.apache.org/viewvc/hadoop/common/site/main/author/src/documentation/content/xdocs/who.xml?rev=1744175=1744174=1744175=diff
==
--- hadoop/common/site/main/author/src/documentation/content/xdocs/who.xml 
(original)
+++ hadoop/common/site/main/author/src/documentation/content/xdocs/who.xml Tue 
May 17 01:54:03 2016
@@ -287,6 +287,13 @@
   -6
 
 
+   
+ lei
+ http://people.apache.org/~lei;>Lei Xu
+ Cloudera
+ 
+ -8
+
 
   llu
   http://people.apache.org/~llu;>Luke Lu
@@ -842,7 +849,7 @@
  RE
  -8

-   
+

  gtcarrera9
  Li Lu

Modified: hadoop/common/site/main/publish/bylaws.pdf
URL: 
http://svn.apache.org/viewvc/hadoop/common/site/main/publish/bylaws.pdf?rev=1744175=1744174=1744175=diff
==
Binary files - no diff available.

Modified: hadoop/common/site/main/publish/index.pdf
URL: 
http://svn.apache.org/viewvc/hadoop/common/site/main/publish/index.pdf?rev=1744175=1744174=1744175=diff
==
Binary files - no diff available.

Modified: hadoop/common/site/main/publish/issue_tracking.pdf
URL: 
http://svn.apache.org/viewvc/hadoop/common/site/main/publish/issue_tracking.pdf?rev=1744175=1744174=1744175=diff
==
Binary files - no diff available.

Modified: hadoop/common/site/main/publish/linkmap.pdf
URL: 
http://svn.apache.org/viewvc/hadoop/common/site/main/publish/linkmap.pdf?rev=1744175=1744174=1744175=diff
==
Binary files - no diff available.

Modified: hadoop/common/site/main/publish/mailing_lists.pdf
URL: 
http://svn.apache.org/viewvc/hadoop/common/site/main/publish/mailing_lists.pdf?rev=1744175=1744174=1744175=diff
==
Binary files - no diff available.

Modified: hadoop/common/site/main/publish/privacy_policy.pdf
URL: 
http://svn.apache.org/viewvc/hadoop/common/site/main/publish/privacy_policy.pdf?rev=1744175=1744174=1744175=diff
==
Binary files - no diff available.

Modified: hadoop/common/site/main/publish/releases.pdf
URL: 
http://svn.apache.org/viewvc/hadoop/common/site/main/publish/releases.pdf?rev=1744175=1744174=1744175=diff
==
Binary files - no diff available.

Modified: hadoop/common/site/main/publish/version_control.pdf
URL: 
http://svn.apache.org/viewvc/hadoop/common/site/main/publish/version_control.pdf?rev=1744175=1744174=1744175=diff
==
Binary files - no diff available.

Modified: hadoop/common/site/main/publish/who.html
URL: 
http://svn.apache.org/viewvc/hadoop/common/site/main/publish/who.html?rev=1744175=1744174=1744175=diff
==
--- hadoop/common/site/main/publish/who.html (original)
+++ hadoop/common/site/main/publish/who.html Tue May 17 01:54:03 2016
@@ -622,6 +622,16 @@ document.write("Last Published: " + docu
 
 
 
+   
+
+ 
+lei
+ http://people.apache.org/~lei;>Lei Xu
+ Cloudera
+ 
+ -8
+
+
 
 
   
@@ -1001,7 +1011,7 @@ document.write("Last Published: " + docu
 
 
 
-
+
 Emeritus Hadoop PMC Members
 
 
@@ -1016,7 +1026,7 @@ document.write("Last Published: " + docu
 
 

-
+
 Hadoop Committers
 
 Hadoop's active committers include:
@@ -1384,7 +1394,7 @@ document.write("Last Published: " + docu
  -8

 
-   
+

 
  
@@ -2182,7 +2192,7 @@ document.write("Last Published: " + docu
 
 

-
+
 

[1/2] hadoop git commit: HADOOP-12930. Dynamic subcommands for hadoop shell scripts (aw)

2016-05-16 Thread aw
Repository: hadoop
Updated Branches:
  refs/heads/trunk 6a6e74acf -> 730bc746f


http://git-wip-us.apache.org/repos/asf/hadoop/blob/730bc746/hadoop-tools/hadoop-gridmix/src/main/shellprofile.d/hadoop-gridmix.sh
--
diff --git 
a/hadoop-tools/hadoop-gridmix/src/main/shellprofile.d/hadoop-gridmix.sh 
b/hadoop-tools/hadoop-gridmix/src/main/shellprofile.d/hadoop-gridmix.sh
new file mode 100755
index 000..b7887ba
--- /dev/null
+++ b/hadoop-tools/hadoop-gridmix/src/main/shellprofile.d/hadoop-gridmix.sh
@@ -0,0 +1,36 @@
+#!/usr/bin/env bash
+
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+if ! declare -f hadoop_subcommand_gridmix >/dev/null 2>/dev/null; then
+
+  if [[ "${HADOOP_SHELL_EXECNAME}" = hadoop ]]; then
+hadoop_add_subcommand "gridmix" "submit a mix of synthetic job, modeling a 
profiled from production load"
+  fi
+
+## @description  gridmix command for hadoop
+## @audience public
+## @stabilitystable
+## @replaceable  yes
+function hadoop_subcommand_gridmix
+{
+  # shellcheck disable=SC2034
+  HADOOP_CLASSNAME=org.apache.hadoop.mapred.gridmix.Gridmix
+  hadoop_add_to_classpath_tools hadoop-rumen
+  hadoop_add_to_classpath_tools hadoop-gridmix
+}
+
+fi

http://git-wip-us.apache.org/repos/asf/hadoop/blob/730bc746/hadoop-tools/hadoop-rumen/src/main/shellprofile.d/hadoop-rumen.sh
--
diff --git a/hadoop-tools/hadoop-rumen/src/main/shellprofile.d/hadoop-rumen.sh 
b/hadoop-tools/hadoop-rumen/src/main/shellprofile.d/hadoop-rumen.sh
new file mode 100755
index 000..d7d4022
--- /dev/null
+++ b/hadoop-tools/hadoop-rumen/src/main/shellprofile.d/hadoop-rumen.sh
@@ -0,0 +1,58 @@
+#!/usr/bin/env bash
+
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+if ! declare -f hadoop_subcommand_rumenfolder >/dev/null 2>/dev/null; then
+
+  if [[ "${HADOOP_SHELL_EXECNAME}" = hadoop ]]; then
+hadoop_add_subcommand "rumenfolder" "scale a rumen input trace"
+  fi
+
+## @description  rumenfolder command for hadoop
+## @audience public
+## @stabilitystable
+## @replaceable  yes
+function hadoop_subcommand_rumenfolder
+{
+  # shellcheck disable=SC2034
+  HADOOP_CLASSNAME=org.apache.hadoop.tools.rumen.Folder
+  hadoop_add_to_classpath_tools hadoop-rumen
+  hadoop_debug "Appending HADOOP_CLIENT_OPTS onto HADOOP_OPTS"
+  HADOOP_OPTS="${HADOOP_OPTS} ${HADOOP_CLIENT_OPTS}"
+}
+
+fi
+
+if ! declare -f hadoop_subcommand_rumentrace >/dev/null 2>/dev/null; then
+
+  if [[ "${HADOOP_SHELL_EXECNAME}" = hadoop ]]; then
+hadoop_add_subcommand "rumentrace" "convert logs into a rumen trace"
+  fi
+
+## @description  rumentrace command for hadoop
+## @audience public
+## @stabilitystable
+## @replaceable  yes
+function hadoop_subcommand_rumentrace
+{
+  # shellcheck disable=SC2034
+  HADOOP_CLASSNAME=org.apache.hadoop.tools.rumen.TraceBuilder
+  hadoop_add_to_classpath_tools hadoop-rumen
+  hadoop_debug "Appending HADOOP_CLIENT_OPTS onto HADOOP_OPTS"
+  HADOOP_OPTS="${HADOOP_OPTS} ${HADOOP_CLIENT_OPTS}"
+}
+
+fi

http://git-wip-us.apache.org/repos/asf/hadoop/blob/730bc746/hadoop-tools/hadoop-rumen/src/site/markdown/Rumen.md.vm
--
diff --git a/hadoop-tools/hadoop-rumen/src/site/markdown/Rumen.md.vm 
b/hadoop-tools/hadoop-rumen/src/site/markdown/Rumen.md.vm
index bee976a..34dfd0b 100644
--- 

[Hadoop Wiki] Trivial Update of "Ozone" by ArpitAgarwal

2016-05-16 Thread Apache Wiki
Dear Wiki user,

You have subscribed to a wiki page or wiki category on "Hadoop Wiki" for change 
notification.

The "Ozone" page has been changed by ArpitAgarwal:
https://wiki.apache.org/hadoop/Ozone?action=diff=2=3

+ <>
+ 
  = Introduction =
  Ozone is an Object Store for Hadoop that is currently under development. See 
the Ozone Apache Jira 
[[https://issues.apache.org/jira/browse/HDFS-7240|HDFS-7240]] for more details. 
Ozone is currently in a prototype phase.
  

-
To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org
For additional commands, e-mail: common-commits-h...@hadoop.apache.org



[Hadoop Wiki] Trivial Update of "Ozone" by ArpitAgarwal

2016-05-16 Thread Apache Wiki
Dear Wiki user,

You have subscribed to a wiki page or wiki category on "Hadoop Wiki" for change 
notification.

The "Ozone" page has been changed by ArpitAgarwal:
https://wiki.apache.org/hadoop/Ozone?action=diff=1=2

  This wiki page is intended as a guide for Ozone contributors.
  
  = Compiling Ozone =
- Setup your development environment if you haven't done so already 
([[https://wiki.apache.org/hadoop/HowToContribute|Instructions here]]). Switch 
to the HDFS-7240 branch and build a Hadoop distribution as usual.
+ Setup your development environment if you haven't done so already 
([[https://wiki.apache.org/hadoop/HowToContribute|Instructions here]]). Switch 
to the HDFS-7240 branch, apply the in-progress patch for 
[[https://issues.apache.org/jira/browse/HDFS-10363|HDFS-10363]] and build a 
Hadoop distribution as usual.
  
  = Configuration =
  Create a new ozone-site.xml file in your Hadoop configuration directory and 
add the following settings for a bare minimal configuration.

-
To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org
For additional commands, e-mail: common-commits-h...@hadoop.apache.org



[Hadoop Wiki] Update of "Ozone" by ArpitAgarwal

2016-05-16 Thread Apache Wiki
Dear Wiki user,

You have subscribed to a wiki page or wiki category on "Hadoop Wiki" for change 
notification.

The "Ozone" page has been changed by ArpitAgarwal:
https://wiki.apache.org/hadoop/Ozone?action=diff=1=2

  This wiki page is intended as a guide for Ozone contributors.
  
  = Compiling Ozone =
- Setup your development environment if you haven't done so already 
([[https://wiki.apache.org/hadoop/HowToContribute|Instructions here]]). Switch 
to the HDFS-7240 branch and build a Hadoop distribution as usual.
+ Setup your development environment if you haven't done so already 
([[https://wiki.apache.org/hadoop/HowToContribute|Instructions here]]). Switch 
to the HDFS-7240 branch, apply the in-progress patch for 
[[https://issues.apache.org/jira/browse/HDFS-10363|HDFS-10363]] and build a 
Hadoop distribution as usual.
  
  = Configuration =
  Create a new ozone-site.xml file in your Hadoop configuration directory and 
add the following settings for a bare minimal configuration.

-
To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org
For additional commands, e-mail: common-commits-h...@hadoop.apache.org



hadoop git commit: HDFS-10410. RedundantEditLogInputStream.LOG is set to wrong class. (John Zhuge via lei)

2016-05-16 Thread lei
Repository: hadoop
Updated Branches:
  refs/heads/trunk 4b55642b9 -> 6a6e74acf


HDFS-10410. RedundantEditLogInputStream.LOG is set to wrong class. (John Zhuge 
via lei)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/6a6e74ac
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/6a6e74ac
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/6a6e74ac

Branch: refs/heads/trunk
Commit: 6a6e74acf5c38a4995c4622148721cfe2f1fbdad
Parents: 4b55642
Author: Lei Xu 
Authored: Mon May 16 17:05:46 2016 -0700
Committer: Lei Xu 
Committed: Mon May 16 17:05:46 2016 -0700

--
 .../hadoop/hdfs/server/namenode/RedundantEditLogInputStream.java  | 3 ++-
 1 file changed, 2 insertions(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/6a6e74ac/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/RedundantEditLogInputStream.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/RedundantEditLogInputStream.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/RedundantEditLogInputStream.java
index 33be8b0..a73206b 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/RedundantEditLogInputStream.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/RedundantEditLogInputStream.java
@@ -37,7 +37,8 @@ import com.google.common.primitives.Longs;
  * different subset of the available edits.
  */
 class RedundantEditLogInputStream extends EditLogInputStream {
-  public static final Log LOG = 
LogFactory.getLog(EditLogInputStream.class.getName());
+  public static final Log LOG = LogFactory.getLog(
+  RedundantEditLogInputStream.class.getName());
   private int curIdx;
   private long prevTxId;
   private final EditLogInputStream[] streams;


-
To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org
For additional commands, e-mail: common-commits-h...@hadoop.apache.org



hadoop git commit: HDFS-10410. RedundantEditLogInputStream.LOG is set to wrong class. (John Zhuge via lei)

2016-05-16 Thread lei
Repository: hadoop
Updated Branches:
  refs/heads/branch-2 af70fc42e -> 09a613b02


HDFS-10410. RedundantEditLogInputStream.LOG is set to wrong class. (John Zhuge 
via lei)

(cherry picked from commit 6a6e74acf5c38a4995c4622148721cfe2f1fbdad)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/09a613b0
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/09a613b0
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/09a613b0

Branch: refs/heads/branch-2
Commit: 09a613b0235639888e671a6b991a5d18072e8018
Parents: af70fc4
Author: Lei Xu 
Authored: Mon May 16 17:05:46 2016 -0700
Committer: Lei Xu 
Committed: Mon May 16 17:07:03 2016 -0700

--
 .../hadoop/hdfs/server/namenode/RedundantEditLogInputStream.java  | 3 ++-
 1 file changed, 2 insertions(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/09a613b0/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/RedundantEditLogInputStream.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/RedundantEditLogInputStream.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/RedundantEditLogInputStream.java
index 33be8b0..a73206b 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/RedundantEditLogInputStream.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/RedundantEditLogInputStream.java
@@ -37,7 +37,8 @@ import com.google.common.primitives.Longs;
  * different subset of the available edits.
  */
 class RedundantEditLogInputStream extends EditLogInputStream {
-  public static final Log LOG = 
LogFactory.getLog(EditLogInputStream.class.getName());
+  public static final Log LOG = LogFactory.getLog(
+  RedundantEditLogInputStream.class.getName());
   private int curIdx;
   private long prevTxId;
   private final EditLogInputStream[] streams;


-
To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org
For additional commands, e-mail: common-commits-h...@hadoop.apache.org



hadoop git commit: HADOOP-11858. [JDK8] Set minimum version of Hadoop 3 to JDK 8. Contributed by Robert Kanter.

2016-05-16 Thread wang
Repository: hadoop
Updated Branches:
  refs/heads/trunk a9a8297ca -> 4b55642b9


HADOOP-11858. [JDK8] Set minimum version of Hadoop 3 to JDK 8. Contributed by 
Robert Kanter.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/4b55642b
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/4b55642b
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/4b55642b

Branch: refs/heads/trunk
Commit: 4b55642b9d836691592405805c181d12c2ed7e50
Parents: a9a8297
Author: Andrew Wang 
Authored: Mon May 16 15:45:39 2016 -0700
Committer: Andrew Wang 
Committed: Mon May 16 15:45:39 2016 -0700

--
 BUILDING.txt   | 8 
 hadoop-project/pom.xml | 4 ++--
 pom.xml| 2 +-
 3 files changed, 7 insertions(+), 7 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/4b55642b/BUILDING.txt
--
diff --git a/BUILDING.txt b/BUILDING.txt
index c7a91da..9d297f7 100644
--- a/BUILDING.txt
+++ b/BUILDING.txt
@@ -4,7 +4,7 @@ Build instructions for Hadoop
 Requirements:
 
 * Unix System
-* JDK 1.7+
+* JDK 1.8+
 * Maven 3.0 or later
 * Findbugs 1.3.9 (if running findbugs)
 * ProtocolBuffer 2.5.0
@@ -56,12 +56,12 @@ Known issues:
 
--
 Installing required packages for clean install of Ubuntu 14.04 LTS Desktop:
 
-* Oracle JDK 1.7 (preferred)
+* Oracle JDK 1.8 (preferred)
   $ sudo apt-get purge openjdk*
   $ sudo apt-get install software-properties-common
   $ sudo add-apt-repository ppa:webupd8team/java
   $ sudo apt-get update
-  $ sudo apt-get install oracle-java7-installer
+  $ sudo apt-get install oracle-java8-installer
 * Maven
   $ sudo apt-get -y install maven
 * Native libraries
@@ -306,7 +306,7 @@ Building on Windows
 Requirements:
 
 * Windows System
-* JDK 1.7+
+* JDK 1.8+
 * Maven 3.0 or later
 * Findbugs 1.3.9 (if running findbugs)
 * ProtocolBuffer 2.5.0

http://git-wip-us.apache.org/repos/asf/hadoop/blob/4b55642b/hadoop-project/pom.xml
--
diff --git a/hadoop-project/pom.xml b/hadoop-project/pom.xml
index 7994ded..f5fb8af 100644
--- a/hadoop-project/pom.xml
+++ b/hadoop-project/pom.xml
@@ -86,7 +86,7 @@
 6.0.44
 
 
-1.7
+1.8
 
 
 
--Xmx2048m -XX:MaxPermSize=768m 
-XX:+HeapDumpOnOutOfMemoryError
+-Xmx2048m 
-XX:+HeapDumpOnOutOfMemoryError
 2.17
 
${maven-surefire-plugin.version}
 
${maven-surefire-plugin.version}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/4b55642b/pom.xml
--
diff --git a/pom.xml b/pom.xml
index d2fc6fd..9756133 100644
--- a/pom.xml
+++ b/pom.xml
@@ -144,7 +144,7 @@ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 
http://maven.apache.org/xs
 [3.0.2,)
   
   
-[1.7,)
+[1.8,)
   
 
   


-
To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org
For additional commands, e-mail: common-commits-h...@hadoop.apache.org



hadoop git commit: HADOOP-13146. Refactor RetryInvocationHandler. Contributed by Tsz Wo Nicholas Sze.

2016-05-16 Thread jing9
Repository: hadoop
Updated Branches:
  refs/heads/branch-2 42b2a34ed -> af70fc42e


HADOOP-13146. Refactor RetryInvocationHandler. Contributed by Tsz Wo Nicholas 
Sze.

(cherry picked from commit a9a8297cad4122961b34265c0a31d87134a4a028)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/af70fc42
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/af70fc42
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/af70fc42

Branch: refs/heads/branch-2
Commit: af70fc42e7d0d40c05443e3036880754e7957cd2
Parents: 42b2a34
Author: Jing Zhao 
Authored: Mon May 16 15:23:36 2016 -0700
Committer: Jing Zhao 
Committed: Mon May 16 15:26:48 2016 -0700

--
 .../hadoop/io/retry/FailoverProxyProvider.java  |  11 +
 .../hadoop/io/retry/RetryInvocationHandler.java | 351 ++-
 .../apache/hadoop/io/retry/TestRetryProxy.java  |  67 ++--
 3 files changed, 211 insertions(+), 218 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/af70fc42/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/FailoverProxyProvider.java
--
diff --git 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/FailoverProxyProvider.java
 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/FailoverProxyProvider.java
index 5acb936..c73e083 100644
--- 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/FailoverProxyProvider.java
+++ 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/FailoverProxyProvider.java
@@ -37,10 +37,21 @@ public interface FailoverProxyProvider extends Closeable 
{
  * provides information for debugging purposes.
  */
 public final String proxyInfo;
+
 public ProxyInfo(T proxy, String proxyInfo) {
   this.proxy = proxy;
   this.proxyInfo = proxyInfo;
 }
+
+public String getString(String methodName) {
+  return proxy.getClass().getSimpleName() + "." + methodName
+  + " over " + proxyInfo;
+}
+
+@Override
+public String toString() {
+  return proxy.getClass().getSimpleName() + " over " + proxyInfo;
+}
   }
 
   /**

http://git-wip-us.apache.org/repos/asf/hadoop/blob/af70fc42/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/RetryInvocationHandler.java
--
diff --git 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/RetryInvocationHandler.java
 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/RetryInvocationHandler.java
index d57dc84..300d0c2 100644
--- 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/RetryInvocationHandler.java
+++ 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/RetryInvocationHandler.java
@@ -17,49 +17,137 @@
  */
 package org.apache.hadoop.io.retry;
 
-import java.io.IOException;
-import java.lang.reflect.InvocationHandler;
-import java.lang.reflect.InvocationTargetException;
-import java.lang.reflect.Method;
-import java.lang.reflect.Proxy;
-import java.util.Collections;
-import java.util.LinkedList;
-import java.util.List;
-import java.util.Map;
-import java.util.concurrent.atomic.AtomicLong;
-
+import com.google.common.annotations.VisibleForTesting;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.io.retry.FailoverProxyProvider.ProxyInfo;
 import org.apache.hadoop.io.retry.RetryPolicy.RetryAction;
-import org.apache.hadoop.ipc.Client;
+import org.apache.hadoop.ipc.*;
 import org.apache.hadoop.ipc.Client.ConnectionId;
-import org.apache.hadoop.ipc.ProtocolTranslator;
-import org.apache.hadoop.ipc.RPC;
-import org.apache.hadoop.ipc.RpcConstants;
-import org.apache.hadoop.ipc.RpcInvocationHandler;
 
-import com.google.common.annotations.VisibleForTesting;
+import java.io.IOException;
+import java.lang.reflect.InvocationHandler;
+import java.lang.reflect.InvocationTargetException;
+import java.lang.reflect.Method;
+import java.lang.reflect.Proxy;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.Map;
 
 /**
- * This class implements RpcInvocationHandler and supports retry on the client 
- * side.
+ * A {@link RpcInvocationHandler} which supports client side retry .
  */
 @InterfaceAudience.Private
 public class RetryInvocationHandler implements RpcInvocationHandler {
   public static final Log LOG = 
LogFactory.getLog(RetryInvocationHandler.class);
-  private final FailoverProxyProvider proxyProvider;
 
-  /**
-   * The 

hadoop git commit: HADOOP-13146. Refactor RetryInvocationHandler. Contributed by Tsz Wo Nicholas Sze.

2016-05-16 Thread jing9
Repository: hadoop
Updated Branches:
  refs/heads/trunk 1217c8f6b -> a9a8297ca


HADOOP-13146. Refactor RetryInvocationHandler. Contributed by Tsz Wo Nicholas 
Sze.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/a9a8297c
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/a9a8297c
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/a9a8297c

Branch: refs/heads/trunk
Commit: a9a8297cad4122961b34265c0a31d87134a4a028
Parents: 1217c8f
Author: Jing Zhao 
Authored: Mon May 16 15:23:36 2016 -0700
Committer: Jing Zhao 
Committed: Mon May 16 15:23:36 2016 -0700

--
 .../hadoop/io/retry/FailoverProxyProvider.java  |  11 +
 .../hadoop/io/retry/RetryInvocationHandler.java | 350 ++-
 .../apache/hadoop/io/retry/TestRetryProxy.java  |  67 ++--
 3 files changed, 211 insertions(+), 217 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/a9a8297c/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/FailoverProxyProvider.java
--
diff --git 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/FailoverProxyProvider.java
 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/FailoverProxyProvider.java
index 5acb936..c73e083 100644
--- 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/FailoverProxyProvider.java
+++ 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/FailoverProxyProvider.java
@@ -37,10 +37,21 @@ public interface FailoverProxyProvider extends Closeable 
{
  * provides information for debugging purposes.
  */
 public final String proxyInfo;
+
 public ProxyInfo(T proxy, String proxyInfo) {
   this.proxy = proxy;
   this.proxyInfo = proxyInfo;
 }
+
+public String getString(String methodName) {
+  return proxy.getClass().getSimpleName() + "." + methodName
+  + " over " + proxyInfo;
+}
+
+@Override
+public String toString() {
+  return proxy.getClass().getSimpleName() + " over " + proxyInfo;
+}
   }
 
   /**

http://git-wip-us.apache.org/repos/asf/hadoop/blob/a9a8297c/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/RetryInvocationHandler.java
--
diff --git 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/RetryInvocationHandler.java
 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/RetryInvocationHandler.java
index a67c84f..300d0c2 100644
--- 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/RetryInvocationHandler.java
+++ 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/RetryInvocationHandler.java
@@ -17,48 +17,137 @@
  */
 package org.apache.hadoop.io.retry;
 
-import java.io.IOException;
-import java.lang.reflect.InvocationHandler;
-import java.lang.reflect.InvocationTargetException;
-import java.lang.reflect.Method;
-import java.lang.reflect.Proxy;
-import java.util.Collections;
-import java.util.LinkedList;
-import java.util.List;
-import java.util.Map;
-
+import com.google.common.annotations.VisibleForTesting;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.io.retry.FailoverProxyProvider.ProxyInfo;
 import org.apache.hadoop.io.retry.RetryPolicy.RetryAction;
-import org.apache.hadoop.ipc.Client;
+import org.apache.hadoop.ipc.*;
 import org.apache.hadoop.ipc.Client.ConnectionId;
-import org.apache.hadoop.ipc.ProtocolTranslator;
-import org.apache.hadoop.ipc.RPC;
-import org.apache.hadoop.ipc.RpcConstants;
-import org.apache.hadoop.ipc.RpcInvocationHandler;
 
-import com.google.common.annotations.VisibleForTesting;
+import java.io.IOException;
+import java.lang.reflect.InvocationHandler;
+import java.lang.reflect.InvocationTargetException;
+import java.lang.reflect.Method;
+import java.lang.reflect.Proxy;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.Map;
 
 /**
- * This class implements RpcInvocationHandler and supports retry on the client 
- * side.
+ * A {@link RpcInvocationHandler} which supports client side retry .
  */
 @InterfaceAudience.Private
 public class RetryInvocationHandler implements RpcInvocationHandler {
   public static final Log LOG = 
LogFactory.getLog(RetryInvocationHandler.class);
-  private final FailoverProxyProvider proxyProvider;
 
-  /**
-   * The number of times the associated proxyProvider has ever been failed 
over.
-   */
-  private long proxyProviderFailoverCount = 0;

hadoop git commit: YARN-5069. TestFifoScheduler.testResourceOverCommit race condition. Contributed by Eric Badger.

2016-05-16 Thread epayne
Repository: hadoop
Updated Branches:
  refs/heads/branch-2.7 8f1739502 -> 0ee36fd51


YARN-5069. TestFifoScheduler.testResourceOverCommit race condition. Contributed 
by Eric Badger.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/0ee36fd5
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/0ee36fd5
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/0ee36fd5

Branch: refs/heads/branch-2.7
Commit: 0ee36fd51190ed10022b8d4c9098f6fecc0679af
Parents: 8f17395
Author: Eric Payne 
Authored: Mon May 16 21:12:26 2016 +
Committer: Eric Payne 
Committed: Mon May 16 21:12:26 2016 +

--
 .../hadoop/yarn/server/resourcemanager/TestFifoScheduler.java | 3 ++-
 1 file changed, 2 insertions(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/0ee36fd5/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/TestFifoScheduler.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/TestFifoScheduler.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/TestFifoScheduler.java
index 2327af4..a128d5f 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/TestFifoScheduler.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/TestFifoScheduler.java
@@ -584,7 +584,8 @@ public class TestFifoScheduler {
 waitCount = 0;
 while (waitCount++ != 20) {
   report_nm1 = rm.getResourceScheduler().getNodeReport(nm1.getNodeId());
-  if (report_nm1.getAvailableResource().getMemory() != 0) {
+  if (null != report_nm1 &&
+  report_nm1.getAvailableResource().getMemory() != 0) {
 break;
   }
   LOG.info("Waiting for RMNodeResourceUpdateEvent to be handled... Tried "


-
To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org
For additional commands, e-mail: common-commits-h...@hadoop.apache.org



hadoop git commit: YARN-5069. TestFifoScheduler.testResourceOverCommit race condition. Contributed by Eric Badger. (cherry picked from commit 1217c8f6b49e3afd8ca008ffddcf6615f0accfc5)

2016-05-16 Thread epayne
Repository: hadoop
Updated Branches:
  refs/heads/branch-2.8 28a2f28e4 -> a552932b3


YARN-5069. TestFifoScheduler.testResourceOverCommit race condition. Contributed 
by Eric Badger.
(cherry picked from commit 1217c8f6b49e3afd8ca008ffddcf6615f0accfc5)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/a552932b
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/a552932b
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/a552932b

Branch: refs/heads/branch-2.8
Commit: a552932b3881f3ba1244d92dae917e36bb515fd5
Parents: 28a2f28
Author: Eric Payne 
Authored: Mon May 16 20:27:07 2016 +
Committer: Eric Payne 
Committed: Mon May 16 20:44:41 2016 +

--
 .../server/resourcemanager/scheduler/fifo/TestFifoScheduler.java  | 3 ++-
 1 file changed, 2 insertions(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/a552932b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fifo/TestFifoScheduler.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fifo/TestFifoScheduler.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fifo/TestFifoScheduler.java
index bbcc800..7b06319 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fifo/TestFifoScheduler.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fifo/TestFifoScheduler.java
@@ -1151,7 +1151,8 @@ public class TestFifoScheduler {
 waitCount = 0;
 while (waitCount++ != 20) {
   report_nm1 = rm.getResourceScheduler().getNodeReport(nm1.getNodeId());
-  if (report_nm1.getAvailableResource().getMemory() != 0) {
+  if (null != report_nm1 &&
+  report_nm1.getAvailableResource().getMemory() != 0) {
 break;
   }
   LOG.info("Waiting for RMNodeResourceUpdateEvent to be handled... Tried "


-
To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org
For additional commands, e-mail: common-commits-h...@hadoop.apache.org



hadoop git commit: YARN-5069. TestFifoScheduler.testResourceOverCommit race condition. Contributed by Eric Badger. (cherry picked from commit 1217c8f6b49e3afd8ca008ffddcf6615f0accfc5)

2016-05-16 Thread epayne
Repository: hadoop
Updated Branches:
  refs/heads/branch-2 bb41547fb -> 42b2a34ed


YARN-5069. TestFifoScheduler.testResourceOverCommit race condition. Contributed 
by Eric Badger.
(cherry picked from commit 1217c8f6b49e3afd8ca008ffddcf6615f0accfc5)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/42b2a34e
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/42b2a34e
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/42b2a34e

Branch: refs/heads/branch-2
Commit: 42b2a34ed8979ad8712d2be4be2f5103c32d4ae2
Parents: bb41547
Author: Eric Payne 
Authored: Mon May 16 20:27:07 2016 +
Committer: Eric Payne 
Committed: Mon May 16 20:38:10 2016 +

--
 .../server/resourcemanager/scheduler/fifo/TestFifoScheduler.java  | 3 ++-
 1 file changed, 2 insertions(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/42b2a34e/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fifo/TestFifoScheduler.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fifo/TestFifoScheduler.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fifo/TestFifoScheduler.java
index 90b9969..02c60cf 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fifo/TestFifoScheduler.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fifo/TestFifoScheduler.java
@@ -1140,7 +1140,8 @@ public class TestFifoScheduler {
 waitCount = 0;
 while (waitCount++ != 20) {
   report_nm1 = rm.getResourceScheduler().getNodeReport(nm1.getNodeId());
-  if (report_nm1.getAvailableResource().getMemory() != 0) {
+  if (null != report_nm1 &&
+  report_nm1.getAvailableResource().getMemory() != 0) {
 break;
   }
   LOG.info("Waiting for RMNodeResourceUpdateEvent to be handled... Tried "


-
To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org
For additional commands, e-mail: common-commits-h...@hadoop.apache.org



hadoop git commit: YARN-5069. TestFifoScheduler.testResourceOverCommit race condition. Contributed by Eric Badger.

2016-05-16 Thread epayne
Repository: hadoop
Updated Branches:
  refs/heads/trunk 576e2d127 -> 1217c8f6b


YARN-5069. TestFifoScheduler.testResourceOverCommit race condition. Contributed 
by Eric Badger.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/1217c8f6
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/1217c8f6
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/1217c8f6

Branch: refs/heads/trunk
Commit: 1217c8f6b49e3afd8ca008ffddcf6615f0accfc5
Parents: 576e2d1
Author: Eric Payne 
Authored: Mon May 16 20:27:07 2016 +
Committer: Eric Payne 
Committed: Mon May 16 20:28:04 2016 +

--
 .../server/resourcemanager/scheduler/fifo/TestFifoScheduler.java  | 3 ++-
 1 file changed, 2 insertions(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/1217c8f6/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fifo/TestFifoScheduler.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fifo/TestFifoScheduler.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fifo/TestFifoScheduler.java
index 90b9969..02c60cf 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fifo/TestFifoScheduler.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fifo/TestFifoScheduler.java
@@ -1140,7 +1140,8 @@ public class TestFifoScheduler {
 waitCount = 0;
 while (waitCount++ != 20) {
   report_nm1 = rm.getResourceScheduler().getNodeReport(nm1.getNodeId());
-  if (report_nm1.getAvailableResource().getMemory() != 0) {
+  if (null != report_nm1 &&
+  report_nm1.getAvailableResource().getMemory() != 0) {
 break;
   }
   LOG.info("Waiting for RMNodeResourceUpdateEvent to be handled... Tried "


-
To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org
For additional commands, e-mail: common-commits-h...@hadoop.apache.org



hadoop git commit: HADOOP-13149. Windows distro build fails on dist-copynativelibs. Contributed by Chris Nauroth.

2016-05-16 Thread cnauroth
Repository: hadoop
Updated Branches:
  refs/heads/trunk b9685e85d -> 576e2d127


HADOOP-13149. Windows distro build fails on dist-copynativelibs. Contributed by 
Chris Nauroth.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/576e2d12
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/576e2d12
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/576e2d12

Branch: refs/heads/trunk
Commit: 576e2d1278a584ef8e804832c852c3f8323bb64a
Parents: b9685e8
Author: Chris Nauroth 
Authored: Mon May 16 12:05:02 2016 -0700
Committer: Chris Nauroth 
Committed: Mon May 16 12:05:02 2016 -0700

--
 hadoop-project-dist/pom.xml | 33 +
 1 file changed, 17 insertions(+), 16 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/576e2d12/hadoop-project-dist/pom.xml
--
diff --git a/hadoop-project-dist/pom.xml b/hadoop-project-dist/pom.xml
index a554b42..2cccbbe 100644
--- a/hadoop-project-dist/pom.xml
+++ b/hadoop-project-dist/pom.xml
@@ -325,22 +325,23 @@
   exec
 
 
-
${project.parent.basedir}/../dev-support/bin/dist-copynativelibs
-
${project.build.directory}
-false
-
-   --version=${project.version}
-   
--builddir=${project.build.directory}
-   --artifactid=${project.artifactId}
-   --isalbundle=${bundle.isal}
-   --isallib=${isal.lib}
-   --openssllib=${openssl.lib}
-   
--opensslbinbundle=${bundle.openssl.in.bin}
-   
--openssllibbundle=${bundle.openssl}
-   
--snappybinbundle=${bundle.snappy.in.bin}
-   --snappylib=${snappy.lib}
-   --snappylibbundle=${bundle.snappy}
-
+  ${shell-executable}
+  
${project.build.directory}
+  false
+  
+
${project.parent.basedir}/../dev-support/bin/dist-copynativelibs
+--version=${project.version}
+--builddir=${project.build.directory}
+--artifactid=${project.artifactId}
+--isalbundle=${bundle.isal}
+--isallib=${isal.lib}
+--openssllib=${openssl.lib}
+
--opensslbinbundle=${bundle.openssl.in.bin}
+--openssllibbundle=${bundle.openssl}
+
--snappybinbundle=${bundle.snappy.in.bin}
+--snappylib=${snappy.lib}
+--snappylibbundle=${bundle.snappy}
+  
 
   
 


-
To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org
For additional commands, e-mail: common-commits-h...@hadoop.apache.org



[3/3] hadoop git commit: HADOOP-13148. TestDistCpViewFs to include IOExceptions in test error reports. Contributed by Steve Loughran.

2016-05-16 Thread cnauroth
HADOOP-13148. TestDistCpViewFs to include IOExceptions in test error reports. 
Contributed by Steve Loughran.

(cherry picked from commit b9685e85d59e69e5dd64802fa9025dad1b315be5)
(cherry picked from commit bb41547fb42b9dd2b80ec2a7286db6d35fd251b5)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/28a2f28e
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/28a2f28e
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/28a2f28e

Branch: refs/heads/branch-2.8
Commit: 28a2f28e422247f7595fc49dc2c0913646006dec
Parents: 70faa87
Author: Chris Nauroth 
Authored: Mon May 16 11:53:17 2016 -0700
Committer: Chris Nauroth 
Committed: Mon May 16 11:53:33 2016 -0700

--
 .../apache/hadoop/tools/TestDistCpViewFs.java   | 93 +---
 1 file changed, 22 insertions(+), 71 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/28a2f28e/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/TestDistCpViewFs.java
--
diff --git 
a/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/TestDistCpViewFs.java
 
b/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/TestDistCpViewFs.java
index a6939a2..5511e09 100644
--- 
a/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/TestDistCpViewFs.java
+++ 
b/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/TestDistCpViewFs.java
@@ -73,13 +73,12 @@ public class TestDistCpViewFs {
   }
 
   @Test
-  public void testSingleFileMissingTarget() {
+  public void testSingleFileMissingTarget() throws IOException {
 caseSingleFileMissingTarget(false);
 caseSingleFileMissingTarget(true);
   }
 
-
-  private void caseSingleFileMissingTarget(boolean sync) {
+  private void caseSingleFileMissingTarget(boolean sync) throws IOException{
 
 try {
   addEntries(listFile, "singlefile1/file1");
@@ -88,21 +87,18 @@ public class TestDistCpViewFs {
   runTest(listFile, target, false, sync);
 
   checkResult(target, 1);
-} catch (IOException e) {
-  LOG.error("Exception encountered while testing distcp", e);
-  Assert.fail("distcp failure");
 } finally {
   TestDistCpUtils.delete(fs, root);
 }
   }
 
   @Test
-  public void testSingleFileTargetFile() {
+  public void testSingleFileTargetFile() throws IOException{
 caseSingleFileTargetFile(false);
 caseSingleFileTargetFile(true);
   }
 
-  private void caseSingleFileTargetFile(boolean sync) {
+  private void caseSingleFileTargetFile(boolean sync) throws IOException {
 
 try {
   addEntries(listFile, "singlefile1/file1");
@@ -111,21 +107,18 @@ public class TestDistCpViewFs {
   runTest(listFile, target, false, sync);
 
   checkResult(target, 1);
-} catch (IOException e) {
-  LOG.error("Exception encountered while testing distcp", e);
-  Assert.fail("distcp failure");
 } finally {
   TestDistCpUtils.delete(fs, root);
 }
   }
 
   @Test
-  public void testSingleFileTargetDir() {
+  public void testSingleFileTargetDir() throws IOException {
 caseSingleFileTargetDir(false);
 caseSingleFileTargetDir(true);
   }
 
-  private void caseSingleFileTargetDir(boolean sync) {
+  private void caseSingleFileTargetDir(boolean sync) throws IOException {
 
 try {
   addEntries(listFile, "singlefile2/file2");
@@ -135,21 +128,18 @@ public class TestDistCpViewFs {
   runTest(listFile, target, true, sync);
 
   checkResult(target, 1, "file2");
-} catch (IOException e) {
-  LOG.error("Exception encountered while testing distcp", e);
-  Assert.fail("distcp failure");
 } finally {
   TestDistCpUtils.delete(fs, root);
 }
   }
 
   @Test
-  public void testSingleDirTargetMissing() {
+  public void testSingleDirTargetMissing() throws IOException{
 caseSingleDirTargetMissing(false);
 caseSingleDirTargetMissing(true);
   }
 
-  private void caseSingleDirTargetMissing(boolean sync) {
+  private void caseSingleDirTargetMissing(boolean sync) throws IOException {
 
 try {
   addEntries(listFile, "singledir");
@@ -158,16 +148,13 @@ public class TestDistCpViewFs {
   runTest(listFile, target, false, sync);
 
   checkResult(target, 1, "dir1");
-} catch (IOException e) {
-  LOG.error("Exception encountered while testing distcp", e);
-  Assert.fail("distcp failure");
 } finally {
   TestDistCpUtils.delete(fs, root);
 }
   }
 
   @Test
-  public void testSingleDirTargetPresent() {
+  public void testSingleDirTargetPresent() throws IOException{
 
 try {
   addEntries(listFile, "singledir");
@@ -177,16 +164,13 @@ public class TestDistCpViewFs {
   runTest(listFile, target, true, false);
 
   

[1/3] hadoop git commit: HADOOP-13148. TestDistCpViewFs to include IOExceptions in test error reports. Contributed by Steve Loughran.

2016-05-16 Thread cnauroth
Repository: hadoop
Updated Branches:
  refs/heads/branch-2 575c05635 -> bb41547fb
  refs/heads/branch-2.8 70faa87cc -> 28a2f28e4
  refs/heads/trunk 81effb7dc -> b9685e85d


HADOOP-13148. TestDistCpViewFs to include IOExceptions in test error reports. 
Contributed by Steve Loughran.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/b9685e85
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/b9685e85
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/b9685e85

Branch: refs/heads/trunk
Commit: b9685e85d59e69e5dd64802fa9025dad1b315be5
Parents: 81effb7
Author: Chris Nauroth 
Authored: Mon May 16 11:53:17 2016 -0700
Committer: Chris Nauroth 
Committed: Mon May 16 11:53:17 2016 -0700

--
 .../apache/hadoop/tools/TestDistCpViewFs.java   | 93 +---
 1 file changed, 22 insertions(+), 71 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/b9685e85/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/TestDistCpViewFs.java
--
diff --git 
a/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/TestDistCpViewFs.java
 
b/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/TestDistCpViewFs.java
index a6939a2..5511e09 100644
--- 
a/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/TestDistCpViewFs.java
+++ 
b/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/TestDistCpViewFs.java
@@ -73,13 +73,12 @@ public class TestDistCpViewFs {
   }
 
   @Test
-  public void testSingleFileMissingTarget() {
+  public void testSingleFileMissingTarget() throws IOException {
 caseSingleFileMissingTarget(false);
 caseSingleFileMissingTarget(true);
   }
 
-
-  private void caseSingleFileMissingTarget(boolean sync) {
+  private void caseSingleFileMissingTarget(boolean sync) throws IOException{
 
 try {
   addEntries(listFile, "singlefile1/file1");
@@ -88,21 +87,18 @@ public class TestDistCpViewFs {
   runTest(listFile, target, false, sync);
 
   checkResult(target, 1);
-} catch (IOException e) {
-  LOG.error("Exception encountered while testing distcp", e);
-  Assert.fail("distcp failure");
 } finally {
   TestDistCpUtils.delete(fs, root);
 }
   }
 
   @Test
-  public void testSingleFileTargetFile() {
+  public void testSingleFileTargetFile() throws IOException{
 caseSingleFileTargetFile(false);
 caseSingleFileTargetFile(true);
   }
 
-  private void caseSingleFileTargetFile(boolean sync) {
+  private void caseSingleFileTargetFile(boolean sync) throws IOException {
 
 try {
   addEntries(listFile, "singlefile1/file1");
@@ -111,21 +107,18 @@ public class TestDistCpViewFs {
   runTest(listFile, target, false, sync);
 
   checkResult(target, 1);
-} catch (IOException e) {
-  LOG.error("Exception encountered while testing distcp", e);
-  Assert.fail("distcp failure");
 } finally {
   TestDistCpUtils.delete(fs, root);
 }
   }
 
   @Test
-  public void testSingleFileTargetDir() {
+  public void testSingleFileTargetDir() throws IOException {
 caseSingleFileTargetDir(false);
 caseSingleFileTargetDir(true);
   }
 
-  private void caseSingleFileTargetDir(boolean sync) {
+  private void caseSingleFileTargetDir(boolean sync) throws IOException {
 
 try {
   addEntries(listFile, "singlefile2/file2");
@@ -135,21 +128,18 @@ public class TestDistCpViewFs {
   runTest(listFile, target, true, sync);
 
   checkResult(target, 1, "file2");
-} catch (IOException e) {
-  LOG.error("Exception encountered while testing distcp", e);
-  Assert.fail("distcp failure");
 } finally {
   TestDistCpUtils.delete(fs, root);
 }
   }
 
   @Test
-  public void testSingleDirTargetMissing() {
+  public void testSingleDirTargetMissing() throws IOException{
 caseSingleDirTargetMissing(false);
 caseSingleDirTargetMissing(true);
   }
 
-  private void caseSingleDirTargetMissing(boolean sync) {
+  private void caseSingleDirTargetMissing(boolean sync) throws IOException {
 
 try {
   addEntries(listFile, "singledir");
@@ -158,16 +148,13 @@ public class TestDistCpViewFs {
   runTest(listFile, target, false, sync);
 
   checkResult(target, 1, "dir1");
-} catch (IOException e) {
-  LOG.error("Exception encountered while testing distcp", e);
-  Assert.fail("distcp failure");
 } finally {
   TestDistCpUtils.delete(fs, root);
 }
   }
 
   @Test
-  public void testSingleDirTargetPresent() {
+  public void testSingleDirTargetPresent() throws IOException{
 
 try {
   addEntries(listFile, "singledir");
@@ -177,16 +164,13 @@ public class TestDistCpViewFs {
   runTest(listFile, 

[2/3] hadoop git commit: HADOOP-13148. TestDistCpViewFs to include IOExceptions in test error reports. Contributed by Steve Loughran.

2016-05-16 Thread cnauroth
HADOOP-13148. TestDistCpViewFs to include IOExceptions in test error reports. 
Contributed by Steve Loughran.

(cherry picked from commit b9685e85d59e69e5dd64802fa9025dad1b315be5)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/bb41547f
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/bb41547f
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/bb41547f

Branch: refs/heads/branch-2
Commit: bb41547fb42b9dd2b80ec2a7286db6d35fd251b5
Parents: 575c056
Author: Chris Nauroth 
Authored: Mon May 16 11:53:17 2016 -0700
Committer: Chris Nauroth 
Committed: Mon May 16 11:53:25 2016 -0700

--
 .../apache/hadoop/tools/TestDistCpViewFs.java   | 93 +---
 1 file changed, 22 insertions(+), 71 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/bb41547f/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/TestDistCpViewFs.java
--
diff --git 
a/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/TestDistCpViewFs.java
 
b/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/TestDistCpViewFs.java
index a6939a2..5511e09 100644
--- 
a/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/TestDistCpViewFs.java
+++ 
b/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/TestDistCpViewFs.java
@@ -73,13 +73,12 @@ public class TestDistCpViewFs {
   }
 
   @Test
-  public void testSingleFileMissingTarget() {
+  public void testSingleFileMissingTarget() throws IOException {
 caseSingleFileMissingTarget(false);
 caseSingleFileMissingTarget(true);
   }
 
-
-  private void caseSingleFileMissingTarget(boolean sync) {
+  private void caseSingleFileMissingTarget(boolean sync) throws IOException{
 
 try {
   addEntries(listFile, "singlefile1/file1");
@@ -88,21 +87,18 @@ public class TestDistCpViewFs {
   runTest(listFile, target, false, sync);
 
   checkResult(target, 1);
-} catch (IOException e) {
-  LOG.error("Exception encountered while testing distcp", e);
-  Assert.fail("distcp failure");
 } finally {
   TestDistCpUtils.delete(fs, root);
 }
   }
 
   @Test
-  public void testSingleFileTargetFile() {
+  public void testSingleFileTargetFile() throws IOException{
 caseSingleFileTargetFile(false);
 caseSingleFileTargetFile(true);
   }
 
-  private void caseSingleFileTargetFile(boolean sync) {
+  private void caseSingleFileTargetFile(boolean sync) throws IOException {
 
 try {
   addEntries(listFile, "singlefile1/file1");
@@ -111,21 +107,18 @@ public class TestDistCpViewFs {
   runTest(listFile, target, false, sync);
 
   checkResult(target, 1);
-} catch (IOException e) {
-  LOG.error("Exception encountered while testing distcp", e);
-  Assert.fail("distcp failure");
 } finally {
   TestDistCpUtils.delete(fs, root);
 }
   }
 
   @Test
-  public void testSingleFileTargetDir() {
+  public void testSingleFileTargetDir() throws IOException {
 caseSingleFileTargetDir(false);
 caseSingleFileTargetDir(true);
   }
 
-  private void caseSingleFileTargetDir(boolean sync) {
+  private void caseSingleFileTargetDir(boolean sync) throws IOException {
 
 try {
   addEntries(listFile, "singlefile2/file2");
@@ -135,21 +128,18 @@ public class TestDistCpViewFs {
   runTest(listFile, target, true, sync);
 
   checkResult(target, 1, "file2");
-} catch (IOException e) {
-  LOG.error("Exception encountered while testing distcp", e);
-  Assert.fail("distcp failure");
 } finally {
   TestDistCpUtils.delete(fs, root);
 }
   }
 
   @Test
-  public void testSingleDirTargetMissing() {
+  public void testSingleDirTargetMissing() throws IOException{
 caseSingleDirTargetMissing(false);
 caseSingleDirTargetMissing(true);
   }
 
-  private void caseSingleDirTargetMissing(boolean sync) {
+  private void caseSingleDirTargetMissing(boolean sync) throws IOException {
 
 try {
   addEntries(listFile, "singledir");
@@ -158,16 +148,13 @@ public class TestDistCpViewFs {
   runTest(listFile, target, false, sync);
 
   checkResult(target, 1, "dir1");
-} catch (IOException e) {
-  LOG.error("Exception encountered while testing distcp", e);
-  Assert.fail("distcp failure");
 } finally {
   TestDistCpUtils.delete(fs, root);
 }
   }
 
   @Test
-  public void testSingleDirTargetPresent() {
+  public void testSingleDirTargetPresent() throws IOException{
 
 try {
   addEntries(listFile, "singledir");
@@ -177,16 +164,13 @@ public class TestDistCpViewFs {
   runTest(listFile, target, true, false);
 
   checkResult(target, 1, "singledir/dir1");
-} catch (IOException e) 

hadoop git commit: YARN-3362. Add node label usage in RM CapacityScheduler web UI. Contributed by Eric Payne.

2016-05-16 Thread naganarasimha_gr
Repository: hadoop
Updated Branches:
  refs/heads/branch-2.7 7ded648ae -> 8f1739502


YARN-3362. Add node label usage in RM CapacityScheduler web UI. Contributed by 
Eric Payne.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/8f173950
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/8f173950
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/8f173950

Branch: refs/heads/branch-2.7
Commit: 8f173950299ac3ff31ffa60ce96cdea23d0bc9e2
Parents: 7ded648
Author: Naganarasimha 
Authored: Tue May 17 02:39:50 2016 +0800
Committer: Naganarasimha 
Committed: Tue May 17 02:39:50 2016 +0800

--
 .../scheduler/capacity/AbstractCSQueue.java |  23 +++
 .../webapp/CapacitySchedulerPage.java   | 170 +++
 .../resourcemanager/webapp/RMWebServices.java   |   6 +-
 .../webapp/dao/CapacitySchedulerInfo.java   |  40 -
 .../dao/CapacitySchedulerLeafQueueInfo.java |   8 +-
 .../webapp/dao/CapacitySchedulerQueueInfo.java  |  33 ++--
 .../capacity/TestCapacityScheduler.java |   9 +-
 7 files changed, 233 insertions(+), 56 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/8f173950/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/AbstractCSQueue.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/AbstractCSQueue.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/AbstractCSQueue.java
index 3679de6..fcf6c85 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/AbstractCSQueue.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/AbstractCSQueue.java
@@ -504,4 +504,27 @@ public abstract class AbstractCSQueue implements CSQueue {
 // non-empty
 return false;
   }
+
+  /**
+   * @param nodePartition node label to check for accessibility
+   * @return true if queue can access nodes with specified label, false if not.
+   */
+  public final boolean accessibleToPartition(final String nodePartition) {
+// if queue's label is *, it can access any node
+if (accessibleLabels != null
+&& accessibleLabels.contains(RMNodeLabelsManager.ANY)) {
+  return true;
+}
+// any queue can access to a node without label
+if (nodePartition == null
+|| nodePartition.equals(RMNodeLabelsManager.NO_LABEL)) {
+  return true;
+}
+// a queue can access to a node only if it contains any label of the node
+if (accessibleLabels != null && accessibleLabels.contains(nodePartition)) {
+  return true;
+}
+// sorry, you cannot access
+return false;
+  }
 }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/8f173950/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/CapacitySchedulerPage.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/CapacitySchedulerPage.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/CapacitySchedulerPage.java
index 2fcc12a..1f33ab6 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/CapacitySchedulerPage.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/CapacitySchedulerPage.java
@@ -21,11 +21,15 @@ package 
org.apache.hadoop.yarn.server.resourcemanager.webapp;
 import static org.apache.hadoop.yarn.util.StringHelper.join;
 
 import java.util.ArrayList;
+import java.util.List;
 
 import org.apache.hadoop.util.StringUtils;
+import org.apache.hadoop.yarn.nodelabels.NodeLabel;
 import org.apache.hadoop.yarn.server.resourcemanager.ResourceManager;
+import 
org.apache.hadoop.yarn.server.resourcemanager.nodelabels.RMNodeLabelsManager;
 

hadoop git commit: YARN-4325. Nodemanager log handlers fail to send finished/failed events in some cases. Contributed by Junping Du (cherry picked from commit 81effb7dcde2b31423438d6f1b8b8204d4ca05b3)

2016-05-16 Thread jlowe
Repository: hadoop
Updated Branches:
  refs/heads/branch-2.8 29ed65ff9 -> 70faa87cc


YARN-4325. Nodemanager log handlers fail to send finished/failed events in some 
cases. Contributed by Junping Du
(cherry picked from commit 81effb7dcde2b31423438d6f1b8b8204d4ca05b3)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/70faa87c
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/70faa87c
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/70faa87c

Branch: refs/heads/branch-2.8
Commit: 70faa87ccfcd624611a241748eb18f44e1ce3a9d
Parents: 29ed65f
Author: Jason Lowe 
Authored: Mon May 16 15:40:23 2016 +
Committer: Jason Lowe 
Committed: Mon May 16 15:43:42 2016 +

--
 .../application/ApplicationImpl.java|  8 +-
 .../logaggregation/AppLogAggregatorImpl.java|  4 +
 .../logaggregation/LogAggregationService.java   |  5 +-
 .../loghandler/NonAggregatingLogHandler.java|  4 +
 .../TestContainerManagerRecovery.java   | 84 
 .../TestNonAggregatingLogHandler.java   | 58 +-
 6 files changed, 155 insertions(+), 8 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/70faa87c/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/application/ApplicationImpl.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/application/ApplicationImpl.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/application/ApplicationImpl.java
index fbc8453..efa258a 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/application/ApplicationImpl.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/application/ApplicationImpl.java
@@ -207,18 +207,18 @@ public class ApplicationImpl implements Application {
   ApplicationEventType.APPLICATION_LOG_HANDLING_FINISHED,
   ApplicationEventType.APPLICATION_INITED,
   ApplicationEventType.FINISH_APPLICATION))
-   
+
// Transitions from FINISHED state
.addTransition(ApplicationState.FINISHED,
ApplicationState.FINISHED,
-   ApplicationEventType.APPLICATION_LOG_HANDLING_FINISHED,
+   EnumSet.of(
+   ApplicationEventType.APPLICATION_LOG_HANDLING_FINISHED,
+   ApplicationEventType.APPLICATION_LOG_HANDLING_FAILED),
new AppLogsAggregatedTransition())
.addTransition(ApplicationState.FINISHED, ApplicationState.FINISHED,
EnumSet.of(
   ApplicationEventType.APPLICATION_LOG_HANDLING_INITED,
-  ApplicationEventType.APPLICATION_LOG_HANDLING_FAILED,
   ApplicationEventType.FINISH_APPLICATION))
-   
// create the topology tables
.installTopology();
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/70faa87c/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/logaggregation/AppLogAggregatorImpl.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/logaggregation/AppLogAggregatorImpl.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/logaggregation/AppLogAggregatorImpl.java
index fed4a3b..32b0934 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/logaggregation/AppLogAggregatorImpl.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/logaggregation/AppLogAggregatorImpl.java
@@ -501,6 +501,7 @@ public class AppLogAggregatorImpl implements 
AppLogAggregator {
 }
   }
 
+  @SuppressWarnings("unchecked")
   @Override
   public void run() {
 try {
@@ -513,6 

hadoop git commit: YARN-4325. Nodemanager log handlers fail to send finished/failed events in some cases. Contributed by Junping Du (cherry picked from commit 81effb7dcde2b31423438d6f1b8b8204d4ca05b3)

2016-05-16 Thread jlowe
Repository: hadoop
Updated Branches:
  refs/heads/branch-2 a37b3694e -> 575c05635


YARN-4325. Nodemanager log handlers fail to send finished/failed events in some 
cases. Contributed by Junping Du
(cherry picked from commit 81effb7dcde2b31423438d6f1b8b8204d4ca05b3)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/575c0563
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/575c0563
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/575c0563

Branch: refs/heads/branch-2
Commit: 575c05635704868c5264a3b0e5bd203cca395d0f
Parents: a37b369
Author: Jason Lowe 
Authored: Mon May 16 15:40:23 2016 +
Committer: Jason Lowe 
Committed: Mon May 16 15:43:20 2016 +

--
 .../application/ApplicationImpl.java|  8 +-
 .../logaggregation/AppLogAggregatorImpl.java|  4 +
 .../logaggregation/LogAggregationService.java   |  5 +-
 .../loghandler/NonAggregatingLogHandler.java|  4 +
 .../TestContainerManagerRecovery.java   | 84 
 .../TestNonAggregatingLogHandler.java   | 58 +-
 6 files changed, 155 insertions(+), 8 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/575c0563/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/application/ApplicationImpl.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/application/ApplicationImpl.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/application/ApplicationImpl.java
index fbc8453..efa258a 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/application/ApplicationImpl.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/application/ApplicationImpl.java
@@ -207,18 +207,18 @@ public class ApplicationImpl implements Application {
   ApplicationEventType.APPLICATION_LOG_HANDLING_FINISHED,
   ApplicationEventType.APPLICATION_INITED,
   ApplicationEventType.FINISH_APPLICATION))
-   
+
// Transitions from FINISHED state
.addTransition(ApplicationState.FINISHED,
ApplicationState.FINISHED,
-   ApplicationEventType.APPLICATION_LOG_HANDLING_FINISHED,
+   EnumSet.of(
+   ApplicationEventType.APPLICATION_LOG_HANDLING_FINISHED,
+   ApplicationEventType.APPLICATION_LOG_HANDLING_FAILED),
new AppLogsAggregatedTransition())
.addTransition(ApplicationState.FINISHED, ApplicationState.FINISHED,
EnumSet.of(
   ApplicationEventType.APPLICATION_LOG_HANDLING_INITED,
-  ApplicationEventType.APPLICATION_LOG_HANDLING_FAILED,
   ApplicationEventType.FINISH_APPLICATION))
-   
// create the topology tables
.installTopology();
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/575c0563/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/logaggregation/AppLogAggregatorImpl.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/logaggregation/AppLogAggregatorImpl.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/logaggregation/AppLogAggregatorImpl.java
index fed4a3b..32b0934 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/logaggregation/AppLogAggregatorImpl.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/logaggregation/AppLogAggregatorImpl.java
@@ -501,6 +501,7 @@ public class AppLogAggregatorImpl implements 
AppLogAggregator {
 }
   }
 
+  @SuppressWarnings("unchecked")
   @Override
   public void run() {
 try {
@@ -513,6 

hadoop git commit: YARN-4325. Nodemanager log handlers fail to send finished/failed events in some cases. Contributed by Junping Du

2016-05-16 Thread jlowe
Repository: hadoop
Updated Branches:
  refs/heads/trunk ad9441122 -> 81effb7dc


YARN-4325. Nodemanager log handlers fail to send finished/failed events in some 
cases. Contributed by Junping Du


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/81effb7d
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/81effb7d
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/81effb7d

Branch: refs/heads/trunk
Commit: 81effb7dcde2b31423438d6f1b8b8204d4ca05b3
Parents: ad94411
Author: Jason Lowe 
Authored: Mon May 16 15:40:23 2016 +
Committer: Jason Lowe 
Committed: Mon May 16 15:40:23 2016 +

--
 .../application/ApplicationImpl.java|  8 +-
 .../logaggregation/AppLogAggregatorImpl.java|  4 +
 .../logaggregation/LogAggregationService.java   |  5 +-
 .../loghandler/NonAggregatingLogHandler.java|  4 +
 .../TestContainerManagerRecovery.java   | 84 
 .../TestNonAggregatingLogHandler.java   | 58 +-
 6 files changed, 155 insertions(+), 8 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/81effb7d/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/application/ApplicationImpl.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/application/ApplicationImpl.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/application/ApplicationImpl.java
index fbc8453..efa258a 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/application/ApplicationImpl.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/application/ApplicationImpl.java
@@ -207,18 +207,18 @@ public class ApplicationImpl implements Application {
   ApplicationEventType.APPLICATION_LOG_HANDLING_FINISHED,
   ApplicationEventType.APPLICATION_INITED,
   ApplicationEventType.FINISH_APPLICATION))
-   
+
// Transitions from FINISHED state
.addTransition(ApplicationState.FINISHED,
ApplicationState.FINISHED,
-   ApplicationEventType.APPLICATION_LOG_HANDLING_FINISHED,
+   EnumSet.of(
+   ApplicationEventType.APPLICATION_LOG_HANDLING_FINISHED,
+   ApplicationEventType.APPLICATION_LOG_HANDLING_FAILED),
new AppLogsAggregatedTransition())
.addTransition(ApplicationState.FINISHED, ApplicationState.FINISHED,
EnumSet.of(
   ApplicationEventType.APPLICATION_LOG_HANDLING_INITED,
-  ApplicationEventType.APPLICATION_LOG_HANDLING_FAILED,
   ApplicationEventType.FINISH_APPLICATION))
-   
// create the topology tables
.installTopology();
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/81effb7d/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/logaggregation/AppLogAggregatorImpl.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/logaggregation/AppLogAggregatorImpl.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/logaggregation/AppLogAggregatorImpl.java
index fed4a3b..32b0934 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/logaggregation/AppLogAggregatorImpl.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/logaggregation/AppLogAggregatorImpl.java
@@ -501,6 +501,7 @@ public class AppLogAggregatorImpl implements 
AppLogAggregator {
 }
   }
 
+  @SuppressWarnings("unchecked")
   @Override
   public void run() {
 try {
@@ -513,6 +514,9 @@ public class AppLogAggregatorImpl implements 
AppLogAggregator {
 

hadoop git commit: YARN-5068. Expose scheduler queue to application master. (Harish Jaiprakash via rohithsharmaks)

2016-05-16 Thread rohithsharmaks
Repository: hadoop
Updated Branches:
  refs/heads/branch-2 413f3dabb -> a37b3694e


YARN-5068. Expose scheduler queue to application master. (Harish Jaiprakash via 
rohithsharmaks)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/a37b3694
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/a37b3694
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/a37b3694

Branch: refs/heads/branch-2
Commit: a37b3694ead2ab4e8753e68213f31868b68376b8
Parents: 413f3da
Author: Rohith Sharma K S 
Authored: Mon May 16 15:25:15 2016 +0530
Committer: Rohith Sharma K S 
Committed: Mon May 16 15:25:15 2016 +0530

--
 .../hadoop/yarn/api/ApplicationConstants.java |  7 +++
 .../resourcemanager/amlauncher/AMLauncher.java| 18 --
 .../TestApplicationMasterLauncher.java|  5 +
 3 files changed, 28 insertions(+), 2 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/a37b3694/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/ApplicationConstants.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/ApplicationConstants.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/ApplicationConstants.java
index b2d765a..ad526d6 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/ApplicationConstants.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/ApplicationConstants.java
@@ -171,6 +171,13 @@ public interface ApplicationConstants {
 LD_LIBRARY_PATH("LD_LIBRARY_PATH"),
 
 /**
+ * $YARN_RESOURCEMANAGER_APPLICATION_QUEUE
+ * The queue into which the app was submitted/launched.
+ */
+YARN_RESOURCEMANAGER_APPLICATION_QUEUE(
+"YARN_RESOURCEMANAGER_APPLICATION_QUEUE"),
+
+/**
  * $HADOOP_CONF_DIR
  * Final, non-modifiable.
  */

http://git-wip-us.apache.org/repos/asf/hadoop/blob/a37b3694/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/amlauncher/AMLauncher.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/amlauncher/AMLauncher.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/amlauncher/AMLauncher.java
index d2fa587..4c840e7 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/amlauncher/AMLauncher.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/amlauncher/AMLauncher.java
@@ -51,6 +51,7 @@ import org.apache.hadoop.yarn.api.records.ContainerId;
 import org.apache.hadoop.yarn.api.records.ContainerLaunchContext;
 import org.apache.hadoop.yarn.api.records.NodeId;
 import org.apache.hadoop.yarn.client.NMProxy;
+import org.apache.hadoop.yarn.conf.YarnConfiguration;
 import org.apache.hadoop.yarn.event.EventHandler;
 import org.apache.hadoop.yarn.exceptions.YarnException;
 import org.apache.hadoop.yarn.ipc.YarnRPC;
@@ -190,13 +191,26 @@ public class AMLauncher implements Runnable {
 + " : "
 + StringUtils.arrayToString(container.getCommands().toArray(
 new String[0])));
-
+
+// Populate the current queue name in the environment variable.
+setupQueueNameEnv(container, applicationMasterContext);
+
 // Finalize the container
 setupTokens(container, containerID);
-
+
 return container;
   }
 
+  private void setupQueueNameEnv(ContainerLaunchContext container,
+  ApplicationSubmissionContext applicationMasterContext) {
+String queueName = applicationMasterContext.getQueue();
+if (queueName == null) {
+  queueName = YarnConfiguration.DEFAULT_QUEUE_NAME;
+}
+container.getEnvironment().put(ApplicationConstants.Environment
+.YARN_RESOURCEMANAGER_APPLICATION_QUEUE.key(), queueName);
+  }
+
   @Private
   @VisibleForTesting
   protected void setupTokens(

http://git-wip-us.apache.org/repos/asf/hadoop/blob/a37b3694/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/TestApplicationMasterLauncher.java

hadoop git commit: HDFS-8449. Add tasks count metrics to datanode for ECWorker. Contributed by Bo Li.

2016-05-16 Thread drankye
Repository: hadoop
Updated Branches:
  refs/heads/trunk 45788204a -> ad9441122


HDFS-8449. Add tasks count metrics to datanode for ECWorker. Contributed by Bo 
Li.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/ad944112
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/ad944112
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/ad944112

Branch: refs/heads/trunk
Commit: ad9441122f31547fcab29f50e64d52a8895906b6
Parents: 4578820
Author: Kai Zheng 
Authored: Sun May 15 23:39:09 2016 -0700
Committer: Kai Zheng 
Committed: Sun May 15 23:39:09 2016 -0700

--
 .../erasurecode/StripedReconstructor.java   |   4 +-
 .../datanode/metrics/DataNodeMetrics.java   |  14 ++
 .../apache/hadoop/hdfs/StripedFileTestUtil.java |  32 +++-
 .../hadoop/hdfs/TestReconstructStripedFile.java |  27 +---
 .../TestDataNodeErasureCodingMetrics.java   | 153 +++
 5 files changed, 204 insertions(+), 26 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/ad944112/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/erasurecode/StripedReconstructor.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/erasurecode/StripedReconstructor.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/erasurecode/StripedReconstructor.java
index 1b59b22..c80bf96 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/erasurecode/StripedReconstructor.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/erasurecode/StripedReconstructor.java
@@ -179,11 +179,11 @@ class StripedReconstructor implements Runnable {
   // block replication.
 } catch (Throwable e) {
   LOG.warn("Failed to reconstruct striped block: {}", blockGroup, e);
+  datanode.getMetrics().incrECFailedReconstructionTasks();
 } finally {
   datanode.decrementXmitsInProgress();
-
+  datanode.getMetrics().incrECReconstructionTasks();
   stripedReader.close();
-
   stripedWriter.close();
 }
   }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/ad944112/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/metrics/DataNodeMetrics.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/metrics/DataNodeMetrics.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/metrics/DataNodeMetrics.java
index 085762b..3d504d6 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/metrics/DataNodeMetrics.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/metrics/DataNodeMetrics.java
@@ -126,6 +126,11 @@ public class DataNodeMetrics {
   @Metric MutableRate sendDataPacketTransferNanos;
   final MutableQuantiles[] sendDataPacketTransferNanosQuantiles;
 
+  @Metric("Count of erasure coding reconstruction tasks")
+  MutableCounterLong ecReconstructionTasks;
+  @Metric("Count of erasure coding failed reconstruction tasks")
+  MutableCounterLong ecFailedReconstructionTasks;
+
   final MetricsRegistry registry = new MetricsRegistry("datanode");
   final String name;
   JvmMetrics jvmMetrics = null;
@@ -415,4 +420,13 @@ public class DataNodeMetrics {
   q.add(latencyMs);
 }
   }
+
+  public void incrECReconstructionTasks() {
+ecReconstructionTasks.incr();
+  }
+
+  public void incrECFailedReconstructionTasks() {
+ecFailedReconstructionTasks.incr();
+  }
+
 }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/ad944112/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/StripedFileTestUtil.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/StripedFileTestUtil.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/StripedFileTestUtil.java
index 3c58133..6d3 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/StripedFileTestUtil.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/StripedFileTestUtil.java
@@ -74,7 +74,7 @@ public class StripedFileTestUtil {
   static int numDNs = NUM_DATA_BLOCKS + NUM_PARITY_BLOCKS + 2;
   static int BLOCK_GROUP_SIZE = blockSize * NUM_DATA_BLOCKS;
 
-  static byte[] generateBytes(int cnt) {
+  public static byte[] generateBytes(int cnt) {
 byte[] bytes = new