HDFS-9438. TestPipelinesFailover assumes Linux ifconfig. (John Zhuge via Yongjun Zhang)
(cherry picked from commit 8176ea7dc694841a993f2bfc30669fe22f9ec1d2) Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/48b294c5 Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/48b294c5 Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/48b294c5 Branch: refs/heads/branch-2 Commit: 48b294c58ef8b688c2a1973d870e83ba4b003cf5 Parents: 7ac8d8f Author: Yongjun Zhang <yzh...@cloudera.com> Authored: Wed Nov 25 07:40:16 2015 -0800 Committer: Yongjun Zhang <yzh...@cloudera.com> Committed: Wed Nov 25 08:50:02 2015 -0800 ---------------------------------------------------------------------- hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt | 3 ++ .../namenode/ha/TestPipelinesFailover.java | 45 ++++++++++---------- 2 files changed, 26 insertions(+), 22 deletions(-) ---------------------------------------------------------------------- http://git-wip-us.apache.org/repos/asf/hadoop/blob/48b294c5/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt ---------------------------------------------------------------------- diff --git a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt index 4ff439b..8387571 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt +++ b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt @@ -817,6 +817,9 @@ Release 2.8.0 - UNRELEASED HDFS-8807. dfs.datanode.data.dir does not handle spaces between storageType and URI correctly. (Anu Engineer via szetszwo) + HDFS-9438. TestPipelinesFailover assumes Linux ifconfig. + (John Zhuge via Yongjun Zhang) + OPTIMIZATIONS HDFS-8026. Trace FSOutputSummer#writeChecksumChunks rather than http://git-wip-us.apache.org/repos/asf/hadoop/blob/48b294c5/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/ha/TestPipelinesFailover.java ---------------------------------------------------------------------- diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/ha/TestPipelinesFailover.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/ha/TestPipelinesFailover.java index 76a62ff..47b3817 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/ha/TestPipelinesFailover.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/ha/TestPipelinesFailover.java @@ -55,6 +55,7 @@ import org.apache.hadoop.test.GenericTestUtils; import org.apache.hadoop.test.GenericTestUtils.DelayAnswer; import org.apache.hadoop.test.MultithreadedTestUtil.RepeatingTestThread; import org.apache.hadoop.test.MultithreadedTestUtil.TestContext; +import org.apache.hadoop.util.Shell; import org.apache.hadoop.util.Shell.ShellCommandExecutor; import org.apache.log4j.Level; import org.junit.Test; @@ -419,28 +420,28 @@ public class TestPipelinesFailover { // The following section of code is to help debug HDFS-6694 about // this test that fails from time to time due to "too many open files". // - String[] scmd = new String[] {"/bin/sh", "-c", "ulimit -a"}; - ShellCommandExecutor sce = new ShellCommandExecutor(scmd); - sce.execute(); - - System.out.println("HDFS-6694 Debug Data BEGIN==="); - System.out.println("'ulimit -a' output:\n" + sce.getOutput()); - - scmd = new String[] {"hostname"}; - sce = new ShellCommandExecutor(scmd); - sce.execute(); - System.out.println("'hostname' output:\n" + sce.getOutput()); - - scmd = new String[] {"ifconfig"}; - sce = new ShellCommandExecutor(scmd); - sce.execute(); - System.out.println("'ifconfig' output:\n" + sce.getOutput()); - - scmd = new String[] {"whoami"}; - sce = new ShellCommandExecutor(scmd); - sce.execute(); - System.out.println("'whoami' output:\n" + sce.getOutput()); - System.out.println("===HDFS-6694 Debug Data END"); + + // Only collect debug data on these OSes. + if (Shell.LINUX || Shell.SOLARIS || Shell.MAC) { + System.out.println("HDFS-6694 Debug Data BEGIN==="); + + String[] scmd = new String[] {"/bin/sh", "-c", "ulimit -a"}; + ShellCommandExecutor sce = new ShellCommandExecutor(scmd); + sce.execute(); + System.out.println("'ulimit -a' output:\n" + sce.getOutput()); + + scmd = new String[] {"hostname"}; + sce = new ShellCommandExecutor(scmd); + sce.execute(); + System.out.println("'hostname' output:\n" + sce.getOutput()); + + scmd = new String[] {"ifconfig", "-a"}; + sce = new ShellCommandExecutor(scmd); + sce.execute(); + System.out.println("'ifconfig' output:\n" + sce.getOutput()); + + System.out.println("===HDFS-6694 Debug Data END"); + } HAStressTestHarness harness = new HAStressTestHarness(); // Disable permissions so that another user can recover the lease.