HDFS-9438. TestPipelinesFailover assumes Linux ifconfig. (John Zhuge via Yongjun Zhang)
Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/8176ea7d Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/8176ea7d Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/8176ea7d Branch: refs/heads/HDFS-1312 Commit: 8176ea7dc694841a993f2bfc30669fe22f9ec1d2 Parents: 177975e Author: Yongjun Zhang <yzh...@cloudera.com> Authored: Wed Nov 25 07:40:16 2015 -0800 Committer: Yongjun Zhang <yzh...@cloudera.com> Committed: Wed Nov 25 07:40:16 2015 -0800 ---------------------------------------------------------------------- hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt | 3 ++ .../namenode/ha/TestPipelinesFailover.java | 45 ++++++++++---------- 2 files changed, 26 insertions(+), 22 deletions(-) ---------------------------------------------------------------------- http://git-wip-us.apache.org/repos/asf/hadoop/blob/8176ea7d/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt ---------------------------------------------------------------------- diff --git a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt index db49e54..ce0e74f 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt +++ b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt @@ -1684,6 +1684,9 @@ Release 2.8.0 - UNRELEASED HDFS-8807. dfs.datanode.data.dir does not handle spaces between storageType and URI correctly. (Anu Engineer via szetszwo) + HDFS-9438. TestPipelinesFailover assumes Linux ifconfig. + (John Zhuge via Yongjun Zhang) + OPTIMIZATIONS HDFS-8026. Trace FSOutputSummer#writeChecksumChunks rather than http://git-wip-us.apache.org/repos/asf/hadoop/blob/8176ea7d/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/ha/TestPipelinesFailover.java ---------------------------------------------------------------------- diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/ha/TestPipelinesFailover.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/ha/TestPipelinesFailover.java index 3da37f5..f1858a7 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/ha/TestPipelinesFailover.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/ha/TestPipelinesFailover.java @@ -56,6 +56,7 @@ import org.apache.hadoop.test.GenericTestUtils; import org.apache.hadoop.test.GenericTestUtils.DelayAnswer; import org.apache.hadoop.test.MultithreadedTestUtil.RepeatingTestThread; import org.apache.hadoop.test.MultithreadedTestUtil.TestContext; +import org.apache.hadoop.util.Shell; import org.apache.hadoop.util.Shell.ShellCommandExecutor; import org.apache.log4j.Level; import org.junit.Test; @@ -429,28 +430,28 @@ public class TestPipelinesFailover { // The following section of code is to help debug HDFS-6694 about // this test that fails from time to time due to "too many open files". // - String[] scmd = new String[] {"/bin/sh", "-c", "ulimit -a"}; - ShellCommandExecutor sce = new ShellCommandExecutor(scmd); - sce.execute(); - - System.out.println("HDFS-6694 Debug Data BEGIN==="); - System.out.println("'ulimit -a' output:\n" + sce.getOutput()); - - scmd = new String[] {"hostname"}; - sce = new ShellCommandExecutor(scmd); - sce.execute(); - System.out.println("'hostname' output:\n" + sce.getOutput()); - - scmd = new String[] {"ifconfig"}; - sce = new ShellCommandExecutor(scmd); - sce.execute(); - System.out.println("'ifconfig' output:\n" + sce.getOutput()); - - scmd = new String[] {"whoami"}; - sce = new ShellCommandExecutor(scmd); - sce.execute(); - System.out.println("'whoami' output:\n" + sce.getOutput()); - System.out.println("===HDFS-6694 Debug Data END"); + + // Only collect debug data on these OSes. + if (Shell.LINUX || Shell.SOLARIS || Shell.MAC) { + System.out.println("HDFS-6694 Debug Data BEGIN==="); + + String[] scmd = new String[] {"/bin/sh", "-c", "ulimit -a"}; + ShellCommandExecutor sce = new ShellCommandExecutor(scmd); + sce.execute(); + System.out.println("'ulimit -a' output:\n" + sce.getOutput()); + + scmd = new String[] {"hostname"}; + sce = new ShellCommandExecutor(scmd); + sce.execute(); + System.out.println("'hostname' output:\n" + sce.getOutput()); + + scmd = new String[] {"ifconfig", "-a"}; + sce = new ShellCommandExecutor(scmd); + sce.execute(); + System.out.println("'ifconfig' output:\n" + sce.getOutput()); + + System.out.println("===HDFS-6694 Debug Data END"); + } HAStressTestHarness harness = new HAStressTestHarness(); // Disable permissions so that another user can recover the lease.