HADOOP-9427. Use JUnit assumptions to skip platform-specific tests. Contributed 
by Gergely Novák.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/54fe17a6
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/54fe17a6
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/54fe17a6

Branch: refs/heads/HADOOP-12756
Commit: 54fe17a607ecbf90145bfc5364b25f0a5aee98f4
Parents: 55d5993
Author: Akira Ajisaka <aajis...@apache.org>
Authored: Wed Jul 27 19:41:09 2016 +0900
Committer: Akira Ajisaka <aajis...@apache.org>
Committed: Wed Jul 27 19:41:09 2016 +0900

----------------------------------------------------------------------
 .../hadoop/fs/FileContextPermissionBase.java    | 16 ++----
 .../java/org/apache/hadoop/fs/TestFileUtil.java | 14 ++----
 .../org/apache/hadoop/fs/TestFsShellCopy.java   | 10 ++--
 .../apache/hadoop/fs/TestLocalDirAllocator.java | 20 ++++----
 .../apache/hadoop/fs/TestLocalFileSystem.java   |  8 +--
 .../fs/TestLocalFileSystemPermission.java       | 21 ++++----
 .../java/org/apache/hadoop/fs/TestPath.java     | 24 +++++----
 .../apache/hadoop/fs/TestSymlinkLocalFS.java    | 18 +++----
 .../fs/TestSymlinkLocalFSFileContext.java       |  5 +-
 .../hadoop/fs/TestSymlinkLocalFSFileSystem.java |  5 +-
 .../hadoop/fs/sftp/TestSFTPFileSystem.java      |  5 +-
 .../apache/hadoop/fs/shell/TestPathData.java    |  9 ++--
 .../apache/hadoop/io/nativeio/TestNativeIO.java | 51 ++++++--------------
 .../java/org/apache/hadoop/net/TestDNS.java     |  5 +-
 .../security/TestShellBasedIdMapping.java       | 11 ++---
 .../apache/hadoop/test/PlatformAssumptions.java | 47 ++++++++++++++++++
 .../org/apache/hadoop/util/TestWinUtils.java    |  4 +-
 .../org/apache/hadoop/fs/TestGlobPaths.java     |  4 +-
 .../org/apache/hadoop/hdfs/TestDFSUtil.java     |  5 +-
 .../hdfs/server/balancer/TestBalancer.java      |  4 +-
 .../TestRBWBlockInvalidation.java               |  5 +-
 .../datanode/TestDataNodeHotSwapVolumes.java    |  4 +-
 .../datanode/TestDataNodeVolumeFailure.java     |  6 +--
 .../TestDataNodeVolumeFailureReporting.java     |  4 +-
 .../TestDataNodeVolumeFailureToleration.java    | 10 ++--
 .../datanode/TestFsDatasetCacheRevocation.java  |  7 ++-
 .../fsdataset/impl/TestScrLazyPersistFiles.java |  5 +-
 .../hdfs/util/TestAtomicFileOutputStream.java   |  5 +-
 .../TestTracingShortCircuitLocalRead.java       |  5 +-
 .../apache/hadoop/mapred/TestYARNRunner.java    |  6 +--
 ...stNativeAzureFileSystemOperationsMocked.java |  4 +-
 .../azure/TestShellDecryptionKeyProvider.java   | 10 ++--
 .../yarn/util/TestWindowsBasedProcessTree.java  |  7 +--
 .../nodemanager/TestContainerExecutor.java      | 11 ++---
 .../TestLinuxContainerExecutorWithMocks.java    |  4 +-
 .../launcher/TestContainerLaunch.java           | 11 +++--
 36 files changed, 194 insertions(+), 196 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hadoop/blob/54fe17a6/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileContextPermissionBase.java
----------------------------------------------------------------------
diff --git 
a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileContextPermissionBase.java
 
b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileContextPermissionBase.java
index 5c44a98..dff89f9 100644
--- 
a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileContextPermissionBase.java
+++ 
b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileContextPermissionBase.java
@@ -34,6 +34,7 @@ import org.junit.Before;
 import org.junit.Test;
 
 import static org.apache.hadoop.fs.FileContextTestHelper.*;
+import static org.apache.hadoop.test.PlatformAssumptions.assumeNotWindows;
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.fail;
 
@@ -98,10 +99,7 @@ public abstract class FileContextPermissionBase {
 
   @Test
   public void testCreatePermission() throws IOException {
-    if (Path.WINDOWS) {
-      System.out.println("Cannot run test for Windows");
-      return;
-    }
+    assumeNotWindows();
     String filename = "foo";
     Path f = fileContextTestHelper.getTestRootPath(fc, filename);
     fileContextTestHelper.createFile(fc, filename);
@@ -112,10 +110,7 @@ public abstract class FileContextPermissionBase {
   
   @Test
   public void testSetPermission() throws IOException {
-    if (Path.WINDOWS) {
-      System.out.println("Cannot run test for Windows");
-      return;
-    }
+    assumeNotWindows();
 
     String filename = "foo";
     Path f = fileContextTestHelper.getTestRootPath(fc, filename);
@@ -137,10 +132,7 @@ public abstract class FileContextPermissionBase {
 
   @Test
   public void testSetOwner() throws IOException {
-    if (Path.WINDOWS) {
-      System.out.println("Cannot run test for Windows");
-      return;
-    }
+    assumeNotWindows();
 
     String filename = "bar";
     Path f = fileContextTestHelper.getTestRootPath(fc, filename);

http://git-wip-us.apache.org/repos/asf/hadoop/blob/54fe17a6/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileUtil.java
----------------------------------------------------------------------
diff --git 
a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileUtil.java
 
b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileUtil.java
index 2116a4b..0b7519c 100644
--- 
a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileUtil.java
+++ 
b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileUtil.java
@@ -43,12 +43,12 @@ import java.util.zip.ZipOutputStream;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.util.Shell;
 import org.apache.hadoop.util.StringUtils;
 import org.apache.tools.tar.TarEntry;
 import org.apache.tools.tar.TarOutputStream;
 
 
+import static org.apache.hadoop.test.PlatformAssumptions.assumeNotWindows;
 import static org.junit.Assert.*;
 import static org.mockito.Mockito.mock;
 import static org.mockito.Mockito.when;
@@ -423,10 +423,8 @@ public class TestFileUtil {
 
   @Test (timeout = 30000)
   public void testFailFullyDelete() throws IOException {
-    if(Shell.WINDOWS) {
-      // windows Dir.setWritable(false) does not work for directories
-      return;
-    }
+    // Windows Dir.setWritable(false) does not work for directories
+    assumeNotWindows();
     LOG.info("Running test to verify failure of fullyDelete()");
     setupDirsAndNonWritablePermissions();
     boolean ret = FileUtil.fullyDelete(new MyFile(del));
@@ -504,10 +502,8 @@ public class TestFileUtil {
 
   @Test (timeout = 30000)
   public void testFailFullyDeleteContents() throws IOException {
-    if(Shell.WINDOWS) {
-      // windows Dir.setWritable(false) does not work for directories
-      return;
-    }
+    // Windows Dir.setWritable(false) does not work for directories
+    assumeNotWindows();
     LOG.info("Running test to verify failure of fullyDeleteContents()");
     setupDirsAndNonWritablePermissions();
     boolean ret = FileUtil.fullyDeleteContents(new MyFile(del));

http://git-wip-us.apache.org/repos/asf/hadoop/blob/54fe17a6/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsShellCopy.java
----------------------------------------------------------------------
diff --git 
a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsShellCopy.java
 
b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsShellCopy.java
index 9e199ca..6ca3905 100644
--- 
a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsShellCopy.java
+++ 
b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsShellCopy.java
@@ -18,13 +18,13 @@
 
 package org.apache.hadoop.fs;
 
+import static org.apache.hadoop.test.PlatformAssumptions.assumeWindows;
 import static org.hamcrest.CoreMatchers.is;
 import static org.hamcrest.CoreMatchers.not;
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertFalse;
 import static org.junit.Assert.assertThat;
 import static org.junit.Assert.assertTrue;
-import static org.junit.Assume.assumeTrue;
 
 import java.io.File;
 import java.io.IOException;
@@ -143,7 +143,7 @@ public class TestFsShellCopy {
 
   @Test
   public void testCopyFileFromWindowsLocalPath() throws Exception {
-    assumeTrue(Path.WINDOWS);
+    assumeWindows();
     String windowsTestRootPath = (new File(testRootDir.toUri().getPath()
         .toString())).getAbsolutePath();
     Path testRoot = new Path(windowsTestRootPath, "testPutFile");
@@ -158,7 +158,7 @@ public class TestFsShellCopy {
 
   @Test
   public void testCopyDirFromWindowsLocalPath() throws Exception {
-    assumeTrue(Path.WINDOWS);
+    assumeWindows();
     String windowsTestRootPath = (new File(testRootDir.toUri().getPath()
         .toString())).getAbsolutePath();
     Path testRoot = new Path(windowsTestRootPath, "testPutDir");
@@ -485,7 +485,7 @@ public class TestFsShellCopy {
   
   @Test
   public void testMoveFromWindowsLocalPath() throws Exception {
-    assumeTrue(Path.WINDOWS);
+    assumeWindows();
     Path testRoot = new Path(testRootDir, "testPutFile");
     lfs.delete(testRoot, true);
     lfs.mkdirs(testRoot);
@@ -504,7 +504,7 @@ public class TestFsShellCopy {
 
   @Test
   public void testGetWindowsLocalPath() throws Exception {
-    assumeTrue(Path.WINDOWS);
+    assumeWindows();
     String winDstFile = (new File(dstPath.toUri().getPath()
         .toString())).getAbsolutePath();
     shellRun(0, "-get", srcPath.toString(), winDstFile);

http://git-wip-us.apache.org/repos/asf/hadoop/blob/54fe17a6/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalDirAllocator.java
----------------------------------------------------------------------
diff --git 
a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalDirAllocator.java
 
b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalDirAllocator.java
index 8cbe283..825efe0 100644
--- 
a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalDirAllocator.java
+++ 
b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalDirAllocator.java
@@ -34,8 +34,8 @@ import org.junit.runners.Parameterized;
 import org.junit.runners.Parameterized.Parameters;
 import org.junit.Test;
 
+import static org.apache.hadoop.test.PlatformAssumptions.assumeNotWindows;
 import static org.junit.Assert.*;
-import static org.junit.Assume.*;
 
 /** This test LocalDirAllocator works correctly;
  * Every test case uses different buffer dirs to
@@ -57,8 +57,6 @@ public class TestLocalDirAllocator {
   final static private LocalDirAllocator dirAllocator =
     new LocalDirAllocator(CONTEXT);
   static LocalFileSystem localFs;
-  final static private boolean isWindows =
-    System.getProperty("os.name").startsWith("Windows");
   final static int SMALL_FILE_SIZE = 100;
   final static private String RELATIVE = "/RELATIVE";
   final static private String ABSOLUTE = "/ABSOLUTE";
@@ -132,7 +130,7 @@ public class TestLocalDirAllocator {
    */
   @Test (timeout = 30000)
   public void test0() throws Exception {
-    if (isWindows) return;
+    assumeNotWindows();
     String dir0 = buildBufferDir(ROOT, 0);
     String dir1 = buildBufferDir(ROOT, 1);
     try {
@@ -154,7 +152,7 @@ public class TestLocalDirAllocator {
    */
   @Test (timeout = 30000)
   public void testROBufferDirAndRWBufferDir() throws Exception {
-    if (isWindows) return;
+    assumeNotWindows();
     String dir1 = buildBufferDir(ROOT, 1);
     String dir2 = buildBufferDir(ROOT, 2);
     try {
@@ -174,7 +172,7 @@ public class TestLocalDirAllocator {
    */
   @Test (timeout = 30000)
   public void testDirsNotExist() throws Exception {
-    if (isWindows) return;
+    assumeNotWindows();
     String dir2 = buildBufferDir(ROOT, 2);
     String dir3 = buildBufferDir(ROOT, 3);
     try {
@@ -200,7 +198,7 @@ public class TestLocalDirAllocator {
    */
   @Test (timeout = 30000)
   public void testRWBufferDirBecomesRO() throws Exception {
-    if (isWindows) return;
+    assumeNotWindows();
     String dir3 = buildBufferDir(ROOT, 3);
     String dir4 = buildBufferDir(ROOT, 4);
     try {
@@ -238,7 +236,7 @@ public class TestLocalDirAllocator {
   static final int TRIALS = 100;
   @Test (timeout = 30000)
   public void testCreateManyFiles() throws Exception {
-    if (isWindows) return;
+    assumeNotWindows();
     String dir5 = buildBufferDir(ROOT, 5);
     String dir6 = buildBufferDir(ROOT, 6);
     try {
@@ -348,7 +346,7 @@ public class TestLocalDirAllocator {
    */
   @Test (timeout = 30000)
   public void testNoSideEffects() throws IOException {
-    assumeTrue(!isWindows);
+    assumeNotWindows();
     String dir = buildBufferDir(ROOT, 0);
     try {
       conf.set(CONTEXT, dir);
@@ -370,7 +368,7 @@ public class TestLocalDirAllocator {
    */
   @Test (timeout = 30000)
   public void testGetLocalPathToRead() throws IOException {
-    assumeTrue(!isWindows);
+    assumeNotWindows();
     String dir = buildBufferDir(ROOT, 0);
     try {
       conf.set(CONTEXT, dir);
@@ -395,7 +393,7 @@ public class TestLocalDirAllocator {
    */
   @Test (timeout = 30000)
   public void testGetAllLocalPathsToRead() throws IOException {
-    assumeTrue(!isWindows);
+    assumeNotWindows();
     
     String dir0 = buildBufferDir(ROOT, 0);
     String dir1 = buildBufferDir(ROOT, 1);

http://git-wip-us.apache.org/repos/asf/hadoop/blob/54fe17a6/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalFileSystem.java
----------------------------------------------------------------------
diff --git 
a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalFileSystem.java
 
b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalFileSystem.java
index 3aadd2f..2311337 100644
--- 
a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalFileSystem.java
+++ 
b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalFileSystem.java
@@ -21,7 +21,6 @@ import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem.Statistics;
 import org.apache.hadoop.io.IOUtils;
 import org.apache.hadoop.test.GenericTestUtils;
-import org.apache.hadoop.util.Shell;
 import org.apache.hadoop.util.StringUtils;
 
 import static org.apache.hadoop.fs.FileSystemTestHelper.*;
@@ -31,8 +30,9 @@ import java.net.URI;
 import java.util.Arrays;
 import java.util.Random;
 
+import static org.apache.hadoop.test.PlatformAssumptions.assumeNotWindows;
+import static org.apache.hadoop.test.PlatformAssumptions.assumeWindows;
 import static org.junit.Assert.*;
-import static org.junit.Assume.assumeTrue;
 import static org.mockito.Mockito.*;
 
 import org.junit.After;
@@ -287,7 +287,7 @@ public class TestLocalFileSystem {
 
   @Test(timeout = 1000)
   public void testListStatusWithColons() throws IOException {
-    assumeTrue(!Shell.WINDOWS);
+    assumeNotWindows();
     File colonFile = new File(TEST_ROOT_DIR, "foo:bar");
     colonFile.mkdirs();
     FileStatus[] stats = fileSys.listStatus(new Path(TEST_ROOT_DIR));
@@ -298,7 +298,7 @@ public class TestLocalFileSystem {
   
   @Test
   public void testListStatusReturnConsistentPathOnWindows() throws IOException 
{
-    assumeTrue(Shell.WINDOWS);
+    assumeWindows();
     String dirNoDriveSpec = TEST_ROOT_DIR;
     if (dirNoDriveSpec.charAt(1) == ':')
        dirNoDriveSpec = dirNoDriveSpec.substring(2);

http://git-wip-us.apache.org/repos/asf/hadoop/blob/54fe17a6/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalFileSystemPermission.java
----------------------------------------------------------------------
diff --git 
a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalFileSystemPermission.java
 
b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalFileSystemPermission.java
index 11e94a7..817285c 100644
--- 
a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalFileSystemPermission.java
+++ 
b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalFileSystemPermission.java
@@ -26,7 +26,10 @@ import org.apache.hadoop.util.Shell;
 import java.io.*;
 import java.util.*;
 
-import junit.framework.*;
+import org.junit.Test;
+import static org.apache.hadoop.test.PlatformAssumptions.assumeNotWindows;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -37,7 +40,7 @@ import static org.junit.Assert.assertThat;
 /**
  * This class tests the local file system via the FileSystem abstraction.
  */
-public class TestLocalFileSystemPermission extends TestCase {
+public class TestLocalFileSystemPermission {
 
   public static final Logger LOGGER =
       LoggerFactory.getLogger(TestFcLocalFsPermission.class);
@@ -71,11 +74,9 @@ public class TestLocalFileSystemPermission extends TestCase {
     assertTrue(!fs.exists(name));
   }
 
+  @Test
   public void testLocalFSDirsetPermission() throws IOException {
-    if (Path.WINDOWS) {
-      LOGGER.info("Cannot run test for Windows");
-      return;
-    }
+    assumeNotWindows();
     LocalFileSystem localfs = FileSystem.getLocal(new Configuration());
     Configuration conf = localfs.getConf();
     conf.set(CommonConfigurationKeys.FS_PERMISSIONS_UMASK_KEY, "044");
@@ -124,11 +125,9 @@ public class TestLocalFileSystemPermission extends 
TestCase {
   }
 
   /** Test LocalFileSystem.setPermission */
+  @Test
   public void testLocalFSsetPermission() throws IOException {
-    if (Path.WINDOWS) {
-      LOGGER.info("Cannot run test for Windows");
-      return;
-    }
+    assumeNotWindows();
     Configuration conf = new Configuration();
     conf.set(CommonConfigurationKeys.FS_PERMISSIONS_UMASK_KEY, "044");
     LocalFileSystem localfs = FileSystem.getLocal(conf);
@@ -195,6 +194,7 @@ public class TestLocalFileSystemPermission extends TestCase 
{
   }
 
   /** Test LocalFileSystem.setOwner. */
+  @Test
   public void testLocalFSsetOwner() throws IOException {
     if (Path.WINDOWS) {
       LOGGER.info("Cannot run test for Windows");
@@ -248,6 +248,7 @@ public class TestLocalFileSystemPermission extends TestCase 
{
    * 5. For this directory we expect 715 as permission not 755
    * @throws Exception we can throw away all the exception.
    */
+  @Test
   public void testSetUmaskInRealTime() throws Exception {
     if (Path.WINDOWS) {
       LOGGER.info("Cannot run test for Windows");

http://git-wip-us.apache.org/repos/asf/hadoop/blob/54fe17a6/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestPath.java
----------------------------------------------------------------------
diff --git 
a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestPath.java
 
b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestPath.java
index e5b22f9..dc48a10 100644
--- 
a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestPath.java
+++ 
b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestPath.java
@@ -31,9 +31,17 @@ import org.apache.hadoop.util.Shell;
 
 import com.google.common.base.Joiner;
 
-import junit.framework.TestCase;
+import static org.apache.hadoop.test.PlatformAssumptions.assumeNotWindows;
+import static org.apache.hadoop.test.PlatformAssumptions.assumeWindows;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
 
-public class TestPath extends TestCase {
+/**
+ * Test Hadoop Filesystem Paths.
+ */
+public class TestPath {
   /**
    * Merge a bunch of Path objects into a sorted semicolon-separated
    * path string.
@@ -242,9 +250,7 @@ public class TestPath extends TestCase {
   /** Test that Windows paths are correctly handled */
   @Test (timeout = 5000)
   public void testWindowsPaths() throws URISyntaxException, IOException {
-    if (!Path.WINDOWS) {
-      return;
-    }
+    assumeWindows();
 
     assertEquals(new Path("c:\\foo\\bar").toString(), "c:/foo/bar");
     assertEquals(new Path("c:/foo/bar").toString(), "c:/foo/bar");
@@ -255,9 +261,7 @@ public class TestPath extends TestCase {
   /** Test invalid paths on Windows are correctly rejected */
   @Test (timeout = 5000)
   public void testInvalidWindowsPaths() throws URISyntaxException, IOException 
{
-    if (!Path.WINDOWS) {
-      return;
-    }
+    assumeWindows();
 
     String [] invalidPaths = {
         "hdfs:\\\\\\tmp"
@@ -401,7 +405,7 @@ public class TestPath extends TestCase {
   @Test (timeout = 30000)
   public void testGlobEscapeStatus() throws Exception {
     // This test is not meaningful on Windows where * is disallowed in file 
name.
-    if (Shell.WINDOWS) return;
+    assumeNotWindows();
     FileSystem lfs = FileSystem.getLocal(new Configuration());
     Path testRoot = lfs.makeQualified(
         new Path(GenericTestUtils.getTempPath("testPathGlob")));
@@ -493,7 +497,7 @@ public class TestPath extends TestCase {
 
   @Test (timeout = 30000)
   public void testIsWindowsAbsolutePath() {
-    if (!Shell.WINDOWS) return;
+    assumeWindows();
     assertTrue(Path.isWindowsAbsolutePath("C:\\test", false));
     assertTrue(Path.isWindowsAbsolutePath("C:/test", false));
     assertTrue(Path.isWindowsAbsolutePath("/C:/test", true));

http://git-wip-us.apache.org/repos/asf/hadoop/blob/54fe17a6/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestSymlinkLocalFS.java
----------------------------------------------------------------------
diff --git 
a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestSymlinkLocalFS.java
 
b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestSymlinkLocalFS.java
index 8968e7a..0a51b65 100644
--- 
a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestSymlinkLocalFS.java
+++ 
b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestSymlinkLocalFS.java
@@ -17,6 +17,7 @@
  */
 package org.apache.hadoop.fs;
 
+import static org.apache.hadoop.test.PlatformAssumptions.assumeNotWindows;
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertFalse;
 import static org.junit.Assert.assertTrue;
@@ -29,7 +30,6 @@ import java.io.IOException;
 import java.net.URI;
 import java.net.URISyntaxException;
 
-import org.apache.hadoop.fs.permission.FsPermission;
 import org.apache.hadoop.util.Shell;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.junit.Test;
@@ -71,37 +71,37 @@ abstract public class TestSymlinkLocalFS extends 
SymlinkBaseTest {
   @Override
   public void testCreateDanglingLink() throws IOException {
     // Dangling symlinks are not supported on Windows local file system.
-    assumeTrue(!Path.WINDOWS);
+    assumeNotWindows();
     super.testCreateDanglingLink();
   }
 
   @Override
   public void testCreateFileViaDanglingLinkParent() throws IOException {
-    assumeTrue(!Path.WINDOWS);
+    assumeNotWindows();
     super.testCreateFileViaDanglingLinkParent();
   }
 
   @Override
   public void testOpenResolvesLinks() throws IOException {
-    assumeTrue(!Path.WINDOWS);
+    assumeNotWindows();
     super.testOpenResolvesLinks();
   }
 
   @Override
   public void testRecursiveLinks() throws IOException {
-    assumeTrue(!Path.WINDOWS);
+    assumeNotWindows();
     super.testRecursiveLinks();
   }
 
   @Override
   public void testRenameDirToDanglingSymlink() throws IOException {
-    assumeTrue(!Path.WINDOWS);
+    assumeNotWindows();
     super.testRenameDirToDanglingSymlink();
   }
 
   @Override  
   public void testStatDanglingLink() throws IOException {
-    assumeTrue(!Path.WINDOWS);
+    assumeNotWindows();
     super.testStatDanglingLink();
   }
 
@@ -126,7 +126,7 @@ abstract public class TestSymlinkLocalFS extends 
SymlinkBaseTest {
   @Test(timeout=1000)
   /** Stat and lstat a dangling link */
   public void testDanglingLink() throws IOException {
-    assumeTrue(!Path.WINDOWS);
+    assumeNotWindows();
     Path fileAbs  = new Path(testBaseDir1()+"/file");
     Path fileQual = new Path(testURI().toString(), fileAbs);
     Path link     = new Path(testBaseDir1()+"/linkToFile");
@@ -235,7 +235,7 @@ abstract public class TestSymlinkLocalFS extends 
SymlinkBaseTest {
 
   @Override
   public void testSetTimesDanglingLink() throws IOException {
-    assumeTrue(!Path.WINDOWS);
+    assumeNotWindows();
     super.testSetTimesDanglingLink();
   }
 }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/54fe17a6/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestSymlinkLocalFSFileContext.java
----------------------------------------------------------------------
diff --git 
a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestSymlinkLocalFSFileContext.java
 
b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestSymlinkLocalFSFileContext.java
index 7f506c6..301bf04 100644
--- 
a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestSymlinkLocalFSFileContext.java
+++ 
b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestSymlinkLocalFSFileContext.java
@@ -17,12 +17,11 @@
  */
 package org.apache.hadoop.fs;
 
-import org.apache.hadoop.util.Shell;
 import org.junit.BeforeClass;
 
 import java.io.IOException;
 
-import static org.junit.Assume.assumeTrue;
+import static org.apache.hadoop.test.PlatformAssumptions.assumeNotWindows;
 
 public class TestSymlinkLocalFSFileContext extends TestSymlinkLocalFS {
 
@@ -34,7 +33,7 @@ public class TestSymlinkLocalFSFileContext extends 
TestSymlinkLocalFS {
 
   @Override
   public void testRenameFileWithDestParentSymlink() throws IOException {
-    assumeTrue(!Shell.WINDOWS);
+    assumeNotWindows();
     super.testRenameFileWithDestParentSymlink();
   }
 }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/54fe17a6/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestSymlinkLocalFSFileSystem.java
----------------------------------------------------------------------
diff --git 
a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestSymlinkLocalFSFileSystem.java
 
b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestSymlinkLocalFSFileSystem.java
index a945ddd..6fc2d62 100644
--- 
a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestSymlinkLocalFSFileSystem.java
+++ 
b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestSymlinkLocalFSFileSystem.java
@@ -22,14 +22,13 @@ import java.io.IOException;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.Options.Rename;
-import org.apache.hadoop.util.Shell;
 import org.junit.BeforeClass;
 import org.junit.Ignore;
 import org.junit.Test;
 
+import static org.apache.hadoop.test.PlatformAssumptions.assumeNotWindows;
 import static org.junit.Assert.assertTrue;
 import static org.junit.Assert.fail;
-import static org.junit.Assume.assumeTrue;
 
 public class TestSymlinkLocalFSFileSystem extends TestSymlinkLocalFS {
 
@@ -64,7 +63,7 @@ public class TestSymlinkLocalFSFileSystem extends 
TestSymlinkLocalFS {
 
   @Override
   public void testRenameFileWithDestParentSymlink() throws IOException {
-    assumeTrue(!Shell.WINDOWS);
+    assumeNotWindows();
     super.testRenameFileWithDestParentSymlink();
   }
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/54fe17a6/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/sftp/TestSFTPFileSystem.java
----------------------------------------------------------------------
diff --git 
a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/sftp/TestSFTPFileSystem.java
 
b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/sftp/TestSFTPFileSystem.java
index 36aacee..8dc5324 100644
--- 
a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/sftp/TestSFTPFileSystem.java
+++ 
b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/sftp/TestSFTPFileSystem.java
@@ -30,7 +30,6 @@ import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.test.GenericTestUtils;
-import org.apache.hadoop.util.Shell;
 
 import org.apache.sshd.SshServer;
 import org.apache.sshd.common.NamedFactory;
@@ -48,8 +47,8 @@ import org.junit.Rule;
 import org.junit.Test;
 import org.junit.rules.TestName;
 
+import static org.apache.hadoop.test.PlatformAssumptions.assumeNotWindows;
 import static org.junit.Assert.*;
-import static org.junit.Assume.assumeTrue;
 
 public class TestSFTPFileSystem {
 
@@ -99,7 +98,7 @@ public class TestSFTPFileSystem {
   @BeforeClass
   public static void setUp() throws Exception {
     // skip all tests if running on Windows
-    assumeTrue(!Shell.WINDOWS);
+    assumeNotWindows();
 
     startSshdServer();
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/54fe17a6/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestPathData.java
----------------------------------------------------------------------
diff --git 
a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestPathData.java
 
b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestPathData.java
index e3e574a..f2656e6 100644
--- 
a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestPathData.java
+++ 
b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestPathData.java
@@ -17,6 +17,7 @@
  */
 package org.apache.hadoop.fs.shell;
 
+import static org.apache.hadoop.test.PlatformAssumptions.assumeWindows;
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertTrue;
 import static org.junit.Assert.fail;
@@ -126,9 +127,7 @@ public class TestPathData {
 
   @Test (timeout = 5000)
   public void testToFileRawWindowsPaths() throws Exception {
-    if (!Path.WINDOWS) {
-      return;
-    }
+    assumeWindows();
 
     // Can we handle raw Windows paths? The files need not exist for
     // these tests to succeed.
@@ -155,9 +154,7 @@ public class TestPathData {
 
   @Test (timeout = 5000)
   public void testInvalidWindowsPath() throws Exception {
-    if (!Path.WINDOWS) {
-      return;
-    }
+    assumeWindows();
 
     // Verify that the following invalid paths are rejected.
     String [] winPaths = {

http://git-wip-us.apache.org/repos/asf/hadoop/blob/54fe17a6/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/nativeio/TestNativeIO.java
----------------------------------------------------------------------
diff --git 
a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/nativeio/TestNativeIO.java
 
b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/nativeio/TestNativeIO.java
index e6f25dc..b2445a3 100644
--- 
a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/nativeio/TestNativeIO.java
+++ 
b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/nativeio/TestNativeIO.java
@@ -42,6 +42,8 @@ import org.junit.Test;
 
 import static org.junit.Assume.*;
 import static org.junit.Assert.*;
+import static org.apache.hadoop.test.PlatformAssumptions.assumeNotWindows;
+import static org.apache.hadoop.test.PlatformAssumptions.assumeWindows;
 
 import org.apache.commons.io.FileUtils;
 import org.apache.commons.logging.Log;
@@ -107,9 +109,7 @@ public class TestNativeIO {
    */
   @Test (timeout = 30000)
   public void testMultiThreadedFstat() throws Exception {
-    if (Path.WINDOWS) {
-      return;
-    }
+    assumeNotWindows();
 
     final FileOutputStream fos = new FileOutputStream(
       new File(TEST_DIR, "testfstat"));
@@ -165,9 +165,7 @@ public class TestNativeIO {
 
   @Test (timeout = 30000)
   public void testSetFilePointer() throws Exception {
-    if (!Path.WINDOWS) {
-      return;
-    }
+    assumeWindows();
 
     LOG.info("Set a file pointer on Windows");
     try {
@@ -212,9 +210,7 @@ public class TestNativeIO {
 
   @Test (timeout = 30000)
   public void testCreateFile() throws Exception {
-    if (!Path.WINDOWS) {
-      return;
-    }
+    assumeWindows();
 
     LOG.info("Open a file on Windows with SHARE_DELETE shared mode");
     try {
@@ -255,9 +251,7 @@ public class TestNativeIO {
   /** Validate access checks on Windows */
   @Test (timeout = 30000)
   public void testAccess() throws Exception {
-    if (!Path.WINDOWS) {
-      return;
-    }
+    assumeWindows();
 
     File testFile = new File(TEST_DIR, "testfileaccess");
     assertTrue(testFile.createNewFile());
@@ -331,9 +325,7 @@ public class TestNativeIO {
 
   @Test (timeout = 30000)
   public void testOpenMissingWithoutCreate() throws Exception {
-    if (Path.WINDOWS) {
-      return;
-    }
+    assumeNotWindows();
 
     LOG.info("Open a missing file without O_CREAT and it should fail");
     try {
@@ -348,9 +340,7 @@ public class TestNativeIO {
 
   @Test (timeout = 30000)
   public void testOpenWithCreate() throws Exception {
-    if (Path.WINDOWS) {
-      return;
-    }
+    assumeNotWindows();
 
     LOG.info("Test creating a file with O_CREAT");
     FileDescriptor fd = NativeIO.POSIX.open(
@@ -382,9 +372,7 @@ public class TestNativeIO {
    */
   @Test (timeout = 30000)
   public void testFDDoesntLeak() throws IOException {
-    if (Path.WINDOWS) {
-      return;
-    }
+    assumeNotWindows();
 
     for (int i = 0; i < 10000; i++) {
       FileDescriptor fd = NativeIO.POSIX.open(
@@ -403,9 +391,7 @@ public class TestNativeIO {
    */
   @Test (timeout = 30000)
   public void testChmod() throws Exception {
-    if (Path.WINDOWS) {
-      return;
-    }
+    assumeNotWindows();
 
     try {
       NativeIO.POSIX.chmod("/this/file/doesnt/exist", 777);
@@ -428,9 +414,7 @@ public class TestNativeIO {
 
   @Test (timeout = 30000)
   public void testPosixFadvise() throws Exception {
-    if (Path.WINDOWS) {
-      return;
-    }
+    assumeNotWindows();
 
     FileInputStream fis = new FileInputStream("/dev/zero");
     try {
@@ -497,19 +481,13 @@ public class TestNativeIO {
 
   @Test (timeout = 30000)
   public void testGetUserName() throws IOException {
-    if (Path.WINDOWS) {
-      return;
-    }
-
+    assumeNotWindows();
     assertFalse(NativeIO.POSIX.getUserName(0).isEmpty());
   }
 
   @Test (timeout = 30000)
   public void testGetGroupName() throws IOException {
-    if (Path.WINDOWS) {
-      return;
-    }
-
+    assumeNotWindows();
     assertFalse(NativeIO.POSIX.getGroupName(0).isEmpty());
   }
 
@@ -647,8 +625,7 @@ public class TestNativeIO {
 
   @Test (timeout=10000)
   public void testNativePosixConsts() {
-    assumeTrue("Native POSIX constants not required for Windows",
-      !Path.WINDOWS);
+    assumeNotWindows("Native POSIX constants not required for Windows");
     assertTrue("Native 0_RDONLY const not set", O_RDONLY >= 0);
     assertTrue("Native 0_WRONLY const not set", O_WRONLY >= 0);
     assertTrue("Native 0_RDWR const not set", O_RDWR >= 0);

http://git-wip-us.apache.org/repos/asf/hadoop/blob/54fe17a6/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/TestDNS.java
----------------------------------------------------------------------
diff --git 
a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/TestDNS.java
 
b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/TestDNS.java
index a0bfe73..863d380 100644
--- 
a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/TestDNS.java
+++ 
b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/TestDNS.java
@@ -30,15 +30,14 @@ import javax.naming.NameNotFoundException;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.util.Shell;
 import org.apache.hadoop.util.Time;
 
 import org.junit.Test;
 
+import static org.apache.hadoop.test.PlatformAssumptions.assumeNotWindows;
 import static org.hamcrest.CoreMatchers.not;
 import static org.hamcrest.core.Is.is;
 import static org.junit.Assert.*;
-import static org.junit.Assume.assumeTrue;
 
 /**
  * Test host name and IP resolution and caching.
@@ -197,7 +196,7 @@ public class TestDNS {
    */
   @Test (timeout=60000)
   public void testLookupWithHostsFallback() throws Exception {
-    assumeTrue(!Shell.WINDOWS);
+    assumeNotWindows();
     final String oldHostname = changeDnsCachedHostname(DUMMY_HOSTNAME);
 
     try {

http://git-wip-us.apache.org/repos/asf/hadoop/blob/54fe17a6/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestShellBasedIdMapping.java
----------------------------------------------------------------------
diff --git 
a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestShellBasedIdMapping.java
 
b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestShellBasedIdMapping.java
index e395247..d589c3a 100644
--- 
a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestShellBasedIdMapping.java
+++ 
b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestShellBasedIdMapping.java
@@ -17,9 +17,9 @@
  */
 package org.apache.hadoop.security;
 
+import static org.apache.hadoop.test.PlatformAssumptions.assumeNotWindows;
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertTrue;
-import static org.junit.Assume.assumeTrue;
 
 import java.io.File;
 import java.io.FileOutputStream;
@@ -28,7 +28,6 @@ import java.io.OutputStream;
 import java.util.Map;
 
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.util.Shell;
 import org.apache.hadoop.security.ShellBasedIdMapping.PassThroughMap;
 import org.apache.hadoop.security.ShellBasedIdMapping.StaticMapping;
 import org.junit.Test;
@@ -87,7 +86,7 @@ public class TestShellBasedIdMapping {
   
   @Test
   public void testStaticMapping() throws IOException {
-    assumeTrue(!Shell.WINDOWS);
+    assumeNotWindows();
     Map<Integer, Integer> uidStaticMap = new PassThroughMap<Integer>();
     Map<Integer, Integer> gidStaticMap = new PassThroughMap<Integer>();
     
@@ -129,7 +128,7 @@ public class TestShellBasedIdMapping {
   // Test staticMap refreshing
   @Test
   public void testStaticMapUpdate() throws IOException {
-    assumeTrue(!Shell.WINDOWS);
+    assumeNotWindows();
     File tempStaticMapFile = File.createTempFile("nfs-", ".map");
     tempStaticMapFile.delete();
     Configuration conf = new Configuration();
@@ -207,7 +206,7 @@ public class TestShellBasedIdMapping {
 
   @Test
   public void testDuplicates() throws IOException {
-    assumeTrue(!Shell.WINDOWS);
+    assumeNotWindows();
     String GET_ALL_USERS_CMD = "echo \"root:x:0:0:root:/root:/bin/bash\n"
         + "hdfs:x:11501:10787:Grid Distributed File 
System:/home/hdfs:/bin/bash\n"
         + "hdfs:x:11502:10788:Grid Distributed File 
System:/home/hdfs:/bin/bash\n"
@@ -247,7 +246,7 @@ public class TestShellBasedIdMapping {
 
   @Test
   public void testIdOutOfIntegerRange() throws IOException {
-    assumeTrue(!Shell.WINDOWS);
+    assumeNotWindows();
     String GET_ALL_USERS_CMD = "echo \""
         + "nfsnobody:x:4294967294:4294967294:Anonymous NFS 
User:/var/lib/nfs:/sbin/nologin\n"
         + "nfsnobody1:x:4294967295:4294967295:Anonymous NFS 
User:/var/lib/nfs1:/sbin/nologin\n"

http://git-wip-us.apache.org/repos/asf/hadoop/blob/54fe17a6/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/test/PlatformAssumptions.java
----------------------------------------------------------------------
diff --git 
a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/test/PlatformAssumptions.java
 
b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/test/PlatformAssumptions.java
new file mode 100644
index 0000000..4e83162
--- /dev/null
+++ 
b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/test/PlatformAssumptions.java
@@ -0,0 +1,47 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.test;
+
+import org.junit.internal.AssumptionViolatedException;
+
+/**
+ * JUnit assumptions for the environment (OS).
+ */
+public final class PlatformAssumptions {
+  public static final String OS_NAME = System.getProperty("os.name");
+  public static final boolean WINDOWS = OS_NAME.startsWith("Windows");
+
+  private PlatformAssumptions() { }
+
+  public static void assumeNotWindows() {
+    assumeNotWindows("Expected Unix-like platform but got " + OS_NAME);
+  }
+
+  public static void assumeNotWindows(String message) {
+    if (WINDOWS) {
+      throw new AssumptionViolatedException(message);
+    }
+  }
+
+  public static void assumeWindows() {
+    if (!WINDOWS) {
+      throw new AssumptionViolatedException(
+          "Expected Windows platform but got " + OS_NAME);
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/54fe17a6/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestWinUtils.java
----------------------------------------------------------------------
diff --git 
a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestWinUtils.java
 
b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestWinUtils.java
index cfa97f4..e45890c 100644
--- 
a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestWinUtils.java
+++ 
b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestWinUtils.java
@@ -18,8 +18,8 @@
 
 package org.apache.hadoop.util;
 
+import static org.apache.hadoop.test.PlatformAssumptions.assumeWindows;
 import static org.junit.Assert.*;
-import static org.junit.Assume.assumeTrue;
 import static org.junit.matchers.JUnitMatchers.containsString;
 
 import java.io.File;
@@ -53,7 +53,7 @@ public class TestWinUtils {
   @Before
   public void setUp() throws IOException {
     // Not supported on non-Windows platforms
-    assumeTrue(Shell.WINDOWS);
+    assumeWindows();
     TEST_DIR.mkdirs();
     assertTrue("Failed to create Test directory " + TEST_DIR,
         TEST_DIR.isDirectory() );

http://git-wip-us.apache.org/repos/asf/hadoop/blob/54fe17a6/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/fs/TestGlobPaths.java
----------------------------------------------------------------------
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/fs/TestGlobPaths.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/fs/TestGlobPaths.java
index 30778e6..32d960a 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/fs/TestGlobPaths.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/fs/TestGlobPaths.java
@@ -17,10 +17,10 @@
  */
 package org.apache.hadoop.fs;
 
+import static org.apache.hadoop.test.PlatformAssumptions.assumeNotWindows;
 import static org.junit.Assert.*;
 
 import java.io.IOException;
-import java.security.PrivilegedExceptionAction;
 import java.util.ArrayList;
 import java.util.UUID;
 import java.util.regex.Pattern;
@@ -495,7 +495,7 @@ public class TestGlobPaths {
   public void pTestEscape() throws IOException {
     // Skip the test case on Windows because backslash will be treated as a
     // path separator instead of an escaping character on Windows.
-    org.junit.Assume.assumeTrue(!Path.WINDOWS);
+    assumeNotWindows();
     try {
       String [] files = new String[] {USER_DIR+"/ab\\[c.d"};
       Path[] matchedPath = prepareTesting(USER_DIR+"/ab\\[c.d", files);

http://git-wip-us.apache.org/repos/asf/hadoop/blob/54fe17a6/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSUtil.java
----------------------------------------------------------------------
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSUtil.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSUtil.java
index b139845..bef5e33 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSUtil.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSUtil.java
@@ -34,6 +34,7 @@ import static 
org.apache.hadoop.hdfs.DFSConfigKeys.DFS_SERVER_HTTPS_KEYPASSWORD_
 import static 
org.apache.hadoop.hdfs.DFSConfigKeys.DFS_SERVER_HTTPS_KEYSTORE_PASSWORD_KEY;
 import static 
org.apache.hadoop.hdfs.DFSConfigKeys.DFS_SERVER_HTTPS_TRUSTSTORE_PASSWORD_KEY;
 import static org.apache.hadoop.test.GenericTestUtils.assertExceptionContains;
+import static org.apache.hadoop.test.PlatformAssumptions.assumeNotWindows;
 import static org.hamcrest.CoreMatchers.not;
 import static org.junit.Assert.assertArrayEquals;
 import static org.junit.Assert.assertEquals;
@@ -73,9 +74,7 @@ import org.apache.hadoop.security.alias.CredentialProvider;
 import org.apache.hadoop.security.alias.CredentialProviderFactory;
 import org.apache.hadoop.security.alias.JavaKeyStoreProvider;
 import org.apache.hadoop.test.GenericTestUtils;
-import org.apache.hadoop.util.Shell;
 import org.junit.Assert;
-import org.junit.Assume;
 import org.junit.Before;
 import org.junit.Test;
 
@@ -800,7 +799,7 @@ public class TestDFSUtil {
   @Test (timeout=15000)
   public void testLocalhostReverseLookup() {
     // 127.0.0.1 -> localhost reverse resolution does not happen on Windows.
-    Assume.assumeTrue(!Shell.WINDOWS);
+    assumeNotWindows();
 
     // Make sure when config FS_DEFAULT_NAME_KEY using IP address,
     // it will automatically convert it to hostname

http://git-wip-us.apache.org/repos/asf/hadoop/blob/54fe17a6/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/balancer/TestBalancer.java
----------------------------------------------------------------------
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/balancer/TestBalancer.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/balancer/TestBalancer.java
index 788f1e5..73a4cbc 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/balancer/TestBalancer.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/balancer/TestBalancer.java
@@ -43,10 +43,10 @@ import static 
org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_KERBEROS_PRINCIP
 import static 
org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_KEYTAB_FILE_KEY;
 import static 
org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_LAZY_PERSIST_FILE_SCRUB_INTERVAL_SEC;
 import static 
org.apache.hadoop.hdfs.DFSConfigKeys.DFS_WEB_AUTHENTICATION_KERBEROS_PRINCIPAL_KEY;
+import static org.apache.hadoop.test.PlatformAssumptions.assumeNotWindows;
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertTrue;
 import static org.junit.Assert.fail;
-import static org.junit.Assume.assumeTrue;
 
 import java.io.File;
 import java.io.IOException;
@@ -451,7 +451,7 @@ public class TestBalancer {
     // This test assumes stick-bit based block pin mechanism available only
     // in Linux/Unix. It can be unblocked on Windows when HDFS-7759 is ready to
     // provide a different mechanism for Windows.
-    assumeTrue(!Path.WINDOWS);
+    assumeNotWindows();
 
     final Configuration conf = new HdfsConfiguration();
     initConf(conf);

http://git-wip-us.apache.org/repos/asf/hadoop/blob/54fe17a6/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/blockmanagement/TestRBWBlockInvalidation.java
----------------------------------------------------------------------
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/blockmanagement/TestRBWBlockInvalidation.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/blockmanagement/TestRBWBlockInvalidation.java
index 744e9fa..9816af8 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/blockmanagement/TestRBWBlockInvalidation.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/blockmanagement/TestRBWBlockInvalidation.java
@@ -17,9 +17,8 @@
  */
 package org.apache.hadoop.hdfs.server.blockmanagement;
 
+import static org.apache.hadoop.test.PlatformAssumptions.assumeNotWindows;
 import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertTrue;
-import static org.junit.Assume.assumeTrue;
 
 import java.io.Closeable;
 import java.io.IOException;
@@ -70,7 +69,7 @@ public class TestRBWBlockInvalidation {
       throws IOException, InterruptedException {
     // This test cannot pass on Windows due to file locking enforcement.  It 
will
     // reject the attempt to delete the block file from the RBW folder.
-    assumeTrue(!Path.WINDOWS);
+    assumeNotWindows();
 
     Configuration conf = new HdfsConfiguration();
     conf.setInt(DFSConfigKeys.DFS_REPLICATION_KEY, 2);

http://git-wip-us.apache.org/repos/asf/hadoop/blob/54fe17a6/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestDataNodeHotSwapVolumes.java
----------------------------------------------------------------------
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestDataNodeHotSwapVolumes.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestDataNodeHotSwapVolumes.java
index c03b02b..0dbb09c 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestDataNodeHotSwapVolumes.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestDataNodeHotSwapVolumes.java
@@ -72,6 +72,7 @@ import org.mockito.invocation.InvocationOnMock;
 import org.mockito.stubbing.Answer;
 
 import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_DATANODE_DATA_DIR_KEY;
+import static org.apache.hadoop.test.PlatformAssumptions.assumeNotWindows;
 import static org.hamcrest.CoreMatchers.anyOf;
 import static org.hamcrest.CoreMatchers.containsString;
 import static org.hamcrest.CoreMatchers.not;
@@ -81,7 +82,6 @@ import static org.junit.Assert.assertFalse;
 import static org.junit.Assert.assertThat;
 import static org.junit.Assert.assertTrue;
 import static org.junit.Assert.fail;
-import static org.junit.Assume.assumeTrue;
 import static org.mockito.Matchers.any;
 import static org.mockito.Matchers.anyString;
 import static org.mockito.Mockito.doAnswer;
@@ -784,7 +784,7 @@ public class TestDataNodeHotSwapVolumes {
       ReconfigurationException {
     // The test uses DataNodeTestUtils#injectDataDirFailure() to simulate
     // volume failures which is currently not supported on Windows.
-    assumeTrue(!Path.WINDOWS);
+    assumeNotWindows();
 
     startDFSCluster(1, 2);
     createFile(new Path("/test"), 32, (short)2);

http://git-wip-us.apache.org/repos/asf/hadoop/blob/54fe17a6/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestDataNodeVolumeFailure.java
----------------------------------------------------------------------
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestDataNodeVolumeFailure.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestDataNodeVolumeFailure.java
index 00c2f62..6792ba8 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestDataNodeVolumeFailure.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestDataNodeVolumeFailure.java
@@ -17,6 +17,7 @@
  */
 package org.apache.hadoop.hdfs.server.datanode;
 
+import static org.apache.hadoop.test.PlatformAssumptions.assumeNotWindows;
 import static org.hamcrest.core.Is.is;
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertFalse;
@@ -24,7 +25,6 @@ import static org.junit.Assert.assertNotEquals;
 import static org.junit.Assert.assertNotNull;
 import static org.junit.Assert.assertThat;
 import static org.junit.Assert.assertTrue;
-import static org.junit.Assume.assumeTrue;
 
 import java.io.File;
 import java.io.IOException;
@@ -219,7 +219,7 @@ public class TestDataNodeVolumeFailure {
       throws InterruptedException, IOException, TimeoutException {
     // The test uses DataNodeTestUtils#injectDataDirFailure() to simulate
     // volume failures which is currently not supported on Windows.
-    assumeTrue(!Path.WINDOWS);
+    assumeNotWindows();
 
     Path file1 = new Path("/test1");
     DFSTestUtil.createFile(fs, file1, 1024, (short) 2, 1L);
@@ -384,7 +384,7 @@ public class TestDataNodeVolumeFailure {
   public void testUnderReplicationAfterVolFailure() throws Exception {
     // The test uses DataNodeTestUtils#injectDataDirFailure() to simulate
     // volume failures which is currently not supported on Windows.
-    assumeTrue(!Path.WINDOWS);
+    assumeNotWindows();
 
     // Bring up one more datanode
     cluster.startDataNodes(conf, 1, true, null, null);

http://git-wip-us.apache.org/repos/asf/hadoop/blob/54fe17a6/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestDataNodeVolumeFailureReporting.java
----------------------------------------------------------------------
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestDataNodeVolumeFailureReporting.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestDataNodeVolumeFailureReporting.java
index c76fa2c..6c58743 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestDataNodeVolumeFailureReporting.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestDataNodeVolumeFailureReporting.java
@@ -19,6 +19,7 @@ package org.apache.hadoop.hdfs.server.datanode;
 
 import static org.apache.hadoop.test.MetricsAsserts.assertCounter;
 import static org.apache.hadoop.test.MetricsAsserts.getMetrics;
+import static org.apache.hadoop.test.PlatformAssumptions.assumeNotWindows;
 import static org.hamcrest.core.Is.is;
 import static org.junit.Assert.assertArrayEquals;
 import static org.junit.Assert.assertEquals;
@@ -26,7 +27,6 @@ import static org.junit.Assert.assertFalse;
 import static org.junit.Assert.assertNull;
 import static org.junit.Assert.assertThat;
 import static org.junit.Assert.assertTrue;
-import static org.junit.Assume.assumeTrue;
 
 import java.io.File;
 import java.util.ArrayList;
@@ -82,7 +82,7 @@ public class TestDataNodeVolumeFailureReporting {
   public void setUp() throws Exception {
     // These tests use DataNodeTestUtils#injectDataDirFailure() to simulate
     // volume failures which is currently not supported on Windows.
-    assumeTrue(!Path.WINDOWS);
+    assumeNotWindows();
     // Allow a single volume failure (there are two volumes)
     initCluster(1, 2, 1);
   }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/54fe17a6/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestDataNodeVolumeFailureToleration.java
----------------------------------------------------------------------
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestDataNodeVolumeFailureToleration.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestDataNodeVolumeFailureToleration.java
index 2f8239e..5ff7d9b 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestDataNodeVolumeFailureToleration.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestDataNodeVolumeFailureToleration.java
@@ -17,10 +17,10 @@
  */
 package org.apache.hadoop.hdfs.server.datanode;
 
+import static org.apache.hadoop.test.PlatformAssumptions.assumeNotWindows;
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertFalse;
 import static org.junit.Assert.assertTrue;
-import static org.junit.Assume.assumeTrue;
 
 import java.io.File;
 import java.io.IOException;
@@ -91,7 +91,7 @@ public class TestDataNodeVolumeFailureToleration {
    */
   @Test
   public void testValidVolumesAtStartup() throws Exception {
-    assumeTrue(!System.getProperty("os.name").startsWith("Windows"));
+    assumeNotWindows();
 
     // Make sure no DNs are running.
     cluster.shutdownDataNodes();
@@ -139,7 +139,7 @@ public class TestDataNodeVolumeFailureToleration {
    */
   @Test
   public void testConfigureMinValidVolumes() throws Exception {
-    assumeTrue(!System.getProperty("os.name").startsWith("Windows"));
+    assumeNotWindows();
 
     // Bring up two additional datanodes that need both of their volumes
     // functioning in order to stay up.
@@ -218,7 +218,7 @@ public class TestDataNodeVolumeFailureToleration {
   private void testVolumeConfig(int volumesTolerated, int volumesFailed,
       boolean expectedBPServiceState, boolean manageDfsDirs)
       throws IOException, InterruptedException {
-    assumeTrue(!System.getProperty("os.name").startsWith("Windows"));
+    assumeNotWindows();
     final int dnIndex = 0;
     // Fail the current directory since invalid storage directory perms
     // get fixed up automatically on datanode startup.
@@ -272,7 +272,7 @@ public class TestDataNodeVolumeFailureToleration {
    */
   @Test
   public void testFailedVolumeOnStartupIsCounted() throws Exception {
-    assumeTrue(!System.getProperty("os.name").startsWith("Windows"));
+    assumeNotWindows();
     final DatanodeManager dm = cluster.getNamesystem().getBlockManager(
     ).getDatanodeManager();
     long origCapacity = DFSTestUtil.getLiveDatanodeCapacity(dm);

http://git-wip-us.apache.org/repos/asf/hadoop/blob/54fe17a6/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestFsDatasetCacheRevocation.java
----------------------------------------------------------------------
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestFsDatasetCacheRevocation.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestFsDatasetCacheRevocation.java
index ce37abd..40de320 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestFsDatasetCacheRevocation.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestFsDatasetCacheRevocation.java
@@ -17,6 +17,7 @@
  */
 package org.apache.hadoop.hdfs.server.datanode;
 
+import static org.apache.hadoop.test.PlatformAssumptions.assumeNotWindows;
 import static org.junit.Assume.assumeTrue;
 
 import java.io.File;
@@ -96,7 +97,8 @@ public class TestFsDatasetCacheRevocation {
    */
   @Test(timeout=120000)
   public void testPinning() throws Exception {
-    assumeTrue(NativeCodeLoader.isNativeCodeLoaded() && !Path.WINDOWS);
+    assumeTrue(NativeCodeLoader.isNativeCodeLoaded());
+    assumeNotWindows();
     Configuration conf = getDefaultConf();
     // Set a really long revocation timeout, so that we won't reach it during
     // this test.
@@ -146,7 +148,8 @@ public class TestFsDatasetCacheRevocation {
    */
   @Test(timeout=120000)
   public void testRevocation() throws Exception {
-    assumeTrue(NativeCodeLoader.isNativeCodeLoaded() && !Path.WINDOWS);
+    assumeTrue(NativeCodeLoader.isNativeCodeLoaded());
+    assumeNotWindows();
     BlockReaderTestUtil.enableHdfsCachingTracing();
     BlockReaderTestUtil.enableShortCircuitShmTracing();
     Configuration conf = getDefaultConf();

http://git-wip-us.apache.org/repos/asf/hadoop/blob/54fe17a6/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/TestScrLazyPersistFiles.java
----------------------------------------------------------------------
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/TestScrLazyPersistFiles.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/TestScrLazyPersistFiles.java
index f598a07..7043227 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/TestScrLazyPersistFiles.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/TestScrLazyPersistFiles.java
@@ -42,6 +42,7 @@ import java.util.concurrent.TimeoutException;
 
 import static org.apache.hadoop.fs.StorageType.DEFAULT;
 import static org.apache.hadoop.fs.StorageType.RAM_DISK;
+import static org.apache.hadoop.test.PlatformAssumptions.assumeNotWindows;
 import static org.hamcrest.CoreMatchers.equalTo;
 import static org.hamcrest.core.Is.is;
 import static org.junit.Assert.assertThat;
@@ -61,8 +62,8 @@ public class TestScrLazyPersistFiles extends 
LazyPersistTestCase {
 
   @Before
   public void before() {
-    Assume.assumeThat(NativeCodeLoader.isNativeCodeLoaded() && !Path.WINDOWS,
-        equalTo(true));
+    Assume.assumeTrue(NativeCodeLoader.isNativeCodeLoaded());
+    assumeNotWindows();
     Assume.assumeThat(DomainSocket.getLoadingFailureReason(), equalTo(null));
 
     final long osPageSize = 
NativeIO.POSIX.getCacheManipulator().getOperatingSystemPageSize();

http://git-wip-us.apache.org/repos/asf/hadoop/blob/54fe17a6/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/util/TestAtomicFileOutputStream.java
----------------------------------------------------------------------
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/util/TestAtomicFileOutputStream.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/util/TestAtomicFileOutputStream.java
index b9946c5..c4ae8ce 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/util/TestAtomicFileOutputStream.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/util/TestAtomicFileOutputStream.java
@@ -17,11 +17,11 @@
  */
 package org.apache.hadoop.hdfs.util;
 
+import static org.apache.hadoop.test.PlatformAssumptions.assumeWindows;
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertFalse;
 import static org.junit.Assert.assertTrue;
 import static org.junit.Assert.fail;
-import static org.junit.Assume.assumeTrue;
 
 import java.io.File;
 import java.io.FileNotFoundException;
@@ -33,7 +33,6 @@ import org.apache.hadoop.fs.FileUtil;
 import org.apache.hadoop.hdfs.DFSTestUtil;
 import org.apache.hadoop.io.IOUtils;
 import org.apache.hadoop.test.PathUtils;
-import org.apache.hadoop.util.Shell;
 import org.junit.Before;
 import org.junit.Rule;
 import org.junit.Test;
@@ -129,7 +128,7 @@ public class TestAtomicFileOutputStream {
 
   @Test
   public void testFailToRename() throws IOException {
-    assumeTrue(Shell.WINDOWS);
+    assumeWindows();
     OutputStream fos = null;
     try {
       fos = new AtomicFileOutputStream(DST_FILE);

http://git-wip-us.apache.org/repos/asf/hadoop/blob/54fe17a6/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/tracing/TestTracingShortCircuitLocalRead.java
----------------------------------------------------------------------
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/tracing/TestTracingShortCircuitLocalRead.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/tracing/TestTracingShortCircuitLocalRead.java
index b3cf402..03131f3 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/tracing/TestTracingShortCircuitLocalRead.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/tracing/TestTracingShortCircuitLocalRead.java
@@ -17,6 +17,7 @@
  */
 package org.apache.hadoop.tracing;
 
+import static org.apache.hadoop.test.PlatformAssumptions.assumeNotWindows;
 import static org.junit.Assume.assumeTrue;
 
 import java.io.File;
@@ -24,7 +25,6 @@ import java.io.IOException;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FSDataInputStream;
-import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.FsTracer;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hdfs.DFSConfigKeys;
@@ -63,7 +63,8 @@ public class TestTracingShortCircuitLocalRead {
 
   @Test
   public void testShortCircuitTraceHooks() throws IOException {
-    assumeTrue(NativeCodeLoader.isNativeCodeLoaded() && !Path.WINDOWS);
+    assumeTrue(NativeCodeLoader.isNativeCodeLoaded());
+    assumeNotWindows();
     conf = new Configuration();
     conf.set(TraceUtils.DEFAULT_HADOOP_TRACE_PREFIX +
             Tracer.SPAN_RECEIVER_CLASSES_KEY,

http://git-wip-us.apache.org/repos/asf/hadoop/blob/54fe17a6/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestYARNRunner.java
----------------------------------------------------------------------
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestYARNRunner.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestYARNRunner.java
index abf2e72..0e17ac8 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestYARNRunner.java
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestYARNRunner.java
@@ -18,6 +18,7 @@
 
 package org.apache.hadoop.mapred;
 
+import static org.apache.hadoop.test.PlatformAssumptions.assumeNotWindows;
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertFalse;
 import static org.junit.Assert.assertNotNull;
@@ -65,7 +66,6 @@ import org.apache.hadoop.security.Credentials;
 import org.apache.hadoop.security.SecurityUtil;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.security.token.Token;
-import org.apache.hadoop.util.Shell;
 import org.apache.hadoop.yarn.api.ApplicationClientProtocol;
 import org.apache.hadoop.yarn.api.ApplicationConstants;
 import org.apache.hadoop.yarn.api.ApplicationConstants.Environment;
@@ -586,9 +586,7 @@ public class TestYARNRunner {
     // the Windows behavior is different and this test currently doesn't really
     // apply
     // MAPREDUCE-6588 should revisit this test
-    if (Shell.WINDOWS) {
-      return;
-    }
+    assumeNotWindows();
 
     final String ADMIN_LIB_PATH = "foo";
     final String USER_LIB_PATH = "bar";

http://git-wip-us.apache.org/repos/asf/hadoop/blob/54fe17a6/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azure/TestNativeAzureFileSystemOperationsMocked.java
----------------------------------------------------------------------
diff --git 
a/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azure/TestNativeAzureFileSystemOperationsMocked.java
 
b/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azure/TestNativeAzureFileSystemOperationsMocked.java
index f01829b..69c942d 100644
--- 
a/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azure/TestNativeAzureFileSystemOperationsMocked.java
+++ 
b/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azure/TestNativeAzureFileSystemOperationsMocked.java
@@ -18,7 +18,7 @@
 
 package org.apache.hadoop.fs.azure;
 
-import static org.junit.Assume.assumeTrue;
+import static org.apache.hadoop.test.PlatformAssumptions.assumeNotWindows;
 
 import org.apache.hadoop.fs.FSMainOperationsBaseTest;
 import org.apache.hadoop.fs.FileSystem;
@@ -48,7 +48,7 @@ public class TestNativeAzureFileSystemOperationsMocked extends
     System.out
         .println("Skipping testListStatusThrowsExceptionForUnreadableDir since 
WASB"
             + " doesn't honor directory permissions.");
-    assumeTrue(!Path.WINDOWS);
+    assumeNotWindows();
   }
 
   @Override

http://git-wip-us.apache.org/repos/asf/hadoop/blob/54fe17a6/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azure/TestShellDecryptionKeyProvider.java
----------------------------------------------------------------------
diff --git 
a/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azure/TestShellDecryptionKeyProvider.java
 
b/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azure/TestShellDecryptionKeyProvider.java
index 2284d1f..0bf33d8 100644
--- 
a/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azure/TestShellDecryptionKeyProvider.java
+++ 
b/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azure/TestShellDecryptionKeyProvider.java
@@ -18,6 +18,7 @@
 
 package org.apache.hadoop.fs.azure;
 
+import static org.apache.hadoop.test.PlatformAssumptions.assumeWindows;
 import static org.junit.Assert.assertEquals;
 
 import java.io.File;
@@ -26,7 +27,6 @@ import org.apache.commons.io.FileUtils;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.util.Shell;
 import org.junit.Assert;
 import org.junit.Test;
 
@@ -38,9 +38,7 @@ public class TestShellDecryptionKeyProvider {
 
   @Test
   public void testScriptPathNotSpecified() throws Exception {
-    if (!Shell.WINDOWS) {
-      return;
-    }
+    assumeWindows();
     ShellDecryptionKeyProvider provider = new ShellDecryptionKeyProvider();
     Configuration conf = new Configuration();
     String account = "testacct";
@@ -58,9 +56,7 @@ public class TestShellDecryptionKeyProvider {
 
   @Test
   public void testValidScript() throws Exception {
-    if (!Shell.WINDOWS) {
-      return;
-    }
+    assumeWindows();
     String expectedResult = "decretedKey";
 
     // Create a simple script which echoes the given key plus the given

http://git-wip-us.apache.org/repos/asf/hadoop/blob/54fe17a6/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/util/TestWindowsBasedProcessTree.java
----------------------------------------------------------------------
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/util/TestWindowsBasedProcessTree.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/util/TestWindowsBasedProcessTree.java
index cbab1ed..5699702 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/util/TestWindowsBasedProcessTree.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/util/TestWindowsBasedProcessTree.java
@@ -20,10 +20,10 @@ package org.apache.hadoop.yarn.util;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.util.Shell;
 import org.junit.Assert;
 import org.junit.Test;
 
+import static org.apache.hadoop.test.PlatformAssumptions.assumeWindows;
 import static org.junit.Assert.assertTrue;
 
 public class TestWindowsBasedProcessTree {
@@ -45,10 +45,7 @@ public class TestWindowsBasedProcessTree {
   @Test (timeout = 30000)
   @SuppressWarnings("deprecation")
   public void tree() {
-    if( !Shell.WINDOWS) {
-      LOG.info("Platform not Windows. Not testing");
-      return;      
-    }
+    assumeWindows();
     assertTrue("WindowsBasedProcessTree should be available on Windows", 
                WindowsBasedProcessTree.isAvailable());
     ControlledClock testClock = new ControlledClock();

http://git-wip-us.apache.org/repos/asf/hadoop/blob/54fe17a6/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestContainerExecutor.java
----------------------------------------------------------------------
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestContainerExecutor.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestContainerExecutor.java
index bc87b03..396c8f4 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestContainerExecutor.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestContainerExecutor.java
@@ -31,8 +31,8 @@ import org.apache.hadoop.yarn.util.ResourceCalculatorPlugin;
 import org.junit.Assert;
 import org.junit.Test;
 
+import static org.apache.hadoop.test.PlatformAssumptions.assumeWindows;
 import static org.junit.Assert.*;
-import static org.junit.Assume.assumeTrue;
 
 @SuppressWarnings("deprecation")
 public class TestContainerExecutor {
@@ -80,8 +80,7 @@ public class TestContainerExecutor {
 
   @Test (timeout = 5000)
   public void testRunCommandWithNoResources() {
-    // Windows only test
-    assumeTrue(Shell.WINDOWS);
+    assumeWindows();
     Configuration conf = new Configuration();
     String[] command = containerExecutor.getRunCommand("echo", "group1", null, 
null,
         conf, Resource.newInstance(1024, 1));
@@ -93,8 +92,7 @@ public class TestContainerExecutor {
 
   @Test (timeout = 5000)
   public void testRunCommandWithMemoryOnlyResources() {
-    // Windows only test
-    assumeTrue(Shell.WINDOWS);
+    assumeWindows();
     Configuration conf = new Configuration();
     conf.set(YarnConfiguration.NM_WINDOWS_CONTAINER_MEMORY_LIMIT_ENABLED, 
"true");
     String[] command = containerExecutor.getRunCommand("echo", "group1", null, 
null,
@@ -107,8 +105,7 @@ public class TestContainerExecutor {
 
   @Test (timeout = 5000)
   public void testRunCommandWithCpuAndMemoryResources() {
-    // Windows only test
-    assumeTrue(Shell.WINDOWS);
+    assumeWindows();
     int containerCores = 1;
     Configuration conf = new Configuration();
     conf.set(YarnConfiguration.NM_WINDOWS_CONTAINER_CPU_LIMIT_ENABLED, "true");

http://git-wip-us.apache.org/repos/asf/hadoop/blob/54fe17a6/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestLinuxContainerExecutorWithMocks.java
----------------------------------------------------------------------
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestLinuxContainerExecutorWithMocks.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestLinuxContainerExecutorWithMocks.java
index aae0037..ae5a011 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestLinuxContainerExecutorWithMocks.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestLinuxContainerExecutorWithMocks.java
@@ -18,9 +18,9 @@
 
 package org.apache.hadoop.yarn.server.nodemanager;
 
+import static org.apache.hadoop.test.PlatformAssumptions.assumeNotWindows;
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertTrue;
-import static org.junit.Assume.assumeTrue;
 import static org.mockito.Matchers.any;
 import static org.mockito.Mockito.doAnswer;
 import static org.mockito.Mockito.mock;
@@ -123,7 +123,7 @@ public class TestLinuxContainerExecutorWithMocks {
 
   @Before
   public void setup() throws IOException, ContainerExecutionException {
-    assumeTrue(!Path.WINDOWS);
+    assumeNotWindows();
 
     tmpMockExecutor = System.getProperty("test.build.data") +
         "/tmp-mock-container-executor";

http://git-wip-us.apache.org/repos/asf/hadoop/blob/54fe17a6/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/launcher/TestContainerLaunch.java
----------------------------------------------------------------------
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/launcher/TestContainerLaunch.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/launcher/TestContainerLaunch.java
index 6b32dd9..a06822a 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/launcher/TestContainerLaunch.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/launcher/TestContainerLaunch.java
@@ -18,6 +18,7 @@
 
 package org.apache.hadoop.yarn.server.nodemanager.containermanager.launcher;
 
+import static org.apache.hadoop.test.PlatformAssumptions.assumeWindows;
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertThat;
 import static org.junit.Assert.fail;
@@ -408,7 +409,7 @@ public class TestContainerLaunch extends 
BaseContainerManagerTest {
   public void testPrependDistcache() throws Exception {
 
     // Test is only relevant on Windows
-    Assume.assumeTrue(Shell.WINDOWS);
+    assumeWindows();
 
     ContainerLaunchContext containerLaunchContext =
         recordFactory.newRecordInstance(ContainerLaunchContext.class);
@@ -1129,7 +1130,7 @@ public class TestContainerLaunch extends 
BaseContainerManagerTest {
     String callCmd = "@call ";
     
     // Test is only relevant on Windows
-    Assume.assumeTrue(Shell.WINDOWS);
+    assumeWindows();
 
     // The tests are built on assuming 8191 max command line length
     assertEquals(8191, Shell.WINDOWS_MAX_SHELL_LENGTH);
@@ -1177,7 +1178,7 @@ public class TestContainerLaunch extends 
BaseContainerManagerTest {
   @Test (timeout = 10000)
   public void testWindowsShellScriptBuilderEnv() throws IOException {
     // Test is only relevant on Windows
-    Assume.assumeTrue(Shell.WINDOWS);
+    assumeWindows();
 
     // The tests are built on assuming 8191 max command line length
     assertEquals(8191, Shell.WINDOWS_MAX_SHELL_LENGTH);
@@ -1202,7 +1203,7 @@ public class TestContainerLaunch extends 
BaseContainerManagerTest {
     String mkDirCmd = "@if not exist \"\" mkdir \"\"";
 
     // Test is only relevant on Windows
-    Assume.assumeTrue(Shell.WINDOWS);
+    assumeWindows();
 
     // The tests are built on assuming 8191 max command line length
     assertEquals(8191, Shell.WINDOWS_MAX_SHELL_LENGTH);
@@ -1225,7 +1226,7 @@ public class TestContainerLaunch extends 
BaseContainerManagerTest {
   @Test (timeout = 10000)
   public void testWindowsShellScriptBuilderLink() throws IOException {
     // Test is only relevant on Windows
-    Assume.assumeTrue(Shell.WINDOWS);
+    assumeWindows();
     String linkCmd = "@" + Shell.getWinUtilsPath() + " symlink \"\" \"\"";
 
     // The tests are built on assuming 8191 max command line length


---------------------------------------------------------------------
To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org
For additional commands, e-mail: common-commits-h...@hadoop.apache.org

Reply via email to