hadoop git commit: HADOOP-15450. Avoid fsync storm triggered by DiskChecker and handle disk full situation. Contributed by Arpit Agarwal.

2018-05-22 Thread kihwal
Repository: hadoop
Updated Branches:
  refs/heads/trunk 5f11288e4 -> bcc8e76ba


HADOOP-15450. Avoid fsync storm triggered by DiskChecker and handle disk full 
situation. Contributed by Arpit Agarwal.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/bcc8e76b
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/bcc8e76b
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/bcc8e76b

Branch: refs/heads/trunk
Commit: bcc8e76badc1341a6cf995c8e44fa5e422158de8
Parents: 5f11288
Author: Kihwal Lee 
Authored: Tue May 22 11:19:15 2018 -0500
Committer: Kihwal Lee 
Committed: Tue May 22 11:20:51 2018 -0500

--
 .../org/apache/hadoop/util/DiskChecker.java |  46 -
 .../org/apache/hadoop/util/TestDiskChecker.java | 102 ---
 .../hadoop/util/TestDiskCheckerWithDiskIo.java  | 173 +++
 3 files changed, 217 insertions(+), 104 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/bcc8e76b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/DiskChecker.java
--
diff --git 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/DiskChecker.java
 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/DiskChecker.java
index a4fa8fd..595aeed 100644
--- 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/DiskChecker.java
+++ 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/DiskChecker.java
@@ -74,12 +74,30 @@ public class DiskChecker {
* @throws DiskErrorException
*/
   public static void checkDir(File dir) throws DiskErrorException {
+checkDirInternal(dir);
+  }
+
+  /**
+   * Create the directory if it doesn't exist and check that dir is
+   * readable, writable and executable. Perform some disk IO to
+   * ensure that the disk is usable for writes.
+   *
+   * @param dir
+   * @throws DiskErrorException
+   */
+  public static void checkDirWithDiskIo(File dir)
+  throws DiskErrorException {
+checkDirInternal(dir);
+doDiskIo(dir);
+  }
+
+  private static void checkDirInternal(File dir)
+  throws DiskErrorException {
 if (!mkdirsWithExistsCheck(dir)) {
   throw new DiskErrorException("Cannot create directory: "
+ dir.toString());
 }
 checkAccessByFileMethods(dir);
-doDiskIo(dir);
   }
 
   /**
@@ -94,10 +112,34 @@ public class DiskChecker {
*/
   public static void checkDir(LocalFileSystem localFS, Path dir,
   FsPermission expected)
+  throws DiskErrorException, IOException {
+checkDirInternal(localFS, dir, expected);
+  }
+
+
+  /**
+   * Create the local directory if necessary, also ensure permissions
+   * allow it to be read from and written into. Perform some diskIO
+   * to ensure that the disk is usable for writes. 
+   *
+   * @param localFS local filesystem
+   * @param dir directory
+   * @param expected permission
+   * @throws DiskErrorException
+   * @throws IOException
+   */  
+  public static void checkDirWithDiskIo(LocalFileSystem localFS, Path dir,
+FsPermission expected) 
+  throws DiskErrorException, IOException {
+checkDirInternal(localFS, dir, expected);
+doDiskIo(localFS.pathToFile(dir));
+  }  
+
+  private static void checkDirInternal(LocalFileSystem localFS, Path dir,
+   FsPermission expected)
   throws DiskErrorException, IOException {
 mkdirsWithExistsAndPermissionCheck(localFS, dir, expected);
 checkAccessByFileMethods(localFS.pathToFile(dir));
-doDiskIo(localFS.pathToFile(dir));
   }
 
   /**

http://git-wip-us.apache.org/repos/asf/hadoop/blob/bcc8e76b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestDiskChecker.java
--
diff --git 
a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestDiskChecker.java
 
b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestDiskChecker.java
index bd8e1dd..6b6c6c8 100644
--- 
a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestDiskChecker.java
+++ 
b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestDiskChecker.java
@@ -19,7 +19,6 @@ package org.apache.hadoop.util;
 
 import java.io.*;
 import java.nio.file.Files;
-import java.util.concurrent.atomic.AtomicInteger;
 
 import org.apache.hadoop.util.DiskChecker.FileIoProvider;
 import org.junit.After;
@@ -214,105 +213,4 @@ public class TestDiskChecker {
 }
 localDir.delete();
   }
-
-  /**
-   * Verify DiskChecker ignores at least 2 transient file creation e

hadoop git commit: HADOOP-15450. Avoid fsync storm triggered by DiskChecker and handle disk full situation. Contributed by Arpit Agarwal.

2018-05-22 Thread kihwal
Repository: hadoop
Updated Branches:
  refs/heads/branch-3.1 9788c8c01 -> 6b8a5af6e


HADOOP-15450. Avoid fsync storm triggered by DiskChecker and handle disk full 
situation. Contributed by Arpit Agarwal.

(cherry picked from commit bcc8e76badc1341a6cf995c8e44fa5e422158de8)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/6b8a5af6
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/6b8a5af6
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/6b8a5af6

Branch: refs/heads/branch-3.1
Commit: 6b8a5af6efe61ec619241187fd8c9869de0969ce
Parents: 9788c8c
Author: Kihwal Lee 
Authored: Tue May 22 11:22:22 2018 -0500
Committer: Kihwal Lee 
Committed: Tue May 22 11:22:22 2018 -0500

--
 .../org/apache/hadoop/util/DiskChecker.java |  46 -
 .../org/apache/hadoop/util/TestDiskChecker.java | 102 ---
 .../hadoop/util/TestDiskCheckerWithDiskIo.java  | 173 +++
 3 files changed, 217 insertions(+), 104 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/6b8a5af6/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/DiskChecker.java
--
diff --git 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/DiskChecker.java
 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/DiskChecker.java
index a4fa8fd..595aeed 100644
--- 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/DiskChecker.java
+++ 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/DiskChecker.java
@@ -74,12 +74,30 @@ public class DiskChecker {
* @throws DiskErrorException
*/
   public static void checkDir(File dir) throws DiskErrorException {
+checkDirInternal(dir);
+  }
+
+  /**
+   * Create the directory if it doesn't exist and check that dir is
+   * readable, writable and executable. Perform some disk IO to
+   * ensure that the disk is usable for writes.
+   *
+   * @param dir
+   * @throws DiskErrorException
+   */
+  public static void checkDirWithDiskIo(File dir)
+  throws DiskErrorException {
+checkDirInternal(dir);
+doDiskIo(dir);
+  }
+
+  private static void checkDirInternal(File dir)
+  throws DiskErrorException {
 if (!mkdirsWithExistsCheck(dir)) {
   throw new DiskErrorException("Cannot create directory: "
+ dir.toString());
 }
 checkAccessByFileMethods(dir);
-doDiskIo(dir);
   }
 
   /**
@@ -94,10 +112,34 @@ public class DiskChecker {
*/
   public static void checkDir(LocalFileSystem localFS, Path dir,
   FsPermission expected)
+  throws DiskErrorException, IOException {
+checkDirInternal(localFS, dir, expected);
+  }
+
+
+  /**
+   * Create the local directory if necessary, also ensure permissions
+   * allow it to be read from and written into. Perform some diskIO
+   * to ensure that the disk is usable for writes. 
+   *
+   * @param localFS local filesystem
+   * @param dir directory
+   * @param expected permission
+   * @throws DiskErrorException
+   * @throws IOException
+   */  
+  public static void checkDirWithDiskIo(LocalFileSystem localFS, Path dir,
+FsPermission expected) 
+  throws DiskErrorException, IOException {
+checkDirInternal(localFS, dir, expected);
+doDiskIo(localFS.pathToFile(dir));
+  }  
+
+  private static void checkDirInternal(LocalFileSystem localFS, Path dir,
+   FsPermission expected)
   throws DiskErrorException, IOException {
 mkdirsWithExistsAndPermissionCheck(localFS, dir, expected);
 checkAccessByFileMethods(localFS.pathToFile(dir));
-doDiskIo(localFS.pathToFile(dir));
   }
 
   /**

http://git-wip-us.apache.org/repos/asf/hadoop/blob/6b8a5af6/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestDiskChecker.java
--
diff --git 
a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestDiskChecker.java
 
b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestDiskChecker.java
index bd8e1dd..6b6c6c8 100644
--- 
a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestDiskChecker.java
+++ 
b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestDiskChecker.java
@@ -19,7 +19,6 @@ package org.apache.hadoop.util;
 
 import java.io.*;
 import java.nio.file.Files;
-import java.util.concurrent.atomic.AtomicInteger;
 
 import org.apache.hadoop.util.DiskChecker.FileIoProvider;
 import org.junit.After;
@@ -214,105 +213,4 @@ public class TestDiskChecker {
 }
 localDir.delete();
   

hadoop git commit: HADOOP-15450. Avoid fsync storm triggered by DiskChecker and handle disk full situation. Contributed by Arpit Agarwal.

2018-05-22 Thread kihwal
Repository: hadoop
Updated Branches:
  refs/heads/branch-3.0 a5a9c8cf0 -> 96313e03c


HADOOP-15450. Avoid fsync storm triggered by DiskChecker and handle disk full 
situation. Contributed by Arpit Agarwal.

(cherry picked from commit bcc8e76badc1341a6cf995c8e44fa5e422158de8)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/96313e03
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/96313e03
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/96313e03

Branch: refs/heads/branch-3.0
Commit: 96313e03c120907bbf35891579c52f127600acde
Parents: a5a9c8c
Author: Kihwal Lee 
Authored: Tue May 22 11:23:55 2018 -0500
Committer: Kihwal Lee 
Committed: Tue May 22 11:23:55 2018 -0500

--
 .../org/apache/hadoop/util/DiskChecker.java |  46 -
 .../org/apache/hadoop/util/TestDiskChecker.java | 102 ---
 .../hadoop/util/TestDiskCheckerWithDiskIo.java  | 173 +++
 3 files changed, 217 insertions(+), 104 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/96313e03/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/DiskChecker.java
--
diff --git 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/DiskChecker.java
 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/DiskChecker.java
index a4fa8fd..595aeed 100644
--- 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/DiskChecker.java
+++ 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/DiskChecker.java
@@ -74,12 +74,30 @@ public class DiskChecker {
* @throws DiskErrorException
*/
   public static void checkDir(File dir) throws DiskErrorException {
+checkDirInternal(dir);
+  }
+
+  /**
+   * Create the directory if it doesn't exist and check that dir is
+   * readable, writable and executable. Perform some disk IO to
+   * ensure that the disk is usable for writes.
+   *
+   * @param dir
+   * @throws DiskErrorException
+   */
+  public static void checkDirWithDiskIo(File dir)
+  throws DiskErrorException {
+checkDirInternal(dir);
+doDiskIo(dir);
+  }
+
+  private static void checkDirInternal(File dir)
+  throws DiskErrorException {
 if (!mkdirsWithExistsCheck(dir)) {
   throw new DiskErrorException("Cannot create directory: "
+ dir.toString());
 }
 checkAccessByFileMethods(dir);
-doDiskIo(dir);
   }
 
   /**
@@ -94,10 +112,34 @@ public class DiskChecker {
*/
   public static void checkDir(LocalFileSystem localFS, Path dir,
   FsPermission expected)
+  throws DiskErrorException, IOException {
+checkDirInternal(localFS, dir, expected);
+  }
+
+
+  /**
+   * Create the local directory if necessary, also ensure permissions
+   * allow it to be read from and written into. Perform some diskIO
+   * to ensure that the disk is usable for writes. 
+   *
+   * @param localFS local filesystem
+   * @param dir directory
+   * @param expected permission
+   * @throws DiskErrorException
+   * @throws IOException
+   */  
+  public static void checkDirWithDiskIo(LocalFileSystem localFS, Path dir,
+FsPermission expected) 
+  throws DiskErrorException, IOException {
+checkDirInternal(localFS, dir, expected);
+doDiskIo(localFS.pathToFile(dir));
+  }  
+
+  private static void checkDirInternal(LocalFileSystem localFS, Path dir,
+   FsPermission expected)
   throws DiskErrorException, IOException {
 mkdirsWithExistsAndPermissionCheck(localFS, dir, expected);
 checkAccessByFileMethods(localFS.pathToFile(dir));
-doDiskIo(localFS.pathToFile(dir));
   }
 
   /**

http://git-wip-us.apache.org/repos/asf/hadoop/blob/96313e03/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestDiskChecker.java
--
diff --git 
a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestDiskChecker.java
 
b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestDiskChecker.java
index bd8e1dd..6b6c6c8 100644
--- 
a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestDiskChecker.java
+++ 
b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestDiskChecker.java
@@ -19,7 +19,6 @@ package org.apache.hadoop.util;
 
 import java.io.*;
 import java.nio.file.Files;
-import java.util.concurrent.atomic.AtomicInteger;
 
 import org.apache.hadoop.util.DiskChecker.FileIoProvider;
 import org.junit.After;
@@ -214,105 +213,4 @@ public class TestDiskChecker {
 }
 localDir.delete();
   

hadoop git commit: HADOOP-15450. Avoid fsync storm triggered by DiskChecker and handle disk full situation. Contributed by Arpit Agarwal.

2018-05-22 Thread kihwal
Repository: hadoop
Updated Branches:
  refs/heads/branch-2.9 a4b12f890 -> a10caac94


HADOOP-15450. Avoid fsync storm triggered by DiskChecker and handle disk full 
situation. Contributed by Arpit Agarwal.

(cherry picked from commit bcc8e76badc1341a6cf995c8e44fa5e422158de8)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/a10caac9
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/a10caac9
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/a10caac9

Branch: refs/heads/branch-2.9
Commit: a10caac945828d141559e5f8b15317f1b286516b
Parents: a4b12f8
Author: Kihwal Lee 
Authored: Tue May 22 11:24:59 2018 -0500
Committer: Kihwal Lee 
Committed: Tue May 22 11:24:59 2018 -0500

--
 .../org/apache/hadoop/util/DiskChecker.java |  46 -
 .../org/apache/hadoop/util/TestDiskChecker.java | 102 ---
 .../hadoop/util/TestDiskCheckerWithDiskIo.java  | 173 +++
 3 files changed, 217 insertions(+), 104 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/a10caac9/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/DiskChecker.java
--
diff --git 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/DiskChecker.java
 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/DiskChecker.java
index 8563232..c47de21 100644
--- 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/DiskChecker.java
+++ 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/DiskChecker.java
@@ -74,12 +74,30 @@ public class DiskChecker {
* @throws DiskErrorException
*/
   public static void checkDir(File dir) throws DiskErrorException {
+checkDirInternal(dir);
+  }
+
+  /**
+   * Create the directory if it doesn't exist and check that dir is
+   * readable, writable and executable. Perform some disk IO to
+   * ensure that the disk is usable for writes.
+   *
+   * @param dir
+   * @throws DiskErrorException
+   */
+  public static void checkDirWithDiskIo(File dir)
+  throws DiskErrorException {
+checkDirInternal(dir);
+doDiskIo(dir);
+  }
+
+  private static void checkDirInternal(File dir)
+  throws DiskErrorException {
 if (!mkdirsWithExistsCheck(dir)) {
   throw new DiskErrorException("Cannot create directory: "
+ dir.toString());
 }
 checkAccessByFileMethods(dir);
-doDiskIo(dir);
   }
 
   /**
@@ -94,10 +112,34 @@ public class DiskChecker {
*/
   public static void checkDir(LocalFileSystem localFS, Path dir,
   FsPermission expected)
+  throws DiskErrorException, IOException {
+checkDirInternal(localFS, dir, expected);
+  }
+
+
+  /**
+   * Create the local directory if necessary, also ensure permissions
+   * allow it to be read from and written into. Perform some diskIO
+   * to ensure that the disk is usable for writes. 
+   *
+   * @param localFS local filesystem
+   * @param dir directory
+   * @param expected permission
+   * @throws DiskErrorException
+   * @throws IOException
+   */  
+  public static void checkDirWithDiskIo(LocalFileSystem localFS, Path dir,
+FsPermission expected) 
+  throws DiskErrorException, IOException {
+checkDirInternal(localFS, dir, expected);
+doDiskIo(localFS.pathToFile(dir));
+  }  
+
+  private static void checkDirInternal(LocalFileSystem localFS, Path dir,
+   FsPermission expected)
   throws DiskErrorException, IOException {
 mkdirsWithExistsAndPermissionCheck(localFS, dir, expected);
 checkAccessByFileMethods(localFS.pathToFile(dir));
-doDiskIo(localFS.pathToFile(dir));
   }
 
   /**

http://git-wip-us.apache.org/repos/asf/hadoop/blob/a10caac9/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestDiskChecker.java
--
diff --git 
a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestDiskChecker.java
 
b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestDiskChecker.java
index bd8e1dd..6b6c6c8 100644
--- 
a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestDiskChecker.java
+++ 
b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestDiskChecker.java
@@ -19,7 +19,6 @@ package org.apache.hadoop.util;
 
 import java.io.*;
 import java.nio.file.Files;
-import java.util.concurrent.atomic.AtomicInteger;
 
 import org.apache.hadoop.util.DiskChecker.FileIoProvider;
 import org.junit.After;
@@ -214,105 +213,4 @@ public class TestDiskChecker {
 }
 localDir.delete();
   

hadoop git commit: HADOOP-15450. Avoid fsync storm triggered by DiskChecker and handle disk full situation. Contributed by Arpit Agarwal.

2018-05-22 Thread kihwal
Repository: hadoop
Updated Branches:
  refs/heads/branch-2 43beb -> 5dcd57cbe


HADOOP-15450. Avoid fsync storm triggered by DiskChecker and handle disk full 
situation. Contributed by Arpit Agarwal.

(cherry picked from commit bcc8e76badc1341a6cf995c8e44fa5e422158de8)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/5dcd57cb
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/5dcd57cb
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/5dcd57cb

Branch: refs/heads/branch-2
Commit: 5dcd57cbeada2915678edbea5c5f0cc20486ddd9
Parents: 43b
Author: Kihwal Lee 
Authored: Tue May 22 11:29:54 2018 -0500
Committer: Kihwal Lee 
Committed: Tue May 22 11:29:54 2018 -0500

--
 .../org/apache/hadoop/util/DiskChecker.java |  46 -
 .../org/apache/hadoop/util/TestDiskChecker.java | 102 ---
 .../hadoop/util/TestDiskCheckerWithDiskIo.java  | 173 +++
 3 files changed, 217 insertions(+), 104 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/5dcd57cb/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/DiskChecker.java
--
diff --git 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/DiskChecker.java
 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/DiskChecker.java
index 8563232..c47de21 100644
--- 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/DiskChecker.java
+++ 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/DiskChecker.java
@@ -74,12 +74,30 @@ public class DiskChecker {
* @throws DiskErrorException
*/
   public static void checkDir(File dir) throws DiskErrorException {
+checkDirInternal(dir);
+  }
+
+  /**
+   * Create the directory if it doesn't exist and check that dir is
+   * readable, writable and executable. Perform some disk IO to
+   * ensure that the disk is usable for writes.
+   *
+   * @param dir
+   * @throws DiskErrorException
+   */
+  public static void checkDirWithDiskIo(File dir)
+  throws DiskErrorException {
+checkDirInternal(dir);
+doDiskIo(dir);
+  }
+
+  private static void checkDirInternal(File dir)
+  throws DiskErrorException {
 if (!mkdirsWithExistsCheck(dir)) {
   throw new DiskErrorException("Cannot create directory: "
+ dir.toString());
 }
 checkAccessByFileMethods(dir);
-doDiskIo(dir);
   }
 
   /**
@@ -94,10 +112,34 @@ public class DiskChecker {
*/
   public static void checkDir(LocalFileSystem localFS, Path dir,
   FsPermission expected)
+  throws DiskErrorException, IOException {
+checkDirInternal(localFS, dir, expected);
+  }
+
+
+  /**
+   * Create the local directory if necessary, also ensure permissions
+   * allow it to be read from and written into. Perform some diskIO
+   * to ensure that the disk is usable for writes. 
+   *
+   * @param localFS local filesystem
+   * @param dir directory
+   * @param expected permission
+   * @throws DiskErrorException
+   * @throws IOException
+   */  
+  public static void checkDirWithDiskIo(LocalFileSystem localFS, Path dir,
+FsPermission expected) 
+  throws DiskErrorException, IOException {
+checkDirInternal(localFS, dir, expected);
+doDiskIo(localFS.pathToFile(dir));
+  }  
+
+  private static void checkDirInternal(LocalFileSystem localFS, Path dir,
+   FsPermission expected)
   throws DiskErrorException, IOException {
 mkdirsWithExistsAndPermissionCheck(localFS, dir, expected);
 checkAccessByFileMethods(localFS.pathToFile(dir));
-doDiskIo(localFS.pathToFile(dir));
   }
 
   /**

http://git-wip-us.apache.org/repos/asf/hadoop/blob/5dcd57cb/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestDiskChecker.java
--
diff --git 
a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestDiskChecker.java
 
b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestDiskChecker.java
index bd8e1dd..6b6c6c8 100644
--- 
a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestDiskChecker.java
+++ 
b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestDiskChecker.java
@@ -19,7 +19,6 @@ package org.apache.hadoop.util;
 
 import java.io.*;
 import java.nio.file.Files;
-import java.util.concurrent.atomic.AtomicInteger;
 
 import org.apache.hadoop.util.DiskChecker.FileIoProvider;
 import org.junit.After;
@@ -214,105 +213,4 @@ public class TestDiskChecker {
 }
 localDir.delete();
   }
-

hadoop git commit: HADOOP-15450. Avoid fsync storm triggered by DiskChecker and handle disk full situation. Contributed by Arpit Agarwal.

2018-05-22 Thread kihwal
Repository: hadoop
Updated Branches:
  refs/heads/branch-2.8 8e1107d63 -> 1c407d327


HADOOP-15450. Avoid fsync storm triggered by DiskChecker and handle disk full 
situation. Contributed by Arpit Agarwal.

(cherry picked from commit bcc8e76badc1341a6cf995c8e44fa5e422158de8)

Conflicts:

hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestDiskChecker.java


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/1c407d32
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/1c407d32
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/1c407d32

Branch: refs/heads/branch-2.8
Commit: 1c407d327a655576d2d0d12fd408db5ba0e85de8
Parents: 8e1107d
Author: Kihwal Lee 
Authored: Tue May 22 11:36:25 2018 -0500
Committer: Kihwal Lee 
Committed: Tue May 22 11:36:25 2018 -0500

--
 .../org/apache/hadoop/util/DiskChecker.java |  46 -
 .../org/apache/hadoop/util/TestDiskChecker.java | 101 ---
 .../hadoop/util/TestDiskCheckerWithDiskIo.java  | 173 +++
 3 files changed, 217 insertions(+), 103 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/1c407d32/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/DiskChecker.java
--
diff --git 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/DiskChecker.java
 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/DiskChecker.java
index 8563232..c47de21 100644
--- 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/DiskChecker.java
+++ 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/DiskChecker.java
@@ -74,12 +74,30 @@ public class DiskChecker {
* @throws DiskErrorException
*/
   public static void checkDir(File dir) throws DiskErrorException {
+checkDirInternal(dir);
+  }
+
+  /**
+   * Create the directory if it doesn't exist and check that dir is
+   * readable, writable and executable. Perform some disk IO to
+   * ensure that the disk is usable for writes.
+   *
+   * @param dir
+   * @throws DiskErrorException
+   */
+  public static void checkDirWithDiskIo(File dir)
+  throws DiskErrorException {
+checkDirInternal(dir);
+doDiskIo(dir);
+  }
+
+  private static void checkDirInternal(File dir)
+  throws DiskErrorException {
 if (!mkdirsWithExistsCheck(dir)) {
   throw new DiskErrorException("Cannot create directory: "
+ dir.toString());
 }
 checkAccessByFileMethods(dir);
-doDiskIo(dir);
   }
 
   /**
@@ -94,10 +112,34 @@ public class DiskChecker {
*/
   public static void checkDir(LocalFileSystem localFS, Path dir,
   FsPermission expected)
+  throws DiskErrorException, IOException {
+checkDirInternal(localFS, dir, expected);
+  }
+
+
+  /**
+   * Create the local directory if necessary, also ensure permissions
+   * allow it to be read from and written into. Perform some diskIO
+   * to ensure that the disk is usable for writes. 
+   *
+   * @param localFS local filesystem
+   * @param dir directory
+   * @param expected permission
+   * @throws DiskErrorException
+   * @throws IOException
+   */  
+  public static void checkDirWithDiskIo(LocalFileSystem localFS, Path dir,
+FsPermission expected) 
+  throws DiskErrorException, IOException {
+checkDirInternal(localFS, dir, expected);
+doDiskIo(localFS.pathToFile(dir));
+  }  
+
+  private static void checkDirInternal(LocalFileSystem localFS, Path dir,
+   FsPermission expected)
   throws DiskErrorException, IOException {
 mkdirsWithExistsAndPermissionCheck(localFS, dir, expected);
 checkAccessByFileMethods(localFS.pathToFile(dir));
-doDiskIo(localFS.pathToFile(dir));
   }
 
   /**

http://git-wip-us.apache.org/repos/asf/hadoop/blob/1c407d32/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestDiskChecker.java
--
diff --git 
a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestDiskChecker.java
 
b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestDiskChecker.java
index 43bd183..ffeee4d 100644
--- 
a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestDiskChecker.java
+++ 
b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestDiskChecker.java
@@ -203,105 +203,4 @@ public class TestDiskChecker {
 }
 localDir.delete();
   }
-
-  /**
-   * Verify DiskChecker ignores at least 2 transient file creation errors.
-   */
-  @Test(timeout = 3)
- 

[39/50] [abbrv] hadoop git commit: HADOOP-15450. Avoid fsync storm triggered by DiskChecker and handle disk full situation. Contributed by Arpit Agarwal.

2018-05-22 Thread arp
HADOOP-15450. Avoid fsync storm triggered by DiskChecker and handle disk full 
situation. Contributed by Arpit Agarwal.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/bcc8e76b
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/bcc8e76b
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/bcc8e76b

Branch: refs/heads/HDDS-48
Commit: bcc8e76badc1341a6cf995c8e44fa5e422158de8
Parents: 5f11288
Author: Kihwal Lee 
Authored: Tue May 22 11:19:15 2018 -0500
Committer: Kihwal Lee 
Committed: Tue May 22 11:20:51 2018 -0500

--
 .../org/apache/hadoop/util/DiskChecker.java |  46 -
 .../org/apache/hadoop/util/TestDiskChecker.java | 102 ---
 .../hadoop/util/TestDiskCheckerWithDiskIo.java  | 173 +++
 3 files changed, 217 insertions(+), 104 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/bcc8e76b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/DiskChecker.java
--
diff --git 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/DiskChecker.java
 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/DiskChecker.java
index a4fa8fd..595aeed 100644
--- 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/DiskChecker.java
+++ 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/DiskChecker.java
@@ -74,12 +74,30 @@ public class DiskChecker {
* @throws DiskErrorException
*/
   public static void checkDir(File dir) throws DiskErrorException {
+checkDirInternal(dir);
+  }
+
+  /**
+   * Create the directory if it doesn't exist and check that dir is
+   * readable, writable and executable. Perform some disk IO to
+   * ensure that the disk is usable for writes.
+   *
+   * @param dir
+   * @throws DiskErrorException
+   */
+  public static void checkDirWithDiskIo(File dir)
+  throws DiskErrorException {
+checkDirInternal(dir);
+doDiskIo(dir);
+  }
+
+  private static void checkDirInternal(File dir)
+  throws DiskErrorException {
 if (!mkdirsWithExistsCheck(dir)) {
   throw new DiskErrorException("Cannot create directory: "
+ dir.toString());
 }
 checkAccessByFileMethods(dir);
-doDiskIo(dir);
   }
 
   /**
@@ -94,10 +112,34 @@ public class DiskChecker {
*/
   public static void checkDir(LocalFileSystem localFS, Path dir,
   FsPermission expected)
+  throws DiskErrorException, IOException {
+checkDirInternal(localFS, dir, expected);
+  }
+
+
+  /**
+   * Create the local directory if necessary, also ensure permissions
+   * allow it to be read from and written into. Perform some diskIO
+   * to ensure that the disk is usable for writes. 
+   *
+   * @param localFS local filesystem
+   * @param dir directory
+   * @param expected permission
+   * @throws DiskErrorException
+   * @throws IOException
+   */  
+  public static void checkDirWithDiskIo(LocalFileSystem localFS, Path dir,
+FsPermission expected) 
+  throws DiskErrorException, IOException {
+checkDirInternal(localFS, dir, expected);
+doDiskIo(localFS.pathToFile(dir));
+  }  
+
+  private static void checkDirInternal(LocalFileSystem localFS, Path dir,
+   FsPermission expected)
   throws DiskErrorException, IOException {
 mkdirsWithExistsAndPermissionCheck(localFS, dir, expected);
 checkAccessByFileMethods(localFS.pathToFile(dir));
-doDiskIo(localFS.pathToFile(dir));
   }
 
   /**

http://git-wip-us.apache.org/repos/asf/hadoop/blob/bcc8e76b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestDiskChecker.java
--
diff --git 
a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestDiskChecker.java
 
b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestDiskChecker.java
index bd8e1dd..6b6c6c8 100644
--- 
a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestDiskChecker.java
+++ 
b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestDiskChecker.java
@@ -19,7 +19,6 @@ package org.apache.hadoop.util;
 
 import java.io.*;
 import java.nio.file.Files;
-import java.util.concurrent.atomic.AtomicInteger;
 
 import org.apache.hadoop.util.DiskChecker.FileIoProvider;
 import org.junit.After;
@@ -214,105 +213,4 @@ public class TestDiskChecker {
 }
 localDir.delete();
   }
-
-  /**
-   * Verify DiskChecker ignores at least 2 transient file creation errors.
-   */
-  @Test(timeout = 3)
-  public void testDiskIoIgnoresTransie

[07/50] [abbrv] hadoop git commit: HADOOP-15450. Avoid fsync storm triggered by DiskChecker and handle disk full situation. Contributed by Arpit Agarwal.

2018-05-23 Thread xyao
HADOOP-15450. Avoid fsync storm triggered by DiskChecker and handle disk full 
situation. Contributed by Arpit Agarwal.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/bcc8e76b
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/bcc8e76b
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/bcc8e76b

Branch: refs/heads/HDDS-4
Commit: bcc8e76badc1341a6cf995c8e44fa5e422158de8
Parents: 5f11288
Author: Kihwal Lee 
Authored: Tue May 22 11:19:15 2018 -0500
Committer: Kihwal Lee 
Committed: Tue May 22 11:20:51 2018 -0500

--
 .../org/apache/hadoop/util/DiskChecker.java |  46 -
 .../org/apache/hadoop/util/TestDiskChecker.java | 102 ---
 .../hadoop/util/TestDiskCheckerWithDiskIo.java  | 173 +++
 3 files changed, 217 insertions(+), 104 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/bcc8e76b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/DiskChecker.java
--
diff --git 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/DiskChecker.java
 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/DiskChecker.java
index a4fa8fd..595aeed 100644
--- 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/DiskChecker.java
+++ 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/DiskChecker.java
@@ -74,12 +74,30 @@ public class DiskChecker {
* @throws DiskErrorException
*/
   public static void checkDir(File dir) throws DiskErrorException {
+checkDirInternal(dir);
+  }
+
+  /**
+   * Create the directory if it doesn't exist and check that dir is
+   * readable, writable and executable. Perform some disk IO to
+   * ensure that the disk is usable for writes.
+   *
+   * @param dir
+   * @throws DiskErrorException
+   */
+  public static void checkDirWithDiskIo(File dir)
+  throws DiskErrorException {
+checkDirInternal(dir);
+doDiskIo(dir);
+  }
+
+  private static void checkDirInternal(File dir)
+  throws DiskErrorException {
 if (!mkdirsWithExistsCheck(dir)) {
   throw new DiskErrorException("Cannot create directory: "
+ dir.toString());
 }
 checkAccessByFileMethods(dir);
-doDiskIo(dir);
   }
 
   /**
@@ -94,10 +112,34 @@ public class DiskChecker {
*/
   public static void checkDir(LocalFileSystem localFS, Path dir,
   FsPermission expected)
+  throws DiskErrorException, IOException {
+checkDirInternal(localFS, dir, expected);
+  }
+
+
+  /**
+   * Create the local directory if necessary, also ensure permissions
+   * allow it to be read from and written into. Perform some diskIO
+   * to ensure that the disk is usable for writes. 
+   *
+   * @param localFS local filesystem
+   * @param dir directory
+   * @param expected permission
+   * @throws DiskErrorException
+   * @throws IOException
+   */  
+  public static void checkDirWithDiskIo(LocalFileSystem localFS, Path dir,
+FsPermission expected) 
+  throws DiskErrorException, IOException {
+checkDirInternal(localFS, dir, expected);
+doDiskIo(localFS.pathToFile(dir));
+  }  
+
+  private static void checkDirInternal(LocalFileSystem localFS, Path dir,
+   FsPermission expected)
   throws DiskErrorException, IOException {
 mkdirsWithExistsAndPermissionCheck(localFS, dir, expected);
 checkAccessByFileMethods(localFS.pathToFile(dir));
-doDiskIo(localFS.pathToFile(dir));
   }
 
   /**

http://git-wip-us.apache.org/repos/asf/hadoop/blob/bcc8e76b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestDiskChecker.java
--
diff --git 
a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestDiskChecker.java
 
b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestDiskChecker.java
index bd8e1dd..6b6c6c8 100644
--- 
a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestDiskChecker.java
+++ 
b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestDiskChecker.java
@@ -19,7 +19,6 @@ package org.apache.hadoop.util;
 
 import java.io.*;
 import java.nio.file.Files;
-import java.util.concurrent.atomic.AtomicInteger;
 
 import org.apache.hadoop.util.DiskChecker.FileIoProvider;
 import org.junit.After;
@@ -214,105 +213,4 @@ public class TestDiskChecker {
 }
 localDir.delete();
   }
-
-  /**
-   * Verify DiskChecker ignores at least 2 transient file creation errors.
-   */
-  @Test(timeout = 3)
-  public void testDiskIoIgnoresTransien