hadoop git commit: HDFS-8581. ContentSummary on / skips further counts on yielding lock (contributed by J.Andreina)

2016-06-14 Thread kihwal
Repository: hadoop
Updated Branches:
  refs/heads/branch-2.6 a2d960b6d -> 867b29be9


HDFS-8581. ContentSummary on / skips further counts on yielding lock 
(contributed by J.Andreina)

(cherry picked from commit 4014ce5990bff9b0ecb3d38a633d40eaf6cf07a7)
(cherry picked from commit 8854cdd9eefd05c10d0518528a3bff6a7348f37e)

Conflicts:
hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/867b29be
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/867b29be
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/867b29be

Branch: refs/heads/branch-2.6
Commit: 867b29be9c5a2f01bfef3b4c361e5ed85d438660
Parents: a2d960b
Author: Kihwal Lee 
Authored: Tue Jun 14 08:31:00 2016 -0500
Committer: Kihwal Lee 
Committed: Tue Jun 14 08:31:00 2016 -0500

--
 hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt |  3 ++
 .../hdfs/server/namenode/INodeDirectory.java|  2 +-
 .../java/org/apache/hadoop/hdfs/TestQuota.java  | 31 
 3 files changed, 35 insertions(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/867b29be/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
--
diff --git a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt 
b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
index 84bdbf8..cc27d77 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
+++ b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
@@ -27,6 +27,9 @@ Release 2.6.5 - UNRELEASED
 HDFS-10271. Extra bytes are getting released from reservedSpace for append
 (Brahma Reddy Battula via vinayakumarb)
 
+HDFS-8581. ContentSummary on / skips further counts on yielding lock
+(J.Andreina via vinayakumarb)
+
 Release 2.6.4 - 2016-02-11
 
   INCOMPATIBLE CHANGES

http://git-wip-us.apache.org/repos/asf/hadoop/blob/867b29be/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/INodeDirectory.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/INodeDirectory.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/INodeDirectory.java
index a753230..44e8f6f 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/INodeDirectory.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/INodeDirectory.java
@@ -638,7 +638,7 @@ public class INodeDirectory extends 
INodeWithAdditionalFields
 continue;
   }
   // The locks were released and reacquired. Check parent first.
-  if (getParent() == null) {
+  if (!isRoot() && getParent() == null) {
 // Stop further counting and return whatever we have so far.
 break;
   }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/867b29be/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestQuota.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestQuota.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestQuota.java
index 6e93a91..6fab668 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestQuota.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestQuota.java
@@ -21,11 +21,13 @@ import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertFalse;
 import static org.junit.Assert.assertTrue;
 
+import java.io.IOException;
 import java.io.OutputStream;
 import java.security.PrivilegedExceptionAction;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.ContentSummary;
+import org.apache.hadoop.fs.FSDataOutputStream;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hdfs.protocol.DSQuotaExceededException;
@@ -921,4 +923,33 @@ public class TestQuota {
   cluster.shutdown();
 }
   }
+
+   /**
+   * File count on root , should return total value of files in Filesystem
+   * when one folder contains files more than "dfs.content-summary.limit".
+   */
+  @Test
+  public void testHugeFileCount() throws IOException {
+MiniDFSCluster cluster = null;
+Configuration conf = new Configuration();
+conf.setInt("dfs.content-summary.limit", 4);
+try {
+  cluster = new MiniDFSCluster.Builder(conf).numDataNodes(0).build();
+  DistributedFileSystem dfs = cluster.getFileSystem();
+  for (int i = 1; i <= 5; i++) {
+FSDataOutputStream out =
+dfs.create(new Path("/Folder1/" + "file" + i),(short)1);
+out.clos

hadoop git commit: HDFS-8581. ContentSummary on / skips further counts on yielding lock (contributed by J.Andreina)

2016-06-14 Thread kihwal
Repository: hadoop
Updated Branches:
  refs/heads/branch-2.7 d44873a2a -> 8854cdd9e


HDFS-8581. ContentSummary on / skips further counts on yielding lock 
(contributed by J.Andreina)

(cherry picked from commit 4014ce5990bff9b0ecb3d38a633d40eaf6cf07a7)

Conflicts:
hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt

hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestQuota.java


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/8854cdd9
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/8854cdd9
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/8854cdd9

Branch: refs/heads/branch-2.7
Commit: 8854cdd9eefd05c10d0518528a3bff6a7348f37e
Parents: d44873a
Author: Kihwal Lee 
Authored: Tue Jun 14 08:25:09 2016 -0500
Committer: Kihwal Lee 
Committed: Tue Jun 14 08:25:09 2016 -0500

--
 hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt |  4 ++-
 .../hdfs/server/namenode/INodeDirectory.java|  2 +-
 .../java/org/apache/hadoop/hdfs/TestQuota.java  | 31 
 3 files changed, 35 insertions(+), 2 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/8854cdd9/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
--
diff --git a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt 
b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
index 26bb1e5..df338df 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
+++ b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
@@ -122,7 +122,6 @@ Release 2.7.3 - UNRELEASED
 HDFS-9779 . TestReplicationPolicyWithNodeGroup NODE variable picks wrong 
rack value
 (Kuhu Shukla via umamahesh)
 
-
 HDFS-9784. Example usage is not correct in Transparent Encryption document.
 (Takashi Ohnishi via aajisaka)
 
@@ -184,6 +183,9 @@ Release 2.7.3 - UNRELEASED
 HDFS-8548. Minicluster throws NPE on shutdown.
 (surendra singh lilhore via xyao)
 
+HDFS-8581. ContentSummary on / skips further counts on yielding lock
+(J.Andreina via vinayakumarb)
+
 Release 2.7.2 - 2016-01-25
 
   INCOMPATIBLE CHANGES

http://git-wip-us.apache.org/repos/asf/hadoop/blob/8854cdd9/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/INodeDirectory.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/INodeDirectory.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/INodeDirectory.java
index 9f55fc4..b32445a 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/INodeDirectory.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/INodeDirectory.java
@@ -662,7 +662,7 @@ public class INodeDirectory extends 
INodeWithAdditionalFields
 continue;
   }
   // The locks were released and reacquired. Check parent first.
-  if (getParent() == null) {
+  if (!isRoot() && getParent() == null) {
 // Stop further counting and return whatever we have so far.
 break;
   }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/8854cdd9/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestQuota.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestQuota.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestQuota.java
index 6e93a91..6fab668 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestQuota.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestQuota.java
@@ -21,11 +21,13 @@ import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertFalse;
 import static org.junit.Assert.assertTrue;
 
+import java.io.IOException;
 import java.io.OutputStream;
 import java.security.PrivilegedExceptionAction;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.ContentSummary;
+import org.apache.hadoop.fs.FSDataOutputStream;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hdfs.protocol.DSQuotaExceededException;
@@ -921,4 +923,33 @@ public class TestQuota {
   cluster.shutdown();
 }
   }
+
+   /**
+   * File count on root , should return total value of files in Filesystem
+   * when one folder contains files more than "dfs.content-summary.limit".
+   */
+  @Test
+  public void testHugeFileCount() throws IOException {
+MiniDFSCluster cluster = null;
+Configuration conf = new Configuration();
+conf.setInt("dfs.content-summary.limit", 4);
+try {
+  

[09/50] [abbrv] hadoop git commit: HDFS-8581. ContentSummary on / skips further counts on yielding lock (contributed by J.Andreina)

2015-09-15 Thread eclark
HDFS-8581. ContentSummary on / skips further counts on yielding lock 
(contributed by J.Andreina)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/4014ce59
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/4014ce59
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/4014ce59

Branch: refs/heads/HADOOP-11890
Commit: 4014ce5990bff9b0ecb3d38a633d40eaf6cf07a7
Parents: 0f0e897
Author: Vinayakumar B 
Authored: Thu Sep 10 00:08:19 2015 +0530
Committer: Vinayakumar B 
Committed: Thu Sep 10 00:08:19 2015 +0530

--
 hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt |  3 ++
 .../hdfs/server/namenode/INodeDirectory.java|  2 +-
 .../java/org/apache/hadoop/hdfs/TestQuota.java  | 32 
 3 files changed, 36 insertions(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/4014ce59/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
--
diff --git a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt 
b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
index 8edc389..bbb6066 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
+++ b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
@@ -1326,6 +1326,9 @@ Release 2.8.0 - UNRELEASED
 HDFS-8939. Test(S)WebHdfsFileContextMainOperations failing on branch-2.
 (Chris Nauroth via jghoman)
 
+HDFS-8581. ContentSummary on / skips further counts on yielding lock
+(J.Andreina via vinayakumarb)
+
 Release 2.7.2 - UNRELEASED
 
   INCOMPATIBLE CHANGES

http://git-wip-us.apache.org/repos/asf/hadoop/blob/4014ce59/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/INodeDirectory.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/INodeDirectory.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/INodeDirectory.java
index 5c33c02..21fe313 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/INodeDirectory.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/INodeDirectory.java
@@ -660,7 +660,7 @@ public class INodeDirectory extends 
INodeWithAdditionalFields
 continue;
   }
   // The locks were released and reacquired. Check parent first.
-  if (getParent() == null) {
+  if (!isRoot() && getParent() == null) {
 // Stop further counting and return whatever we have so far.
 break;
   }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/4014ce59/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestQuota.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestQuota.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestQuota.java
index e339049..00ff07f 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestQuota.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestQuota.java
@@ -24,12 +24,14 @@ import static org.junit.Assert.assertTrue;
 import static org.junit.Assert.fail;
 
 import java.io.ByteArrayOutputStream;
+import java.io.IOException;
 import java.io.OutputStream;
 import java.io.PrintStream;
 import java.security.PrivilegedExceptionAction;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.ContentSummary;
+import org.apache.hadoop.fs.FSDataOutputStream;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.StorageType;
@@ -1007,4 +1009,34 @@ public class TestQuota {
 assertTrue(errOutput.contains(StorageType.getTypesSupportingQuota()
 .toString()));
   }
+
+   /**
+   * File count on root , should return total value of files in Filesystem
+   * when one folder contains files more than "dfs.content-summary.limit".
+   */
+  @Test
+  public void testHugeFileCount() throws IOException {
+MiniDFSCluster cluster = null;
+Configuration conf = new Configuration();
+conf.setInt("dfs.content-summary.limit", 4);
+try {
+  cluster = new MiniDFSCluster.Builder(conf).numDataNodes(0).build();
+  DistributedFileSystem dfs = cluster.getFileSystem();
+  for (int i = 1; i <= 5; i++) {
+FSDataOutputStream out =
+dfs.create(new Path("/Folder1/" + "file" + i),(short)1);
+out.close();
+  }
+  FSDataOutputStream out = dfs.create(new Path("/Folder2/file6"),(short)1);
+  out.close();
+  ContentSummary contentSummary = dfs.getContentSummary(new Path("/"));
+  assertEquals(6, contentSummary.getFil

[22/42] hadoop git commit: HDFS-8581. ContentSummary on / skips further counts on yielding lock (contributed by J.Andreina)

2015-09-11 Thread wangda
HDFS-8581. ContentSummary on / skips further counts on yielding lock 
(contributed by J.Andreina)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/4014ce59
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/4014ce59
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/4014ce59

Branch: refs/heads/YARN-1197
Commit: 4014ce5990bff9b0ecb3d38a633d40eaf6cf07a7
Parents: 0f0e897
Author: Vinayakumar B 
Authored: Thu Sep 10 00:08:19 2015 +0530
Committer: Vinayakumar B 
Committed: Thu Sep 10 00:08:19 2015 +0530

--
 hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt |  3 ++
 .../hdfs/server/namenode/INodeDirectory.java|  2 +-
 .../java/org/apache/hadoop/hdfs/TestQuota.java  | 32 
 3 files changed, 36 insertions(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/4014ce59/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
--
diff --git a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt 
b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
index 8edc389..bbb6066 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
+++ b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
@@ -1326,6 +1326,9 @@ Release 2.8.0 - UNRELEASED
 HDFS-8939. Test(S)WebHdfsFileContextMainOperations failing on branch-2.
 (Chris Nauroth via jghoman)
 
+HDFS-8581. ContentSummary on / skips further counts on yielding lock
+(J.Andreina via vinayakumarb)
+
 Release 2.7.2 - UNRELEASED
 
   INCOMPATIBLE CHANGES

http://git-wip-us.apache.org/repos/asf/hadoop/blob/4014ce59/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/INodeDirectory.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/INodeDirectory.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/INodeDirectory.java
index 5c33c02..21fe313 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/INodeDirectory.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/INodeDirectory.java
@@ -660,7 +660,7 @@ public class INodeDirectory extends 
INodeWithAdditionalFields
 continue;
   }
   // The locks were released and reacquired. Check parent first.
-  if (getParent() == null) {
+  if (!isRoot() && getParent() == null) {
 // Stop further counting and return whatever we have so far.
 break;
   }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/4014ce59/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestQuota.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestQuota.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestQuota.java
index e339049..00ff07f 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestQuota.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestQuota.java
@@ -24,12 +24,14 @@ import static org.junit.Assert.assertTrue;
 import static org.junit.Assert.fail;
 
 import java.io.ByteArrayOutputStream;
+import java.io.IOException;
 import java.io.OutputStream;
 import java.io.PrintStream;
 import java.security.PrivilegedExceptionAction;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.ContentSummary;
+import org.apache.hadoop.fs.FSDataOutputStream;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.StorageType;
@@ -1007,4 +1009,34 @@ public class TestQuota {
 assertTrue(errOutput.contains(StorageType.getTypesSupportingQuota()
 .toString()));
   }
+
+   /**
+   * File count on root , should return total value of files in Filesystem
+   * when one folder contains files more than "dfs.content-summary.limit".
+   */
+  @Test
+  public void testHugeFileCount() throws IOException {
+MiniDFSCluster cluster = null;
+Configuration conf = new Configuration();
+conf.setInt("dfs.content-summary.limit", 4);
+try {
+  cluster = new MiniDFSCluster.Builder(conf).numDataNodes(0).build();
+  DistributedFileSystem dfs = cluster.getFileSystem();
+  for (int i = 1; i <= 5; i++) {
+FSDataOutputStream out =
+dfs.create(new Path("/Folder1/" + "file" + i),(short)1);
+out.close();
+  }
+  FSDataOutputStream out = dfs.create(new Path("/Folder2/file6"),(short)1);
+  out.close();
+  ContentSummary contentSummary = dfs.getContentSummary(new Path("/"));
+  assertEquals(6, contentSummary.getFileCo

[2/2] hadoop git commit: HDFS-8581. ContentSummary on / skips further counts on yielding lock (contributed by J.Andreina)

2015-09-09 Thread vinayakumarb
HDFS-8581. ContentSummary on / skips further counts on yielding lock 
(contributed by J.Andreina)

(cherry picked from commit 4014ce5990bff9b0ecb3d38a633d40eaf6cf07a7)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/cd256c1f
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/cd256c1f
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/cd256c1f

Branch: refs/heads/branch-2
Commit: cd256c1fda100e71fedbf9cdbe0b69f948b79170
Parents: 03f50de
Author: Vinayakumar B 
Authored: Thu Sep 10 00:08:19 2015 +0530
Committer: Vinayakumar B 
Committed: Thu Sep 10 00:09:27 2015 +0530

--
 hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt |  3 ++
 .../hdfs/server/namenode/INodeDirectory.java|  2 +-
 .../java/org/apache/hadoop/hdfs/TestQuota.java  | 32 
 3 files changed, 36 insertions(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/cd256c1f/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
--
diff --git a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt 
b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
index e573a3d..f5e9ea1 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
+++ b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
@@ -979,6 +979,9 @@ Release 2.8.0 - UNRELEASED
 HDFS-8939. Test(S)WebHdfsFileContextMainOperations failing on branch-2.
 (Chris Nauroth via jghoman)
 
+HDFS-8581. ContentSummary on / skips further counts on yielding lock
+(J.Andreina via vinayakumarb)
+
 Release 2.7.2 - UNRELEASED
 
   INCOMPATIBLE CHANGES

http://git-wip-us.apache.org/repos/asf/hadoop/blob/cd256c1f/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/INodeDirectory.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/INodeDirectory.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/INodeDirectory.java
index 5c33c02..21fe313 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/INodeDirectory.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/INodeDirectory.java
@@ -660,7 +660,7 @@ public class INodeDirectory extends 
INodeWithAdditionalFields
 continue;
   }
   // The locks were released and reacquired. Check parent first.
-  if (getParent() == null) {
+  if (!isRoot() && getParent() == null) {
 // Stop further counting and return whatever we have so far.
 break;
   }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/cd256c1f/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestQuota.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestQuota.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestQuota.java
index f41893b..e68e4d6 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestQuota.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestQuota.java
@@ -23,12 +23,14 @@ import static org.junit.Assert.assertFalse;
 import static org.junit.Assert.assertTrue;
 
 import java.io.ByteArrayOutputStream;
+import java.io.IOException;
 import java.io.OutputStream;
 import java.io.PrintStream;
 import java.security.PrivilegedExceptionAction;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.ContentSummary;
+import org.apache.hadoop.fs.FSDataOutputStream;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.StorageType;
@@ -943,4 +945,34 @@ public class TestQuota {
 assertTrue(errOutput.contains(StorageType.getTypesSupportingQuota()
 .toString()));
   }
+
+   /**
+   * File count on root , should return total value of files in Filesystem
+   * when one folder contains files more than "dfs.content-summary.limit".
+   */
+  @Test
+  public void testHugeFileCount() throws IOException {
+MiniDFSCluster cluster = null;
+Configuration conf = new Configuration();
+conf.setInt("dfs.content-summary.limit", 4);
+try {
+  cluster = new MiniDFSCluster.Builder(conf).numDataNodes(0).build();
+  DistributedFileSystem dfs = cluster.getFileSystem();
+  for (int i = 1; i <= 5; i++) {
+FSDataOutputStream out =
+dfs.create(new Path("/Folder1/" + "file" + i),(short)1);
+out.close();
+  }
+  FSDataOutputStream out = dfs.create(new Path("/Folder2/file6"),(short)1);
+  out.close();
+  ContentSummary contentSummary = dfs.getConten

[1/2] hadoop git commit: HDFS-8581. ContentSummary on / skips further counts on yielding lock (contributed by J.Andreina)

2015-09-09 Thread vinayakumarb
Repository: hadoop
Updated Branches:
  refs/heads/branch-2 03f50de48 -> cd256c1fd
  refs/heads/trunk 0f0e897bf -> 4014ce599


HDFS-8581. ContentSummary on / skips further counts on yielding lock 
(contributed by J.Andreina)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/4014ce59
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/4014ce59
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/4014ce59

Branch: refs/heads/trunk
Commit: 4014ce5990bff9b0ecb3d38a633d40eaf6cf07a7
Parents: 0f0e897
Author: Vinayakumar B 
Authored: Thu Sep 10 00:08:19 2015 +0530
Committer: Vinayakumar B 
Committed: Thu Sep 10 00:08:19 2015 +0530

--
 hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt |  3 ++
 .../hdfs/server/namenode/INodeDirectory.java|  2 +-
 .../java/org/apache/hadoop/hdfs/TestQuota.java  | 32 
 3 files changed, 36 insertions(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/4014ce59/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
--
diff --git a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt 
b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
index 8edc389..bbb6066 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
+++ b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
@@ -1326,6 +1326,9 @@ Release 2.8.0 - UNRELEASED
 HDFS-8939. Test(S)WebHdfsFileContextMainOperations failing on branch-2.
 (Chris Nauroth via jghoman)
 
+HDFS-8581. ContentSummary on / skips further counts on yielding lock
+(J.Andreina via vinayakumarb)
+
 Release 2.7.2 - UNRELEASED
 
   INCOMPATIBLE CHANGES

http://git-wip-us.apache.org/repos/asf/hadoop/blob/4014ce59/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/INodeDirectory.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/INodeDirectory.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/INodeDirectory.java
index 5c33c02..21fe313 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/INodeDirectory.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/INodeDirectory.java
@@ -660,7 +660,7 @@ public class INodeDirectory extends 
INodeWithAdditionalFields
 continue;
   }
   // The locks were released and reacquired. Check parent first.
-  if (getParent() == null) {
+  if (!isRoot() && getParent() == null) {
 // Stop further counting and return whatever we have so far.
 break;
   }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/4014ce59/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestQuota.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestQuota.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestQuota.java
index e339049..00ff07f 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestQuota.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestQuota.java
@@ -24,12 +24,14 @@ import static org.junit.Assert.assertTrue;
 import static org.junit.Assert.fail;
 
 import java.io.ByteArrayOutputStream;
+import java.io.IOException;
 import java.io.OutputStream;
 import java.io.PrintStream;
 import java.security.PrivilegedExceptionAction;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.ContentSummary;
+import org.apache.hadoop.fs.FSDataOutputStream;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.StorageType;
@@ -1007,4 +1009,34 @@ public class TestQuota {
 assertTrue(errOutput.contains(StorageType.getTypesSupportingQuota()
 .toString()));
   }
+
+   /**
+   * File count on root , should return total value of files in Filesystem
+   * when one folder contains files more than "dfs.content-summary.limit".
+   */
+  @Test
+  public void testHugeFileCount() throws IOException {
+MiniDFSCluster cluster = null;
+Configuration conf = new Configuration();
+conf.setInt("dfs.content-summary.limit", 4);
+try {
+  cluster = new MiniDFSCluster.Builder(conf).numDataNodes(0).build();
+  DistributedFileSystem dfs = cluster.getFileSystem();
+  for (int i = 1; i <= 5; i++) {
+FSDataOutputStream out =
+dfs.create(new Path("/Folder1/" + "file" + i),(short)1);
+out.close();
+  }
+  FSDataOutputStream out = dfs.create(new Path("/Folder2/file6"),(short)1);
+  out.close();
+