Repository: hadoop
Updated Branches:
  refs/heads/branch-2.8 29c3ee974 -> 9b51420ea


HDFS-10448. CacheManager#addInternal tracks bytesNeeded incorrectly when 
dealing with replication factors other than 1 (Yiqun Lin via cmccabe)

(cherry picked from commit 46f1602e896273b308fbd5df6c75f6c142828227)
(cherry picked from commit 4e11f33ccc849355eaeb0694dbee702e34340a74)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/9b51420e
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/9b51420e
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/9b51420e

Branch: refs/heads/branch-2.8
Commit: 9b51420ea51856b8028f0097fc1787656b0d37d8
Parents: 29c3ee9
Author: Colin Patrick Mccabe <cmcc...@cloudera.com>
Authored: Mon Jun 20 18:25:09 2016 -0700
Committer: Colin Patrick Mccabe <cmcc...@cloudera.com>
Committed: Mon Jun 20 18:32:00 2016 -0700

----------------------------------------------------------------------
 .../org/apache/hadoop/hdfs/server/namenode/CacheManager.java    | 5 ++---
 1 file changed, 2 insertions(+), 3 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hadoop/blob/9b51420e/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/CacheManager.java
----------------------------------------------------------------------
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/CacheManager.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/CacheManager.java
index b1f936b..2b3f3c9 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/CacheManager.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/CacheManager.java
@@ -396,8 +396,7 @@ public final class CacheManager {
     if (pool.getLimit() == CachePoolInfo.LIMIT_UNLIMITED) {
       return;
     }
-    if (pool.getBytesNeeded() + (stats.getBytesNeeded() * replication) > pool
-        .getLimit()) {
+    if (pool.getBytesNeeded() + stats.getBytesNeeded() > pool.getLimit()) {
       throw new InvalidRequestException("Caching path " + path + " of size "
           + stats.getBytesNeeded() / replication + " bytes at replication "
           + replication + " would exceed pool " + pool.getPoolName()
@@ -441,7 +440,7 @@ public final class CacheManager {
       }
     }
     return new CacheDirectiveStats.Builder()
-        .setBytesNeeded(requestedBytes)
+        .setBytesNeeded(requestedBytes * replication)
         .setFilesCached(requestedFiles)
         .build();
   }


---------------------------------------------------------------------
To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org
For additional commands, e-mail: common-commits-h...@hadoop.apache.org

Reply via email to