Repository: hadoop
Updated Branches:
  refs/heads/branch-2 55f709790 -> 08508ca1c


HDFS-8792. BlockManager#postponedMisreplicatedBlocks should use a 
LightWeightHashSet to save memory (Yi Liu via Colin P. McCabe)

(cherry picked from commit c77bd6af16cbc26f88a2c6d8220db83a3e1caa2c)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/08508ca1
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/08508ca1
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/08508ca1

Branch: refs/heads/branch-2
Commit: 08508ca1c8d9263d83229841915c8659bfc72afc
Parents: 55f7097
Author: Colin Patrick Mccabe <cmcc...@cloudera.com>
Authored: Mon Aug 17 12:00:45 2015 -0700
Committer: Colin Patrick Mccabe <cmcc...@cloudera.com>
Committed: Mon Aug 17 12:07:16 2015 -0700

----------------------------------------------------------------------
 hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt     |  3 ++
 .../server/blockmanagement/BlockManager.java    |  5 ++--
 .../hadoop/hdfs/util/LightWeightHashSet.java    | 21 ++++++++++----
 .../hdfs/util/TestLightWeightHashSet.java       | 29 +++++++++++++++++++-
 4 files changed, 50 insertions(+), 8 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hadoop/blob/08508ca1/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt 
b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
index 243711a..32a2230 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
+++ b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
@@ -464,6 +464,9 @@ Release 2.8.0 - UNRELEASED
     HDFS-7433. Optimize performance of DatanodeManager's node map.
     (daryn via kihwal)
 
+    HDFS-8792. BlockManager#postponedMisreplicatedBlocks should use a
+    LightWeightHashSet to save memory (Yi Liu via Colin P. McCabe)
+
   BUG FIXES
 
     HDFS-8091: ACLStatus and XAttributes should be presented to

http://git-wip-us.apache.org/repos/asf/hadoop/blob/08508ca1/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/BlockManager.java
----------------------------------------------------------------------
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/BlockManager.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/BlockManager.java
index a1109a4..254a628 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/BlockManager.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/BlockManager.java
@@ -84,6 +84,7 @@ import 
org.apache.hadoop.hdfs.server.protocol.DatanodeStorage.State;
 import org.apache.hadoop.hdfs.server.protocol.KeyUpdateCommand;
 import org.apache.hadoop.hdfs.server.protocol.ReceivedDeletedBlockInfo;
 import org.apache.hadoop.hdfs.server.protocol.StorageReceivedDeletedBlocks;
+import org.apache.hadoop.hdfs.util.LightWeightHashSet;
 import org.apache.hadoop.hdfs.util.LightWeightLinkedSet;
 import org.apache.hadoop.metrics2.util.MBeans;
 import org.apache.hadoop.net.Node;
@@ -94,7 +95,6 @@ import org.apache.hadoop.util.Time;
 
 import com.google.common.annotations.VisibleForTesting;
 import com.google.common.base.Preconditions;
-import com.google.common.collect.Sets;
 
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -195,7 +195,8 @@ public class BlockManager implements BlockStatsMXBean {
    * notified of all block deletions that might have been pending
    * when the failover happened.
    */
-  private final Set<Block> postponedMisreplicatedBlocks = Sets.newHashSet();
+  private final LightWeightHashSet<Block> postponedMisreplicatedBlocks =
+      new LightWeightHashSet<>();
 
   /**
    * Maps a StorageID to the set of blocks that are "extra" for this

http://git-wip-us.apache.org/repos/asf/hadoop/blob/08508ca1/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/util/LightWeightHashSet.java
----------------------------------------------------------------------
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/util/LightWeightHashSet.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/util/LightWeightHashSet.java
index 87537dc..ffd390f 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/util/LightWeightHashSet.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/util/LightWeightHashSet.java
@@ -528,12 +528,13 @@ public class LightWeightHashSet<T> implements 
Collection<T> {
   }
 
   private class LinkedSetIterator implements Iterator<T> {
-    /** The starting modification for fail-fast. */
-    private final int startModification = modification;
+    /** The current modification epoch. */
+    private int expectedModification = modification;
     /** The current index of the entry array. */
     private int index = -1;
     /** The next element to return. */
     private LinkedElement<T> next = nextNonemptyEntry();
+    private LinkedElement<T> current;
 
     private LinkedElement<T> nextNonemptyEntry() {
       for (index++; index < entries.length && entries[index] == null; index++);
@@ -547,13 +548,14 @@ public class LightWeightHashSet<T> implements 
Collection<T> {
 
     @Override
     public T next() {
-      if (modification != startModification) {
+      if (modification != expectedModification) {
         throw new ConcurrentModificationException("modification="
-            + modification + " != startModification = " + startModification);
+            + modification + " != expectedModification = " + 
expectedModification);
       }
       if (next == null) {
         throw new NoSuchElementException();
       }
+      current = next;
       final T e = next.element;
       // find the next element
       final LinkedElement<T> n = next.next;
@@ -563,7 +565,16 @@ public class LightWeightHashSet<T> implements 
Collection<T> {
 
     @Override
     public void remove() {
-      throw new UnsupportedOperationException("Remove is not supported.");
+      if (current == null) {
+        throw new NoSuchElementException();
+      }
+      if (modification != expectedModification) {
+        throw new ConcurrentModificationException("modification="
+            + modification + " != expectedModification = " + 
expectedModification);
+      }
+      LightWeightHashSet.this.removeElem(current.element);
+      current = null;
+      expectedModification = modification;
     }
   }
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/08508ca1/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/util/TestLightWeightHashSet.java
----------------------------------------------------------------------
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/util/TestLightWeightHashSet.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/util/TestLightWeightHashSet.java
index bb27483..50af255 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/util/TestLightWeightHashSet.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/util/TestLightWeightHashSet.java
@@ -191,6 +191,33 @@ public class TestLightWeightHashSet{
   }
 
   @Test
+  public void testRemoveAllViaIterator() {
+    LOG.info("Test remove all via iterator");
+    for (Integer i : list) {
+      assertTrue(set.add(i));
+    }
+    for (Iterator<Integer> iter = set.iterator(); iter.hasNext(); ) {
+      int e = iter.next();
+      // element should be there before removing
+      assertTrue(set.contains(e));
+      iter.remove();
+      // element should not be there now
+      assertFalse(set.contains(e));
+    }
+
+    // the deleted elements should not be there
+    for (int i = 0; i < NUM; i++) {
+      assertFalse(set.contains(list.get(i)));
+    }
+
+    // iterator should not have next
+    Iterator<Integer> iter = set.iterator();
+    assertFalse(iter.hasNext());
+    assertTrue(set.isEmpty());
+    LOG.info("Test remove all via iterator - DONE");
+  }
+
+  @Test
   public void testPollAll() {
     LOG.info("Test poll all");
     for (Integer i : list) {
@@ -470,4 +497,4 @@ public class TestLightWeightHashSet{
     }
   }
 
-}
\ No newline at end of file
+}

Reply via email to