svn commit: r1378120 - in /hadoop/common/trunk/hadoop-common-project/hadoop-common: CHANGES.txt src/main/java/org/apache/hadoop/io/WritableComparator.java src/test/java/org/apache/hadoop/io/serializer

2012-08-28 Thread suresh
Author: suresh
Date: Tue Aug 28 13:05:31 2012
New Revision: 1378120

URL: http://svn.apache.org/viewvc?rev=1378120&view=rev
Log:
HADOOP-8619. WritableComparator must implement no-arg constructor. Contributed 
by Chris Douglas.

Modified:
hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/WritableComparator.java

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/serializer/TestWritableSerialization.java

Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1378120&r1=1378119&r2=1378120&view=diff
==
--- hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt 
(original)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt Tue Aug 
28 13:05:31 2012
@@ -98,6 +98,9 @@ Trunk (unreleased changes)
 HADOOP-8719. Workaround for kerberos-related log errors upon running any
 hadoop command on OSX. (Jianbin Wei via harsh)
 
+HADOOP-8619. WritableComparator must implement no-arg constructor.
+(Chris Douglas via Suresh)
+
   BUG FIXES
 
 HADOOP-8177. MBeans shouldn't try to register when it fails to create 
MBeanName.

Modified: 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/WritableComparator.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/WritableComparator.java?rev=1378120&r1=1378119&r2=1378120&view=diff
==
--- 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/WritableComparator.java
 (original)
+++ 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/WritableComparator.java
 Tue Aug 28 13:05:31 2012
@@ -87,6 +87,10 @@ public class WritableComparator implemen
   private final WritableComparable key2;
   private final DataInputBuffer buffer;
 
+  protected WritableComparator() {
+this(null);
+  }
+
   /** Construct for a {@link WritableComparable} implementation. */
   protected WritableComparator(Class keyClass) {
 this(keyClass, false);

Modified: 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/serializer/TestWritableSerialization.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/serializer/TestWritableSerialization.java?rev=1378120&r1=1378119&r2=1378120&view=diff
==
--- 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/serializer/TestWritableSerialization.java
 (original)
+++ 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/serializer/TestWritableSerialization.java
 Tue Aug 28 13:05:31 2012
@@ -18,25 +18,34 @@
 
 package org.apache.hadoop.io.serializer;
 
+import java.io.Serializable;
+
+import org.apache.hadoop.io.DataInputBuffer;
+import org.apache.hadoop.io.DataOutputBuffer;
 import static org.apache.hadoop.io.TestGenericWritable.CONF_TEST_KEY;
 import static org.apache.hadoop.io.TestGenericWritable.CONF_TEST_VALUE;
-import junit.framework.TestCase;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.io.TestGenericWritable.Baz;
 import org.apache.hadoop.io.TestGenericWritable.FooGenericWritable;
+import org.apache.hadoop.io.WritableComparator;
+
+import org.junit.Test;
+import static org.junit.Assert.*;
 
-public class TestWritableSerialization extends TestCase {
+public class TestWritableSerialization {
 
   private static final Configuration conf = new Configuration();
 
+  @Test
   public void testWritableSerialization() throws Exception {
 Text before = new Text("test writable"); 
 Text after = SerializationTestUtil.testSerialization(conf, before);
 assertEquals(before, after);
   }
   
+  @Test
   public void testWritableConfigurable() throws Exception {
 
 //set the configuration parameter
@@ -52,4 +61,42 @@ public class TestWritableSerialization e
 assertEquals(baz, result);
 assertNotNull(result.getConf());
   }
+
+  @Test
+  @SuppressWarnings({"rawtypes", "unchecked"})
+  public void testWritableComparatorJavaSerialization() throws Exception {
+Serialization ser = new JavaSerialization();
+
+Serializer serializer = ser.getSerializer(TestWC.class);
+DataOutputBuffer dob = new DataOutputBuffer();
+serializer.open(dob);
+TestWC orig = new TestWC(0);
+serializer.serialize(orig);
+serializer.close();
+
+Deserializ

svn commit: r1378127 - in /hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common: CHANGES.txt src/main/java/org/apache/hadoop/io/WritableComparator.java src/test/java/org/apache/hadoop/i

2012-08-28 Thread suresh
Author: suresh
Date: Tue Aug 28 13:31:10 2012
New Revision: 1378127

URL: http://svn.apache.org/viewvc?rev=1378127&view=rev
Log:
HDFS-8619. Merging change from trunk to branch-2

Modified:

hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt

hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/WritableComparator.java

hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/serializer/TestWritableSerialization.java

Modified: 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1378127&r1=1378126&r2=1378127&view=diff
==
--- 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt 
(original)
+++ 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt 
Tue Aug 28 13:31:10 2012
@@ -3,7 +3,6 @@ Hadoop Change Log
 Release 2.0.1-alpha - UNRELEASED
 
   INCOMPATIBLE CHANGES
-
 HADOOP-8388. Remove unused BlockLocation serialization.
 (Colin Patrick McCabe via eli)
 
@@ -684,6 +683,9 @@ Release 0.23.3 - UNRELEASED
 
 HADOOP-8525. Provide Improved Traceability for Configuration (bobby)
 
+HADOOP-8619. WritableComparator must implement no-arg constructor.
+(Chris Douglas via Suresh)
+
   OPTIMIZATIONS
 
   BUG FIXES

Modified: 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/WritableComparator.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/WritableComparator.java?rev=1378127&r1=1378126&r2=1378127&view=diff
==
--- 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/WritableComparator.java
 (original)
+++ 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/WritableComparator.java
 Tue Aug 28 13:31:10 2012
@@ -87,6 +87,10 @@ public class WritableComparator implemen
   private final WritableComparable key2;
   private final DataInputBuffer buffer;
 
+  protected WritableComparator() {
+this(null);
+  }
+
   /** Construct for a {@link WritableComparable} implementation. */
   protected WritableComparator(Class keyClass) {
 this(keyClass, false);

Modified: 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/serializer/TestWritableSerialization.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/serializer/TestWritableSerialization.java?rev=1378127&r1=1378126&r2=1378127&view=diff
==
--- 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/serializer/TestWritableSerialization.java
 (original)
+++ 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/serializer/TestWritableSerialization.java
 Tue Aug 28 13:31:10 2012
@@ -18,25 +18,34 @@
 
 package org.apache.hadoop.io.serializer;
 
+import java.io.Serializable;
+
+import org.apache.hadoop.io.DataInputBuffer;
+import org.apache.hadoop.io.DataOutputBuffer;
 import static org.apache.hadoop.io.TestGenericWritable.CONF_TEST_KEY;
 import static org.apache.hadoop.io.TestGenericWritable.CONF_TEST_VALUE;
-import junit.framework.TestCase;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.io.TestGenericWritable.Baz;
 import org.apache.hadoop.io.TestGenericWritable.FooGenericWritable;
+import org.apache.hadoop.io.WritableComparator;
+
+import org.junit.Test;
+import static org.junit.Assert.*;
 
-public class TestWritableSerialization extends TestCase {
+public class TestWritableSerialization {
 
   private static final Configuration conf = new Configuration();
 
+  @Test
   public void testWritableSerialization() throws Exception {
 Text before = new Text("test writable"); 
 Text after = SerializationTestUtil.testSerialization(conf, before);
 assertEquals(before, after);
   }
   
+  @Test
   public void testWritableConfigurable() throws Exception {
 
 //set the configuration parameter
@@ -52,4 +61,42 @@ public class TestWritableSerialization e
 assertEquals(baz, result);
 assertNotNull(result.getConf());
   }
+
+  @Test
+  @SuppressWarnings({"rawtypes", "unchecked"})
+  public void testWritableComparatorJavaSerialization() throws Exception {
+Serialization ser = new JavaSerialization();
+
+Serializer serializer = ser.getSeria

svn commit: r1378129 - in /hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common: CHANGES.txt src/main/java/org/apache/hadoop/io/WritableComparator.java src/test/java/org/apache/hadoo

2012-08-28 Thread suresh
Author: suresh
Date: Tue Aug 28 13:35:59 2012
New Revision: 1378129

URL: http://svn.apache.org/viewvc?rev=1378129&view=rev
Log:
HDFS-8619. Merging change from trunk to branch-2

Modified:

hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/CHANGES.txt

hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/WritableComparator.java

hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/serializer/TestWritableSerialization.java

Modified: 
hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1378129&r1=1378128&r2=1378129&view=diff
==
--- 
hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/CHANGES.txt
 (original)
+++ 
hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/CHANGES.txt
 Tue Aug 28 13:35:59 2012
@@ -39,6 +39,9 @@ Release 0.23.3 - UNRELEASED
 HADOOP-8239. Add subclasses of MD5MD5CRC32FileChecksum to support file
 checksum with CRC32C.  (Kihwal Lee via szetszwo)
 
+HADOOP-8619. WritableComparator must implement no-arg constructor.
+(Chris Douglas via Suresh)
+
   OPTIMIZATIONS
 
   BUG FIXES

Modified: 
hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/WritableComparator.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/WritableComparator.java?rev=1378129&r1=1378128&r2=1378129&view=diff
==
--- 
hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/WritableComparator.java
 (original)
+++ 
hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/WritableComparator.java
 Tue Aug 28 13:35:59 2012
@@ -87,6 +87,10 @@ public class WritableComparator implemen
   private final WritableComparable key2;
   private final DataInputBuffer buffer;
 
+  protected WritableComparator() {
+this(null);
+  }
+
   /** Construct for a {@link WritableComparable} implementation. */
   protected WritableComparator(Class keyClass) {
 this(keyClass, false);

Modified: 
hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/serializer/TestWritableSerialization.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/serializer/TestWritableSerialization.java?rev=1378129&r1=1378128&r2=1378129&view=diff
==
--- 
hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/serializer/TestWritableSerialization.java
 (original)
+++ 
hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/serializer/TestWritableSerialization.java
 Tue Aug 28 13:35:59 2012
@@ -18,25 +18,34 @@
 
 package org.apache.hadoop.io.serializer;
 
+import java.io.Serializable;
+
+import org.apache.hadoop.io.DataInputBuffer;
+import org.apache.hadoop.io.DataOutputBuffer;
 import static org.apache.hadoop.io.TestGenericWritable.CONF_TEST_KEY;
 import static org.apache.hadoop.io.TestGenericWritable.CONF_TEST_VALUE;
-import junit.framework.TestCase;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.io.TestGenericWritable.Baz;
 import org.apache.hadoop.io.TestGenericWritable.FooGenericWritable;
+import org.apache.hadoop.io.WritableComparator;
+
+import org.junit.Test;
+import static org.junit.Assert.*;
 
-public class TestWritableSerialization extends TestCase {
+public class TestWritableSerialization {
 
   private static final Configuration conf = new Configuration();
 
+  @Test
   public void testWritableSerialization() throws Exception {
 Text before = new Text("test writable"); 
 Text after = SerializationTestUtil.testSerialization(conf, before);
 assertEquals(before, after);
   }
   
+  @Test
   public void testWritableConfigurable() throws Exception {
 
 //set the configuration parameter
@@ -52,4 +61,42 @@ public class TestWritableSerialization e
 assertEquals(baz, result);
 assertNotNull(result.getConf());
   }
+
+  @Test
+  @SuppressWarnings({"rawtypes", "unchecked"})
+  public void testWritableComparatorJavaSerialization() throws Exception {
+Serialization ser = new JavaSerialization();
+
+Serializer serializer = ser.getSerializer(TestWC.class);
+DataOutputBuffer dob = new DataOutputBuffer();
+serializer.open(dob)

svn commit: r1378175 - /hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt

2012-08-28 Thread tucu
Author: tucu
Date: Tue Aug 28 15:30:43 2012
New Revision: 1378175

URL: http://svn.apache.org/viewvc?rev=1378175&view=rev
Log:
HADOOP-8738. junit JAR is showing up in the distro (tucu)

Modified:
hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt

Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1378175&r1=1378174&r2=1378175&view=diff
==
--- hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt 
(original)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt Tue Aug 
28 15:30:43 2012
@@ -445,6 +445,8 @@ Branch-2 ( Unreleased changes )
 HADOOP-8031. Configuration class fails to find embedded .jar resources; 
 should use URL.openStream() (genman via tucu)
 
+HADOOP-8738. junit JAR is showing up in the distro (tucu)
+
   BREAKDOWN OF HDFS-3042 SUBTASKS
 
 HADOOP-8220. ZKFailoverController doesn't handle failure to become active




svn commit: r1378177 - /hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt

2012-08-28 Thread tucu
Author: tucu
Date: Tue Aug 28 15:32:28 2012
New Revision: 1378177

URL: http://svn.apache.org/viewvc?rev=1378177&view=rev
Log:
HADOOP-8738. junit JAR is showing up in the distro (tucu)

Modified:

hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt

Modified: 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1378177&r1=1378176&r2=1378177&view=diff
==
--- 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt 
(original)
+++ 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt 
Tue Aug 28 15:32:28 2012
@@ -239,6 +239,8 @@ Release 2.0.1-alpha - UNRELEASED
 HADOOP-8031. Configuration class fails to find embedded .jar resources; 
 should use URL.openStream() (genman via tucu)
 
+HADOOP-8738. junit JAR is showing up in the distro (tucu)
+
   BREAKDOWN OF HDFS-3042 SUBTASKS
 
 HADOOP-8220. ZKFailoverController doesn't handle failure to become active




svn commit: r1378241 - in /hadoop/common/branches/branch-1/src: hdfs/org/apache/hadoop/hdfs/server/namenode/ test/org/apache/hadoop/hdfs/server/namenode/

2012-08-28 Thread suresh
Author: suresh
Date: Tue Aug 28 17:26:59 2012
New Revision: 1378241

URL: http://svn.apache.org/viewvc?rev=1378241&view=rev
Log:
HDFS-3791. Namenode will not block until a large directory deletion completes. 
It allows other operations when the deletion is in progress. Backport of 
HDFS-173. Contributed by Uma Maheswara Rao.

Added:

hadoop/common/branches/branch-1/src/test/org/apache/hadoop/hdfs/server/namenode/TestLargeDirectoryDelete.java
Modified:

hadoop/common/branches/branch-1/src/hdfs/org/apache/hadoop/hdfs/server/namenode/BlocksMap.java

hadoop/common/branches/branch-1/src/hdfs/org/apache/hadoop/hdfs/server/namenode/FSDirectory.java

hadoop/common/branches/branch-1/src/hdfs/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java

hadoop/common/branches/branch-1/src/hdfs/org/apache/hadoop/hdfs/server/namenode/INodeFile.java

Modified: 
hadoop/common/branches/branch-1/src/hdfs/org/apache/hadoop/hdfs/server/namenode/BlocksMap.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-1/src/hdfs/org/apache/hadoop/hdfs/server/namenode/BlocksMap.java?rev=1378241&r1=1378240&r2=1378241&view=diff
==
--- 
hadoop/common/branches/branch-1/src/hdfs/org/apache/hadoop/hdfs/server/namenode/BlocksMap.java
 (original)
+++ 
hadoop/common/branches/branch-1/src/hdfs/org/apache/hadoop/hdfs/server/namenode/BlocksMap.java
 Tue Aug 28 17:26:59 2012
@@ -58,6 +58,10 @@ class BlocksMap {
 INodeFile getINode() {
   return inode;
 }
+
+void setINode(INodeFile inode) {
+  this.inode = inode;
+}
 
 DatanodeDescriptor getDatanode(int index) {
   assert this.triplets != null : "BlockInfo is not initialized";

Modified: 
hadoop/common/branches/branch-1/src/hdfs/org/apache/hadoop/hdfs/server/namenode/FSDirectory.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-1/src/hdfs/org/apache/hadoop/hdfs/server/namenode/FSDirectory.java?rev=1378241&r1=1378240&r2=1378241&view=diff
==
--- 
hadoop/common/branches/branch-1/src/hdfs/org/apache/hadoop/hdfs/server/namenode/FSDirectory.java
 (original)
+++ 
hadoop/common/branches/branch-1/src/hdfs/org/apache/hadoop/hdfs/server/namenode/FSDirectory.java
 Tue Aug 28 17:26:59 2012
@@ -591,19 +591,27 @@ class FSDirectory implements FSConstants
   }
 
   /**
-   * Remove the file from management, return blocks
+   * Delete the target directory and collect the blocks under it
+   * 
+   * @param src
+   *  Path of a directory to delete
+   * @param collectedBlocks
+   *  Blocks under the deleted directory
+   * @return true on successful deletion; else false
*/
-  boolean delete(String src) {
+  boolean delete(String src, ListcollectedBlocks) {
 if (NameNode.stateChangeLog.isDebugEnabled()) {
-  NameNode.stateChangeLog.debug("DIR* FSDirectory.delete: "+src);
+  NameNode.stateChangeLog.debug("DIR* FSDirectory.delete: " + src);
 }
 waitForReady();
 long now = FSNamesystem.now();
-int filesRemoved = unprotectedDelete(src, now);
+int filesRemoved = unprotectedDelete(src, collectedBlocks, now);
 if (filesRemoved <= 0) {
   return false;
 }
 incrDeletedFileCount(filesRemoved);
+// Blocks will be deleted later by the caller of this method
+FSNamesystem.getFSNamesystem().removePathAndBlocks(src, null);
 fsImage.getEditLog().logDelete(src, now);
 return true;
   }
@@ -625,14 +633,36 @@ class FSDirectory implements FSConstants
   }
   
   /**
-   * Delete a path from the name space
-   * Update the count at each ancestor directory with quota
-   * @param src a string representation of a path to an inode
-   * @param modificationTime the time the inode is removed
-   * @param deletedBlocks the place holder for the blocks to be removed
+   * Delete a path from the name space Update the count at each ancestor
+   * directory with quota
+   * 
+   * @param src
+   *  a string representation of a path to an inode
+   * 
+   * @param mTime
+   *  the time the inode is removed
+   */
+  void unprotectedDelete(String src, long mTime) {
+List collectedBlocks = new ArrayList();
+int filesRemoved = unprotectedDelete(src, collectedBlocks, mTime);
+if (filesRemoved > 0) {
+  namesystem.removePathAndBlocks(src, collectedBlocks);
+}
+  }
+  
+  /**
+   * Delete a path from the name space Update the count at each ancestor
+   * directory with quota
+   * 
+   * @param src
+   *  a string representation of a path to an inode
+   * @param collectedBlocks
+   *  blocks collected from the deleted path
+   * @param mtime
+   *  the time the inode is removed
* @return the number of inodes deleted; 0 if no inodes are deleted.
-   */ 
-  int unprotectedDelete(String src, long modificationTime) {
+   */
+  int unprotectedDelete(String src, List collectedBl

svn commit: r1378357 - in /hadoop/common/branches/branch-1: CHANGES.txt src/contrib/capacity-scheduler/src/java/org/apache/hadoop/mapred/CapacityTaskScheduler.java

2012-08-28 Thread tgraves
Author: tgraves
Date: Tue Aug 28 21:28:39 2012
New Revision: 1378357

URL: http://svn.apache.org/viewvc?rev=1378357&view=rev
Log:
MAPREDUCE-1684. ClusterStatus can be cached in 
CapacityTaskScheduler.assignTasks() (Koji Noguchi via tgraves)

Modified:
hadoop/common/branches/branch-1/CHANGES.txt

hadoop/common/branches/branch-1/src/contrib/capacity-scheduler/src/java/org/apache/hadoop/mapred/CapacityTaskScheduler.java

Modified: hadoop/common/branches/branch-1/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-1/CHANGES.txt?rev=1378357&r1=1378356&r2=1378357&view=diff
==
--- hadoop/common/branches/branch-1/CHANGES.txt (original)
+++ hadoop/common/branches/branch-1/CHANGES.txt Tue Aug 28 21:28:39 2012
@@ -213,6 +213,9 @@ Release 1.2.0 - unreleased
 MAPREDUCE-4595. TestLostTracker failing - possibly due to a race in 
 JobHistory.JobHistoryFilesManager#run() (kkambatl via tucu)
 
+MAPREDUCE-1684. ClusterStatus can be cached in 
+CapacityTaskScheduler.assignTasks() (Koji Noguchi via tgraves)
+
 Release 1.1.0 - unreleased
 
   INCOMPATIBLE CHANGES

Modified: 
hadoop/common/branches/branch-1/src/contrib/capacity-scheduler/src/java/org/apache/hadoop/mapred/CapacityTaskScheduler.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-1/src/contrib/capacity-scheduler/src/java/org/apache/hadoop/mapred/CapacityTaskScheduler.java?rev=1378357&r1=1378356&r2=1378357&view=diff
==
--- 
hadoop/common/branches/branch-1/src/contrib/capacity-scheduler/src/java/org/apache/hadoop/mapred/CapacityTaskScheduler.java
 (original)
+++ 
hadoop/common/branches/branch-1/src/contrib/capacity-scheduler/src/java/org/apache/hadoop/mapred/CapacityTaskScheduler.java
 Tue Aug 28 21:28:39 2012
@@ -154,7 +154,8 @@ class CapacityTaskScheduler extends Task
 protected TaskType type = null;
 
 abstract TaskLookupResult obtainNewTask(TaskTrackerStatus taskTracker, 
-JobInProgress job, boolean assignOffSwitch) throws IOException;
+JobInProgress job, boolean assignOffSwitch,
+ClusterStatus clusterStatus) throws IOException;
 
 int getSlotsOccupied(JobInProgress job) {
   return (getNumReservedTaskTrackers(job) + getRunningTasks(job)) * 
@@ -293,7 +294,8 @@ class CapacityTaskScheduler extends Task
 private TaskLookupResult getTaskFromQueue(TaskTracker taskTracker,
   int availableSlots,
   CapacitySchedulerQueue queue,
-  boolean assignOffSwitch)
+  boolean assignOffSwitch,
+  ClusterStatus clusterStatus)
 throws IOException {
   TaskTrackerStatus taskTrackerStatus = taskTracker.getStatus();
   // we only look at jobs in the running queues, as these are the ones
@@ -320,7 +322,8 @@ class CapacityTaskScheduler extends Task
   availableSlots)) 
{
   // We found a suitable job. Get task from it.
   TaskLookupResult tlr = 
-obtainNewTask(taskTrackerStatus, j, assignOffSwitch);
+obtainNewTask(taskTrackerStatus, j, assignOffSwitch,
+  clusterStatus);
   //if there is a task return it immediately.
   if (tlr.getLookUpStatus() == 
   TaskLookupResult.LookUpStatus.LOCAL_TASK_FOUND || 
@@ -379,6 +382,11 @@ class CapacityTaskScheduler extends Task
 
   printQueues();
 
+  //MAPREDUCE-1684: somehow getClusterStatus seems to be expensive. Caching
+  //here to reuse during the scheduling
+  ClusterStatus clusterStatus =
+scheduler.taskTrackerManager.getClusterStatus();
+
   // Check if this tasktracker has been reserved for a job...
   JobInProgress job = taskTracker.getJobForFallowSlot(type);
   if (job != null) {
@@ -397,7 +405,7 @@ class CapacityTaskScheduler extends Task
 // Don't care about locality!
 job.overrideSchedulingOpportunities();
   }
-  return obtainNewTask(taskTrackerStatus, job, true);
+  return obtainNewTask(taskTrackerStatus, job, true, clusterStatus);
 } else {
   // Re-reserve the current tasktracker
   taskTracker.reserveSlots(type, job, availableSlots);
@@ -420,7 +428,8 @@ class CapacityTaskScheduler extends Task
 }
 
 TaskLookupResult tlr = 
-  getTaskFromQueue(taskTracker, availableSlots, queue, 
assignOffSwitch);
+  getTaskFromQueue(taskTracker, availableSlots, queue, assignOffSwitch,
+  clusterStatus);
 TaskLookupResult.LookUpStatus lookUpStatus = tlr.getLookUpStatus();
 
 if (lookUpStatus == TaskLookupResult.LookUpStatus.

svn commit: r1378444 - in /hadoop/common/trunk/hadoop-common-project/hadoop-common: CHANGES.txt src/JNIFlags.cmake

2012-08-28 Thread eli
Author: eli
Date: Wed Aug 29 05:30:06 2012
New Revision: 1378444

URL: http://svn.apache.org/viewvc?rev=1378444&view=rev
Log:
HADOOP-8737. cmake: always use JAVA_HOME to find libjvm.so, jni.h, jni_md.h. 
Contributed by Colin Patrick McCabe

Modified:
hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/JNIFlags.cmake

Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1378444&r1=1378443&r2=1378444&view=diff
==
--- hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt 
(original)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt Wed Aug 
29 05:30:06 2012
@@ -447,6 +447,9 @@ Branch-2 ( Unreleased changes )
 
 HADOOP-8738. junit JAR is showing up in the distro (tucu)
 
+HADOOP-8737. cmake: always use JAVA_HOME to find libjvm.so, jni.h, 
jni_md.h.
+(Colin Patrick McCabe via eli)
+
   BREAKDOWN OF HDFS-3042 SUBTASKS
 
 HADOOP-8220. ZKFailoverController doesn't handle failure to become active

Modified: 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/JNIFlags.cmake
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/JNIFlags.cmake?rev=1378444&r1=1378443&r2=1378444&view=diff
==
--- hadoop/common/trunk/hadoop-common-project/hadoop-common/src/JNIFlags.cmake 
(original)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-common/src/JNIFlags.cmake 
Wed Aug 29 05:30:06 2012
@@ -65,4 +65,49 @@ if (CMAKE_SYSTEM_PROCESSOR MATCHES "^arm
 endif (READELF MATCHES "NOTFOUND")
 endif (CMAKE_SYSTEM_PROCESSOR MATCHES "^arm" AND CMAKE_SYSTEM_NAME STREQUAL 
"Linux")
 
-find_package(JNI REQUIRED)
+IF("${CMAKE_SYSTEM}" MATCHES "Linux")
+#
+# Locate JNI_INCLUDE_DIRS and JNI_LIBRARIES.
+# Since we were invoked from Maven, we know that the JAVA_HOME environment
+# variable is valid.  So we ignore system paths here and just use 
JAVA_HOME.
+#
+FILE(TO_CMAKE_PATH "$ENV{JAVA_HOME}" _JAVA_HOME)
+IF(CMAKE_SYSTEM_PROCESSOR MATCHES "^i.86$")
+SET(_java_libarch "i386")
+ELSEIF (CMAKE_SYSTEM_PROCESSOR STREQUAL "x86_64" OR CMAKE_SYSTEM_PROCESSOR 
STREQUAL "amd64")
+SET(_java_libarch "amd64")
+ELSE()
+SET(_java_libarch ${CMAKE_SYSTEM_PROCESSOR})
+ENDIF()
+SET(_JDK_DIRS "${_JAVA_HOME}/jre/lib/${_java_libarch}/*"
+  "${_JAVA_HOME}/jre/lib/${_java_libarch}"
+  "${_JAVA_HOME}/jre/lib/*"
+  "${_JAVA_HOME}/jre/lib"
+  "${_JAVA_HOME}/lib/*"
+  "${_JAVA_HOME}/lib"
+  "${_JAVA_HOME}/include/*"
+  "${_JAVA_HOME}/include"
+  "${_JAVA_HOME}"
+)
+FIND_PATH(JAVA_INCLUDE_PATH
+NAMES jni.h 
+PATHS ${_JDK_DIRS}
+NO_DEFAULT_PATH)
+FIND_PATH(JAVA_INCLUDE_PATH2 
+NAMES jni_md.h
+PATHS ${_JDK_DIRS}
+NO_DEFAULT_PATH)
+SET(JNI_INCLUDE_DIRS ${JAVA_INCLUDE_PATH} ${JAVA_INCLUDE_PATH2})
+FIND_LIBRARY(JAVA_JVM_LIBRARY
+NAMES jvm JavaVM
+PATHS ${_JDK_DIRS}
+NO_DEFAULT_PATH)
+SET(JNI_LIBRARIES ${JAVA_JVM_LIBRARY})
+IF((NOT JAVA_JVM_LIBRARY) OR (NOT JAVA_INCLUDE_PATH) OR (NOT 
JAVA_INCLUDE_PATH2))
+MESSAGE("JAVA_HOME=${JAVA_HOME}, JAVA_JVM_LIBRARY=${JAVA_JVM_LIBRARY}")
+MESSAGE("JAVA_INCLUDE_PATH=${JAVA_INCLUDE_PATH}, 
JAVA_INCLUDE_PATH2=${JAVA_INCLUDE_PATH2}")
+MESSAGE(FATAL_ERROR "Failed to find a viable JVM installation under 
JAVA_HOME.")
+ENDIF()
+ELSE()
+find_package(JNI REQUIRED)
+ENDIF()




svn commit: r1378445 - in /hadoop/common/branches/branch-2/hadoop-common-project: ./ hadoop-auth/ hadoop-common/ hadoop-common/src/ hadoop-common/src/main/docs/ hadoop-common/src/main/java/ hadoop-com

2012-08-28 Thread eli
Author: eli
Date: Wed Aug 29 05:31:50 2012
New Revision: 1378445

URL: http://svn.apache.org/viewvc?rev=1378445&view=rev
Log:
HADOOP-8737. cmake: always use JAVA_HOME to find libjvm.so, jni.h, jni_md.h. 
Contributed by Colin Patrick McCabe

Modified:
hadoop/common/branches/branch-2/hadoop-common-project/   (props changed)
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-auth/   (props 
changed)
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/   
(props changed)

hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt 
  (contents, props changed)

hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/JNIFlags.cmake

hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/docs/
   (props changed)

hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/
   (props changed)

hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/test/core/
   (props changed)

Propchange: hadoop/common/branches/branch-2/hadoop-common-project/
--
  Merged /hadoop/common/trunk/hadoop-common-project:r1378444

Propchange: hadoop/common/branches/branch-2/hadoop-common-project/hadoop-auth/
--
  Merged /hadoop/common/trunk/hadoop-common-project/hadoop-auth:r1378444

Propchange: hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/
--
  Merged /hadoop/common/trunk/hadoop-common-project/hadoop-common:r1378444

Modified: 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1378445&r1=1378444&r2=1378445&view=diff
==
--- 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt 
(original)
+++ 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt 
Wed Aug 29 05:31:50 2012
@@ -241,6 +241,9 @@ Release 2.0.1-alpha - UNRELEASED
 
 HADOOP-8738. junit JAR is showing up in the distro (tucu)
 
+HADOOP-8737. cmake: always use JAVA_HOME to find libjvm.so, jni.h, 
jni_md.h.
+(Colin Patrick McCabe via eli)
+
   BREAKDOWN OF HDFS-3042 SUBTASKS
 
 HADOOP-8220. ZKFailoverController doesn't handle failure to become active

Propchange: 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt
--
  Merged 
/hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt:r1378444

Modified: 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/JNIFlags.cmake
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/JNIFlags.cmake?rev=1378445&r1=1378444&r2=1378445&view=diff
==
--- 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/JNIFlags.cmake
 (original)
+++ 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/JNIFlags.cmake
 Wed Aug 29 05:31:50 2012
@@ -65,4 +65,49 @@ if (CMAKE_SYSTEM_PROCESSOR MATCHES "^arm
 endif (READELF MATCHES "NOTFOUND")
 endif (CMAKE_SYSTEM_PROCESSOR MATCHES "^arm" AND CMAKE_SYSTEM_NAME STREQUAL 
"Linux")
 
-find_package(JNI REQUIRED)
+IF("${CMAKE_SYSTEM}" MATCHES "Linux")
+#
+# Locate JNI_INCLUDE_DIRS and JNI_LIBRARIES.
+# Since we were invoked from Maven, we know that the JAVA_HOME environment
+# variable is valid.  So we ignore system paths here and just use 
JAVA_HOME.
+#
+FILE(TO_CMAKE_PATH "$ENV{JAVA_HOME}" _JAVA_HOME)
+IF(CMAKE_SYSTEM_PROCESSOR MATCHES "^i.86$")
+SET(_java_libarch "i386")
+ELSEIF (CMAKE_SYSTEM_PROCESSOR STREQUAL "x86_64" OR CMAKE_SYSTEM_PROCESSOR 
STREQUAL "amd64")
+SET(_java_libarch "amd64")
+ELSE()
+SET(_java_libarch ${CMAKE_SYSTEM_PROCESSOR})
+ENDIF()
+SET(_JDK_DIRS "${_JAVA_HOME}/jre/lib/${_java_libarch}/*"
+  "${_JAVA_HOME}/jre/lib/${_java_libarch}"
+  "${_JAVA_HOME}/jre/lib/*"
+  "${_JAVA_HOME}/jre/lib"
+  "${_JAVA_HOME}/lib/*"
+  "${_JAVA_HOME}/lib"
+  "${_JAVA_HOME}/include/*"
+  "${_JAVA_HOME}/include"
+  "${_JAVA_HOME}"
+)
+FIND_PATH(JAVA_INCLUDE_PATH
+NAMES jni.h 
+PATHS ${_JDK_DIRS}
+NO_DEFAULT_PATH)
+FIND_PATH(JAVA_INCLUDE_PATH2 
+NAMES jni_md.h
+PATHS ${_JDK_DIRS}
+NO_DEFAULT_PATH)
+SET(JNI_INCLUDE_DIRS ${JAVA_INCLUDE_PATH} ${JAVA_INCLUDE_PATH2})
+FIND_LIBRARY