svn commit: r1368002 - in /hadoop/common/trunk/hadoop-common-project/hadoop-common: ./ src/main/java/org/apache/hadoop/fs/shell/ src/test/java/org/apache/hadoop/fs/shell/

2012-08-01 Thread tgraves
Author: tgraves
Date: Wed Aug  1 14:02:22 2012
New Revision: 1368002

URL: http://svn.apache.org/viewvc?rev=1368002view=rev
Log:
HADOOP-8633. Interrupted FsShell copies may leave tmp files (Daryn Sharp via 
tgraves)

Added:

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestCopy.java
Modified:
hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/CommandWithDestination.java

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/PathData.java

Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1368002r1=1368001r2=1368002view=diff
==
--- hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt 
(original)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt Wed Aug 
 1 14:02:22 2012
@@ -873,6 +873,9 @@ Release 0.23.3 - UNRELEASED
 org.apache.hadoop.classification.InterfaceAudience not found  (Trevor
 Robinson via tgraves)
 
+HADOOP-8633. Interrupted FsShell copies may leave tmp files (Daryn Sharp
+via tgraves)
+
 Release 0.23.2 - UNRELEASED 
 
   INCOMPATIBLE CHANGES

Modified: 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/CommandWithDestination.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/CommandWithDestination.java?rev=1368002r1=1368001r2=1368002view=diff
==
--- 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/CommandWithDestination.java
 (original)
+++ 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/CommandWithDestination.java
 Wed Aug  1 14:02:22 2012
@@ -24,6 +24,8 @@ import java.io.InputStream;
 import java.util.LinkedList;
 
 import org.apache.hadoop.fs.FSDataOutputStream;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.FilterFileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.shell.PathExceptions.PathExistsException;
 import org.apache.hadoop.fs.shell.PathExceptions.PathIOException;
@@ -232,31 +234,65 @@ abstract class CommandWithDestination ex
 if (target.exists  (target.stat.isDirectory() || !overwrite)) {
   throw new PathExistsException(target.toString());
 }
-target.fs.setWriteChecksum(writeChecksum);
-PathData tempFile = null;
+TargetFileSystem targetFs = new TargetFileSystem(target.fs);
 try {
-  tempFile = target.createTempFile(target+._COPYING_);
-  FSDataOutputStream out = target.fs.create(tempFile.path, true);
-  IOUtils.copyBytes(in, out, getConf(), true);
+  PathData tempTarget = target.suffix(._COPYING_);
+  targetFs.setWriteChecksum(writeChecksum);
+  targetFs.writeStreamToFile(in, tempTarget);
+  targetFs.rename(tempTarget, target);
+} finally {
+  targetFs.close(); // last ditch effort to ensure temp file is removed
+}
+  }
+
+  // Helper filter filesystem that registers created files as temp files to
+  // be deleted on exit unless successfully renamed
+  private static class TargetFileSystem extends FilterFileSystem {
+TargetFileSystem(FileSystem fs) {
+  super(fs);
+}
+
+void writeStreamToFile(InputStream in, PathData target) throws IOException 
{
+  FSDataOutputStream out = null;
+  try {
+out = create(target);
+IOUtils.copyBytes(in, out, getConf(), true);
+  } finally {
+IOUtils.closeStream(out); // just in case copyBytes didn't
+  }
+}
+
+// tag created files as temp files
+FSDataOutputStream create(PathData item) throws IOException {
+  try {
+return create(item.path, true);
+  } finally { // might have been created but stream was interrupted
+deleteOnExit(item.path);
+  }
+}
+
+void rename(PathData src, PathData target) throws IOException {
   // the rename method with an option to delete the target is deprecated
-  if (target.exists  !target.fs.delete(target.path, false)) {
+  if (target.exists  !delete(target.path, false)) {
 // too bad we don't know why it failed
 PathIOException e = new PathIOException(target.toString());
 e.setOperation(delete);
 throw e;
   }
-  if (!tempFile.fs.rename(tempFile.path, target.path)) {
+  if (!rename(src.path, target.path)) {
 // too bad we don't know why it failed
-PathIOException e = new PathIOException(tempFile.toString());
+PathIOException e = new 

svn commit: r1368003 - in /hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common: ./ src/main/java/org/apache/hadoop/fs/shell/ src/test/java/org/apache/hadoop/fs/shell/

2012-08-01 Thread tgraves
Author: tgraves
Date: Wed Aug  1 14:03:01 2012
New Revision: 1368003

URL: http://svn.apache.org/viewvc?rev=1368003view=rev
Log:
merge -r  1368001:1368002 from trunk. FIXES: HADOOP-8633

Added:

hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestCopy.java
  - copied unchanged from r1368002, 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestCopy.java
Modified:

hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt

hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/CommandWithDestination.java

hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/PathData.java

Modified: 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1368003r1=1368002r2=1368003view=diff
==
--- 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt 
(original)
+++ 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt 
Wed Aug  1 14:03:01 2012
@@ -692,6 +692,9 @@ Release 0.23.3 - UNRELEASED
 org.apache.hadoop.classification.InterfaceAudience not found  (Trevor
 Robinson via tgraves)
 
+HADOOP-8633. Interrupted FsShell copies may leave tmp files (Daryn Sharp
+via tgraves)
+
 Release 0.23.2 - UNRELEASED 
 
   NEW FEATURES

Modified: 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/CommandWithDestination.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/CommandWithDestination.java?rev=1368003r1=1368002r2=1368003view=diff
==
--- 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/CommandWithDestination.java
 (original)
+++ 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/CommandWithDestination.java
 Wed Aug  1 14:03:01 2012
@@ -24,6 +24,8 @@ import java.io.InputStream;
 import java.util.LinkedList;
 
 import org.apache.hadoop.fs.FSDataOutputStream;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.FilterFileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.shell.PathExceptions.PathExistsException;
 import org.apache.hadoop.fs.shell.PathExceptions.PathIOException;
@@ -232,31 +234,65 @@ abstract class CommandWithDestination ex
 if (target.exists  (target.stat.isDirectory() || !overwrite)) {
   throw new PathExistsException(target.toString());
 }
-target.fs.setWriteChecksum(writeChecksum);
-PathData tempFile = null;
+TargetFileSystem targetFs = new TargetFileSystem(target.fs);
 try {
-  tempFile = target.createTempFile(target+._COPYING_);
-  FSDataOutputStream out = target.fs.create(tempFile.path, true);
-  IOUtils.copyBytes(in, out, getConf(), true);
+  PathData tempTarget = target.suffix(._COPYING_);
+  targetFs.setWriteChecksum(writeChecksum);
+  targetFs.writeStreamToFile(in, tempTarget);
+  targetFs.rename(tempTarget, target);
+} finally {
+  targetFs.close(); // last ditch effort to ensure temp file is removed
+}
+  }
+
+  // Helper filter filesystem that registers created files as temp files to
+  // be deleted on exit unless successfully renamed
+  private static class TargetFileSystem extends FilterFileSystem {
+TargetFileSystem(FileSystem fs) {
+  super(fs);
+}
+
+void writeStreamToFile(InputStream in, PathData target) throws IOException 
{
+  FSDataOutputStream out = null;
+  try {
+out = create(target);
+IOUtils.copyBytes(in, out, getConf(), true);
+  } finally {
+IOUtils.closeStream(out); // just in case copyBytes didn't
+  }
+}
+
+// tag created files as temp files
+FSDataOutputStream create(PathData item) throws IOException {
+  try {
+return create(item.path, true);
+  } finally { // might have been created but stream was interrupted
+deleteOnExit(item.path);
+  }
+}
+
+void rename(PathData src, PathData target) throws IOException {
   // the rename method with an option to delete the target is deprecated
-  if (target.exists  !target.fs.delete(target.path, false)) {
+  if (target.exists  !delete(target.path, false)) {
 // too bad we don't know why it failed
 PathIOException e = new PathIOException(target.toString());
 e.setOperation(delete);
 throw e;
   }
-  

svn commit: r1368004 - in /hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common: ./ src/main/java/org/apache/hadoop/fs/shell/ src/test/java/org/apache/hadoop/fs/shell/

2012-08-01 Thread tgraves
Author: tgraves
Date: Wed Aug  1 14:03:35 2012
New Revision: 1368004

URL: http://svn.apache.org/viewvc?rev=1368004view=rev
Log:
merge -r 1368002:1368003 from branch-2. FIXES: HADOOP-8633

Added:

hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestCopy.java
  - copied unchanged from r1368003, 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestCopy.java
Modified:

hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/CHANGES.txt

hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/CommandWithDestination.java

hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/PathData.java

Modified: 
hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1368004r1=1368003r2=1368004view=diff
==
--- 
hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/CHANGES.txt
 (original)
+++ 
hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/CHANGES.txt
 Wed Aug  1 14:03:35 2012
@@ -146,6 +146,9 @@ Release 0.23.3 - UNRELEASED
 org.apache.hadoop.classification.InterfaceAudience not found  (Trevor
 Robinson via tgraves)
 
+HADOOP-8633. Interrupted FsShell copies may leave tmp files (Daryn Sharp
+via tgraves)
+
 Release 0.23.2 - UNRELEASED 
 
   NEW FEATURES

Modified: 
hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/CommandWithDestination.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/CommandWithDestination.java?rev=1368004r1=1368003r2=1368004view=diff
==
--- 
hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/CommandWithDestination.java
 (original)
+++ 
hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/CommandWithDestination.java
 Wed Aug  1 14:03:35 2012
@@ -24,6 +24,8 @@ import java.io.InputStream;
 import java.util.LinkedList;
 
 import org.apache.hadoop.fs.FSDataOutputStream;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.FilterFileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.shell.PathExceptions.PathExistsException;
 import org.apache.hadoop.fs.shell.PathExceptions.PathIOException;
@@ -232,31 +234,65 @@ abstract class CommandWithDestination ex
 if (target.exists  (target.stat.isDirectory() || !overwrite)) {
   throw new PathExistsException(target.toString());
 }
-target.fs.setWriteChecksum(writeChecksum);
-PathData tempFile = null;
+TargetFileSystem targetFs = new TargetFileSystem(target.fs);
 try {
-  tempFile = target.createTempFile(target+._COPYING_);
-  FSDataOutputStream out = target.fs.create(tempFile.path, true);
-  IOUtils.copyBytes(in, out, getConf(), true);
+  PathData tempTarget = target.suffix(._COPYING_);
+  targetFs.setWriteChecksum(writeChecksum);
+  targetFs.writeStreamToFile(in, tempTarget);
+  targetFs.rename(tempTarget, target);
+} finally {
+  targetFs.close(); // last ditch effort to ensure temp file is removed
+}
+  }
+
+  // Helper filter filesystem that registers created files as temp files to
+  // be deleted on exit unless successfully renamed
+  private static class TargetFileSystem extends FilterFileSystem {
+TargetFileSystem(FileSystem fs) {
+  super(fs);
+}
+
+void writeStreamToFile(InputStream in, PathData target) throws IOException 
{
+  FSDataOutputStream out = null;
+  try {
+out = create(target);
+IOUtils.copyBytes(in, out, getConf(), true);
+  } finally {
+IOUtils.closeStream(out); // just in case copyBytes didn't
+  }
+}
+
+// tag created files as temp files
+FSDataOutputStream create(PathData item) throws IOException {
+  try {
+return create(item.path, true);
+  } finally { // might have been created but stream was interrupted
+deleteOnExit(item.path);
+  }
+}
+
+void rename(PathData src, PathData target) throws IOException {
   // the rename method with an option to delete the target is deprecated
-  if (target.exists  !target.fs.delete(target.path, false)) {
+  if (target.exists  !delete(target.path, false)) {
 // too bad we don't know why it failed
 PathIOException e = new PathIOException(target.toString());
 

svn commit: r1368134 - in /hadoop/common/branches/branch-1: CHANGES.txt src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestUlimit.java

2012-08-01 Thread tucu
Author: tucu
Date: Wed Aug  1 17:29:22 2012
New Revision: 1368134

URL: http://svn.apache.org/viewvc?rev=1368134view=rev
Log:
MAPREDUCE-4036. Streaming TestUlimit fails on CentOS 6. (tucu)

Modified:
hadoop/common/branches/branch-1/CHANGES.txt

hadoop/common/branches/branch-1/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestUlimit.java

Modified: hadoop/common/branches/branch-1/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-1/CHANGES.txt?rev=1368134r1=1368133r2=1368134view=diff
==
--- hadoop/common/branches/branch-1/CHANGES.txt (original)
+++ hadoop/common/branches/branch-1/CHANGES.txt Wed Aug  1 17:29:22 2012
@@ -144,6 +144,8 @@ Release 1.2.0 - unreleased
 HDFS-3163. TestHDFSCLI.testAll fails if user name is not all lowercase.
 (Brandon Li via suresh)
 
+MAPREDUCE-4036. Streaming TestUlimit fails on CentOS 6. (tucu)
+
 Release 1.1.0 - unreleased
 
   INCOMPATIBLE CHANGES

Modified: 
hadoop/common/branches/branch-1/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestUlimit.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-1/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestUlimit.java?rev=1368134r1=1368133r2=1368134view=diff
==
--- 
hadoop/common/branches/branch-1/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestUlimit.java
 (original)
+++ 
hadoop/common/branches/branch-1/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestUlimit.java
 Wed Aug  1 17:29:22 2012
@@ -46,7 +46,7 @@ public class TestUlimit extends TestCase
   MiniDFSCluster dfs = null;
   MiniMRCluster mr = null;
   FileSystem fs = null;
-  private static String SET_MEMORY_LIMIT = 786432; // 768MB
+  private static String SET_MEMORY_LIMIT = 1786432; // 1768MB
 
   String[] genArgs(String memLimit) {
 return new String[] {
@@ -57,6 +57,8 @@ public class TestUlimit extends TestCase
   -numReduceTasks, 0,
   -jobconf, mapred.map.tasks=1,
   -jobconf, JobConf.MAPRED_MAP_TASK_ULIMIT + = + memLimit,
+  -jobconf, mapred.child.java.opts=-Xmx512m,
+  -jobconf, mapred.child.env=MALLOC_ARENA_MAX=1,
   -jobconf, mapred.job.tracker= + localhost: +
mr.getJobTrackerPort(),
   -jobconf, fs.default.name= + hdfs://localhost: 




svn commit: r1368251 - in /hadoop/common/trunk/hadoop-common-project/hadoop-common: CHANGES.txt pom.xml src/CMakeLists.txt

2012-08-01 Thread eli
Author: eli
Date: Wed Aug  1 21:23:10 2012
New Revision: 1368251

URL: http://svn.apache.org/viewvc?rev=1368251view=rev
Log:
HADOOP-8620. Add -Drequire.fuse and -Drequire.snappy. Contributed by Colin 
Patrick McCabe

Modified:
hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt
hadoop/common/trunk/hadoop-common-project/hadoop-common/pom.xml
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/CMakeLists.txt

Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1368251r1=1368250r2=1368251view=diff
==
--- hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt 
(original)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt Wed Aug 
 1 21:23:10 2012
@@ -277,6 +277,9 @@ Branch-2 ( Unreleased changes )
 HADOOP-8609. IPC server logs a useless message when shutting down socket.
 (Jon Zuanich via atm)
 
+HADOOP-8620. Add -Drequire.fuse and -Drequire.snappy. (Colin
+Patrick McCabe via eli)
+
   BUG FIXES
 
 HADOOP-8372. NetUtils.normalizeHostName() incorrectly handles hostname

Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/pom.xml
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/pom.xml?rev=1368251r1=1368250r2=1368251view=diff
==
--- hadoop/common/trunk/hadoop-common-project/hadoop-common/pom.xml (original)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-common/pom.xml Wed Aug  1 
21:23:10 2012
@@ -31,9 +31,6 @@
   packagingjar/packaging
 
   properties
-snappy.prefix/usr/local/snappy.prefix
-snappy.lib${snappy.prefix}/lib/snappy.lib
-bundle.snappyfalse/bundle.snappy
 kdc.resource.dirsrc/test/resources/kdc/kdc.resource.dir
 hadoop.componentcommon/hadoop.component
 is.hadoop.componenttrue/is.hadoop.component
@@ -533,10 +530,10 @@
 activeByDefaultfalse/activeByDefault
   /activation
   properties
-snappy.prefix/usr/local/snappy.prefix
-snappy.lib${snappy.prefix}/lib/snappy.lib
-snappy.include${snappy.prefix}/include/snappy.include
-runas.home/runas.home
+snappy.prefix/snappy.prefix
+snappy.lib/snappy.lib
+snappy.include/snappy.include
+require.snappyfalse/require.snappy
   /properties
   build
 plugins
@@ -579,9 +576,7 @@
 configuration
   target
 exec executable=cmake 
dir=${project.build.directory}/native failonerror=true
-  arg line=${basedir}/src/ 
-DGENERATED_JAVAH=${project.build.directory}/native/javah 
-DJVM_ARCH_DATA_MODEL=${sun.arch.data.model}/
-  env key=CFLAGS value=-I${snappy.include}/
-  env key=LDFLAGS value=-L${snappy.lib}/
+  arg line=${basedir}/src/ 
-DGENERATED_JAVAH=${project.build.directory}/native/javah 
-DJVM_ARCH_DATA_MODEL=${sun.arch.data.model} -DREQUIRE_SNAPPY=${require.snappy} 
-DCUSTOM_SNAPPY_PREFIX=${snappy.prefix} -DCUSTOM_SNAPPY_LIB=${snappy.lib} 
-DCUSTOM_SNAPPY_INCLUDE=${snappy.include}/
 /exec
 exec executable=make 
dir=${project.build.directory}/native failonerror=true
   arg line=VERBOSE=1/

Modified: 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/CMakeLists.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/CMakeLists.txt?rev=1368251r1=1368250r2=1368251view=diff
==
--- hadoop/common/trunk/hadoop-common-project/hadoop-common/src/CMakeLists.txt 
(original)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-common/src/CMakeLists.txt 
Wed Aug  1 21:23:10 2012
@@ -79,17 +79,26 @@ INCLUDE(CheckCSourceCompiles)
 CHECK_FUNCTION_EXISTS(sync_file_range HAVE_SYNC_FILE_RANGE)
 CHECK_FUNCTION_EXISTS(posix_fadvise HAVE_POSIX_FADVISE)
 
-find_library(SNAPPY_LIBRARY NAMES snappy PATHS)
-find_path(SNAPPY_INCLUDE_DIR NAMES snappy.h PATHS)
-if (SNAPPY_LIBRARY)
+find_library(SNAPPY_LIBRARY 
+NAMES snappy
+PATHS ${CUSTOM_SNAPPY_PREFIX} ${CUSTOM_SNAPPY_PREFIX}/lib
+  ${CUSTOM_SNAPPY_PREFIX}/lib64 ${CUSTOM_SNAPPY_LIB})
+find_path(SNAPPY_INCLUDE_DIR 
+NAMES snappy.h
+PATHS ${CUSTOM_SNAPPY_PREFIX} ${CUSTOM_SNAPPY_PREFIX}/include
+  ${CUSTOM_SNAPPY_INCLUDE})
+if (SNAPPY_LIBRARY AND SNAPPY_INCLUDE_DIR)
 GET_FILENAME_COMPONENT(HADOOP_SNAPPY_LIBRARY ${SNAPPY_LIBRARY} NAME)
 set(SNAPPY_SOURCE_FILES
 ${D}/io/compress/snappy/SnappyCompressor.c
 ${D}/io/compress/snappy/SnappyDecompressor.c)
-else (${SNAPPY_LIBRARY})
+else (SNAPPY_LIBRARY AND SNAPPY_INCLUDE_DIR)
 

svn commit: r1368256 - in /hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common: CHANGES.txt pom.xml src/CMakeLists.txt

2012-08-01 Thread eli
Author: eli
Date: Wed Aug  1 21:27:12 2012
New Revision: 1368256

URL: http://svn.apache.org/viewvc?rev=1368256view=rev
Log:
HADOOP-8620. Add -Drequire.fuse and -Drequire.snappy. Contributed by Colin 
Patrick McCabe

Modified:

hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/pom.xml

hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/CMakeLists.txt

Modified: 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1368256r1=1368255r2=1368256view=diff
==
--- 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt 
(original)
+++ 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt 
Wed Aug  1 21:27:12 2012
@@ -377,6 +377,9 @@ Release 2.0.0-alpha - 05-23-2012
 
 HADOOP-8366 Use ProtoBuf for RpcResponseHeader (sanjay radia)
 
+HADOOP-8620. Add -Drequire.fuse and -Drequire.snappy. (Colin
+Patrick McCabe via eli)
+
   OPTIMIZATIONS
 
 HADOOP-8422. Deprecate FileSystem#getDefault* and getServerDefault

Modified: 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/pom.xml
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/pom.xml?rev=1368256r1=1368255r2=1368256view=diff
==
--- hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/pom.xml 
(original)
+++ hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/pom.xml 
Wed Aug  1 21:27:12 2012
@@ -31,10 +31,6 @@
   packagingjar/packaging
 
   properties
-snappy.prefix/usr/local/snappy.prefix
-snappy.lib${snappy.prefix}/lib/snappy.lib
-bundle.snappyfalse/bundle.snappy
-
 hadoop.componentcommon/hadoop.component
 is.hadoop.componenttrue/is.hadoop.component
   /properties
@@ -524,10 +520,10 @@
 activeByDefaultfalse/activeByDefault
   /activation
   properties
-snappy.prefix/usr/local/snappy.prefix
-snappy.lib${snappy.prefix}/lib/snappy.lib
-snappy.include${snappy.prefix}/include/snappy.include
-runas.home/runas.home
+snappy.prefix/snappy.prefix
+snappy.lib/snappy.lib
+snappy.include/snappy.include
+require.snappyfalse/require.snappy
   /properties
   build
 plugins
@@ -570,9 +566,7 @@
 configuration
   target
 exec executable=cmake 
dir=${project.build.directory}/native failonerror=true
-  arg line=${basedir}/src/ 
-DGENERATED_JAVAH=${project.build.directory}/native/javah 
-DJVM_ARCH_DATA_MODEL=${sun.arch.data.model}/
-  env key=CFLAGS value=-I${snappy.include}/
-  env key=LDFLAGS value=-L${snappy.lib}/
+  arg line=${basedir}/src/ 
-DGENERATED_JAVAH=${project.build.directory}/native/javah 
-DJVM_ARCH_DATA_MODEL=${sun.arch.data.model} -DREQUIRE_SNAPPY=${require.snappy} 
-DCUSTOM_SNAPPY_PREFIX=${snappy.prefix} -DCUSTOM_SNAPPY_LIB=${snappy.lib} 
-DCUSTOM_SNAPPY_INCLUDE=${snappy.include}/
 /exec
 exec executable=make 
dir=${project.build.directory}/native failonerror=true
   arg line=VERBOSE=1/

Modified: 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/CMakeLists.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/CMakeLists.txt?rev=1368256r1=1368255r2=1368256view=diff
==
--- 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/CMakeLists.txt
 (original)
+++ 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/CMakeLists.txt
 Wed Aug  1 21:27:12 2012
@@ -79,17 +79,26 @@ INCLUDE(CheckCSourceCompiles)
 CHECK_FUNCTION_EXISTS(sync_file_range HAVE_SYNC_FILE_RANGE)
 CHECK_FUNCTION_EXISTS(posix_fadvise HAVE_POSIX_FADVISE)
 
-find_library(SNAPPY_LIBRARY NAMES snappy PATHS)
-find_path(SNAPPY_INCLUDE_DIR NAMES snappy.h PATHS)
-if (SNAPPY_LIBRARY)
+find_library(SNAPPY_LIBRARY 
+NAMES snappy
+PATHS ${CUSTOM_SNAPPY_PREFIX} ${CUSTOM_SNAPPY_PREFIX}/lib
+  ${CUSTOM_SNAPPY_PREFIX}/lib64 ${CUSTOM_SNAPPY_LIB})
+find_path(SNAPPY_INCLUDE_DIR 
+NAMES snappy.h
+PATHS ${CUSTOM_SNAPPY_PREFIX} ${CUSTOM_SNAPPY_PREFIX}/include
+  ${CUSTOM_SNAPPY_INCLUDE})
+if (SNAPPY_LIBRARY AND SNAPPY_INCLUDE_DIR)
 GET_FILENAME_COMPONENT(HADOOP_SNAPPY_LIBRARY ${SNAPPY_LIBRARY} NAME)
 set(SNAPPY_SOURCE_FILES
 ${D}/io/compress/snappy/SnappyCompressor.c
 

svn commit: r1368257 - /hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt

2012-08-01 Thread eli
Author: eli
Date: Wed Aug  1 21:28:49 2012
New Revision: 1368257

URL: http://svn.apache.org/viewvc?rev=1368257view=rev
Log:
HADOOP-8480. The native build should honor -DskipTests. Contributed by Colin 
Patrick McCabe

Modified:
hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt

Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1368257r1=1368256r2=1368257view=diff
==
--- hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt 
(original)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt Wed Aug 
 1 21:28:49 2012
@@ -374,6 +374,9 @@ Branch-2 ( Unreleased changes )
 hadoop.security.group.mapping.ldap.search.filter.user. (Jonathan Natkins
 via atm)
 
+HADOOP-8480. The native build should honor -DskipTests.
+(Colin Patrick McCabe via eli)
+
   BREAKDOWN OF HDFS-3042 SUBTASKS
 
 HADOOP-8220. ZKFailoverController doesn't handle failure to become active




svn commit: r1368259 - /hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt

2012-08-01 Thread eli
Author: eli
Date: Wed Aug  1 21:30:06 2012
New Revision: 1368259

URL: http://svn.apache.org/viewvc?rev=1368259view=rev
Log:
HADOOP-8480. The native build should honor -DskipTests. Contributed by Colin 
Patrick McCabe

Modified:

hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt

Modified: 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1368259r1=1368258r2=1368259view=diff
==
--- 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt 
(original)
+++ 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt 
Wed Aug  1 21:30:06 2012
@@ -171,6 +171,9 @@ Release 2.0.1-alpha - UNRELEASED
 hadoop.security.group.mapping.ldap.search.filter.user. (Jonathan Natkins
 via atm)
 
+HADOOP-8480. The native build should honor -DskipTests.
+(Colin Patrick McCabe via eli)
+
   BREAKDOWN OF HDFS-3042 SUBTASKS
 
 HADOOP-8220. ZKFailoverController doesn't handle failure to become active




[Hadoop Wiki] Update of ContributorsGroup by SteveLoughran

2012-08-01 Thread Apache Wiki
Dear Wiki user,

You have subscribed to a wiki page or wiki category on Hadoop Wiki for change 
notification.

The ContributorsGroup page has been changed by SteveLoughran:
http://wiki.apache.org/hadoop/ContributorsGroup?action=diffrev1=34rev2=35

Comment:
add user Denis

   * DavidMarin
   * DavidMenestrina
   * DavidWang
+  * Denis
   * dhrubaBorthakur
   * DhrubaBorthakur
   * DjoerdHiemstra


svn commit: r1368350 - in /hadoop/common/branches/branch-1: ./ src/hdfs/org/apache/hadoop/hdfs/ src/hdfs/org/apache/hadoop/hdfs/web/ src/hdfs/org/apache/hadoop/hdfs/web/resources/ src/test/org/apache/

2012-08-01 Thread szetszwo
Author: szetszwo
Date: Thu Aug  2 04:26:53 2012
New Revision: 1368350

URL: http://svn.apache.org/viewvc?rev=1368350view=rev
Log:
HDFS-3667.  Add retry support to WebHdfsFileSystem.

Modified:
hadoop/common/branches/branch-1/CHANGES.txt

hadoop/common/branches/branch-1/src/hdfs/org/apache/hadoop/hdfs/ByteRangeInputStream.java

hadoop/common/branches/branch-1/src/hdfs/org/apache/hadoop/hdfs/DFSClient.java

hadoop/common/branches/branch-1/src/hdfs/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java

hadoop/common/branches/branch-1/src/hdfs/org/apache/hadoop/hdfs/web/resources/DeleteOpParam.java

hadoop/common/branches/branch-1/src/hdfs/org/apache/hadoop/hdfs/web/resources/GetOpParam.java

hadoop/common/branches/branch-1/src/hdfs/org/apache/hadoop/hdfs/web/resources/HttpOpParam.java

hadoop/common/branches/branch-1/src/hdfs/org/apache/hadoop/hdfs/web/resources/PostOpParam.java

hadoop/common/branches/branch-1/src/hdfs/org/apache/hadoop/hdfs/web/resources/PutOpParam.java

hadoop/common/branches/branch-1/src/test/org/apache/hadoop/hdfs/TestDFSClientRetries.java

hadoop/common/branches/branch-1/src/test/org/apache/hadoop/hdfs/security/TestDelegationTokenForProxyUser.java

hadoop/common/branches/branch-1/src/test/org/apache/hadoop/hdfs/web/TestOffsetUrlInputStream.java

hadoop/common/branches/branch-1/src/test/org/apache/hadoop/hdfs/web/TestWebHDFS.java

hadoop/common/branches/branch-1/src/test/org/apache/hadoop/hdfs/web/WebHdfsTestUtil.java

Modified: hadoop/common/branches/branch-1/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-1/CHANGES.txt?rev=1368350r1=1368349r2=1368350view=diff
==
--- hadoop/common/branches/branch-1/CHANGES.txt (original)
+++ hadoop/common/branches/branch-1/CHANGES.txt Thu Aug  2 04:26:53 2012
@@ -58,6 +58,8 @@ Release 1.2.0 - unreleased
 JobTracker for smaller clusters. (Todd Lipcon, backport by 
 Brandon Li via sseth)
 
+HDFS-3667.  Add retry support to WebHdfsFileSystem.  (szetszwo)
+
   OPTIMIZATIONS
 
 HDFS-2533. Backport: Remove needless synchronization on some FSDataSet

Modified: 
hadoop/common/branches/branch-1/src/hdfs/org/apache/hadoop/hdfs/ByteRangeInputStream.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-1/src/hdfs/org/apache/hadoop/hdfs/ByteRangeInputStream.java?rev=1368350r1=1368349r2=1368350view=diff
==
--- 
hadoop/common/branches/branch-1/src/hdfs/org/apache/hadoop/hdfs/ByteRangeInputStream.java
 (original)
+++ 
hadoop/common/branches/branch-1/src/hdfs/org/apache/hadoop/hdfs/ByteRangeInputStream.java
 Thu Aug  2 04:26:53 2012
@@ -54,9 +54,9 @@ public abstract class ByteRangeInputStre
   return url;
 }
 
-protected abstract HttpURLConnection openConnection() throws IOException;
-
-protected abstract HttpURLConnection openConnection(final long offset) 
throws IOException;
+/** Connect to server with a data offset. */
+protected abstract HttpURLConnection connect(final long offset,
+final boolean resolved) throws IOException;
   }
 
   enum StreamStatus {
@@ -82,9 +82,6 @@ public abstract class ByteRangeInputStre
 this.resolvedURL = r;
   }
   
-  protected abstract void checkResponseCode(final HttpURLConnection connection
-  ) throws IOException;
-  
   protected abstract URL getResolvedUrl(final HttpURLConnection connection
   ) throws IOException;
 
@@ -98,13 +95,10 @@ public abstract class ByteRangeInputStre
   
   // Use the original url if no resolved url exists, eg. if
   // it's the first time a request is made.
-  final URLOpener opener =
-(resolvedURL.getURL() == null) ? originalURL : resolvedURL;
-
-  final HttpURLConnection connection = opener.openConnection(startPos);
-  connection.connect();
-  checkResponseCode(connection);
+  final boolean resolved = resolvedURL.getURL() != null; 
+  final URLOpener opener = resolved? resolvedURL: originalURL;
 
+  final HttpURLConnection connection = opener.connect(startPos, resolved);
   final String cl = connection.getHeaderField(StreamFile.CONTENT_LENGTH);
   filelength = (cl == null) ? -1 : Long.parseLong(cl);
   in = connection.getInputStream();

Modified: 
hadoop/common/branches/branch-1/src/hdfs/org/apache/hadoop/hdfs/DFSClient.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-1/src/hdfs/org/apache/hadoop/hdfs/DFSClient.java?rev=1368350r1=1368349r2=1368350view=diff
==
--- 
hadoop/common/branches/branch-1/src/hdfs/org/apache/hadoop/hdfs/DFSClient.java 
(original)
+++ 
hadoop/common/branches/branch-1/src/hdfs/org/apache/hadoop/hdfs/DFSClient.java 
Thu Aug  2 04:26:53 2012
@@ -142,7 +142,7 @@ public class DFSClient implements FSCons
* 
* Note 

svn commit: r1368353 - in /hadoop/common/branches/branch-1: ./ src/hdfs/ src/hdfs/org/apache/hadoop/hdfs/ src/hdfs/org/apache/hadoop/hdfs/server/namenode/ src/test/org/apache/hadoop/hdfs/

2012-08-01 Thread szetszwo
Author: szetszwo
Date: Thu Aug  2 05:22:43 2012
New Revision: 1368353

URL: http://svn.apache.org/viewvc?rev=1368353view=rev
Log:
HDFS-528. Backport: Add ability for safemode to wait for a minimum number of 
live datanodes. 

Modified:
hadoop/common/branches/branch-1/CHANGES.txt
hadoop/common/branches/branch-1/src/hdfs/hdfs-default.xml

hadoop/common/branches/branch-1/src/hdfs/org/apache/hadoop/hdfs/DFSConfigKeys.java

hadoop/common/branches/branch-1/src/hdfs/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java

hadoop/common/branches/branch-1/src/test/org/apache/hadoop/hdfs/TestSafeMode.java

Modified: hadoop/common/branches/branch-1/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-1/CHANGES.txt?rev=1368353r1=1368352r2=1368353view=diff
==
--- hadoop/common/branches/branch-1/CHANGES.txt (original)
+++ hadoop/common/branches/branch-1/CHANGES.txt Thu Aug  2 05:22:43 2012
@@ -19,6 +19,9 @@ Release 1.2.0 - unreleased
 
 MAPREDUCE-987. Exposing MiniDFS and MiniMR clusters as a single process 
command-line (philip and ahmed via tucu)
 
+HDFS-528. Backport: Add ability for safemode to wait for a minimum number
+of live datanodes.  (szetszwo)
+
   IMPROVEMENTS
 
 HDFS-3515. Port HDFS-1457 to branch-1. (eli)

Modified: hadoop/common/branches/branch-1/src/hdfs/hdfs-default.xml
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-1/src/hdfs/hdfs-default.xml?rev=1368353r1=1368352r2=1368353view=diff
==
--- hadoop/common/branches/branch-1/src/hdfs/hdfs-default.xml (original)
+++ hadoop/common/branches/branch-1/src/hdfs/hdfs-default.xml Thu Aug  2 
05:22:43 2012
@@ -304,9 +304,24 @@ creations/deletions), or all./descrip
   description
 Specifies the percentage of blocks that should satisfy 
 the minimal replication requirement defined by dfs.replication.min.
-Values less than or equal to 0 mean not to start in safe mode.
+Values less than or equal to 0 mean not to wait for any particular
+percentage of blocks before exiting safemode.
 Values greater than 1 will make safe mode permanent.
   /description
+ /property
+ 
+property
+  namedfs.namenode.safemode.min.datanodes/name
+  value0/value
+  description
+Specifies the number of datanodes that must be considered alive
+before the name node exits safemode.
+Values less than or equal to 0 mean not to take the number of live
+datanodes into account when deciding whether to remain in safe mode
+during startup.
+Values greater than the number of datanodes in the cluster
+will make safe mode permanent.
+  /description
 /property
 
 property

Modified: 
hadoop/common/branches/branch-1/src/hdfs/org/apache/hadoop/hdfs/DFSConfigKeys.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-1/src/hdfs/org/apache/hadoop/hdfs/DFSConfigKeys.java?rev=1368353r1=1368352r2=1368353view=diff
==
--- 
hadoop/common/branches/branch-1/src/hdfs/org/apache/hadoop/hdfs/DFSConfigKeys.java
 (original)
+++ 
hadoop/common/branches/branch-1/src/hdfs/org/apache/hadoop/hdfs/DFSConfigKeys.java
 Thu Aug  2 05:22:43 2012
@@ -72,6 +72,8 @@ public class DFSConfigKeys extends Commo
   public static final int DFS_NAMENODE_SAFEMODE_EXTENSION_DEFAULT = 3;
   public static final String  DFS_NAMENODE_SAFEMODE_THRESHOLD_PCT_KEY = 
dfs.namenode.safemode.threshold-pct;
   public static final float   DFS_NAMENODE_SAFEMODE_THRESHOLD_PCT_DEFAULT = 
0.999f;
+  public static final String  DFS_NAMENODE_SAFEMODE_MIN_DATANODES_KEY = 
dfs.namenode.safemode.min.datanodes;
+  public static final int DFS_NAMENODE_SAFEMODE_MIN_DATANODES_DEFAULT = 0;
   public static final String  DFS_NAMENODE_SECONDARY_HTTP_ADDRESS_KEY = 
dfs.namenode.secondary.http-address;
   public static final String  DFS_NAMENODE_SECONDARY_HTTP_ADDRESS_DEFAULT = 
0.0.0.0:50090;
   public static final String  DFS_NAMENODE_CHECKPOINT_PERIOD_KEY = 
dfs.namenode.checkpoint.period;

Modified: 
hadoop/common/branches/branch-1/src/hdfs/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-1/src/hdfs/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java?rev=1368353r1=1368352r2=1368353view=diff
==
--- 
hadoop/common/branches/branch-1/src/hdfs/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java
 (original)
+++ 
hadoop/common/branches/branch-1/src/hdfs/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java
 Thu Aug  2 05:22:43 2012
@@ -2532,6 +2532,10 @@ public class FSNamesystem implements FSC
   // no need to update its timestamp
   // because its is done when the descriptor is created
 }
+
+if (safeMode != null) {
+