svn commit: r1390843 - in /hadoop/common/branches/branch-2/hadoop-common-project: hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/client/ hadoop-common/ hadoop-common/src/main/java

2012-09-26 Thread eli
Author: eli
Date: Thu Sep 27 05:08:06 2012
New Revision: 1390843

URL: http://svn.apache.org/viewvc?rev=1390843&view=rev
Log:
HADOOP-8855. SSL-based image transfer does not work when Kerberos is disabled. 
Contributed by Todd Lipcon

Modified:

hadoop/common/branches/branch-2/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/client/KerberosAuthenticator.java

hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt

hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SecurityUtil.java

Modified: 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/client/KerberosAuthenticator.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/client/KerberosAuthenticator.java?rev=1390843&r1=1390842&r2=1390843&view=diff
==
--- 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/client/KerberosAuthenticator.java
 (original)
+++ 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/client/KerberosAuthenticator.java
 Thu Sep 27 05:08:06 2012
@@ -19,6 +19,8 @@ import org.ietf.jgss.GSSContext;
 import org.ietf.jgss.GSSManager;
 import org.ietf.jgss.GSSName;
 import org.ietf.jgss.Oid;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import javax.security.auth.Subject;
 import javax.security.auth.login.AppConfigurationEntry;
@@ -44,6 +46,9 @@ import java.util.Map;
  * sequence.
  */
 public class KerberosAuthenticator implements Authenticator {
+  
+  private static Logger LOG = LoggerFactory.getLogger(
+  KerberosAuthenticator.class);
 
   /**
* HTTP header used by the SPNEGO server endpoint during an authentication 
sequence.
@@ -152,9 +157,18 @@ public class KerberosAuthenticator imple
   }
   conn.setRequestMethod(AUTH_HTTP_METHOD);
   conn.connect();
-  if (isNegotiate()) {
+  
+  if (conn.getResponseCode() == HttpURLConnection.HTTP_OK) {
+LOG.debug("JDK performed authentication on our behalf.");
+// If the JDK already did the SPNEGO back-and-forth for
+// us, just pull out the token.
+AuthenticatedURL.extractToken(conn, token);
+return;
+  } else if (isNegotiate()) {
+LOG.debug("Performing our own SPNEGO sequence.");
 doSpnegoSequence(token);
   } else {
+LOG.debug("Using fallback authenticator sequence.");
 getFallBackAuthenticator().authenticate(url, token);
   }
 }
@@ -168,7 +182,11 @@ public class KerberosAuthenticator imple
* @return the fallback {@link Authenticator}.
*/
   protected Authenticator getFallBackAuthenticator() {
-return new PseudoAuthenticator();
+Authenticator auth = new PseudoAuthenticator();
+if (connConfigurator != null) {
+  auth.setConnectionConfigurator(connConfigurator);
+}
+return auth;
   }
 
   /*
@@ -197,11 +215,16 @@ public class KerberosAuthenticator imple
   AccessControlContext context = AccessController.getContext();
   Subject subject = Subject.getSubject(context);
   if (subject == null) {
+LOG.debug("No subject in context, logging in");
 subject = new Subject();
 LoginContext login = new LoginContext("", subject,
 null, new KerberosConfiguration());
 login.login();
   }
+
+  if (LOG.isDebugEnabled()) {
+LOG.debug("Using subject: " + subject);
+  }
   Subject.doAs(subject, new PrivilegedExceptionAction() {
 
 @Override
@@ -257,6 +280,7 @@ public class KerberosAuthenticator imple
   * Sends the Kerberos token to the server.
   */
   private void sendToken(byte[] outToken) throws IOException, 
AuthenticationException {
+new Exception("sendToken").printStackTrace(System.out);
 String token = base64.encodeToString(outToken);
 conn = (HttpURLConnection) url.openConnection();
 if (connConfigurator != null) {

Modified: 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1390843&r1=1390842&r2=1390843&view=diff
==
--- 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt 
(original)
+++ 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt 
Thu Sep 27 05:08:06 2012
@@ -41,6 +41,9 @@ Release 2.0.3-alpha - Unreleased 
 HADOOP-8791. Fix rm command documentation to indicte it deletes
 files and not directories. (Jing Zhao via suresh)
 

svn commit: r1390841 - in /hadoop/common/trunk/hadoop-common-project: hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/client/ hadoop-common/ hadoop-common/src/main/java/org/apache/

2012-09-26 Thread eli
Author: eli
Date: Thu Sep 27 05:03:42 2012
New Revision: 1390841

URL: http://svn.apache.org/viewvc?rev=1390841&view=rev
Log:
HADOOP-8855. SSL-based image transfer does not work when Kerberos is disabled. 
Contributed by Todd Lipcon

Modified:

hadoop/common/trunk/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/client/KerberosAuthenticator.java
hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SecurityUtil.java

Modified: 
hadoop/common/trunk/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/client/KerberosAuthenticator.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/client/KerberosAuthenticator.java?rev=1390841&r1=1390840&r2=1390841&view=diff
==
--- 
hadoop/common/trunk/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/client/KerberosAuthenticator.java
 (original)
+++ 
hadoop/common/trunk/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/client/KerberosAuthenticator.java
 Thu Sep 27 05:03:42 2012
@@ -19,6 +19,8 @@ import org.ietf.jgss.GSSContext;
 import org.ietf.jgss.GSSManager;
 import org.ietf.jgss.GSSName;
 import org.ietf.jgss.Oid;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import javax.security.auth.Subject;
 import javax.security.auth.login.AppConfigurationEntry;
@@ -44,6 +46,9 @@ import java.util.Map;
  * sequence.
  */
 public class KerberosAuthenticator implements Authenticator {
+  
+  private static Logger LOG = LoggerFactory.getLogger(
+  KerberosAuthenticator.class);
 
   /**
* HTTP header used by the SPNEGO server endpoint during an authentication 
sequence.
@@ -152,9 +157,18 @@ public class KerberosAuthenticator imple
   }
   conn.setRequestMethod(AUTH_HTTP_METHOD);
   conn.connect();
-  if (isNegotiate()) {
+  
+  if (conn.getResponseCode() == HttpURLConnection.HTTP_OK) {
+LOG.debug("JDK performed authentication on our behalf.");
+// If the JDK already did the SPNEGO back-and-forth for
+// us, just pull out the token.
+AuthenticatedURL.extractToken(conn, token);
+return;
+  } else if (isNegotiate()) {
+LOG.debug("Performing our own SPNEGO sequence.");
 doSpnegoSequence(token);
   } else {
+LOG.debug("Using fallback authenticator sequence.");
 getFallBackAuthenticator().authenticate(url, token);
   }
 }
@@ -168,7 +182,11 @@ public class KerberosAuthenticator imple
* @return the fallback {@link Authenticator}.
*/
   protected Authenticator getFallBackAuthenticator() {
-return new PseudoAuthenticator();
+Authenticator auth = new PseudoAuthenticator();
+if (connConfigurator != null) {
+  auth.setConnectionConfigurator(connConfigurator);
+}
+return auth;
   }
 
   /*
@@ -197,11 +215,16 @@ public class KerberosAuthenticator imple
   AccessControlContext context = AccessController.getContext();
   Subject subject = Subject.getSubject(context);
   if (subject == null) {
+LOG.debug("No subject in context, logging in");
 subject = new Subject();
 LoginContext login = new LoginContext("", subject,
 null, new KerberosConfiguration());
 login.login();
   }
+
+  if (LOG.isDebugEnabled()) {
+LOG.debug("Using subject: " + subject);
+  }
   Subject.doAs(subject, new PrivilegedExceptionAction() {
 
 @Override

Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1390841&r1=1390840&r2=1390841&view=diff
==
--- hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt 
(original)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt Thu Sep 
27 05:03:42 2012
@@ -579,6 +579,8 @@ Release 2.0.2-alpha - 2012-09-07 
 
 HADOOP-8781. hadoop-config.sh should add JAVA_LIBRARY_PATH to 
LD_LIBRARY_PATH. (tucu)
 
+HADOOP-8855. SSL-based image transfer does not work when Kerberos is 
disabled. (todd via eli)
+
   BREAKDOWN OF HDFS-3042 SUBTASKS
 
 HADOOP-8220. ZKFailoverController doesn't handle failure to become active

Modified: 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SecurityUtil.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SecurityUtil.java?rev=1390841&r1=1390840&r2=1390841&view=diff
===

svn commit: r1390764 - in /hadoop/common/branches/branch-1-win: CHANGES.txt src/native/acinclude.m4 src/native/configure.ac

2012-09-26 Thread suresh
Author: suresh
Date: Wed Sep 26 22:56:46 2012
New Revision: 1390764

URL: http://svn.apache.org/viewvc?rev=1390764&view=rev
Log:
HDFS-7868. Merging change r1353691 from branch-1 to branch-1-win.

Modified:
hadoop/common/branches/branch-1-win/CHANGES.txt
hadoop/common/branches/branch-1-win/src/native/acinclude.m4
hadoop/common/branches/branch-1-win/src/native/configure.ac

Modified: hadoop/common/branches/branch-1-win/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-1-win/CHANGES.txt?rev=1390764&r1=1390763&r2=1390764&view=diff
==
--- hadoop/common/branches/branch-1-win/CHANGES.txt (original)
+++ hadoop/common/branches/branch-1-win/CHANGES.txt Wed Sep 26 22:56:46 2012
@@ -8,6 +8,9 @@ Release 1.2.0 - unreleased
 module external to HDFS to specify how HDFS blocks should be placed.
 (Sumadhur Reddy Bolli via szetszwo)
  
+HADOOP-7868. Hadoop native fails to compile when default linker
+option is -Wl,--as-needed. (Trevor Robinson via eli)
+
 Release 1.1.0 - unreleased
 
   NEW FEATURES

Modified: hadoop/common/branches/branch-1-win/src/native/acinclude.m4
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-1-win/src/native/acinclude.m4?rev=1390764&r1=1390763&r2=1390764&view=diff
==
--- hadoop/common/branches/branch-1-win/src/native/acinclude.m4 (original)
+++ hadoop/common/branches/branch-1-win/src/native/acinclude.m4 Wed Sep 26 
22:56:46 2012
@@ -1,4 +1,4 @@
-# AC_COMPUTE_NEEDED_DSO(LIBRARY, PREPROC_SYMBOL)
+# AC_COMPUTE_NEEDED_DSO(LIBRARY, TEST_PROGRAM, PREPROC_SYMBOL)
 # --
 # Compute the 'actual' dynamic-library used 
 # for LIBRARY and set it to PREPROC_SYMBOL
@@ -6,7 +6,7 @@ AC_DEFUN([AC_COMPUTE_NEEDED_DSO],
 [
 AC_CACHE_CHECK([Checking for the 'actual' dynamic-library for '-l$1'], 
ac_cv_libname_$1,
   [
-  echo 'int main(int argc, char **argv){return 0;}' > conftest.c
+  echo '$2' > conftest.c
   if test -z "`${CC} ${LDFLAGS} -o conftest conftest.c -l$1 2>&1`"; then
 dnl Try objdump and ldd in that order to get the dynamic library
 if test ! -z "`which objdump | grep -v 'no objdump'`"; then
@@ -22,5 +22,5 @@ AC_CACHE_CHECK([Checking for the 'actual
   rm -f conftest*
   ]
 )
-AC_DEFINE_UNQUOTED($2, ${ac_cv_libname_$1}, [The 'actual' dynamic-library for 
'-l$1'])
+AC_DEFINE_UNQUOTED($3, ${ac_cv_libname_$1}, [The 'actual' dynamic-library for 
'-l$1'])
 ])# AC_COMPUTE_NEEDED_DSO

Modified: hadoop/common/branches/branch-1-win/src/native/configure.ac
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-1-win/src/native/configure.ac?rev=1390764&r1=1390763&r2=1390764&view=diff
==
--- hadoop/common/branches/branch-1-win/src/native/configure.ac (original)
+++ hadoop/common/branches/branch-1-win/src/native/configure.ac Wed Sep 26 
22:56:46 2012
@@ -87,10 +87,20 @@ CPPFLAGS=$cppflags_bak
 AC_SUBST([JNI_CPPFLAGS])
 
 dnl Check for zlib headers
-AC_CHECK_HEADERS([zlib.h zconf.h], 
AC_COMPUTE_NEEDED_DSO(z,HADOOP_ZLIB_LIBRARY), AC_MSG_ERROR(Zlib headers were 
not found... native-hadoop library needs zlib to build. Please install the 
requisite zlib development package.))
+AC_CHECK_HEADERS([zlib.h zconf.h],
+  AC_COMPUTE_NEEDED_DSO(z,
+[#include "zlib.h"
+int main(int argc, char **argv){zlibVersion();return 0;}],
+HADOOP_ZLIB_LIBRARY),
+  AC_MSG_ERROR(Zlib headers were not found... native-hadoop library needs zlib 
to build. Please install the requisite zlib development package.))
 
 dnl Check for snappy headers
-AC_CHECK_HEADERS([snappy-c.h], 
AC_COMPUTE_NEEDED_DSO(snappy,HADOOP_SNAPPY_LIBRARY), AC_MSG_WARN(Snappy headers 
were not found... building without snappy.))
+AC_CHECK_HEADERS([snappy-c.h],
+  AC_COMPUTE_NEEDED_DSO(snappy,
+[#include "snappy-c.h"
+int main(int argc, char **argv){snappy_compress(0,0,0,0);return 0;}],
+HADOOP_SNAPPY_LIBRARY),
+  AC_MSG_WARN(Snappy headers were not found... building without snappy.))
 
 dnl Check for headers needed by the native Group resolution implementation
 AC_CHECK_HEADERS([fcntl.h stdlib.h string.h unistd.h], [], AC_MSG_ERROR(Some 
system headers not found... please ensure their presence on your platform.))




svn commit: r1390763 [3/3] - in /hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common: ./ src/ src/contrib/bash-tab-completion/ src/main/bin/ src/main/docs/ src/main/docs/src/do

2012-09-26 Thread suresh
Modified: 
hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestMultipleProtocolServer.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestMultipleProtocolServer.java?rev=1390763&r1=1390762&r2=1390763&view=diff
==
--- 
hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestMultipleProtocolServer.java
 (original)
+++ 
hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestMultipleProtocolServer.java
 Wed Sep 26 22:55:00 2012
@@ -175,8 +175,9 @@ public class TestMultipleProtocolServer 
   @Before
   public void setUp() throws Exception {
 // create a server with two handlers
-server = RPC.getServer(Foo0.class,
-  new Foo0Impl(), ADDRESS, 0, 2, false, conf, 
null);
+server = new RPC.Builder(conf).setProtocol(Foo0.class)
+.setInstance(new Foo0Impl()).setBindAddress(ADDRESS).setPort(0)
+.setNumHandlers(2).setVerbose(false).build();
 server.addProtocol(RPC.RpcKind.RPC_WRITABLE, Foo1.class, new Foo1Impl());
 server.addProtocol(RPC.RpcKind.RPC_WRITABLE, Bar.class, new BarImpl());
 server.addProtocol(RPC.RpcKind.RPC_WRITABLE, Mixin.class, new BarImpl());
@@ -263,8 +264,9 @@ public class TestMultipleProtocolServer 
   
   @Test(expected=IOException.class)
   public void testIncorrectServerCreation() throws IOException {
-RPC.getServer(Foo1.class,
-new Foo0Impl(), ADDRESS, 0, 2, false, conf, null);
+new RPC.Builder(conf).setProtocol(Foo1.class).setInstance(new Foo0Impl())
+.setBindAddress(ADDRESS).setPort(0).setNumHandlers(2).setVerbose(false)
+.build();
   } 
   
   // Now test a PB service - a server  hosts both PB and Writable Rpcs.

Modified: 
hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestProtoBufRpc.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestProtoBufRpc.java?rev=1390763&r1=1390762&r2=1390763&view=diff
==
--- 
hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestProtoBufRpc.java
 (original)
+++ 
hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestProtoBufRpc.java
 Wed Sep 26 22:55:00 2012
@@ -113,7 +113,8 @@ public class TestProtoBufRpc {
 .newReflectiveBlockingService(serverImpl);
 
 // Get RPC server for server side implementation
-server = RPC.getServer(TestRpcService.class, service, ADDRESS, PORT, conf);
+server = new RPC.Builder(conf).setProtocol(TestRpcService.class)
+.setInstance(service).setBindAddress(ADDRESS).setPort(PORT).build();
 addr = NetUtils.getConnectAddress(server);
 
 // now the second protocol

Modified: 
hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestRPC.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestRPC.java?rev=1390763&r1=1390762&r2=1390763&view=diff
==
--- 
hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestRPC.java
 (original)
+++ 
hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestRPC.java
 Wed Sep 26 22:55:00 2012
@@ -314,8 +314,9 @@ public class TestRPC {
   
   @Test
   public void testConfRpc() throws Exception {
-Server server = RPC.getServer(TestProtocol.class,
-  new TestImpl(), ADDRESS, 0, 1, false, conf, 
null);
+Server server = new RPC.Builder(conf).setProtocol(TestProtocol.class)
+.setInstance(new TestImpl()).setBindAddress(ADDRESS).setPort(0)
+.setNumHandlers(1).setVerbose(false).build();
 // Just one handler
 int confQ = conf.getInt(
   CommonConfigurationKeys.IPC_SERVER_HANDLER_QUEUE_SIZE_KEY,
@@ -328,8 +329,11 @@ public class TestRPC {
 assertEquals(confReaders, server.getNumReaders());
 server.stop();
 
-server = RPC.getServer(TestProtocol.class,
-  new TestImpl(), ADDRESS, 0, 1, 3, 200, 
false, conf, null);
+server = new RPC.Builder(conf).setProtocol(TestProtocol.class)
+.setInstance(new TestImpl()).setBindAddress(ADDRESS).setPort(0)
+.setNumHandlers(1).setnumReaders(3).setQueue

svn commit: r1390763 - in /hadoop/common/branches/branch-trunk-win: ./ dev-support/ hadoop-client/ hadoop-tools/hadoop-archives/src/test/java/org/apache/hadoop/tools/ hadoop-tools/hadoop-distcp/src/ma

2012-09-26 Thread suresh
Author: suresh
Date: Wed Sep 26 22:55:00 2012
New Revision: 1390763

URL: http://svn.apache.org/viewvc?rev=1390763&view=rev
Log:
Merging all the trunk changes into branch-trunk-win branch

Added:
hadoop/common/branches/branch-trunk-win/dev-support/relnotes.py
  - copied unchanged from r1390762, 
hadoop/common/trunk/dev-support/relnotes.py
Modified:
hadoop/common/branches/branch-trunk-win/   (props changed)
hadoop/common/branches/branch-trunk-win/BUILDING.txt
hadoop/common/branches/branch-trunk-win/dev-support/test-patch.sh
hadoop/common/branches/branch-trunk-win/hadoop-client/pom.xml

hadoop/common/branches/branch-trunk-win/hadoop-tools/hadoop-archives/src/test/java/org/apache/hadoop/tools/TestHadoopArchives.java

hadoop/common/branches/branch-trunk-win/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/DistCp.java

hadoop/common/branches/branch-trunk-win/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/OptionsParser.java

hadoop/common/branches/branch-trunk-win/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/mapred/CopyMapper.java

hadoop/common/branches/branch-trunk-win/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/mapred/RetriableFileCopyCommand.java

hadoop/common/branches/branch-trunk-win/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/TestOptionsParser.java

hadoop/common/branches/branch-trunk-win/hadoop-tools/hadoop-extras/src/test/java/org/apache/hadoop/tools/TestCopyFiles.java

hadoop/common/branches/branch-trunk-win/hadoop-tools/hadoop-extras/src/test/java/org/apache/hadoop/tools/TestDistCh.java

Propchange: hadoop/common/branches/branch-trunk-win/
--
  Merged /hadoop/common/trunk:r1379224-1390762

Modified: hadoop/common/branches/branch-trunk-win/BUILDING.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/BUILDING.txt?rev=1390763&r1=1390762&r2=1390763&view=diff
==
--- hadoop/common/branches/branch-trunk-win/BUILDING.txt (original)
+++ hadoop/common/branches/branch-trunk-win/BUILDING.txt Wed Sep 26 22:55:00 
2012
@@ -54,12 +54,32 @@ Maven build goals:
  Build options:
 
   * Use -Pnative to compile/bundle native code
-  * Use -Dsnappy.prefix=(/usr/local) & -Dbundle.snappy=(false) to compile
-Snappy JNI bindings and to bundle Snappy SO files
   * Use -Pdocs to generate & bundle the documentation in the distribution 
(using -Pdist)
   * Use -Psrc to create a project source TAR.GZ
   * Use -Dtar to create a TAR with the distribution (using -Pdist)
 
+ Snappy build options:
+
+   Snappy is a compression library that can be utilized by the native code.
+   It is currently an optional component, meaning that Hadoop can be built with
+   or without this dependency.
+
+  * Use -Drequire.snappy to fail the build if libsnappy.so is not found.
+If this option is not specified and the snappy library is missing,
+we silently build a version of libhadoop.so that cannot make use of snappy.
+This option is recommended if you plan on making use of snappy and want
+to get more repeatable builds.
+
+  * Use -Dsnappy.prefix to specify a nonstandard location for the libsnappy
+header files and library files. You do not need this option if you have
+installed snappy using a package manager.
+  * Use -Dsnappy.lib to specify a nonstandard location for the libsnappy 
library
+files.  Similarly to snappy.prefix, you do not need this option if you have
+installed snappy using a package manager.
+  * Use -Dbundle.snappy to copy the contents of the snappy.lib directory into
+the final tar file. This option requires that -Dsnappy.lib is also given,
+and it ignores the -Dsnappy.prefix option.
+
Tests options:
 
   * Use -DskipTests to skip tests when running the following Maven goals:

Modified: hadoop/common/branches/branch-trunk-win/dev-support/test-patch.sh
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/dev-support/test-patch.sh?rev=1390763&r1=1390762&r2=1390763&view=diff
==
--- hadoop/common/branches/branch-trunk-win/dev-support/test-patch.sh (original)
+++ hadoop/common/branches/branch-trunk-win/dev-support/test-patch.sh Wed Sep 
26 22:55:00 2012
@@ -250,7 +250,7 @@ verifyPatch () {
 echo "PATCH APPLICATION FAILED"
 JIRA_COMMENT="$JIRA_COMMENT
 
--1 patch.  The patch command could not apply the patch."
+{color:red}-1 patch{color}.  The patch command could not apply the patch."
 return 1
   else
 return 0
@@ -305,12 +305,12 @@ checkAuthor () {
   if [[ $authorTags != 0 ]] ; then
 JIRA_COMMENT="$JIRA_COMMENT
 
--1 @author.  The patch appears to contain $authorTags @author tags which 
the Hadoop community has agreed to not allow in

svn commit: r1390731 - in /hadoop/common/branches/branch-2.0.2-alpha/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop: fs/TrashPolicyDefault.java security/SecurityUtil.java

2012-09-26 Thread eli
Author: eli
Date: Wed Sep 26 21:27:03 2012
New Revision: 1390731

URL: http://svn.apache.org/viewvc?rev=1390731&view=rev
Log:
HDFS-3972. Trash emptier fails in secure HA cluster. Contributed by Todd Lipcon

Modified:

hadoop/common/branches/branch-2.0.2-alpha/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/TrashPolicyDefault.java

hadoop/common/branches/branch-2.0.2-alpha/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SecurityUtil.java

Modified: 
hadoop/common/branches/branch-2.0.2-alpha/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/TrashPolicyDefault.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2.0.2-alpha/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/TrashPolicyDefault.java?rev=1390731&r1=1390730&r2=1390731&view=diff
==
--- 
hadoop/common/branches/branch-2.0.2-alpha/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/TrashPolicyDefault.java
 (original)
+++ 
hadoop/common/branches/branch-2.0.2-alpha/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/TrashPolicyDefault.java
 Wed Sep 26 21:27:03 2012
@@ -69,8 +69,9 @@ public class TrashPolicyDefault extends 
 
   public TrashPolicyDefault() { }
 
-  private TrashPolicyDefault(Path home, Configuration conf) throws IOException 
{
-initialize(conf, home.getFileSystem(conf), home);
+  private TrashPolicyDefault(FileSystem fs, Path home, Configuration conf)
+  throws IOException {
+initialize(conf, fs, home);
   }
 
   @Override
@@ -278,7 +279,8 @@ public class TrashPolicyDefault extends 
   if (!home.isDirectory())
 continue;
   try {
-TrashPolicyDefault trash = new 
TrashPolicyDefault(home.getPath(), conf);
+TrashPolicyDefault trash = new TrashPolicyDefault(
+fs, home.getPath(), conf);
 trash.deleteCheckpoint();
 trash.createCheckpoint();
   } catch (IOException e) {

Modified: 
hadoop/common/branches/branch-2.0.2-alpha/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SecurityUtil.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2.0.2-alpha/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SecurityUtil.java?rev=1390731&r1=1390730&r2=1390731&view=diff
==
--- 
hadoop/common/branches/branch-2.0.2-alpha/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SecurityUtil.java
 (original)
+++ 
hadoop/common/branches/branch-2.0.2-alpha/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SecurityUtil.java
 Wed Sep 26 21:27:03 2012
@@ -25,6 +25,7 @@ import java.net.URLConnection;
 import java.net.UnknownHostException;
 import java.security.AccessController;
 import java.security.PrivilegedAction;
+import java.security.PrivilegedExceptionAction;
 import java.util.Arrays;
 import java.util.List;
 import java.util.ServiceLoader;
@@ -451,6 +452,41 @@ public class SecurityUtil {
   return action.run();
 }
   }
+  
+  /**
+   * Perform the given action as the daemon's login user. If an
+   * InterruptedException is thrown, it is converted to an IOException.
+   *
+   * @param action the action to perform
+   * @return the result of the action
+   * @throws IOException in the event of error
+   */
+  public static  T doAsLoginUser(PrivilegedExceptionAction action)
+  throws IOException {
+return doAsUser(UserGroupInformation.getLoginUser(), action);
+  }
+
+  /**
+   * Perform the given action as the daemon's current user. If an
+   * InterruptedException is thrown, it is converted to an IOException.
+   *
+   * @param action the action to perform
+   * @return the result of the action
+   * @throws IOException in the event of error
+   */
+  public static  T doAsCurrentUser(PrivilegedExceptionAction action)
+  throws IOException {
+return doAsUser(UserGroupInformation.getCurrentUser(), action);
+  }
+
+  private static  T doAsUser(UserGroupInformation ugi,
+  PrivilegedExceptionAction action) throws IOException {
+try {
+  return ugi.doAs(action);
+} catch (InterruptedException ie) {
+  throw new IOException(ie);
+}
+  }
 
   /**
* Open a (if need be) secure connection to a URL in a secure environment




svn commit: r1390730 - in /hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop: fs/TrashPolicyDefault.java security/SecurityUtil.java

2012-09-26 Thread eli
Author: eli
Date: Wed Sep 26 21:26:54 2012
New Revision: 1390730

URL: http://svn.apache.org/viewvc?rev=1390730&view=rev
Log:
HDFS-3972. Trash emptier fails in secure HA cluster. Contributed by Todd Lipcon

Modified:

hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/TrashPolicyDefault.java

hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SecurityUtil.java

Modified: 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/TrashPolicyDefault.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/TrashPolicyDefault.java?rev=1390730&r1=1390729&r2=1390730&view=diff
==
--- 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/TrashPolicyDefault.java
 (original)
+++ 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/TrashPolicyDefault.java
 Wed Sep 26 21:26:54 2012
@@ -72,8 +72,9 @@ public class TrashPolicyDefault extends 
 
   public TrashPolicyDefault() { }
 
-  private TrashPolicyDefault(Path home, Configuration conf) throws IOException 
{
-initialize(conf, home.getFileSystem(conf), home);
+  private TrashPolicyDefault(FileSystem fs, Path home, Configuration conf)
+  throws IOException {
+initialize(conf, fs, home);
   }
 
   @Override
@@ -279,7 +280,8 @@ public class TrashPolicyDefault extends 
   if (!home.isDirectory())
 continue;
   try {
-TrashPolicyDefault trash = new 
TrashPolicyDefault(home.getPath(), conf);
+TrashPolicyDefault trash = new TrashPolicyDefault(
+fs, home.getPath(), conf);
 trash.deleteCheckpoint();
 trash.createCheckpoint();
   } catch (IOException e) {

Modified: 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SecurityUtil.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SecurityUtil.java?rev=1390730&r1=1390729&r2=1390730&view=diff
==
--- 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SecurityUtil.java
 (original)
+++ 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SecurityUtil.java
 Wed Sep 26 21:26:54 2012
@@ -25,6 +25,7 @@ import java.net.URLConnection;
 import java.net.UnknownHostException;
 import java.security.AccessController;
 import java.security.PrivilegedAction;
+import java.security.PrivilegedExceptionAction;
 import java.util.Arrays;
 import java.util.List;
 import java.util.ServiceLoader;
@@ -451,6 +452,41 @@ public class SecurityUtil {
   return action.run();
 }
   }
+  
+  /**
+   * Perform the given action as the daemon's login user. If an
+   * InterruptedException is thrown, it is converted to an IOException.
+   *
+   * @param action the action to perform
+   * @return the result of the action
+   * @throws IOException in the event of error
+   */
+  public static  T doAsLoginUser(PrivilegedExceptionAction action)
+  throws IOException {
+return doAsUser(UserGroupInformation.getLoginUser(), action);
+  }
+
+  /**
+   * Perform the given action as the daemon's current user. If an
+   * InterruptedException is thrown, it is converted to an IOException.
+   *
+   * @param action the action to perform
+   * @return the result of the action
+   * @throws IOException in the event of error
+   */
+  public static  T doAsCurrentUser(PrivilegedExceptionAction action)
+  throws IOException {
+return doAsUser(UserGroupInformation.getCurrentUser(), action);
+  }
+
+  private static  T doAsUser(UserGroupInformation ugi,
+  PrivilegedExceptionAction action) throws IOException {
+try {
+  return ugi.doAs(action);
+} catch (InterruptedException ie) {
+  throw new IOException(ie);
+}
+  }
 
   /**
* Open a (if need be) secure connection to a URL in a secure environment




svn commit: r1390729 - in /hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop: fs/TrashPolicyDefault.java security/SecurityUtil.java

2012-09-26 Thread eli
Author: eli
Date: Wed Sep 26 21:25:04 2012
New Revision: 1390729

URL: http://svn.apache.org/viewvc?rev=1390729&view=rev
Log:
HDFS-3972. Trash emptier fails in secure HA cluster. Contributed by Todd Lipcon

Modified:

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/TrashPolicyDefault.java

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SecurityUtil.java

Modified: 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/TrashPolicyDefault.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/TrashPolicyDefault.java?rev=1390729&r1=1390728&r2=1390729&view=diff
==
--- 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/TrashPolicyDefault.java
 (original)
+++ 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/TrashPolicyDefault.java
 Wed Sep 26 21:25:04 2012
@@ -72,8 +72,9 @@ public class TrashPolicyDefault extends 
 
   public TrashPolicyDefault() { }
 
-  private TrashPolicyDefault(Path home, Configuration conf) throws IOException 
{
-initialize(conf, home.getFileSystem(conf), home);
+  private TrashPolicyDefault(FileSystem fs, Path home, Configuration conf)
+  throws IOException {
+initialize(conf, fs, home);
   }
 
   @Override
@@ -279,7 +280,8 @@ public class TrashPolicyDefault extends 
   if (!home.isDirectory())
 continue;
   try {
-TrashPolicyDefault trash = new 
TrashPolicyDefault(home.getPath(), conf);
+TrashPolicyDefault trash = new TrashPolicyDefault(
+fs, home.getPath(), conf);
 trash.deleteCheckpoint();
 trash.createCheckpoint();
   } catch (IOException e) {

Modified: 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SecurityUtil.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SecurityUtil.java?rev=1390729&r1=1390728&r2=1390729&view=diff
==
--- 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SecurityUtil.java
 (original)
+++ 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SecurityUtil.java
 Wed Sep 26 21:25:04 2012
@@ -25,6 +25,7 @@ import java.net.URLConnection;
 import java.net.UnknownHostException;
 import java.security.AccessController;
 import java.security.PrivilegedAction;
+import java.security.PrivilegedExceptionAction;
 import java.util.Arrays;
 import java.util.List;
 import java.util.ServiceLoader;
@@ -451,6 +452,41 @@ public class SecurityUtil {
   return action.run();
 }
   }
+  
+  /**
+   * Perform the given action as the daemon's login user. If an
+   * InterruptedException is thrown, it is converted to an IOException.
+   *
+   * @param action the action to perform
+   * @return the result of the action
+   * @throws IOException in the event of error
+   */
+  public static  T doAsLoginUser(PrivilegedExceptionAction action)
+  throws IOException {
+return doAsUser(UserGroupInformation.getLoginUser(), action);
+  }
+
+  /**
+   * Perform the given action as the daemon's current user. If an
+   * InterruptedException is thrown, it is converted to an IOException.
+   *
+   * @param action the action to perform
+   * @return the result of the action
+   * @throws IOException in the event of error
+   */
+  public static  T doAsCurrentUser(PrivilegedExceptionAction action)
+  throws IOException {
+return doAsUser(UserGroupInformation.getCurrentUser(), action);
+  }
+
+  private static  T doAsUser(UserGroupInformation ugi,
+  PrivilegedExceptionAction action) throws IOException {
+try {
+  return ugi.doAs(action);
+} catch (InterruptedException ie) {
+  throw new IOException(ie);
+}
+  }
 
   /**
* Open a (if need be) secure connection to a URL in a secure environment




svn commit: r1390680 - in /hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common: CHANGES.txt src/main/java/org/apache/hadoop/io/WritableComparator.java

2012-09-26 Thread bobby
Author: bobby
Date: Wed Sep 26 19:10:40 2012
New Revision: 1390680

URL: http://svn.apache.org/viewvc?rev=1390680&view=rev
Log:
svn merge -c 1379506 FIXES: HADOOP-8684. Deadlock between WritableComparator 
and WritableComparable. Contributed by Jing Zhao

Modified:

hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt

hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/WritableComparator.java

Modified: 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1390680&r1=1390679&r2=1390680&view=diff
==
--- 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt 
(original)
+++ 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt 
Wed Sep 26 19:10:40 2012
@@ -750,6 +750,9 @@ Release 0.23.4 - UNRELEASED
 HADOOP-8843. Old trash directories are never deleted on upgrade
 from 1.x (jlowe)
 
+HADOOP-8684. Deadlock between WritableComparator and WritableComparable.
+(Jing Zhao via suresh)
+
 Release 0.23.3 - UNRELEASED
 
   INCOMPATIBLE CHANGES

Modified: 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/WritableComparator.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/WritableComparator.java?rev=1390680&r1=1390679&r2=1390680&view=diff
==
--- 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/WritableComparator.java
 (original)
+++ 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/WritableComparator.java
 Wed Sep 26 19:10:40 2012
@@ -18,8 +18,9 @@
 
 package org.apache.hadoop.io;
 
-import java.io.*;
-import java.util.*;
+import java.io.DataInput;
+import java.io.IOException;
+import java.util.concurrent.ConcurrentHashMap;
 
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
@@ -38,12 +39,11 @@ import org.apache.hadoop.util.Reflection
 @InterfaceStability.Stable
 public class WritableComparator implements RawComparator {
 
-  private static HashMap comparators =
-new HashMap(); // registry
+  private static final ConcurrentHashMap 
comparators 
+  = new ConcurrentHashMap(); // registry
 
   /** Get a comparator for a {@link WritableComparable} implementation. */
-  public static synchronized 
-  WritableComparator get(Class c) {
+  public static WritableComparator get(Class c) {
 WritableComparator comparator = comparators.get(c);
 if (comparator == null) {
   // force the static initializers to run
@@ -76,12 +76,10 @@ public class WritableComparator implemen
   /** Register an optimized comparator for a {@link WritableComparable}
* implementation. Comparators registered with this method must be
* thread-safe. */
-  public static synchronized void define(Class c,
- WritableComparator comparator) {
+  public static void define(Class c, WritableComparator comparator) {
 comparators.put(c, comparator);
   }
 
-
   private final Class keyClass;
   private final WritableComparable key1;
   private final WritableComparable key2;




svn commit: r1390677 - in /hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common: CHANGES.txt src/main/java/org/apache/hadoop/io/WritableComparator.java

2012-09-26 Thread bobby
Author: bobby
Date: Wed Sep 26 19:06:11 2012
New Revision: 1390677

URL: http://svn.apache.org/viewvc?rev=1390677&view=rev
Log:
svn merge -c 1379506 FIXES: HADOOP-8684. Deadlock between WritableComparator 
and WritableComparable. Contributed by Jing Zhao

Modified:

hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/CHANGES.txt

hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/WritableComparator.java

Modified: 
hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1390677&r1=1390676&r2=1390677&view=diff
==
--- 
hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/CHANGES.txt
 (original)
+++ 
hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/CHANGES.txt
 Wed Sep 26 19:06:11 2012
@@ -17,6 +17,9 @@ Release 0.23.4 - UNRELEASED
 HADOOP-8843. Old trash directories are never deleted on upgrade
 from 1.x (jlowe)
 
+HADOOP-8684. Deadlock between WritableComparator and WritableComparable.
+(Jing Zhao via suresh)
+
 Release 0.23.3 - UNRELEASED
 
   INCOMPATIBLE CHANGES

Modified: 
hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/WritableComparator.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/WritableComparator.java?rev=1390677&r1=1390676&r2=1390677&view=diff
==
--- 
hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/WritableComparator.java
 (original)
+++ 
hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/WritableComparator.java
 Wed Sep 26 19:06:11 2012
@@ -18,8 +18,9 @@
 
 package org.apache.hadoop.io;
 
-import java.io.*;
-import java.util.*;
+import java.io.DataInput;
+import java.io.IOException;
+import java.util.concurrent.ConcurrentHashMap;
 
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
@@ -38,12 +39,11 @@ import org.apache.hadoop.util.Reflection
 @InterfaceStability.Stable
 public class WritableComparator implements RawComparator {
 
-  private static HashMap comparators =
-new HashMap(); // registry
+  private static final ConcurrentHashMap 
comparators 
+  = new ConcurrentHashMap(); // registry
 
   /** Get a comparator for a {@link WritableComparable} implementation. */
-  public static synchronized 
-  WritableComparator get(Class c) {
+  public static WritableComparator get(Class c) {
 WritableComparator comparator = comparators.get(c);
 if (comparator == null) {
   // force the static initializers to run
@@ -76,12 +76,10 @@ public class WritableComparator implemen
   /** Register an optimized comparator for a {@link WritableComparable}
* implementation. Comparators registered with this method must be
* thread-safe. */
-  public static synchronized void define(Class c,
- WritableComparator comparator) {
+  public static void define(Class c, WritableComparator comparator) {
 comparators.put(c, comparator);
   }
 
-
   private final Class keyClass;
   private final WritableComparable key1;
   private final WritableComparable key2;




svn commit: r1390623 - in /hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common: CHANGES.txt src/main/java/org/apache/hadoop/fs/TrashPolicyDefault.java src/test/java/org/apache/hadoo

2012-09-26 Thread jlowe
Author: jlowe
Date: Wed Sep 26 17:38:55 2012
New Revision: 1390623

URL: http://svn.apache.org/viewvc?rev=1390623&view=rev
Log:
svn merge -c 1390616 to fix HADOOP-8843. Old trash directories are never 
deleted on upgrade from 1.x.  Contributed by Jason Lowe

Modified:

hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/CHANGES.txt

hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/TrashPolicyDefault.java

hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestTrash.java

Modified: 
hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1390623&r1=1390622&r2=1390623&view=diff
==
--- 
hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/CHANGES.txt
 (original)
+++ 
hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/CHANGES.txt
 Wed Sep 26 17:38:55 2012
@@ -14,6 +14,9 @@ Release 0.23.4 - UNRELEASED
 
   BUG FIXES
 
+HADOOP-8843. Old trash directories are never deleted on upgrade
+from 1.x (jlowe)
+
 Release 0.23.3 - UNRELEASED
 
   INCOMPATIBLE CHANGES

Modified: 
hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/TrashPolicyDefault.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/TrashPolicyDefault.java?rev=1390623&r1=1390622&r2=1390623&view=diff
==
--- 
hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/TrashPolicyDefault.java
 (original)
+++ 
hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/TrashPolicyDefault.java
 Wed Sep 26 17:38:55 2012
@@ -61,6 +61,9 @@ public class TrashPolicyDefault extends 
 new FsPermission(FsAction.ALL, FsAction.NONE, FsAction.NONE);
 
   private static final DateFormat CHECKPOINT = new 
SimpleDateFormat("yyMMddHHmmss");
+  /** Format of checkpoint directories used prior to Hadoop 0.23. */
+  private static final DateFormat OLD_CHECKPOINT =
+  new SimpleDateFormat("yyMMddHHmm");
   private static final int MSECS_PER_MINUTE = 60*1000;
 
   private Path current;
@@ -197,9 +200,7 @@ public class TrashPolicyDefault extends 
 
   long time;
   try {
-synchronized (CHECKPOINT) {
-  time = CHECKPOINT.parse(name).getTime();
-}
+time = getTimeFromCheckpoint(name);
   } catch (ParseException e) {
 LOG.warn("Unexpected item in trash: "+dir+". Ignoring.");
 continue;
@@ -300,4 +301,22 @@ public class TrashPolicyDefault extends 
   return (time / interval) * interval;
 }
   }
+
+  private long getTimeFromCheckpoint(String name) throws ParseException {
+long time;
+
+try {
+  synchronized (CHECKPOINT) {
+time = CHECKPOINT.parse(name).getTime();
+  }
+} catch (ParseException pe) {
+  // Check for old-style checkpoint directories left over
+  // after an upgrade from Hadoop 1.x
+  synchronized (OLD_CHECKPOINT) {
+time = OLD_CHECKPOINT.parse(name).getTime();
+  }
+}
+
+return time;
+  }
 }

Modified: 
hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestTrash.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestTrash.java?rev=1390623&r1=1390622&r2=1390623&view=diff
==
--- 
hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestTrash.java
 (original)
+++ 
hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestTrash.java
 Wed Sep 26 17:38:55 2012
@@ -26,6 +26,8 @@ import java.io.File;
 import java.io.IOException;
 import java.io.PrintStream;
 import java.net.URI;
+import java.text.DateFormat;
+import java.text.SimpleDateFormat;
 import java.util.HashSet;
 import java.util.Set;
 
@@ -431,6 +433,36 @@ public class TestTrash extends TestCase 
 output.indexOf(("Consider using -skipTrash option")) != -1 );
 }
 
+// Verify old checkpoint format is recognized
+{
+  // emulate two old trash checkpoint directories, one that is old enough
+  // to be deleted on the next expunge and one that isn't.
+  long trashInterval = conf.getLong(FS_TRASH_INTERVAL_KEY,
+  FS_TRASH_INTERVAL_DEFAULT);
+  long now = System.currentTimeMillis();
+  DateFormat oldCheckpoint

svn commit: r1390621 - in /hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common: CHANGES.txt src/main/java/org/apache/hadoop/fs/TrashPolicyDefault.java src/test/java/org/apache/hadoop/f

2012-09-26 Thread jlowe
Author: jlowe
Date: Wed Sep 26 17:23:34 2012
New Revision: 1390621

URL: http://svn.apache.org/viewvc?rev=1390621&view=rev
Log:
svn merge -c 1390616 to fix HADOOP-8843. Old trash directories are never 
deleted on upgrade from 1.x.  Contributed by Jason Lowe

Modified:

hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt

hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/TrashPolicyDefault.java

hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestTrash.java

Modified: 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1390621&r1=1390620&r2=1390621&view=diff
==
--- 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt 
(original)
+++ 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt 
Wed Sep 26 17:23:34 2012
@@ -747,6 +747,9 @@ Release 0.23.4 - UNRELEASED
 
   BUG FIXES
 
+HADOOP-8843. Old trash directories are never deleted on upgrade
+from 1.x (jlowe)
+
 Release 0.23.3 - UNRELEASED
 
   INCOMPATIBLE CHANGES

Modified: 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/TrashPolicyDefault.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/TrashPolicyDefault.java?rev=1390621&r1=1390620&r2=1390621&view=diff
==
--- 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/TrashPolicyDefault.java
 (original)
+++ 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/TrashPolicyDefault.java
 Wed Sep 26 17:23:34 2012
@@ -61,6 +61,9 @@ public class TrashPolicyDefault extends 
 new FsPermission(FsAction.ALL, FsAction.NONE, FsAction.NONE);
 
   private static final DateFormat CHECKPOINT = new 
SimpleDateFormat("yyMMddHHmmss");
+  /** Format of checkpoint directories used prior to Hadoop 0.23. */
+  private static final DateFormat OLD_CHECKPOINT =
+  new SimpleDateFormat("yyMMddHHmm");
   private static final int MSECS_PER_MINUTE = 60*1000;
 
   private Path current;
@@ -202,9 +205,7 @@ public class TrashPolicyDefault extends 
 
   long time;
   try {
-synchronized (CHECKPOINT) {
-  time = CHECKPOINT.parse(name).getTime();
-}
+time = getTimeFromCheckpoint(name);
   } catch (ParseException e) {
 LOG.warn("Unexpected item in trash: "+dir+". Ignoring.");
 continue;
@@ -304,4 +305,22 @@ public class TrashPolicyDefault extends 
   return (time / interval) * interval;
 }
   }
+
+  private long getTimeFromCheckpoint(String name) throws ParseException {
+long time;
+
+try {
+  synchronized (CHECKPOINT) {
+time = CHECKPOINT.parse(name).getTime();
+  }
+} catch (ParseException pe) {
+  // Check for old-style checkpoint directories left over
+  // after an upgrade from Hadoop 1.x
+  synchronized (OLD_CHECKPOINT) {
+time = OLD_CHECKPOINT.parse(name).getTime();
+  }
+}
+
+return time;
+  }
 }

Modified: 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestTrash.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestTrash.java?rev=1390621&r1=1390620&r2=1390621&view=diff
==
--- 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestTrash.java
 (original)
+++ 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestTrash.java
 Wed Sep 26 17:23:34 2012
@@ -26,6 +26,8 @@ import java.io.File;
 import java.io.IOException;
 import java.io.PrintStream;
 import java.net.URI;
+import java.text.DateFormat;
+import java.text.SimpleDateFormat;
 import java.util.HashSet;
 import java.util.Set;
 
@@ -434,6 +436,36 @@ public class TestTrash extends TestCase 
   output.indexOf("Failed to determine server trash configuration") != 
-1);
 }
 
+// Verify old checkpoint format is recognized
+{
+  // emulate two old trash checkpoint directories, one that is old enough
+  // to be deleted on the next expunge and one that isn't.
+  long trashInterval = conf.getLong(FS_TRASH_INTERVAL_KEY,
+  FS_TRASH_INTERVAL_DEFAULT);
+  long now = Time.now();
+  DateFormat oldCheckpointFormat = new SimpleDateFormat("yyMMddHHmm");

svn commit: r1390616 - in /hadoop/common/trunk/hadoop-common-project/hadoop-common: CHANGES.txt src/main/java/org/apache/hadoop/fs/TrashPolicyDefault.java src/test/java/org/apache/hadoop/fs/TestTrash.

2012-09-26 Thread jlowe
Author: jlowe
Date: Wed Sep 26 17:14:27 2012
New Revision: 1390616

URL: http://svn.apache.org/viewvc?rev=1390616&view=rev
Log:
HADOOP-8843. Old trash directories are never deleted on upgrade from 1.x.  
Contributed by Jason Lowe

Modified:
hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/TrashPolicyDefault.java

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestTrash.java

Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1390616&r1=1390615&r2=1390616&view=diff
==
--- hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt 
(original)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt Wed Sep 
26 17:14:27 2012
@@ -995,6 +995,9 @@ Release 0.23.4 - UNRELEASED
 
   BUG FIXES
 
+HADOOP-8843. Old trash directories are never deleted on upgrade
+from 1.x (jlowe)
+
 Release 0.23.3 - UNRELEASED
 
   INCOMPATIBLE CHANGES

Modified: 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/TrashPolicyDefault.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/TrashPolicyDefault.java?rev=1390616&r1=1390615&r2=1390616&view=diff
==
--- 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/TrashPolicyDefault.java
 (original)
+++ 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/TrashPolicyDefault.java
 Wed Sep 26 17:14:27 2012
@@ -61,6 +61,9 @@ public class TrashPolicyDefault extends 
 new FsPermission(FsAction.ALL, FsAction.NONE, FsAction.NONE);
 
   private static final DateFormat CHECKPOINT = new 
SimpleDateFormat("yyMMddHHmmss");
+  /** Format of checkpoint directories used prior to Hadoop 0.23. */
+  private static final DateFormat OLD_CHECKPOINT =
+  new SimpleDateFormat("yyMMddHHmm");
   private static final int MSECS_PER_MINUTE = 60*1000;
 
   private Path current;
@@ -202,9 +205,7 @@ public class TrashPolicyDefault extends 
 
   long time;
   try {
-synchronized (CHECKPOINT) {
-  time = CHECKPOINT.parse(name).getTime();
-}
+time = getTimeFromCheckpoint(name);
   } catch (ParseException e) {
 LOG.warn("Unexpected item in trash: "+dir+". Ignoring.");
 continue;
@@ -304,4 +305,22 @@ public class TrashPolicyDefault extends 
   return (time / interval) * interval;
 }
   }
+
+  private long getTimeFromCheckpoint(String name) throws ParseException {
+long time;
+
+try {
+  synchronized (CHECKPOINT) {
+time = CHECKPOINT.parse(name).getTime();
+  }
+} catch (ParseException pe) {
+  // Check for old-style checkpoint directories left over
+  // after an upgrade from Hadoop 1.x
+  synchronized (OLD_CHECKPOINT) {
+time = OLD_CHECKPOINT.parse(name).getTime();
+  }
+}
+
+return time;
+  }
 }

Modified: 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestTrash.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestTrash.java?rev=1390616&r1=1390615&r2=1390616&view=diff
==
--- 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestTrash.java
 (original)
+++ 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestTrash.java
 Wed Sep 26 17:14:27 2012
@@ -26,6 +26,8 @@ import java.io.File;
 import java.io.IOException;
 import java.io.PrintStream;
 import java.net.URI;
+import java.text.DateFormat;
+import java.text.SimpleDateFormat;
 import java.util.HashSet;
 import java.util.Set;
 
@@ -434,6 +436,36 @@ public class TestTrash extends TestCase 
   output.indexOf("Failed to determine server trash configuration") != 
-1);
 }
 
+// Verify old checkpoint format is recognized
+{
+  // emulate two old trash checkpoint directories, one that is old enough
+  // to be deleted on the next expunge and one that isn't.
+  long trashInterval = conf.getLong(FS_TRASH_INTERVAL_KEY,
+  FS_TRASH_INTERVAL_DEFAULT);
+  long now = Time.now();
+  DateFormat oldCheckpointFormat = new SimpleDateFormat("yyMMddHHmm");
+  Path dirToDelete = new Path(trashRoot.getParent(),
+  oldCheckpointFormat.format(now - (trashInterval * 60 * 1000) - 1));
+  Path dirToKeep = new Path(trashRoot.getParent(),
+  oldCheck

svn commit: r1390479 - in /hadoop/common/site/main: author/src/documentation/content/xdocs/who.xml publish/who.html publish/who.pdf

2012-09-26 Thread tgraves
Author: tgraves
Date: Wed Sep 26 13:40:34 2012
New Revision: 1390479

URL: http://svn.apache.org/viewvc?rev=1390479&view=rev
Log:
add tgraves to PMC list

Modified:
hadoop/common/site/main/author/src/documentation/content/xdocs/who.xml
hadoop/common/site/main/publish/who.html
hadoop/common/site/main/publish/who.pdf

Modified: hadoop/common/site/main/author/src/documentation/content/xdocs/who.xml
URL: 
http://svn.apache.org/viewvc/hadoop/common/site/main/author/src/documentation/content/xdocs/who.xml?rev=1390479&r1=1390478&r2=1390479&view=diff
==
--- hadoop/common/site/main/author/src/documentation/content/xdocs/who.xml 
(original)
+++ hadoop/common/site/main/author/src/documentation/content/xdocs/who.xml Wed 
Sep 26 13:40:34 2012
@@ -257,6 +257,14 @@
 
 
 
+  tgraves
+  Thomas Graves
+  Yahoo!
+  
+  -6
+
+
+
   todd
   http://people.apache.org/~todd";>Todd Lipcon
   Cloudera

Modified: hadoop/common/site/main/publish/who.html
URL: 
http://svn.apache.org/viewvc/hadoop/common/site/main/publish/who.html?rev=1390479&r1=1390478&r2=1390479&view=diff
==
--- hadoop/common/site/main/publish/who.html (original)
+++ hadoop/common/site/main/publish/who.html Wed Sep 26 13:40:34 2012
@@ -572,6 +572,17 @@ document.write("Last Published: " + docu
 
 
   
+tgraves
+  Thomas Graves
+  Yahoo!
+  
+  -6
+
+
+
+
+
+  
 todd
   http://people.apache.org/~todd";>Todd Lipcon
   Cloudera
@@ -640,7 +651,7 @@ document.write("Last Published: " + docu
 
 
 
-
+
 Emeritus Hadoop PMC Members
 
 
@@ -655,7 +666,7 @@ document.write("Last Published: " + docu
 
 

-
+
 Hadoop Committers
 
 Hadoop's active committers include:
@@ -1270,7 +1281,7 @@ document.write("Last Published: " + docu
 
 

-
+
 Emeritus Hadoop Committers
 
 Hadoop committers who are no longer active include:

Modified: hadoop/common/site/main/publish/who.pdf
URL: 
http://svn.apache.org/viewvc/hadoop/common/site/main/publish/who.pdf?rev=1390479&r1=1390478&r2=1390479&view=diff
==
Binary files - no diff available.