hadoop git commit: [HDFS-12386] Add fsserver defaults call to WebhdfsFileSystem. (Rushabh Shah via daryn)

2017-09-26 Thread daryn
Repository: hadoop
Updated Branches:
  refs/heads/branch-3.0 bf03363d8 -> 61dc4bcd6


[HDFS-12386] Add fsserver defaults call to WebhdfsFileSystem. (Rushabh Shah via 
daryn)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/61dc4bcd
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/61dc4bcd
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/61dc4bcd

Branch: refs/heads/branch-3.0
Commit: 61dc4bcd617324335eea689a6d811f45feb77f8e
Parents: bf03363
Author: Daryn Sharp <da...@yahoo-inc.com>
Authored: Tue Sep 26 15:27:32 2017 -0500
Committer: Daryn Sharp <da...@yahoo-inc.com>
Committed: Tue Sep 26 15:27:32 2017 -0500

--
 .../apache/hadoop/hdfs/web/JsonUtilClient.java  |  38 ++
 .../hadoop/hdfs/web/WebHdfsFileSystem.java  |  17 +++
 .../hadoop/hdfs/web/resources/GetOpParam.java   |   3 +-
 .../hdfs/server/namenode/FSNamesystem.java  |   3 +-
 .../web/resources/NamenodeWebHdfsMethods.java   |  21 +++
 .../org/apache/hadoop/hdfs/web/JsonUtil.java|  20 +++
 .../org/apache/hadoop/hdfs/web/TestWebHDFS.java | 134 +++
 7 files changed, 234 insertions(+), 2 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/61dc4bcd/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/JsonUtilClient.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/JsonUtilClient.java
 
b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/JsonUtilClient.java
index dcd73bf..53d886d 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/JsonUtilClient.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/JsonUtilClient.java
@@ -26,6 +26,7 @@ import org.apache.hadoop.fs.ContentSummary;
 import org.apache.hadoop.fs.ContentSummary.Builder;
 import org.apache.hadoop.fs.FileChecksum;
 import org.apache.hadoop.fs.FileStatus;
+import org.apache.hadoop.fs.FsServerDefaults;
 import org.apache.hadoop.fs.MD5MD5CRC32CastagnoliFileChecksum;
 import org.apache.hadoop.fs.MD5MD5CRC32FileChecksum;
 import org.apache.hadoop.fs.MD5MD5CRC32GzipFileChecksum;
@@ -65,6 +66,8 @@ import java.util.Map;
 
 class JsonUtilClient {
   static final DatanodeInfo[] EMPTY_DATANODE_INFO_ARRAY = {};
+  static final String UNSUPPPORTED_EXCEPTION_STR =
+  UnsupportedOperationException.class.getName();
 
   /** Convert a Json map to a RemoteException. */
   static RemoteException toRemoteException(final Map json) {
@@ -72,6 +75,9 @@ class JsonUtilClient {
 RemoteException.class.getSimpleName());
 final String message = (String)m.get("message");
 final String javaClassName = (String)m.get("javaClassName");
+if (UNSUPPPORTED_EXCEPTION_STR.equals(javaClassName)) {
+  throw new UnsupportedOperationException(message);
+}
 return new RemoteException(javaClassName, message);
   }
 
@@ -644,4 +650,36 @@ class JsonUtilClient {
 }
   }
 
+  /*
+   * The parameters which have default value -1 are required fields according
+   * to hdfs.proto.
+   * The default values for optional fields are taken from
+   * hdfs.proto#FsServerDefaultsProto.
+   */
+  public static FsServerDefaults toFsServerDefaults(final Map json) {
+if (json == null) {
+  return null;
+}
+Map m =
+(Map) json.get(FsServerDefaults.class.getSimpleName());
+long blockSize =  getLong(m, "blockSize", -1);
+int bytesPerChecksum = getInt(m, "bytesPerChecksum", -1);
+int writePacketSize = getInt(m, "writePacketSize", -1);
+short replication = (short) getInt(m, "replication", -1);
+int fileBufferSize = getInt(m, "fileBufferSize", -1);
+boolean encryptDataTransfer = m.containsKey("encryptDataTransfer")
+? (Boolean) m.get("encryptDataTransfer")
+: false;
+long trashInterval = getLong(m, "trashInterval", 0);
+DataChecksum.Type type =
+DataChecksum.Type.valueOf(getInt(m, "checksumType", 1));
+String keyProviderUri = (String) m.get("keyProviderUri");
+byte storagepolicyId = m.containsKey("defaultStoragePolicyId")
+? ((Number) m.get("defaultStoragePolicyId")).byteValue()
+: HdfsConstants.BLOCK_STORAGE_POLICY_ID_UNSPECIFIED;
+return new FsServerDefaults(blockSize, bytesPerChecksum,
+writePacketSize, replication, fileBufferSize,
+encryptDataTransfer, trashInterval, type, keyProviderUri,
+storagepolicyId);
+  }
 }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/61dc4bcd/hadoop-hdfs-project/hadoop-hdfs-client/src/m

hadoop git commit: [HDFS-12386] Add fsserver defaults call to WebhdfsFileSystem. (Rushabh Shah via daryn)

2017-09-26 Thread daryn
Repository: hadoop
Updated Branches:
  refs/heads/branch-2.8 86e092029 -> 438291787


[HDFS-12386] Add fsserver defaults call to WebhdfsFileSystem. (Rushabh Shah via 
daryn)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/43829178
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/43829178
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/43829178

Branch: refs/heads/branch-2.8
Commit: 438291787a396be260dba0be990d5194b7de1fae
Parents: 86e09202
Author: Daryn Sharp <da...@yahoo-inc.com>
Authored: Tue Sep 26 15:25:12 2017 -0500
Committer: Daryn Sharp <da...@yahoo-inc.com>
Committed: Tue Sep 26 15:25:12 2017 -0500

--
 .../apache/hadoop/hdfs/web/JsonUtilClient.java  |  39 ++
 .../hadoop/hdfs/web/WebHdfsFileSystem.java  |  17 +++
 .../hadoop/hdfs/web/resources/GetOpParam.java   |   4 +-
 .../hdfs/server/namenode/FSNamesystem.java  |   3 +-
 .../web/resources/NamenodeWebHdfsMethods.java   |  21 +++
 .../org/apache/hadoop/hdfs/web/JsonUtil.java|  19 +++
 .../org/apache/hadoop/hdfs/web/TestWebHDFS.java | 134 +++
 7 files changed, 235 insertions(+), 2 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/43829178/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/JsonUtilClient.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/JsonUtilClient.java
 
b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/JsonUtilClient.java
index b1c270b..bfc770e 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/JsonUtilClient.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/JsonUtilClient.java
@@ -24,6 +24,7 @@ import org.apache.hadoop.fs.ContentSummary;
 import org.apache.hadoop.fs.ContentSummary.Builder;
 import org.apache.hadoop.fs.FileChecksum;
 import org.apache.hadoop.fs.FileStatus;
+import org.apache.hadoop.fs.FsServerDefaults;
 import org.apache.hadoop.fs.MD5MD5CRC32CastagnoliFileChecksum;
 import org.apache.hadoop.fs.MD5MD5CRC32FileChecksum;
 import org.apache.hadoop.fs.MD5MD5CRC32GzipFileChecksum;
@@ -63,6 +64,8 @@ import java.util.Map;
 
 class JsonUtilClient {
   static final DatanodeInfo[] EMPTY_DATANODE_INFO_ARRAY = {};
+  static final String UNSUPPPORTED_EXCEPTION_STR =
+  UnsupportedOperationException.class.getName();
 
   /** Convert a Json map to a RemoteException. */
   static RemoteException toRemoteException(final Map json) {
@@ -70,6 +73,9 @@ class JsonUtilClient {
 RemoteException.class.getSimpleName());
 final String message = (String)m.get("message");
 final String javaClassName = (String)m.get("javaClassName");
+if (UNSUPPPORTED_EXCEPTION_STR.equals(javaClassName)) {
+  throw new UnsupportedOperationException(message);
+}
 return new RemoteException(javaClassName, message);
   }
 
@@ -594,4 +600,37 @@ class JsonUtilClient {
   return storageTypes;
 }
   }
+
+  /*
+   * The parameters which have default value -1 are required fields according
+   * to hdfs.proto.
+   * The default values for optional fields are taken from
+   * hdfs.proto#FsServerDefaultsProto.
+   */
+  public static FsServerDefaults toFsServerDefaults(final Map json) {
+if (json == null) {
+  return null;
+}
+Map m =
+(Map) json.get(FsServerDefaults.class.getSimpleName());
+long blockSize = getLong(m, "blockSize", -1);
+int bytesPerChecksum = getInt(m, "bytesPerChecksum", -1);
+int writePacketSize = getInt(m, "writePacketSize", -1);
+short replication = (short) getInt(m, "replication", -1);
+int fileBufferSize = getInt(m, "fileBufferSize", -1);
+boolean encryptDataTransfer = m.containsKey("encryptDataTransfer")
+? (Boolean) m.get("encryptDataTransfer")
+: false;
+long trashInterval = getLong(m, "trashInterval", 0);
+DataChecksum.Type type =
+DataChecksum.Type.valueOf(getInt(m, "checksumType", 1));
+String keyProviderUri = (String) m.get("keyProviderUri");
+byte storagepolicyId = m.containsKey("defaultStoragePolicyId")
+? ((Number) m.get("defaultStoragePolicyId")).byteValue()
+: HdfsConstants.BLOCK_STORAGE_POLICY_ID_UNSPECIFIED;
+return new FsServerDefaults(blockSize, bytesPerChecksum,
+writePacketSize, replication, fileBufferSize,
+encryptDataTransfer, trashInterval, type, keyProviderUri,
+storagepolicyId);
+  }
 }

http://

hadoop git commit: [HDFS-12386] Add fsserver defaults call to WebhdfsFileSystem. (Rushabh Shah via daryn)

2017-09-26 Thread daryn
Repository: hadoop
Updated Branches:
  refs/heads/branch-2 27a41848a -> 7f3ed5f61


[HDFS-12386] Add fsserver defaults call to WebhdfsFileSystem. (Rushabh Shah via 
daryn)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/7f3ed5f6
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/7f3ed5f6
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/7f3ed5f6

Branch: refs/heads/branch-2
Commit: 7f3ed5f61ed3640c50df52f699b992a889b8b28b
Parents: 27a4184
Author: Daryn Sharp <da...@yahoo-inc.com>
Authored: Tue Sep 26 15:21:37 2017 -0500
Committer: Daryn Sharp <da...@yahoo-inc.com>
Committed: Tue Sep 26 15:21:37 2017 -0500

--
 .../apache/hadoop/hdfs/web/JsonUtilClient.java  |  39 ++
 .../hadoop/hdfs/web/WebHdfsFileSystem.java  |  17 +++
 .../hadoop/hdfs/web/resources/GetOpParam.java   |   3 +-
 .../hdfs/server/namenode/FSNamesystem.java  |   3 +-
 .../web/resources/NamenodeWebHdfsMethods.java   |  21 +++
 .../org/apache/hadoop/hdfs/web/JsonUtil.java|  19 +++
 .../org/apache/hadoop/hdfs/web/TestWebHDFS.java | 134 +++
 7 files changed, 234 insertions(+), 2 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/7f3ed5f6/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/JsonUtilClient.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/JsonUtilClient.java
 
b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/JsonUtilClient.java
index 43bb17f..0320614 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/JsonUtilClient.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/JsonUtilClient.java
@@ -25,6 +25,7 @@ import org.apache.hadoop.fs.ContentSummary;
 import org.apache.hadoop.fs.ContentSummary.Builder;
 import org.apache.hadoop.fs.FileChecksum;
 import org.apache.hadoop.fs.FileStatus;
+import org.apache.hadoop.fs.FsServerDefaults;
 import org.apache.hadoop.fs.MD5MD5CRC32CastagnoliFileChecksum;
 import org.apache.hadoop.fs.MD5MD5CRC32FileChecksum;
 import org.apache.hadoop.fs.MD5MD5CRC32GzipFileChecksum;
@@ -66,6 +67,8 @@ import java.util.Map;
 
 class JsonUtilClient {
   static final DatanodeInfo[] EMPTY_DATANODE_INFO_ARRAY = {};
+  static final String UNSUPPPORTED_EXCEPTION_STR =
+  UnsupportedOperationException.class.getName();
 
   /** Convert a Json map to a RemoteException. */
   static RemoteException toRemoteException(final Map json) {
@@ -73,6 +76,9 @@ class JsonUtilClient {
 RemoteException.class.getSimpleName());
 final String message = (String)m.get("message");
 final String javaClassName = (String)m.get("javaClassName");
+if (UNSUPPPORTED_EXCEPTION_STR.equals(javaClassName)) {
+  throw new UnsupportedOperationException(message);
+}
 return new RemoteException(javaClassName, message);
   }
 
@@ -692,4 +698,37 @@ class JsonUtilClient {
   return array;
 }
   }
+
+  /*
+   * The parameters which have default value -1 are required fields according
+   * to hdfs.proto.
+   * The default values for optional fields are taken from
+   * hdfs.proto#FsServerDefaultsProto.
+   */
+  public static FsServerDefaults toFsServerDefaults(final Map json) {
+if (json == null) {
+  return null;
+}
+Map m =
+(Map) json.get(FsServerDefaults.class.getSimpleName());
+long blockSize = getLong(m, "blockSize", -1);
+int bytesPerChecksum = getInt(m, "bytesPerChecksum", -1);
+int writePacketSize = getInt(m, "writePacketSize", -1);
+short replication = (short) getInt(m, "replication", -1);
+int fileBufferSize = getInt(m, "fileBufferSize", -1);
+boolean encryptDataTransfer = m.containsKey("encryptDataTransfer")
+? (Boolean) m.get("encryptDataTransfer")
+: false;
+long trashInterval = getLong(m, "trashInterval", 0);
+DataChecksum.Type type =
+DataChecksum.Type.valueOf(getInt(m, "checksumType", 1));
+String keyProviderUri = (String) m.get("keyProviderUri");
+byte storagepolicyId = m.containsKey("defaultStoragePolicyId")
+? ((Number) m.get("defaultStoragePolicyId")).byteValue()
+: HdfsConstants.BLOCK_STORAGE_POLICY_ID_UNSPECIFIED;
+return new FsServerDefaults(blockSize, bytesPerChecksum,
+writePacketSize, replication, fileBufferSize,
+encryptDataTransfer, trashInterval, type, keyProviderUri,
+storagepolicyId);
+  }
 }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7f3ed5f6/hadoop-hdfs-project/had

hadoop git commit: [HDFS-12386] Add fsserver defaults call to WebhdfsFileSystem. (Rushabh Shah via daryn)

2017-09-26 Thread daryn
Repository: hadoop
Updated Branches:
  refs/heads/trunk 9df05005a -> 0da29cbee


[HDFS-12386] Add fsserver defaults call to WebhdfsFileSystem. (Rushabh Shah via 
daryn)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/0da29cbe
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/0da29cbe
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/0da29cbe

Branch: refs/heads/trunk
Commit: 0da29cbeea40cb7839abcd72566b997962829329
Parents: 9df0500
Author: Daryn Sharp <da...@yahoo-inc.com>
Authored: Tue Sep 26 15:15:24 2017 -0500
Committer: Daryn Sharp <da...@yahoo-inc.com>
Committed: Tue Sep 26 15:15:24 2017 -0500

--
 .../apache/hadoop/hdfs/web/JsonUtilClient.java  |  38 ++
 .../hadoop/hdfs/web/WebHdfsFileSystem.java  |  17 +++
 .../hadoop/hdfs/web/resources/GetOpParam.java   |   3 +-
 .../hdfs/server/namenode/FSNamesystem.java  |   3 +-
 .../web/resources/NamenodeWebHdfsMethods.java   |  21 +++
 .../org/apache/hadoop/hdfs/web/JsonUtil.java|  20 +++
 .../org/apache/hadoop/hdfs/web/TestWebHDFS.java | 134 +++
 7 files changed, 234 insertions(+), 2 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/0da29cbe/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/JsonUtilClient.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/JsonUtilClient.java
 
b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/JsonUtilClient.java
index dcd73bf..53d886d 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/JsonUtilClient.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/JsonUtilClient.java
@@ -26,6 +26,7 @@ import org.apache.hadoop.fs.ContentSummary;
 import org.apache.hadoop.fs.ContentSummary.Builder;
 import org.apache.hadoop.fs.FileChecksum;
 import org.apache.hadoop.fs.FileStatus;
+import org.apache.hadoop.fs.FsServerDefaults;
 import org.apache.hadoop.fs.MD5MD5CRC32CastagnoliFileChecksum;
 import org.apache.hadoop.fs.MD5MD5CRC32FileChecksum;
 import org.apache.hadoop.fs.MD5MD5CRC32GzipFileChecksum;
@@ -65,6 +66,8 @@ import java.util.Map;
 
 class JsonUtilClient {
   static final DatanodeInfo[] EMPTY_DATANODE_INFO_ARRAY = {};
+  static final String UNSUPPPORTED_EXCEPTION_STR =
+  UnsupportedOperationException.class.getName();
 
   /** Convert a Json map to a RemoteException. */
   static RemoteException toRemoteException(final Map json) {
@@ -72,6 +75,9 @@ class JsonUtilClient {
 RemoteException.class.getSimpleName());
 final String message = (String)m.get("message");
 final String javaClassName = (String)m.get("javaClassName");
+if (UNSUPPPORTED_EXCEPTION_STR.equals(javaClassName)) {
+  throw new UnsupportedOperationException(message);
+}
 return new RemoteException(javaClassName, message);
   }
 
@@ -644,4 +650,36 @@ class JsonUtilClient {
 }
   }
 
+  /*
+   * The parameters which have default value -1 are required fields according
+   * to hdfs.proto.
+   * The default values for optional fields are taken from
+   * hdfs.proto#FsServerDefaultsProto.
+   */
+  public static FsServerDefaults toFsServerDefaults(final Map json) {
+if (json == null) {
+  return null;
+}
+Map m =
+(Map) json.get(FsServerDefaults.class.getSimpleName());
+long blockSize =  getLong(m, "blockSize", -1);
+int bytesPerChecksum = getInt(m, "bytesPerChecksum", -1);
+int writePacketSize = getInt(m, "writePacketSize", -1);
+short replication = (short) getInt(m, "replication", -1);
+int fileBufferSize = getInt(m, "fileBufferSize", -1);
+boolean encryptDataTransfer = m.containsKey("encryptDataTransfer")
+? (Boolean) m.get("encryptDataTransfer")
+: false;
+long trashInterval = getLong(m, "trashInterval", 0);
+DataChecksum.Type type =
+DataChecksum.Type.valueOf(getInt(m, "checksumType", 1));
+String keyProviderUri = (String) m.get("keyProviderUri");
+byte storagepolicyId = m.containsKey("defaultStoragePolicyId")
+? ((Number) m.get("defaultStoragePolicyId")).byteValue()
+: HdfsConstants.BLOCK_STORAGE_POLICY_ID_UNSPECIFIED;
+return new FsServerDefaults(blockSize, bytesPerChecksum,
+writePacketSize, replication, fileBufferSize,
+encryptDataTransfer, trashInterval, type, keyProviderUri,
+storagepolicyId);
+  }
 }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/0da29cbe/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java

hadoop git commit: HADOOP-14146. KerberosAuthenticationHandler should authenticate with SPN in AP-REQ. Contributed by Daryn Sharp

2017-06-21 Thread daryn
Repository: hadoop
Updated Branches:
  refs/heads/branch-2 ba487ead4 -> d4115d71b


HADOOP-14146.  KerberosAuthenticationHandler should authenticate with SPN in 
AP-REQ.  Contributed by Daryn Sharp


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/d4115d71
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/d4115d71
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/d4115d71

Branch: refs/heads/branch-2
Commit: d4115d71b5e776c05b3da59b8ad29ad0bb6b8a2c
Parents: ba487ea
Author: Daryn Sharp <da...@yahoo-inc.com>
Authored: Wed Jun 21 11:03:41 2017 -0500
Committer: Daryn Sharp <da...@yahoo-inc.com>
Committed: Wed Jun 21 11:05:12 2017 -0500

--
 .../client/KerberosAuthenticator.java   |   4 +-
 .../server/KerberosAuthenticationHandler.java   | 204 +++
 .../authentication/util/KerberosUtil.java   | 198 +-
 .../TestMultiSchemeAuthenticationHandler.java   |   2 +-
 .../authentication/util/TestKerberosUtil.java   |  73 +++
 5 files changed, 301 insertions(+), 180 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/d4115d71/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/client/KerberosAuthenticator.java
--
diff --git 
a/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/client/KerberosAuthenticator.java
 
b/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/client/KerberosAuthenticator.java
index ceec927..9bcebc3 100644
--- 
a/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/client/KerberosAuthenticator.java
+++ 
b/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/client/KerberosAuthenticator.java
@@ -293,10 +293,10 @@ public class KerberosAuthenticator implements 
Authenticator {
 GSSManager gssManager = GSSManager.getInstance();
 String servicePrincipal = KerberosUtil.getServicePrincipal("HTTP",
 KerberosAuthenticator.this.url.getHost());
-Oid oid = KerberosUtil.getOidInstance("NT_GSS_KRB5_PRINCIPAL");
+Oid oid = KerberosUtil.NT_GSS_KRB5_PRINCIPAL_OID;
 GSSName serviceName = gssManager.createName(servicePrincipal,
 oid);
-oid = KerberosUtil.getOidInstance("GSS_KRB5_MECH_OID");
+oid = KerberosUtil.GSS_KRB5_MECH_OID;
 gssContext = gssManager.createContext(serviceName, oid, null,
   GSSContext.DEFAULT_LIFETIME);
 gssContext.requestCredDeleg(true);

http://git-wip-us.apache.org/repos/asf/hadoop/blob/d4115d71/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/server/KerberosAuthenticationHandler.java
--
diff --git 
a/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/server/KerberosAuthenticationHandler.java
 
b/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/server/KerberosAuthenticationHandler.java
index e0ee227..887548b 100644
--- 
a/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/server/KerberosAuthenticationHandler.java
+++ 
b/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/server/KerberosAuthenticationHandler.java
@@ -28,31 +28,20 @@ import org.slf4j.LoggerFactory;
 
 import javax.security.auth.Subject;
 import javax.security.auth.kerberos.KerberosPrincipal;
-import javax.security.auth.login.AppConfigurationEntry;
-import javax.security.auth.login.Configuration;
-import javax.security.auth.login.LoginContext;
-import javax.security.auth.login.LoginException;
+import javax.security.auth.kerberos.KeyTab;
 import javax.servlet.ServletException;
 import javax.servlet.http.HttpServletRequest;
 import javax.servlet.http.HttpServletResponse;
 
 import java.io.File;
 import java.io.IOException;
-import java.net.InetAddress;
+import java.security.Principal;
 import java.security.PrivilegedActionException;
 import java.security.PrivilegedExceptionAction;
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
 import java.util.Properties;
 import java.util.Set;
 import java.util.regex.Pattern;
 
-import com.google.common.collect.HashMultimap;
-
-import static org.apache.hadoop.util.PlatformName.IBM_JAVA;
-
 /**
  * The {@link KerberosAuthenticationHandler} implements the Kerberos SPNEGO
  * authe

hadoop git commit: HADOOP-14146. KerberosAuthenticationHandler should authenticate with SPN in AP-REQ. Contributed by Daryn Sharp

2017-06-21 Thread daryn
Repository: hadoop
Updated Branches:
  refs/heads/trunk 5db3f9846 -> e806c6e0c


HADOOP-14146.  KerberosAuthenticationHandler should authenticate with SPN in 
AP-REQ.  Contributed by Daryn Sharp


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/e806c6e0
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/e806c6e0
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/e806c6e0

Branch: refs/heads/trunk
Commit: e806c6e0ce6026d53227b51d58ec6d5458164571
Parents: 5db3f98
Author: Daryn Sharp <da...@yahoo-inc.com>
Authored: Wed Jun 21 11:03:41 2017 -0500
Committer: Daryn Sharp <da...@yahoo-inc.com>
Committed: Wed Jun 21 11:03:41 2017 -0500

--
 .../client/KerberosAuthenticator.java   |   4 +-
 .../server/KerberosAuthenticationHandler.java   | 204 +++
 .../authentication/util/KerberosUtil.java   | 198 +-
 .../TestMultiSchemeAuthenticationHandler.java   |   2 +-
 .../authentication/util/TestKerberosUtil.java   |  73 +++
 5 files changed, 301 insertions(+), 180 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/e806c6e0/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/client/KerberosAuthenticator.java
--
diff --git 
a/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/client/KerberosAuthenticator.java
 
b/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/client/KerberosAuthenticator.java
index ceec927..9bcebc3 100644
--- 
a/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/client/KerberosAuthenticator.java
+++ 
b/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/client/KerberosAuthenticator.java
@@ -293,10 +293,10 @@ public class KerberosAuthenticator implements 
Authenticator {
 GSSManager gssManager = GSSManager.getInstance();
 String servicePrincipal = KerberosUtil.getServicePrincipal("HTTP",
 KerberosAuthenticator.this.url.getHost());
-Oid oid = KerberosUtil.getOidInstance("NT_GSS_KRB5_PRINCIPAL");
+Oid oid = KerberosUtil.NT_GSS_KRB5_PRINCIPAL_OID;
 GSSName serviceName = gssManager.createName(servicePrincipal,
 oid);
-oid = KerberosUtil.getOidInstance("GSS_KRB5_MECH_OID");
+oid = KerberosUtil.GSS_KRB5_MECH_OID;
 gssContext = gssManager.createContext(serviceName, oid, null,
   GSSContext.DEFAULT_LIFETIME);
 gssContext.requestCredDeleg(true);

http://git-wip-us.apache.org/repos/asf/hadoop/blob/e806c6e0/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/server/KerberosAuthenticationHandler.java
--
diff --git 
a/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/server/KerberosAuthenticationHandler.java
 
b/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/server/KerberosAuthenticationHandler.java
index e0ee227..887548b 100644
--- 
a/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/server/KerberosAuthenticationHandler.java
+++ 
b/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/server/KerberosAuthenticationHandler.java
@@ -28,31 +28,20 @@ import org.slf4j.LoggerFactory;
 
 import javax.security.auth.Subject;
 import javax.security.auth.kerberos.KerberosPrincipal;
-import javax.security.auth.login.AppConfigurationEntry;
-import javax.security.auth.login.Configuration;
-import javax.security.auth.login.LoginContext;
-import javax.security.auth.login.LoginException;
+import javax.security.auth.kerberos.KeyTab;
 import javax.servlet.ServletException;
 import javax.servlet.http.HttpServletRequest;
 import javax.servlet.http.HttpServletResponse;
 
 import java.io.File;
 import java.io.IOException;
-import java.net.InetAddress;
+import java.security.Principal;
 import java.security.PrivilegedActionException;
 import java.security.PrivilegedExceptionAction;
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
 import java.util.Properties;
 import java.util.Set;
 import java.util.regex.Pattern;
 
-import com.google.common.collect.HashMultimap;
-
-import static org.apache.hadoop.util.PlatformName.IBM_JAVA;
-
 /**
  * The {@link KerberosAuthenticationHandler} implements the Kerberos SPNEGO
  * authe

hadoop git commit: HADOOP-14146. KerberosAuthenticationHandler should authenticate with SPN in AP-REQ. Contributed by Daryn Sharp

2017-06-21 Thread daryn
Repository: hadoop
Updated Branches:
  refs/heads/branch-2.8 a560104fa -> 81eb06b55


HADOOP-14146.  KerberosAuthenticationHandler should authenticate with SPN in 
AP-REQ.  Contributed by Daryn Sharp


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/81eb06b5
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/81eb06b5
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/81eb06b5

Branch: refs/heads/branch-2.8
Commit: 81eb06b553cba5e80c469af2bcbbe2d95ad3d7f3
Parents: a560104
Author: Daryn Sharp <da...@yahoo-inc.com>
Authored: Wed Jun 21 11:03:41 2017 -0500
Committer: Daryn Sharp <da...@yahoo-inc.com>
Committed: Wed Jun 21 11:48:05 2017 -0500

--
 .../client/KerberosAuthenticator.java   |   4 +-
 .../server/KerberosAuthenticationHandler.java   | 204 +++
 .../authentication/util/KerberosUtil.java   | 198 +-
 .../TestMultiSchemeAuthenticationHandler.java   |   2 +-
 .../authentication/util/TestKerberosUtil.java   |  73 +++
 5 files changed, 301 insertions(+), 180 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/81eb06b5/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/client/KerberosAuthenticator.java
--
diff --git 
a/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/client/KerberosAuthenticator.java
 
b/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/client/KerberosAuthenticator.java
index ceec927..9bcebc3 100644
--- 
a/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/client/KerberosAuthenticator.java
+++ 
b/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/client/KerberosAuthenticator.java
@@ -293,10 +293,10 @@ public class KerberosAuthenticator implements 
Authenticator {
 GSSManager gssManager = GSSManager.getInstance();
 String servicePrincipal = KerberosUtil.getServicePrincipal("HTTP",
 KerberosAuthenticator.this.url.getHost());
-Oid oid = KerberosUtil.getOidInstance("NT_GSS_KRB5_PRINCIPAL");
+Oid oid = KerberosUtil.NT_GSS_KRB5_PRINCIPAL_OID;
 GSSName serviceName = gssManager.createName(servicePrincipal,
 oid);
-oid = KerberosUtil.getOidInstance("GSS_KRB5_MECH_OID");
+oid = KerberosUtil.GSS_KRB5_MECH_OID;
 gssContext = gssManager.createContext(serviceName, oid, null,
   GSSContext.DEFAULT_LIFETIME);
 gssContext.requestCredDeleg(true);

http://git-wip-us.apache.org/repos/asf/hadoop/blob/81eb06b5/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/server/KerberosAuthenticationHandler.java
--
diff --git 
a/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/server/KerberosAuthenticationHandler.java
 
b/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/server/KerberosAuthenticationHandler.java
index e0ee227..887548b 100644
--- 
a/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/server/KerberosAuthenticationHandler.java
+++ 
b/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/server/KerberosAuthenticationHandler.java
@@ -28,31 +28,20 @@ import org.slf4j.LoggerFactory;
 
 import javax.security.auth.Subject;
 import javax.security.auth.kerberos.KerberosPrincipal;
-import javax.security.auth.login.AppConfigurationEntry;
-import javax.security.auth.login.Configuration;
-import javax.security.auth.login.LoginContext;
-import javax.security.auth.login.LoginException;
+import javax.security.auth.kerberos.KeyTab;
 import javax.servlet.ServletException;
 import javax.servlet.http.HttpServletRequest;
 import javax.servlet.http.HttpServletResponse;
 
 import java.io.File;
 import java.io.IOException;
-import java.net.InetAddress;
+import java.security.Principal;
 import java.security.PrivilegedActionException;
 import java.security.PrivilegedExceptionAction;
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
 import java.util.Properties;
 import java.util.Set;
 import java.util.regex.Pattern;
 
-import com.google.common.collect.HashMultimap;
-
-import static org.apache.hadoop.util.PlatformName.IBM_JAVA;
-
 /**
  * The {@link KerberosAuthenticationHandler} implements the Kerberos SPNEGO
  *

hadoop git commit: HDFS-10762. Pass IIP for file status related methods

2016-08-24 Thread daryn
Repository: hadoop
Updated Branches:
  refs/heads/branch-2 2321b7303 -> a30f6a68f


HDFS-10762. Pass IIP for file status related methods

(cherry picked from commit ec252ce0fc0998ce13f31af3440c08a236328e5a)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/a30f6a68
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/a30f6a68
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/a30f6a68

Branch: refs/heads/branch-2
Commit: a30f6a68fabf80d2db5868bcc031266986d93b03
Parents: 2321b73
Author: Daryn Sharp <da...@yahoo-inc.com>
Authored: Wed Aug 24 08:51:05 2016 -0500
Committer: Daryn Sharp <da...@yahoo-inc.com>
Committed: Wed Aug 24 08:51:05 2016 -0500

--
 .../hdfs/server/namenode/FSDirAppendOp.java |  6 +--
 .../server/namenode/FSDirStatAndListingOp.java  | 56 +---
 .../hdfs/server/namenode/FSDirWriteFileOp.java  |  3 +-
 .../hdfs/server/namenode/FSDirectory.java   | 14 ++---
 .../hdfs/server/namenode/INodesInPath.java  | 42 ---
 .../hadoop/hdfs/TestReservedRawPaths.java   | 21 
 6 files changed, 91 insertions(+), 51 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/a30f6a68/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSDirAppendOp.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSDirAppendOp.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSDirAppendOp.java
index f0cbb30..07b2a25 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSDirAppendOp.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSDirAppendOp.java
@@ -85,9 +85,10 @@ final class FSDirAppendOp {
 final LocatedBlock lb;
 final FSDirectory fsd = fsn.getFSDirectory();
 final String src;
+final INodesInPath iip;
 fsd.writeLock();
 try {
-  final INodesInPath iip = fsd.resolvePathForWrite(pc, srcArg);
+  iip = fsd.resolvePathForWrite(pc, srcArg);
   src = iip.getPath();
   // Verify that the destination does not exist as a directory already
   final INode inode = iip.getLastINode();
@@ -141,8 +142,7 @@ final class FSDirAppendOp {
   fsd.writeUnlock();
 }
 
-HdfsFileStatus stat = FSDirStatAndListingOp.getFileInfo(fsd, src, false,
-FSDirectory.isReservedRawName(srcArg));
+HdfsFileStatus stat = FSDirStatAndListingOp.getFileInfo(fsd, iip);
 if (lb != null) {
   NameNode.stateChangeLog.debug(
   "DIR* NameSystem.appendFile: file {} for {} at {} block {} block"

http://git-wip-us.apache.org/repos/asf/hadoop/blob/a30f6a68/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSDirStatAndListingOp.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSDirStatAndListingOp.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSDirStatAndListingOp.java
index a82c779..9b0e5f7 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSDirStatAndListingOp.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSDirStatAndListingOp.java
@@ -106,16 +106,16 @@ class FSDirStatAndListingOp {
 if (!DFSUtil.isValidName(src)) {
   throw new InvalidPathException("Invalid file name: " + src);
 }
+final INodesInPath iip;
 if (fsd.isPermissionEnabled()) {
   FSPermissionChecker pc = fsd.getPermissionChecker();
-  final INodesInPath iip = fsd.resolvePath(pc, srcArg, resolveLink);
-  src = iip.getPath();
+  iip = fsd.resolvePath(pc, srcArg, resolveLink);
   fsd.checkPermission(pc, iip, false, null, null, null, null, false);
 } else {
   src = FSDirectory.resolvePath(srcArg, fsd);
+  iip = fsd.getINodesInPath(src, resolveLink);
 }
-return getFileInfo(fsd, src, FSDirectory.isReservedRawName(srcArg),
-   resolveLink);
+return getFileInfo(fsd, iip);
   }
 
   /**
@@ -226,7 +226,6 @@ class FSDirStatAndListingOp {
   String src, byte[] startAfter, boolean needLocation, boolean isSuperUser)
   throws IOException {
 String srcs = FSDirectory.normalizePath(src);
-final boolean isRawPath = FSDirectory.isReservedRawName(src);
 if (FSDirectory.isExactReservedName(srcs)) {
   return getReservedListing(fsd);
 }
@@ -253,7 +252,7 @@ class FSDirStatAndListingOp {
 return new DirectoryListi

hadoop git commit: HDFS-10762. Pass IIP for file status related methods

2016-08-24 Thread daryn
Repository: hadoop
Updated Branches:
  refs/heads/trunk 092b4d5bf -> ec252ce0f


HDFS-10762. Pass IIP for file status related methods


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/ec252ce0
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/ec252ce0
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/ec252ce0

Branch: refs/heads/trunk
Commit: ec252ce0fc0998ce13f31af3440c08a236328e5a
Parents: 092b4d5
Author: Daryn Sharp <da...@yahoo-inc.com>
Authored: Wed Aug 24 08:46:47 2016 -0500
Committer: Daryn Sharp <da...@yahoo-inc.com>
Committed: Wed Aug 24 08:46:47 2016 -0500

--
 .../hdfs/server/namenode/FSDirAppendOp.java |  6 +--
 .../server/namenode/FSDirStatAndListingOp.java  | 56 +---
 .../hdfs/server/namenode/FSDirWriteFileOp.java  |  3 +-
 .../hdfs/server/namenode/FSDirectory.java   | 14 ++---
 .../hdfs/server/namenode/INodesInPath.java  | 42 ---
 .../hadoop/hdfs/TestReservedRawPaths.java   | 21 
 6 files changed, 91 insertions(+), 51 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/ec252ce0/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSDirAppendOp.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSDirAppendOp.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSDirAppendOp.java
index 3a5d7dc..5192352 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSDirAppendOp.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSDirAppendOp.java
@@ -85,9 +85,10 @@ final class FSDirAppendOp {
 final LocatedBlock lb;
 final FSDirectory fsd = fsn.getFSDirectory();
 final String src;
+final INodesInPath iip;
 fsd.writeLock();
 try {
-  final INodesInPath iip = fsd.resolvePathForWrite(pc, srcArg);
+  iip = fsd.resolvePathForWrite(pc, srcArg);
   src = iip.getPath();
   // Verify that the destination does not exist as a directory already
   final INode inode = iip.getLastINode();
@@ -148,8 +149,7 @@ final class FSDirAppendOp {
   fsd.writeUnlock();
 }
 
-HdfsFileStatus stat = FSDirStatAndListingOp.getFileInfo(fsd, src, false,
-FSDirectory.isReservedRawName(srcArg));
+HdfsFileStatus stat = FSDirStatAndListingOp.getFileInfo(fsd, iip);
 if (lb != null) {
   NameNode.stateChangeLog.debug(
   "DIR* NameSystem.appendFile: file {} for {} at {} block {} block"

http://git-wip-us.apache.org/repos/asf/hadoop/blob/ec252ce0/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSDirStatAndListingOp.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSDirStatAndListingOp.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSDirStatAndListingOp.java
index c9eedf5..8a9393e 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSDirStatAndListingOp.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSDirStatAndListingOp.java
@@ -108,16 +108,16 @@ class FSDirStatAndListingOp {
 if (!DFSUtil.isValidName(src)) {
   throw new InvalidPathException("Invalid file name: " + src);
 }
+final INodesInPath iip;
 if (fsd.isPermissionEnabled()) {
   FSPermissionChecker pc = fsd.getPermissionChecker();
-  final INodesInPath iip = fsd.resolvePath(pc, srcArg, resolveLink);
-  src = iip.getPath();
+  iip = fsd.resolvePath(pc, srcArg, resolveLink);
   fsd.checkPermission(pc, iip, false, null, null, null, null, false);
 } else {
   src = FSDirectory.resolvePath(srcArg, fsd);
+  iip = fsd.getINodesInPath(src, resolveLink);
 }
-return getFileInfo(fsd, src, FSDirectory.isReservedRawName(srcArg),
-   resolveLink);
+return getFileInfo(fsd, iip);
   }
 
   /**
@@ -230,7 +230,6 @@ class FSDirStatAndListingOp {
   String src, byte[] startAfter, boolean needLocation, boolean isSuperUser)
   throws IOException {
 String srcs = FSDirectory.normalizePath(src);
-final boolean isRawPath = FSDirectory.isReservedRawName(src);
 if (FSDirectory.isExactReservedName(srcs)) {
   return getReservedListing(fsd);
 }
@@ -257,7 +256,7 @@ class FSDirStatAndListingOp {
 return new DirectoryListing(
 new HdfsFileStatus[]{ createFileStatus(
 fsd, H

hadoop git commit: HDFS-10655. Fix path related byte array conversion bugs. (daryn)

2016-08-01 Thread daryn
Repository: hadoop
Updated Branches:
  refs/heads/branch-2.8 fa0b6ff97 -> ecb143ed2


HDFS-10655. Fix path related byte array conversion bugs. (daryn)

(cherry picked from commit 9f473cf903e586c556154abd56b3a3d820c6b028)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/ecb143ed
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/ecb143ed
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/ecb143ed

Branch: refs/heads/branch-2.8
Commit: ecb143ed21f4971cc3e3d4827ba8becb1864f171
Parents: fa0b6ff
Author: Daryn Sharp <da...@yahoo-inc.com>
Authored: Mon Aug 1 10:14:28 2016 -0500
Committer: Daryn Sharp <da...@yahoo-inc.com>
Committed: Mon Aug 1 10:18:45 2016 -0500

--
 .../java/org/apache/hadoop/hdfs/DFSUtil.java|  42 
 .../hdfs/server/namenode/TestFsLimits.java  |   2 +-
 .../server/namenode/TestPathComponents.java | 100 ++-
 3 files changed, 97 insertions(+), 47 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/ecb143ed/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSUtil.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSUtil.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSUtil.java
index da7d26a..7268274 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSUtil.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSUtil.java
@@ -276,14 +276,15 @@ public class DFSUtil {
 Preconditions.checkArgument(offset >= 0 && offset < pathComponents.length);
 Preconditions.checkArgument(length >= 0 && offset + length <=
 pathComponents.length);
-if (pathComponents.length == 1
+if (offset == 0 && length == 1
 && (pathComponents[0] == null || pathComponents[0].length == 0)) {
   return Path.SEPARATOR;
 }
 StringBuilder result = new StringBuilder();
-for (int i = offset; i < offset + length; i++) {
+int lastIndex = offset + length - 1;
+for (int i = offset; i <= lastIndex; i++) {
   result.append(new String(pathComponents[i], Charsets.UTF_8));
-  if (i < pathComponents.length - 1) {
+  if (i < lastIndex) {
 result.append(Path.SEPARATOR_CHAR);
   }
 }
@@ -349,40 +350,37 @@ public class DFSUtil {
   public static byte[][] bytes2byteArray(byte[] bytes,
  int len,
  byte separator) {
-assert len <= bytes.length;
-int splits = 0;
+Preconditions.checkPositionIndex(len, bytes.length);
 if (len == 0) {
   return new byte[][]{null};
 }
-// Count the splits. Omit multiple separators and the last one
-for (int i = 0; i < len; i++) {
-  if (bytes[i] == separator) {
+// Count the splits. Omit multiple separators and the last one by
+// peeking at prior byte.
+int splits = 0;
+for (int i = 1; i < len; i++) {
+  if (bytes[i-1] == separator && bytes[i] != separator) {
 splits++;
   }
 }
-int last = len - 1;
-while (last > -1 && bytes[last--] == separator) {
-  splits--;
-}
 if (splits == 0 && bytes[0] == separator) {
   return new byte[][]{null};
 }
 splits++;
 byte[][] result = new byte[splits][];
-int startIndex = 0;
 int nextIndex = 0;
-int index = 0;
-// Build the splits
-while (index < splits) {
+// Build the splits.
+for (int i = 0; i < splits; i++) {
+  int startIndex = nextIndex;
+  // find next separator in the bytes.
   while (nextIndex < len && bytes[nextIndex] != separator) {
 nextIndex++;
   }
-  result[index] = new byte[nextIndex - startIndex];
-  System.arraycopy(bytes, startIndex, result[index], 0, nextIndex
-  - startIndex);
-  index++;
-  startIndex = nextIndex + 1;
-  nextIndex = startIndex;
+  result[i] = (nextIndex > 0)
+  ? Arrays.copyOfRange(bytes, startIndex, nextIndex)
+  : DFSUtilClient.EMPTY_BYTES; // reuse empty bytes for root.
+  do { // skip over separators.
+nextIndex++;
+  } while (nextIndex < len && bytes[nextIndex] == separator);
 }
 return result;
   }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/ecb143ed/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestFsLimits.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/

hadoop git commit: HDFS-10655. Fix path related byte array conversion bugs. (daryn)

2016-08-01 Thread daryn
Repository: hadoop
Updated Branches:
  refs/heads/branch-2 7ecb1635b -> 4ad2a73df


HDFS-10655. Fix path related byte array conversion bugs. (daryn)

(cherry picked from commit 9f473cf903e586c556154abd56b3a3d820c6b028)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/4ad2a73d
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/4ad2a73d
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/4ad2a73d

Branch: refs/heads/branch-2
Commit: 4ad2a73df69e483d528f50adb2c7dea71fd7f9c5
Parents: 7ecb163
Author: Daryn Sharp <da...@yahoo-inc.com>
Authored: Mon Aug 1 10:14:28 2016 -0500
Committer: Daryn Sharp <da...@yahoo-inc.com>
Committed: Mon Aug 1 10:17:24 2016 -0500

--
 .../java/org/apache/hadoop/hdfs/DFSUtil.java|  42 
 .../hdfs/server/namenode/TestFsLimits.java  |   2 +-
 .../server/namenode/TestPathComponents.java | 100 ++-
 3 files changed, 97 insertions(+), 47 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/4ad2a73d/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSUtil.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSUtil.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSUtil.java
index da7d26a..7268274 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSUtil.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSUtil.java
@@ -276,14 +276,15 @@ public class DFSUtil {
 Preconditions.checkArgument(offset >= 0 && offset < pathComponents.length);
 Preconditions.checkArgument(length >= 0 && offset + length <=
 pathComponents.length);
-if (pathComponents.length == 1
+if (offset == 0 && length == 1
 && (pathComponents[0] == null || pathComponents[0].length == 0)) {
   return Path.SEPARATOR;
 }
 StringBuilder result = new StringBuilder();
-for (int i = offset; i < offset + length; i++) {
+int lastIndex = offset + length - 1;
+for (int i = offset; i <= lastIndex; i++) {
   result.append(new String(pathComponents[i], Charsets.UTF_8));
-  if (i < pathComponents.length - 1) {
+  if (i < lastIndex) {
 result.append(Path.SEPARATOR_CHAR);
   }
 }
@@ -349,40 +350,37 @@ public class DFSUtil {
   public static byte[][] bytes2byteArray(byte[] bytes,
  int len,
  byte separator) {
-assert len <= bytes.length;
-int splits = 0;
+Preconditions.checkPositionIndex(len, bytes.length);
 if (len == 0) {
   return new byte[][]{null};
 }
-// Count the splits. Omit multiple separators and the last one
-for (int i = 0; i < len; i++) {
-  if (bytes[i] == separator) {
+// Count the splits. Omit multiple separators and the last one by
+// peeking at prior byte.
+int splits = 0;
+for (int i = 1; i < len; i++) {
+  if (bytes[i-1] == separator && bytes[i] != separator) {
 splits++;
   }
 }
-int last = len - 1;
-while (last > -1 && bytes[last--] == separator) {
-  splits--;
-}
 if (splits == 0 && bytes[0] == separator) {
   return new byte[][]{null};
 }
 splits++;
 byte[][] result = new byte[splits][];
-int startIndex = 0;
 int nextIndex = 0;
-int index = 0;
-// Build the splits
-while (index < splits) {
+// Build the splits.
+for (int i = 0; i < splits; i++) {
+  int startIndex = nextIndex;
+  // find next separator in the bytes.
   while (nextIndex < len && bytes[nextIndex] != separator) {
 nextIndex++;
   }
-  result[index] = new byte[nextIndex - startIndex];
-  System.arraycopy(bytes, startIndex, result[index], 0, nextIndex
-  - startIndex);
-  index++;
-  startIndex = nextIndex + 1;
-  nextIndex = startIndex;
+  result[i] = (nextIndex > 0)
+  ? Arrays.copyOfRange(bytes, startIndex, nextIndex)
+  : DFSUtilClient.EMPTY_BYTES; // reuse empty bytes for root.
+  do { // skip over separators.
+nextIndex++;
+  } while (nextIndex < len && bytes[nextIndex] == separator);
 }
 return result;
   }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/4ad2a73d/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestFsLimits.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/

hadoop git commit: HDFS-10655. Fix path related byte array conversion bugs. (daryn)

2016-08-01 Thread daryn
Repository: hadoop
Updated Branches:
  refs/heads/trunk 95694b70c -> 9f473cf90


HDFS-10655. Fix path related byte array conversion bugs. (daryn)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/9f473cf9
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/9f473cf9
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/9f473cf9

Branch: refs/heads/trunk
Commit: 9f473cf903e586c556154abd56b3a3d820c6b028
Parents: 95694b7
Author: Daryn Sharp <da...@yahoo-inc.com>
Authored: Mon Aug 1 10:14:28 2016 -0500
Committer: Daryn Sharp <da...@yahoo-inc.com>
Committed: Mon Aug 1 10:14:28 2016 -0500

--
 .../java/org/apache/hadoop/hdfs/DFSUtil.java|  42 
 .../hdfs/server/namenode/TestFsLimits.java  |   2 +-
 .../server/namenode/TestPathComponents.java | 100 ++-
 3 files changed, 97 insertions(+), 47 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/9f473cf9/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSUtil.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSUtil.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSUtil.java
index 0ba80d9..5ab6978 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSUtil.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSUtil.java
@@ -275,14 +275,15 @@ public class DFSUtil {
 Preconditions.checkArgument(offset >= 0 && offset < pathComponents.length);
 Preconditions.checkArgument(length >= 0 && offset + length <=
 pathComponents.length);
-if (pathComponents.length == 1
+if (offset == 0 && length == 1
 && (pathComponents[0] == null || pathComponents[0].length == 0)) {
   return Path.SEPARATOR;
 }
 StringBuilder result = new StringBuilder();
-for (int i = offset; i < offset + length; i++) {
+int lastIndex = offset + length - 1;
+for (int i = offset; i <= lastIndex; i++) {
   result.append(new String(pathComponents[i], Charsets.UTF_8));
-  if (i < pathComponents.length - 1) {
+  if (i < lastIndex) {
 result.append(Path.SEPARATOR_CHAR);
   }
 }
@@ -348,40 +349,37 @@ public class DFSUtil {
   public static byte[][] bytes2byteArray(byte[] bytes,
  int len,
  byte separator) {
-assert len <= bytes.length;
-int splits = 0;
+Preconditions.checkPositionIndex(len, bytes.length);
 if (len == 0) {
   return new byte[][]{null};
 }
-// Count the splits. Omit multiple separators and the last one
-for (int i = 0; i < len; i++) {
-  if (bytes[i] == separator) {
+// Count the splits. Omit multiple separators and the last one by
+// peeking at prior byte.
+int splits = 0;
+for (int i = 1; i < len; i++) {
+  if (bytes[i-1] == separator && bytes[i] != separator) {
 splits++;
   }
 }
-int last = len - 1;
-while (last > -1 && bytes[last--] == separator) {
-  splits--;
-}
 if (splits == 0 && bytes[0] == separator) {
   return new byte[][]{null};
 }
 splits++;
 byte[][] result = new byte[splits][];
-int startIndex = 0;
 int nextIndex = 0;
-int index = 0;
-// Build the splits
-while (index < splits) {
+// Build the splits.
+for (int i = 0; i < splits; i++) {
+  int startIndex = nextIndex;
+  // find next separator in the bytes.
   while (nextIndex < len && bytes[nextIndex] != separator) {
 nextIndex++;
   }
-  result[index] = new byte[nextIndex - startIndex];
-  System.arraycopy(bytes, startIndex, result[index], 0, nextIndex
-  - startIndex);
-  index++;
-  startIndex = nextIndex + 1;
-  nextIndex = startIndex;
+  result[i] = (nextIndex > 0)
+  ? Arrays.copyOfRange(bytes, startIndex, nextIndex)
+  : DFSUtilClient.EMPTY_BYTES; // reuse empty bytes for root.
+  do { // skip over separators.
+nextIndex++;
+  } while (nextIndex < len && bytes[nextIndex] == separator);
 }
 return result;
   }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/9f473cf9/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestFsLimits.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestFsLimits.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/h

svn commit: r1587608 - in /hadoop/common/trunk/hadoop-common-project/hadoop-common: CHANGES.txt src/main/java/org/apache/hadoop/security/authorize/ProxyUsers.java src/test/java/org/apache/hadoop/secur

2014-04-15 Thread daryn
Author: daryn
Date: Tue Apr 15 15:25:17 2014
New Revision: 1587608

URL: http://svn.apache.org/r1587608
Log:
HADOOP-10498. Add support for proxy server. (daryn)

Modified:
hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/ProxyUsers.java

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/authorize/TestProxyUsers.java

Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1587608r1=1587607r2=1587608view=diff
==
--- hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt 
(original)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt Tue Apr 
15 15:25:17 2014
@@ -333,6 +333,8 @@ Release 2.5.0 - UNRELEASED
 
   NEW FEATURES
 
+HADOOP-10498. Add support for proxy server. (daryn)
+
   IMPROVEMENTS
 
 HADOOP-10451. Remove unused field and imports from SaslRpcServer.

Modified: 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/ProxyUsers.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/ProxyUsers.java?rev=1587608r1=1587607r2=1587608view=diff
==
--- 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/ProxyUsers.java
 (original)
+++ 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/ProxyUsers.java
 Tue Apr 15 15:25:17 2014
@@ -19,10 +19,12 @@
 package org.apache.hadoop.security.authorize;
 
 import java.net.InetAddress;
+import java.net.InetSocketAddress;
 import java.net.UnknownHostException;
 import java.util.ArrayList;
 import java.util.Collection;
 import java.util.HashMap;
+import java.util.HashSet;
 import java.util.Map;
 import java.util.Map.Entry;
 
@@ -41,12 +43,16 @@ public class ProxyUsers {
   public static final String CONF_GROUPS = .groups;
   public static final String CONF_HADOOP_PROXYUSER = hadoop.proxyuser.;
   public static final String CONF_HADOOP_PROXYUSER_RE = 
hadoop\\.proxyuser\\.;
+  public static final String CONF_HADOOP_PROXYSERVERS = hadoop.proxyservers;
+  
   private static boolean init = false;
   // list of groups and hosts per proxyuser
   private static MapString, CollectionString proxyGroups = 
 new HashMapString, CollectionString();
   private static MapString, CollectionString proxyHosts = 
 new HashMapString, CollectionString();
+  private static CollectionString proxyServers =
+new HashSetString();
 
   /**
* reread the conf and get new values for hadoop.proxyuser.*.groups/hosts
@@ -62,9 +68,10 @@ public class ProxyUsers {
*/
   public static synchronized void 
refreshSuperUserGroupsConfiguration(Configuration conf) {
 
-// remove alle existing stuff
+// remove all existing stuff
 proxyGroups.clear();
 proxyHosts.clear();
+proxyServers.clear();
 
 // get all the new keys for groups
 String regex = CONF_HADOOP_PROXYUSER_RE+[^.]*\\+CONF_GROUPS;
@@ -85,9 +92,23 @@ public class ProxyUsers {
   StringUtils.getTrimmedStringCollection(entry.getValue()));
 }
 
+// trusted proxy servers such as http proxies
+for (String host : conf.getTrimmedStrings(CONF_HADOOP_PROXYSERVERS)) {
+  InetSocketAddress addr = new InetSocketAddress(host, 0);
+  if (!addr.isUnresolved()) {
+proxyServers.add(addr.getAddress().getHostAddress());
+  }
+}
 init = true;
   }
 
+  public static synchronized boolean isProxyServer(String remoteAddr) { 
+if(!init) {
+  refreshSuperUserGroupsConfiguration(); 
+}
+return proxyServers.contains(remoteAddr);
+  }
+
   /**
* Returns configuration key for effective user groups allowed for a 
superuser
* 

Modified: 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/authorize/TestProxyUsers.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/authorize/TestProxyUsers.java?rev=1587608r1=1587607r2=1587608view=diff
==
--- 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/authorize/TestProxyUsers.java
 (original)
+++ 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/authorize/TestProxyUsers.java
 Tue Apr 15 15:25:17 2014
@@ -258,6 +258,16 @@ public class TestProxyUsers {
 assertEquals (1

svn commit: r1587609 - in /hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common: CHANGES.txt src/main/java/org/apache/hadoop/security/authorize/ProxyUsers.java src/test/java/org/apache/

2014-04-15 Thread daryn
Author: daryn
Date: Tue Apr 15 15:27:04 2014
New Revision: 1587609

URL: http://svn.apache.org/r1587609
Log:
svn merge -c 1587608 FIXES: HADOOP-10498. Add support for proxy server. (daryn)

Modified:

hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt

hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/ProxyUsers.java

hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/authorize/TestProxyUsers.java

Modified: 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1587609r1=1587608r2=1587609view=diff
==
--- 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt 
(original)
+++ 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt 
Tue Apr 15 15:27:04 2014
@@ -8,6 +8,8 @@ Release 2.5.0 - UNRELEASED
 
   NEW FEATURES
 
+HADOOP-10498. Add support for proxy server. (daryn)
+
   IMPROVEMENTS
 
 HADOOP-10451. Remove unused field and imports from SaslRpcServer.

Modified: 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/ProxyUsers.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/ProxyUsers.java?rev=1587609r1=1587608r2=1587609view=diff
==
--- 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/ProxyUsers.java
 (original)
+++ 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/ProxyUsers.java
 Tue Apr 15 15:27:04 2014
@@ -19,9 +19,11 @@
 package org.apache.hadoop.security.authorize;
 
 import java.net.InetAddress;
+import java.net.InetSocketAddress;
 import java.net.UnknownHostException;
 import java.util.Collection;
 import java.util.HashMap;
+import java.util.HashSet;
 import java.util.Map;
 import java.util.Map.Entry;
 
@@ -39,12 +41,16 @@ public class ProxyUsers {
   public static final String CONF_GROUPS = .groups;
   public static final String CONF_HADOOP_PROXYUSER = hadoop.proxyuser.;
   public static final String CONF_HADOOP_PROXYUSER_RE = 
hadoop\\.proxyuser\\.;
+  public static final String CONF_HADOOP_PROXYSERVERS = hadoop.proxyservers;
+  
   private static boolean init = false;
   // list of groups and hosts per proxyuser
   private static MapString, CollectionString proxyGroups = 
 new HashMapString, CollectionString();
   private static MapString, CollectionString proxyHosts = 
 new HashMapString, CollectionString();
+  private static CollectionString proxyServers =
+new HashSetString();
 
   /**
* reread the conf and get new values for hadoop.proxyuser.*.groups/hosts
@@ -60,9 +66,10 @@ public class ProxyUsers {
*/
   public static synchronized void 
refreshSuperUserGroupsConfiguration(Configuration conf) {
 
-// remove alle existing stuff
+// remove all existing stuff
 proxyGroups.clear();
 proxyHosts.clear();
+proxyServers.clear();
 
 // get all the new keys for groups
 String regex = CONF_HADOOP_PROXYUSER_RE+[^.]*\\+CONF_GROUPS;
@@ -80,9 +87,23 @@ public class ProxyUsers {
   StringUtils.getTrimmedStringCollection(entry.getValue()));
 }
 
+// trusted proxy servers such as http proxies
+for (String host : conf.getTrimmedStrings(CONF_HADOOP_PROXYSERVERS)) {
+  InetSocketAddress addr = new InetSocketAddress(host, 0);
+  if (!addr.isUnresolved()) {
+proxyServers.add(addr.getAddress().getHostAddress());
+  }
+}
 init = true;
   }
 
+  public static synchronized boolean isProxyServer(String remoteAddr) { 
+if(!init) {
+  refreshSuperUserGroupsConfiguration(); 
+}
+return proxyServers.contains(remoteAddr);
+  }
+
   /**
* Returns configuration key for effective user groups allowed for a 
superuser
* 

Modified: 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/authorize/TestProxyUsers.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/authorize/TestProxyUsers.java?rev=1587609r1=1587608r2=1587609view=diff
==
--- 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/authorize/TestProxyUsers.java
 (original)
+++ 
hadoop/common/branches/branch-2/hadoop-common-project

svn commit: r1558883 - in /hadoop/common/trunk/hadoop-common-project/hadoop-common: CHANGES.txt src/main/java/org/apache/hadoop/util/Shell.java

2014-01-16 Thread daryn
Author: daryn
Date: Thu Jan 16 18:54:52 2014
New Revision: 1558883

URL: http://svn.apache.org/r1558883
Log:
HADOOP-10146. Workaround JDK7 Process fd close bug (daryn)

Modified:
hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Shell.java

Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1558883r1=1558882r2=1558883view=diff
==
--- hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt 
(original)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt Thu Jan 
16 18:54:52 2014
@@ -526,6 +526,8 @@ Release 2.4.0 - UNRELEASED
 HADOOP-10236. Fix typo in o.a.h.ipc.Client#checkResponse. (Akira Ajisaka
 via suresh)
 
+HADOOP-10146. Workaround JDK7 Process fd close bug (daryn)
+
 Release 2.3.0 - UNRELEASED
 
   INCOMPATIBLE CHANGES

Modified: 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Shell.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Shell.java?rev=1558883r1=1558882r2=1558883view=diff
==
--- 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Shell.java
 (original)
+++ 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Shell.java
 Thu Jan 16 18:54:52 2014
@@ -21,6 +21,7 @@ import java.io.BufferedReader;
 import java.io.File;
 import java.io.IOException;
 import java.io.InputStreamReader;
+import java.io.InputStream;
 import java.util.Arrays;
 import java.util.Map;
 import java.util.Timer;
@@ -511,7 +512,17 @@ abstract public class Shell {
   }
   // close the input stream
   try {
-inReader.close();
+// JDK 7 tries to automatically drain the input streams for us
+// when the process exits, but since close is not synchronized,
+// it creates a race if we close the stream first and the same
+// fd is recycled.  the stream draining thread will attempt to
+// drain that fd!!  it may block, OOM, or cause bizarre behavior
+// see: https://bugs.openjdk.java.net/browse/JDK-8024521
+//  issue is fixed in build 7u60
+InputStream stdout = process.getInputStream();
+synchronized (stdout) {
+  inReader.close();
+}
   } catch (IOException ioe) {
 LOG.warn(Error while closing the input stream, ioe);
   }
@@ -524,7 +535,10 @@ abstract public class Shell {
 LOG.warn(Interrupted while joining errThread);
   }
   try {
-errReader.close();
+InputStream stderr = process.getErrorStream();
+synchronized (stderr) {
+  errReader.close();
+}
   } catch (IOException ioe) {
 LOG.warn(Error while closing the error stream, ioe);
   }




svn commit: r1558885 - in /hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common: CHANGES.txt src/main/java/org/apache/hadoop/util/Shell.java

2014-01-16 Thread daryn
Author: daryn
Date: Thu Jan 16 18:56:21 2014
New Revision: 1558885

URL: http://svn.apache.org/r1558885
Log:
svn merge -c 1558883 FIXES: HADOOP-10146. Workaround JDK7 Process fd close bug 
(daryn)

Modified:

hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt

hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Shell.java

Modified: 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1558885r1=1558884r2=1558885view=diff
==
--- 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt 
(original)
+++ 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt 
Thu Jan 16 18:56:21 2014
@@ -227,6 +227,8 @@ Release 2.4.0 - UNRELEASED
 HADOOP-10236. Fix typo in o.a.h.ipc.Client#checkResponse. (Akira Ajisaka
 via suresh)
 
+HADOOP-10146. Workaround JDK7 Process fd close bug (daryn)
+
 Release 2.3.0 - UNRELEASED
 
   INCOMPATIBLE CHANGES

Modified: 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Shell.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Shell.java?rev=1558885r1=1558884r2=1558885view=diff
==
--- 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Shell.java
 (original)
+++ 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Shell.java
 Thu Jan 16 18:56:21 2014
@@ -21,6 +21,7 @@ import java.io.BufferedReader;
 import java.io.File;
 import java.io.IOException;
 import java.io.InputStreamReader;
+import java.io.InputStream;
 import java.util.Arrays;
 import java.util.Map;
 import java.util.Timer;
@@ -511,7 +512,17 @@ abstract public class Shell {
   }
   // close the input stream
   try {
-inReader.close();
+// JDK 7 tries to automatically drain the input streams for us
+// when the process exits, but since close is not synchronized,
+// it creates a race if we close the stream first and the same
+// fd is recycled.  the stream draining thread will attempt to
+// drain that fd!!  it may block, OOM, or cause bizarre behavior
+// see: https://bugs.openjdk.java.net/browse/JDK-8024521
+//  issue is fixed in build 7u60
+InputStream stdout = process.getInputStream();
+synchronized (stdout) {
+  inReader.close();
+}
   } catch (IOException ioe) {
 LOG.warn(Error while closing the input stream, ioe);
   }
@@ -524,7 +535,10 @@ abstract public class Shell {
 LOG.warn(Interrupted while joining errThread);
   }
   try {
-errReader.close();
+InputStream stderr = process.getErrorStream();
+synchronized (stderr) {
+  errReader.close();
+}
   } catch (IOException ioe) {
 LOG.warn(Error while closing the error stream, ioe);
   }




svn commit: r1558886 - in /hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common: CHANGES.txt src/main/java/org/apache/hadoop/util/Shell.java

2014-01-16 Thread daryn
Author: daryn
Date: Thu Jan 16 18:57:55 2014
New Revision: 1558886

URL: http://svn.apache.org/r1558886
Log:
HADOOP-10146. Workaround JDK7 Process fd close bug (daryn)

Modified:

hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/CHANGES.txt

hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Shell.java

Modified: 
hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1558886r1=1558885r2=1558886view=diff
==
--- 
hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/CHANGES.txt
 (original)
+++ 
hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/CHANGES.txt
 Thu Jan 16 18:57:55 2014
@@ -19,6 +19,8 @@ Release 0.23.11 - UNRELEASED
 HADOOP-10081. Client.setupIOStreams can leak socket resources on exception
 or error (Tsuyoshi OZAWA via jlowe)
 
+HADOOP-10146. Workaround JDK7 Process fd close bug (daryn)
+
 Release 0.23.10 - 2013-12-09
 
   INCOMPATIBLE CHANGES

Modified: 
hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Shell.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Shell.java?rev=1558886r1=1558885r2=1558886view=diff
==
--- 
hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Shell.java
 (original)
+++ 
hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Shell.java
 Thu Jan 16 18:57:55 2014
@@ -20,6 +20,7 @@ package org.apache.hadoop.util;
 import java.io.BufferedReader;
 import java.io.File;
 import java.io.IOException;
+import java.io.InputStream;
 import java.io.InputStreamReader;
 import java.util.Map;
 import java.util.Timer;
@@ -210,7 +211,17 @@ abstract public class Shell {
   }
   // close the input stream
   try {
-inReader.close();
+// JDK 7 tries to automatically drain the input streams for us
+// when the process exits, but since close is not synchronized,
+// it creates a race if we close the stream first and the same
+// fd is recycled.  the stream draining thread will attempt to
+// drain that fd!!  it may block, OOM, or cause bizarre behavior
+// see: https://bugs.openjdk.java.net/browse/JDK-8024521
+//  issue is fixed in build 7u60
+InputStream stdout = process.getInputStream();
+synchronized (stdout) {
+  inReader.close();
+}
   } catch (IOException ioe) {
 LOG.warn(Error while closing the input stream, ioe);
   }
@@ -218,7 +229,10 @@ abstract public class Shell {
 errThread.interrupt();
   }
   try {
-errReader.close();
+InputStream stderr = process.getErrorStream();
+synchronized (stderr) {
+  errReader.close();
+}
   } catch (IOException ioe) {
 LOG.warn(Error while closing the error stream, ioe);
   }




svn commit: r1552389 - in /hadoop/common/trunk/hadoop-common-project/hadoop-common: CHANGES.txt src/main/java/org/apache/hadoop/security/SaslRpcServer.java src/test/java/org/apache/hadoop/ipc/TestSasl

2013-12-19 Thread daryn
Author: daryn
Date: Thu Dec 19 18:21:33 2013
New Revision: 1552389

URL: http://svn.apache.org/r1552389
Log:
HADOOP-10172. Cache SASL server factories (daryn)

Modified:
hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslRpcServer.java

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestSaslRPC.java

Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1552389r1=1552388r2=1552389view=diff
==
--- hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt 
(original)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt Thu Dec 
19 18:21:33 2013
@@ -408,6 +408,8 @@ Release 2.4.0 - UNRELEASED
 HADOOP-10047. Add a direct-buffer based apis for compression. (Gopal V
 via acmurthy)
 
+HADOOP-10172. Cache SASL server factories (daryn)
+
   BUG FIXES
 
 HADOOP-9964. Fix deadlocks in TestHttpServer by synchronize

Modified: 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslRpcServer.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslRpcServer.java?rev=1552389r1=1552388r2=1552389view=diff
==
--- 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslRpcServer.java
 (original)
+++ 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslRpcServer.java
 Thu Dec 19 18:21:33 2013
@@ -25,6 +25,10 @@ import java.io.DataOutput;
 import java.io.IOException;
 import java.security.PrivilegedExceptionAction;
 import java.security.Security;
+import java.util.ArrayList;
+import java.util.Enumeration;
+import java.util.HashMap;
+import java.util.List;
 import java.util.Map;
 import java.util.TreeMap;
 
@@ -38,6 +42,7 @@ import javax.security.sasl.RealmCallback
 import javax.security.sasl.Sasl;
 import javax.security.sasl.SaslException;
 import javax.security.sasl.SaslServer;
+import javax.security.sasl.SaslServerFactory;
 
 import org.apache.commons.codec.binary.Base64;
 import org.apache.commons.logging.Log;
@@ -63,6 +68,7 @@ public class SaslRpcServer {
   public static final String SASL_DEFAULT_REALM = default;
   public static final MapString, String SASL_PROPS = 
   new TreeMapString, String();
+  private static SaslServerFactory saslFactory;
 
   public static enum QualityOfProtection {
 AUTHENTICATION(auth),
@@ -151,7 +157,7 @@ public class SaslRpcServer {
 new PrivilegedExceptionActionSaslServer() {
   @Override
   public SaslServer run() throws SaslException  {
-return Sasl.createSaslServer(mechanism, protocol, serverId,
+return saslFactory.createSaslServer(mechanism, protocol, serverId,
 SaslRpcServer.SASL_PROPS, callback);
   }
 });
@@ -180,6 +186,7 @@ public class SaslRpcServer {
 SASL_PROPS.put(Sasl.QOP, saslQOP.getSaslQop());
 SASL_PROPS.put(Sasl.SERVER_AUTH, true);
 Security.addProvider(new SaslPlainServer.SecurityProvider());
+saslFactory = new FastSaslServerFactory(SASL_PROPS);
   }
   
   static String encodeIdentifier(byte[] identifier) {
@@ -363,4 +370,47 @@ public class SaslRpcServer {
   }
 }
   }
+  
+  // Sasl.createSaslServer is 100-200X slower than caching the factories!
+  private static class FastSaslServerFactory implements SaslServerFactory {
+private final MapString,ListSaslServerFactory factoryCache =
+new HashMapString,ListSaslServerFactory();
+
+FastSaslServerFactory(MapString,? props) {
+  final EnumerationSaslServerFactory factories =
+  Sasl.getSaslServerFactories();
+  while (factories.hasMoreElements()) {
+SaslServerFactory factory = factories.nextElement();
+for (String mech : factory.getMechanismNames(props)) {
+  if (!factoryCache.containsKey(mech)) {
+factoryCache.put(mech, new ArrayListSaslServerFactory());
+  }
+  factoryCache.get(mech).add(factory);
+}
+  }
+}
+
+@Override
+public SaslServer createSaslServer(String mechanism, String protocol,
+String serverName, MapString,? props, CallbackHandler cbh)
+throws SaslException {
+  SaslServer saslServer = null;
+  ListSaslServerFactory factories = factoryCache.get(mechanism);
+  if (factories != null) {
+for (SaslServerFactory factory : factories) {
+  saslServer = factory.createSaslServer(
+  mechanism, protocol, serverName, props, cbh

svn commit: r1552391 - in /hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common: CHANGES.txt src/main/java/org/apache/hadoop/security/SaslRpcServer.java src/test/java/org/apache/hadoop/

2013-12-19 Thread daryn
Author: daryn
Date: Thu Dec 19 18:23:26 2013
New Revision: 1552391

URL: http://svn.apache.org/r1552391
Log:
svn merge -c 1552389 FIXES: HADOOP-10172. Cache SASL server factories (daryn)

Modified:

hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt

hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslRpcServer.java

hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestSaslRPC.java

Modified: 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1552391r1=1552390r2=1552391view=diff
==
--- 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt 
(original)
+++ 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt 
Thu Dec 19 18:23:26 2013
@@ -116,6 +116,8 @@ Release 2.4.0 - UNRELEASED
 HADOOP-10047. Add a direct-buffer based apis for compression. (Gopal V
 via acmurthy)
 
+HADOOP-10172. Cache SASL server factories (daryn)
+
   BUG FIXES
 
 HADOOP-9964. Fix deadlocks in TestHttpServer by synchronize

Modified: 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslRpcServer.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslRpcServer.java?rev=1552391r1=1552390r2=1552391view=diff
==
--- 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslRpcServer.java
 (original)
+++ 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslRpcServer.java
 Thu Dec 19 18:23:26 2013
@@ -25,6 +25,10 @@ import java.io.DataOutput;
 import java.io.IOException;
 import java.security.PrivilegedExceptionAction;
 import java.security.Security;
+import java.util.ArrayList;
+import java.util.Enumeration;
+import java.util.HashMap;
+import java.util.List;
 import java.util.Map;
 import java.util.TreeMap;
 
@@ -38,6 +42,7 @@ import javax.security.sasl.RealmCallback
 import javax.security.sasl.Sasl;
 import javax.security.sasl.SaslException;
 import javax.security.sasl.SaslServer;
+import javax.security.sasl.SaslServerFactory;
 
 import org.apache.commons.codec.binary.Base64;
 import org.apache.commons.logging.Log;
@@ -63,6 +68,7 @@ public class SaslRpcServer {
   public static final String SASL_DEFAULT_REALM = default;
   public static final MapString, String SASL_PROPS = 
   new TreeMapString, String();
+  private static SaslServerFactory saslFactory;
 
   public static enum QualityOfProtection {
 AUTHENTICATION(auth),
@@ -151,7 +157,7 @@ public class SaslRpcServer {
 new PrivilegedExceptionActionSaslServer() {
   @Override
   public SaslServer run() throws SaslException  {
-return Sasl.createSaslServer(mechanism, protocol, serverId,
+return saslFactory.createSaslServer(mechanism, protocol, serverId,
 SaslRpcServer.SASL_PROPS, callback);
   }
 });
@@ -180,6 +186,7 @@ public class SaslRpcServer {
 SASL_PROPS.put(Sasl.QOP, saslQOP.getSaslQop());
 SASL_PROPS.put(Sasl.SERVER_AUTH, true);
 Security.addProvider(new SaslPlainServer.SecurityProvider());
+saslFactory = new FastSaslServerFactory(SASL_PROPS);
   }
   
   static String encodeIdentifier(byte[] identifier) {
@@ -363,4 +370,47 @@ public class SaslRpcServer {
   }
 }
   }
+  
+  // Sasl.createSaslServer is 100-200X slower than caching the factories!
+  private static class FastSaslServerFactory implements SaslServerFactory {
+private final MapString,ListSaslServerFactory factoryCache =
+new HashMapString,ListSaslServerFactory();
+
+FastSaslServerFactory(MapString,? props) {
+  final EnumerationSaslServerFactory factories =
+  Sasl.getSaslServerFactories();
+  while (factories.hasMoreElements()) {
+SaslServerFactory factory = factories.nextElement();
+for (String mech : factory.getMechanismNames(props)) {
+  if (!factoryCache.containsKey(mech)) {
+factoryCache.put(mech, new ArrayListSaslServerFactory());
+  }
+  factoryCache.get(mech).add(factory);
+}
+  }
+}
+
+@Override
+public SaslServer createSaslServer(String mechanism, String protocol,
+String serverName, MapString,? props, CallbackHandler cbh)
+throws SaslException {
+  SaslServer saslServer = null;
+  ListSaslServerFactory factories = factoryCache.get(mechanism);
+  if (factories != null

svn commit: r1548175 - /hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt

2013-12-05 Thread daryn
Author: daryn
Date: Thu Dec  5 15:47:55 2013
New Revision: 1548175

URL: http://svn.apache.org/r1548175
Log:
HADOOP-10129. Distcp may succeed when it fails (daryn)

Modified:
hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt

Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1548175r1=1548174r2=1548175view=diff
==
--- hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt 
(original)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt Thu Dec 
 5 15:47:55 2013
@@ -460,6 +460,8 @@ Release 2.4.0 - UNRELEASED
 HADOOP-10135 writes to swift fs over partition size leave temp files and
 empty output file (David Dobbins via stevel)
 
+HADOOP-10129. Distcp may succeed when it fails (daryn)
+
 Release 2.3.0 - UNRELEASED
 
   INCOMPATIBLE CHANGES
@@ -2355,6 +2357,8 @@ Release 0.23.10 - UNRELEASED
 
   BUG FIXES
 
+HADOOP-10129. Distcp may succeed when it fails (daryn)
+
 Release 0.23.9 - 2013-07-08
 
   INCOMPATIBLE CHANGES




svn commit: r1548175 - in /hadoop/common/trunk/hadoop-tools/hadoop-distcp: ./ src/main/java/org/apache/hadoop/tools/ src/main/java/org/apache/hadoop/tools/mapred/ src/test/java/org/apache/hadoop/tools

2013-12-05 Thread daryn
Author: daryn
Date: Thu Dec  5 15:47:55 2013
New Revision: 1548175

URL: http://svn.apache.org/r1548175
Log:
HADOOP-10129. Distcp may succeed when it fails (daryn)

Added:

hadoop/common/trunk/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/mapred/TestRetriableFileCopyCommand.java
Modified:
hadoop/common/trunk/hadoop-tools/hadoop-distcp/pom.xml

hadoop/common/trunk/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/SimpleCopyListing.java

hadoop/common/trunk/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/mapred/RetriableFileCopyCommand.java

hadoop/common/trunk/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/TestCopyListing.java

Modified: hadoop/common/trunk/hadoop-tools/hadoop-distcp/pom.xml
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-tools/hadoop-distcp/pom.xml?rev=1548175r1=1548174r2=1548175view=diff
==
--- hadoop/common/trunk/hadoop-tools/hadoop-distcp/pom.xml (original)
+++ hadoop/common/trunk/hadoop-tools/hadoop-distcp/pom.xml Thu Dec  5 15:47:55 
2013
@@ -95,6 +95,11 @@
   scopetest/scope
   typetest-jar/type
 /dependency
+dependency
+  groupIdorg.mockito/groupId
+  artifactIdmockito-all/artifactId
+  scopetest/scope
+/dependency
   /dependencies
 
   build

Modified: 
hadoop/common/trunk/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/SimpleCopyListing.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/SimpleCopyListing.java?rev=1548175r1=1548174r2=1548175view=diff
==
--- 
hadoop/common/trunk/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/SimpleCopyListing.java
 (original)
+++ 
hadoop/common/trunk/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/SimpleCopyListing.java
 Thu Dec  5 15:47:55 2013
@@ -32,6 +32,8 @@ import org.apache.hadoop.tools.util.Dist
 import org.apache.hadoop.mapreduce.security.TokenCache;
 import org.apache.hadoop.security.Credentials;
 
+import com.google.common.annotations.VisibleForTesting;
+
 import java.io.*;
 import java.util.Stack;
 
@@ -107,12 +109,13 @@ public class SimpleCopyListing extends C
   /** {@inheritDoc} */
   @Override
   public void doBuildListing(Path pathToListingFile, DistCpOptions options) 
throws IOException {
-
-SequenceFile.Writer fileListWriter = null;
-
+doBuildListing(getWriter(pathToListingFile), options);
+  }
+  
+  @VisibleForTesting
+  public void doBuildListing(SequenceFile.Writer fileListWriter,
+  DistCpOptions options) throws IOException {
 try {
-  fileListWriter = getWriter(pathToListingFile);
-
   for (Path path: options.getSourcePaths()) {
 FileSystem sourceFS = path.getFileSystem(getConf());
 path = makeQualified(path);
@@ -143,8 +146,10 @@ public class SimpleCopyListing extends C
   localFile, options);
 }
   }
+  fileListWriter.close();
+  fileListWriter = null;
 } finally {
-  IOUtils.closeStream(fileListWriter);
+  IOUtils.cleanup(LOG, fileListWriter);
 }
   }
 

Modified: 
hadoop/common/trunk/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/mapred/RetriableFileCopyCommand.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/mapred/RetriableFileCopyCommand.java?rev=1548175r1=1548174r2=1548175view=diff
==
--- 
hadoop/common/trunk/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/mapred/RetriableFileCopyCommand.java
 (original)
+++ 
hadoop/common/trunk/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/mapred/RetriableFileCopyCommand.java
 Thu Dec  5 15:47:55 2013
@@ -30,6 +30,8 @@ import org.apache.hadoop.io.IOUtils;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 
+import com.google.common.annotations.VisibleForTesting;
+
 import java.io.*;
 import java.util.EnumSet;
 
@@ -176,7 +178,8 @@ public class RetriableFileCopyCommand ex
 return new Path(root, .distcp.tmp. + 
context.getTaskAttemptID().toString());
   }
 
-  private long copyBytes(FileStatus sourceFileStatus, OutputStream outStream,
+  @VisibleForTesting
+  long copyBytes(FileStatus sourceFileStatus, OutputStream outStream,
  int bufferSize, Mapper.Context context)
   throws IOException {
 Path source = sourceFileStatus.getPath();
@@ -193,6 +196,8 @@ public class RetriableFileCopyCommand ex
 updateContextStatus(totalBytesRead, context, sourceFileStatus);
 bytesRead = inStream.read(buf);
   }
+  outStream.close();
+  outStream = null;
 } finally {
   IOUtils.cleanup(LOG, outStream

svn commit: r1548176 - in /hadoop/common/branches/branch-2/hadoop-tools/hadoop-distcp: ./ src/main/java/org/apache/hadoop/tools/ src/main/java/org/apache/hadoop/tools/mapred/ src/test/java/org/apache/

2013-12-05 Thread daryn
Author: daryn
Date: Thu Dec  5 15:51:28 2013
New Revision: 1548176

URL: http://svn.apache.org/r1548176
Log:
svn merge -c 1548175 FIXES: HADOOP-10129. Distcp may succeed when it fails 
(daryn)

Added:

hadoop/common/branches/branch-2/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/mapred/TestRetriableFileCopyCommand.java
  - copied unchanged from r1548175, 
hadoop/common/trunk/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/mapred/TestRetriableFileCopyCommand.java
Modified:
hadoop/common/branches/branch-2/hadoop-tools/hadoop-distcp/pom.xml

hadoop/common/branches/branch-2/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/SimpleCopyListing.java

hadoop/common/branches/branch-2/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/mapred/RetriableFileCopyCommand.java

hadoop/common/branches/branch-2/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/TestCopyListing.java

Modified: hadoop/common/branches/branch-2/hadoop-tools/hadoop-distcp/pom.xml
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-tools/hadoop-distcp/pom.xml?rev=1548176r1=1548175r2=1548176view=diff
==
--- hadoop/common/branches/branch-2/hadoop-tools/hadoop-distcp/pom.xml 
(original)
+++ hadoop/common/branches/branch-2/hadoop-tools/hadoop-distcp/pom.xml Thu Dec  
5 15:51:28 2013
@@ -95,6 +95,11 @@
   scopetest/scope
   typetest-jar/type
 /dependency
+dependency
+  groupIdorg.mockito/groupId
+  artifactIdmockito-all/artifactId
+  scopetest/scope
+/dependency
   /dependencies
 
   build

Modified: 
hadoop/common/branches/branch-2/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/SimpleCopyListing.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/SimpleCopyListing.java?rev=1548176r1=1548175r2=1548176view=diff
==
--- 
hadoop/common/branches/branch-2/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/SimpleCopyListing.java
 (original)
+++ 
hadoop/common/branches/branch-2/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/SimpleCopyListing.java
 Thu Dec  5 15:51:28 2013
@@ -32,6 +32,8 @@ import org.apache.hadoop.tools.util.Dist
 import org.apache.hadoop.mapreduce.security.TokenCache;
 import org.apache.hadoop.security.Credentials;
 
+import com.google.common.annotations.VisibleForTesting;
+
 import java.io.*;
 import java.util.Stack;
 
@@ -107,12 +109,13 @@ public class SimpleCopyListing extends C
   /** {@inheritDoc} */
   @Override
   public void doBuildListing(Path pathToListingFile, DistCpOptions options) 
throws IOException {
-
-SequenceFile.Writer fileListWriter = null;
-
+doBuildListing(getWriter(pathToListingFile), options);
+  }
+  
+  @VisibleForTesting
+  public void doBuildListing(SequenceFile.Writer fileListWriter,
+  DistCpOptions options) throws IOException {
 try {
-  fileListWriter = getWriter(pathToListingFile);
-
   for (Path path: options.getSourcePaths()) {
 FileSystem sourceFS = path.getFileSystem(getConf());
 path = makeQualified(path);
@@ -140,8 +143,10 @@ public class SimpleCopyListing extends C
   writeToFileListing(fileListWriter, rootStatus, sourcePathRoot, 
localFile);
 }
   }
+  fileListWriter.close();
+  fileListWriter = null;
 } finally {
-  IOUtils.closeStream(fileListWriter);
+  IOUtils.cleanup(LOG, fileListWriter);
 }
   }
 

Modified: 
hadoop/common/branches/branch-2/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/mapred/RetriableFileCopyCommand.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/mapred/RetriableFileCopyCommand.java?rev=1548176r1=1548175r2=1548176view=diff
==
--- 
hadoop/common/branches/branch-2/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/mapred/RetriableFileCopyCommand.java
 (original)
+++ 
hadoop/common/branches/branch-2/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/mapred/RetriableFileCopyCommand.java
 Thu Dec  5 15:51:28 2013
@@ -30,6 +30,8 @@ import org.apache.hadoop.io.IOUtils;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 
+import com.google.common.annotations.VisibleForTesting;
+
 import java.io.*;
 import java.util.EnumSet;
 
@@ -176,7 +178,8 @@ public class RetriableFileCopyCommand ex
 return new Path(root, .distcp.tmp. + 
context.getTaskAttemptID().toString());
   }
 
-  private long copyBytes(FileStatus sourceFileStatus, OutputStream outStream,
+  @VisibleForTesting
+  long copyBytes(FileStatus sourceFileStatus

svn commit: r1548176 - /hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt

2013-12-05 Thread daryn
Author: daryn
Date: Thu Dec  5 15:51:28 2013
New Revision: 1548176

URL: http://svn.apache.org/r1548176
Log:
svn merge -c 1548175 FIXES: HADOOP-10129. Distcp may succeed when it fails 
(daryn)

Modified:

hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt

Modified: 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1548176r1=1548175r2=1548176view=diff
==
--- 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt 
(original)
+++ 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt 
Thu Dec  5 15:51:28 2013
@@ -169,6 +169,8 @@ Release 2.4.0 - UNRELEASED
 HADOOP-10135 writes to swift fs over partition size leave temp files and
 empty output file (David Dobbins via stevel)
 
+HADOOP-10129. Distcp may succeed when it fails (daryn)
+
 Release 2.3.0 - UNRELEASED
 
   INCOMPATIBLE CHANGES
@@ -2090,6 +2092,8 @@ Release 0.23.10 - UNRELEASED
 
   BUG FIXES
 
+HADOOP-10129. Distcp may succeed when it fails (daryn)
+
 Release 0.23.9 - 2013-07-08
 
   INCOMPATIBLE CHANGES




svn commit: r1548178 - in /hadoop/common/branches/branch-0.23/hadoop-tools/hadoop-distcp: ./ src/main/java/org/apache/hadoop/tools/ src/main/java/org/apache/hadoop/tools/mapred/ src/test/java/org/apac

2013-12-05 Thread daryn
Author: daryn
Date: Thu Dec  5 15:54:27 2013
New Revision: 1548178

URL: http://svn.apache.org/r1548178
Log:
HADOOP-10129. Distcp may succeed when it fails (daryn)

Added:

hadoop/common/branches/branch-0.23/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/mapred/TestRetriableFileCopyCommand.java
Modified:
hadoop/common/branches/branch-0.23/hadoop-tools/hadoop-distcp/pom.xml

hadoop/common/branches/branch-0.23/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/SimpleCopyListing.java

hadoop/common/branches/branch-0.23/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/mapred/RetriableFileCopyCommand.java

hadoop/common/branches/branch-0.23/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/TestCopyListing.java

Modified: hadoop/common/branches/branch-0.23/hadoop-tools/hadoop-distcp/pom.xml
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23/hadoop-tools/hadoop-distcp/pom.xml?rev=1548178r1=1548177r2=1548178view=diff
==
--- hadoop/common/branches/branch-0.23/hadoop-tools/hadoop-distcp/pom.xml 
(original)
+++ hadoop/common/branches/branch-0.23/hadoop-tools/hadoop-distcp/pom.xml Thu 
Dec  5 15:54:27 2013
@@ -90,6 +90,11 @@
   scopetest/scope
   typetest-jar/type
 /dependency
+dependency
+  groupIdorg.mockito/groupId
+  artifactIdmockito-all/artifactId
+  scopetest/scope
+/dependency
   /dependencies
 
   build

Modified: 
hadoop/common/branches/branch-0.23/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/SimpleCopyListing.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/SimpleCopyListing.java?rev=1548178r1=1548177r2=1548178view=diff
==
--- 
hadoop/common/branches/branch-0.23/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/SimpleCopyListing.java
 (original)
+++ 
hadoop/common/branches/branch-0.23/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/SimpleCopyListing.java
 Thu Dec  5 15:54:27 2013
@@ -32,6 +32,8 @@ import org.apache.hadoop.tools.util.Dist
 import org.apache.hadoop.mapreduce.security.TokenCache;
 import org.apache.hadoop.security.Credentials;
 
+import com.google.common.annotations.VisibleForTesting;
+
 import java.io.*;
 import java.util.Stack;
 
@@ -107,12 +109,13 @@ public class SimpleCopyListing extends C
   /** {@inheritDoc} */
   @Override
   public void doBuildListing(Path pathToListingFile, DistCpOptions options) 
throws IOException {
-
-SequenceFile.Writer fileListWriter = null;
-
+doBuildListing(getWriter(pathToListingFile), options);
+  }
+  
+  @VisibleForTesting
+  public void doBuildListing(SequenceFile.Writer fileListWriter,
+  DistCpOptions options) throws IOException {
 try {
-  fileListWriter = getWriter(pathToListingFile);
-
   for (Path path: options.getSourcePaths()) {
 FileSystem sourceFS = path.getFileSystem(getConf());
 path = makeQualified(path);
@@ -140,8 +143,10 @@ public class SimpleCopyListing extends C
   writeToFileListing(fileListWriter, rootStatus, sourcePathRoot, 
localFile);
 }
   }
+  fileListWriter.close();
+  fileListWriter = null;
 } finally {
-  IOUtils.closeStream(fileListWriter);
+  IOUtils.cleanup(LOG, fileListWriter);
 }
   }
 

Modified: 
hadoop/common/branches/branch-0.23/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/mapred/RetriableFileCopyCommand.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/mapred/RetriableFileCopyCommand.java?rev=1548178r1=1548177r2=1548178view=diff
==
--- 
hadoop/common/branches/branch-0.23/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/mapred/RetriableFileCopyCommand.java
 (original)
+++ 
hadoop/common/branches/branch-0.23/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/mapred/RetriableFileCopyCommand.java
 Thu Dec  5 15:54:27 2013
@@ -30,6 +30,8 @@ import org.apache.hadoop.io.IOUtils;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 
+import com.google.common.annotations.VisibleForTesting;
+
 import java.io.*;
 import java.util.EnumSet;
 
@@ -176,7 +178,8 @@ public class RetriableFileCopyCommand ex
 return new Path(root, .distcp.tmp. + 
context.getTaskAttemptID().toString());
   }
 
-  private long copyBytes(FileStatus sourceFileStatus, OutputStream outStream,
+  @VisibleForTesting
+  long copyBytes(FileStatus sourceFileStatus, OutputStream outStream,
  int bufferSize, Mapper.Context context)
   throws IOException {
 Path source

svn commit: r1548178 - /hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/CHANGES.txt

2013-12-05 Thread daryn
Author: daryn
Date: Thu Dec  5 15:54:27 2013
New Revision: 1548178

URL: http://svn.apache.org/r1548178
Log:
HADOOP-10129. Distcp may succeed when it fails (daryn)

Modified:

hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/CHANGES.txt

Modified: 
hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1548178r1=1548177r2=1548178view=diff
==
--- 
hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/CHANGES.txt
 (original)
+++ 
hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/CHANGES.txt
 Thu Dec  5 15:54:27 2013
@@ -43,6 +43,8 @@ Release 0.23.10 - 2013-12-09
 HADOOP-10091. Job with a har archive as input fails on 0.23 (Jason Dere
 and Jason Lowe via jlowe)
 
+HADOOP-10129. Distcp may succeed when it fails (daryn)
+
 Release 0.23.9 - 2013-07-08
 
   INCOMPATIBLE CHANGES




svn commit: r1548194 - /hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt

2013-12-05 Thread daryn
Author: daryn
Date: Thu Dec  5 16:26:43 2013
New Revision: 1548194

URL: http://svn.apache.org/r1548194
Log:
Move HADOOP-10129 from 0.23.10 to 0.23.11 in CHANGES.txt

Modified:
hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt

Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1548194r1=1548193r2=1548194view=diff
==
--- hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt 
(original)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt Thu Dec 
 5 16:26:43 2013
@@ -2339,6 +2339,20 @@ Release 2.0.0-alpha - 05-23-2012
 HADOOP-7606. Upgrade Jackson to version 1.7.1 to match the version required
 by Jersey (Alejandro Abdelnur via atm)
 
+Release 0.23.11 - UNRELEASED
+
+  INCOMPATIBLE CHANGES
+
+  NEW FEATURES
+
+  IMPROVEMENTS
+
+  OPTIMIZATIONS
+
+  BUG FIXES
+
+HADOOP-10129. Distcp may succeed when it fails (daryn)
+
 Release 0.23.10 - UNRELEASED
 
   INCOMPATIBLE CHANGES
@@ -2360,8 +2374,6 @@ Release 0.23.10 - UNRELEASED
 
   BUG FIXES
 
-HADOOP-10129. Distcp may succeed when it fails (daryn)
-
 Release 0.23.9 - 2013-07-08
 
   INCOMPATIBLE CHANGES




svn commit: r1548196 - /hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt

2013-12-05 Thread daryn
Author: daryn
Date: Thu Dec  5 16:28:57 2013
New Revision: 1548196

URL: http://svn.apache.org/r1548196
Log:
Move HADOOP-10129 from 0.23.10 to 0.23.11 in CHANGES.txt

Modified:

hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt

Modified: 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1548196r1=1548195r2=1548196view=diff
==
--- 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt 
(original)
+++ 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt 
Thu Dec  5 16:28:57 2013
@@ -2074,6 +2074,20 @@ Release 2.0.0-alpha - 05-23-2012
 HADOOP-7606. Upgrade Jackson to version 1.7.1 to match the version required
 by Jersey (Alejandro Abdelnur via atm)
 
+Release 0.23.11 - UNRELEASED
+
+  INCOMPATIBLE CHANGES
+
+  NEW FEATURES
+
+  IMPROVEMENTS
+
+  OPTIMIZATIONS
+
+  BUG FIXES
+
+HADOOP-10129. Distcp may succeed when it fails (daryn)
+
 Release 0.23.10 - UNRELEASED
 
   INCOMPATIBLE CHANGES
@@ -2095,8 +2109,6 @@ Release 0.23.10 - UNRELEASED
 
   BUG FIXES
 
-HADOOP-10129. Distcp may succeed when it fails (daryn)
-
 Release 0.23.9 - 2013-07-08
 
   INCOMPATIBLE CHANGES




svn commit: r1542111 - in /hadoop/common/trunk/hadoop-common-project/hadoop-common: ./ src/main/java/org/apache/hadoop/fs/ src/main/java/org/apache/hadoop/ipc/ src/test/java/org/apache/hadoop/ipc/

2013-11-14 Thread daryn
Author: daryn
Date: Thu Nov 14 22:55:18 2013
New Revision: 1542111

URL: http://svn.apache.org/r1542111
Log:
HADOOP-9955. RPC idle connection closing is extremely inefficient (daryn)

Modified:
hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeys.java

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestIPC.java

Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1542111r1=1542110r2=1542111view=diff
==
--- hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt 
(original)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt Thu Nov 
14 22:55:18 2013
@@ -2313,6 +2313,8 @@ Release 0.23.10 - UNRELEASED
 
 HADOOP-9956. RPC listener inefficiently assigns connections to readers 
(daryn)
 
+HADOOP-9955. RPC idle connection closing is extremely inefficient (daryn)
+
   BUG FIXES
 
 Release 0.23.9 - 2013-07-08

Modified: 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeys.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeys.java?rev=1542111r1=1542110r2=1542111view=diff
==
--- 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeys.java
 (original)
+++ 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeys.java
 Thu Nov 14 22:55:18 2013
@@ -226,4 +226,10 @@ public class CommonConfigurationKeys ext
   public static final String  IPC_CLIENT_FALLBACK_TO_SIMPLE_AUTH_ALLOWED_KEY = 
ipc.client.fallback-to-simple-auth-allowed;
   public static final boolean 
IPC_CLIENT_FALLBACK_TO_SIMPLE_AUTH_ALLOWED_DEFAULT = false;
 
+  /** How often the server scans for idle connections */
+  public static final String IPC_CLIENT_CONNECTION_IDLESCANINTERVAL_KEY =
+  ipc.client.connection.idle-scan-interval.ms;
+  /** Default value for IPC_SERVER_CONNECTION_IDLE_SCAN_INTERVAL_KEY */
+  public static final int IPC_CLIENT_CONNECTION_IDLESCANINTERVAL_DEFAULT =
+  1;
 }

Modified: 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java?rev=1542111r1=1542110r2=1542111view=diff
==
--- 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java
 (original)
+++ 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java
 Thu Nov 14 22:55:18 2013
@@ -51,11 +51,13 @@ import java.util.Iterator;
 import java.util.LinkedList;
 import java.util.List;
 import java.util.Map;
-import java.util.Random;
 import java.util.Set;
+import java.util.Timer;
+import java.util.TimerTask;
 import java.util.concurrent.BlockingQueue;
 import java.util.concurrent.ConcurrentHashMap;
 import java.util.concurrent.LinkedBlockingQueue;
+import java.util.concurrent.atomic.AtomicInteger;
 
 import javax.security.sasl.Sasl;
 import javax.security.sasl.SaslException;
@@ -347,16 +349,6 @@ public abstract class Server {
   private int readThreads;// number of read threads
   private int readerPendingConnectionQueue; // number of connections 
to queue per read thread
   private Class? extends Writable rpcRequestClass;   // class used for 
deserializing the rpc request
-  private int maxIdleTime;// the maximum idle time 
after 
-  // which a client may be 
disconnected
-  private int thresholdIdleConnections;   // the number of idle 
connections
-  // after which we will start
-  // cleaning up idle 
-  // connections
-  int maxConnectionsToNuke;   // the max number of 
-  // connections to nuke
-  //during a cleanup
-  
   protected RpcMetrics rpcMetrics;
   protected RpcDetailedMetrics rpcDetailedMetrics;
   
@@ -374,13 +366,10 @@ public abstract class Server

svn commit: r1542112 - in /hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common: ./ src/main/java/org/apache/hadoop/fs/ src/main/java/org/apache/hadoop/ipc/ src/test/java/org/apache/had

2013-11-14 Thread daryn
Author: daryn
Date: Thu Nov 14 22:57:17 2013
New Revision: 1542112

URL: http://svn.apache.org/r1542112
Log:
svn merge -c 1542111 FIXES: HADOOP-9955. RPC idle connection closing is 
extremely inefficient (daryn)

Modified:

hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt

hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeys.java

hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java

hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestIPC.java

Modified: 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1542112r1=1542111r2=1542112view=diff
==
--- 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt 
(original)
+++ 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt 
Thu Nov 14 22:57:17 2013
@@ -2048,6 +2048,8 @@ Release 0.23.10 - UNRELEASED
 
 HADOOP-9956. RPC listener inefficiently assigns connections to readers 
(daryn)
 
+HADOOP-9955. RPC idle connection closing is extremely inefficient (daryn)
+
   BUG FIXES
 
 Release 0.23.9 - 2013-07-08

Modified: 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeys.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeys.java?rev=1542112r1=1542111r2=1542112view=diff
==
--- 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeys.java
 (original)
+++ 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeys.java
 Thu Nov 14 22:57:17 2013
@@ -226,4 +226,10 @@ public class CommonConfigurationKeys ext
   public static final String  IPC_CLIENT_FALLBACK_TO_SIMPLE_AUTH_ALLOWED_KEY = 
ipc.client.fallback-to-simple-auth-allowed;
   public static final boolean 
IPC_CLIENT_FALLBACK_TO_SIMPLE_AUTH_ALLOWED_DEFAULT = false;
 
+  /** How often the server scans for idle connections */
+  public static final String IPC_CLIENT_CONNECTION_IDLESCANINTERVAL_KEY =
+  ipc.client.connection.idle-scan-interval.ms;
+  /** Default value for IPC_SERVER_CONNECTION_IDLE_SCAN_INTERVAL_KEY */
+  public static final int IPC_CLIENT_CONNECTION_IDLESCANINTERVAL_DEFAULT =
+  1;
 }

Modified: 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java?rev=1542112r1=1542111r2=1542112view=diff
==
--- 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java
 (original)
+++ 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java
 Thu Nov 14 22:57:17 2013
@@ -51,11 +51,13 @@ import java.util.Iterator;
 import java.util.LinkedList;
 import java.util.List;
 import java.util.Map;
-import java.util.Random;
 import java.util.Set;
+import java.util.Timer;
+import java.util.TimerTask;
 import java.util.concurrent.BlockingQueue;
 import java.util.concurrent.ConcurrentHashMap;
 import java.util.concurrent.LinkedBlockingQueue;
+import java.util.concurrent.atomic.AtomicInteger;
 
 import javax.security.sasl.Sasl;
 import javax.security.sasl.SaslException;
@@ -344,16 +346,6 @@ public abstract class Server {
   private int readThreads;// number of read threads
   private int readerPendingConnectionQueue; // number of connections 
to queue per read thread
   private Class? extends Writable rpcRequestClass;   // class used for 
deserializing the rpc request
-  private int maxIdleTime;// the maximum idle time 
after 
-  // which a client may be 
disconnected
-  private int thresholdIdleConnections;   // the number of idle 
connections
-  // after which we will start
-  // cleaning up idle 
-  // connections
-  int maxConnectionsToNuke;   // the max number of 
-  // connections

svn commit: r1542113 - in /hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common: ./ src/main/java/org/apache/hadoop/fs/ src/main/java/org/apache/hadoop/ipc/ src/test/java/org/apache/

2013-11-14 Thread daryn
Author: daryn
Date: Thu Nov 14 23:01:38 2013
New Revision: 1542113

URL: http://svn.apache.org/r1542113
Log:
HADOOP-9955. RPC idle connection closing is extremely inefficient (daryn)

Modified:

hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/CHANGES.txt

hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeys.java

hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java

hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestIPC.java

Modified: 
hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1542113r1=1542112r2=1542113view=diff
==
--- 
hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/CHANGES.txt
 (original)
+++ 
hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/CHANGES.txt
 Thu Nov 14 23:01:38 2013
@@ -22,6 +22,8 @@ Release 0.23.10 - UNRELEASED
 
 HADOOP-9956. RPC listener inefficiently assigns connections to readers 
(daryn)
 
+HADOOP-9955. RPC idle connection closing is extremely inefficient (daryn)
+
   BUG FIXES
 
 HADOOP-9757. Har metadata cache can grow without limit (Cristina Abad via 
daryn)

Modified: 
hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeys.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeys.java?rev=1542113r1=1542112r2=1542113view=diff
==
--- 
hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeys.java
 (original)
+++ 
hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeys.java
 Thu Nov 14 23:01:38 2013
@@ -129,5 +129,11 @@ public class CommonConfigurationKeys ext
   public static final String  IPC_CLIENT_FALLBACK_TO_SIMPLE_AUTH_ALLOWED_KEY = 
ipc.client.fallback-to-simple-auth-allowed;
   public static final boolean 
IPC_CLIENT_FALLBACK_TO_SIMPLE_AUTH_ALLOWED_DEFAULT = false;
 
+  /** How often the server scans for idle connections */
+  public static final String IPC_CLIENT_CONNECTION_IDLESCANINTERVAL_KEY =
+  ipc.client.connection.idle-scan-interval.ms;
+  /** Default value for IPC_SERVER_CONNECTION_IDLE_SCAN_INTERVAL_KEY */
+  public static final int IPC_CLIENT_CONNECTION_IDLESCANINTERVAL_DEFAULT =
+  1;
 }
 

Modified: 
hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java?rev=1542113r1=1542112r2=1542113view=diff
==
--- 
hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java
 (original)
+++ 
hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java
 Thu Nov 14 23:01:38 2013
@@ -48,11 +48,13 @@ import java.util.Iterator;
 import java.util.LinkedList;
 import java.util.List;
 import java.util.Map;
-import java.util.Random;
 import java.util.Set;
+import java.util.Timer;
+import java.util.TimerTask;
 import java.util.concurrent.BlockingQueue;
 import java.util.concurrent.ConcurrentHashMap;
 import java.util.concurrent.LinkedBlockingQueue;
+import java.util.concurrent.atomic.AtomicInteger;
 
 import javax.security.sasl.Sasl;
 import javax.security.sasl.SaslException;
@@ -227,15 +229,6 @@ public abstract class Server {
   private int readThreads;// number of read threads
   private int readerPendingConnectionQueue;   // number of connections to 
queue per read thread
   private Class? extends Writable paramClass;   // class of call parameters
-  private int maxIdleTime;// the maximum idle time 
after 
-  // which a client may be 
disconnected
-  private int thresholdIdleConnections;   // the number of idle 
connections
-  // after which we will start
-  // cleaning up idle 
-  // connections
-  int maxConnectionsToNuke;   // the max number

svn commit: r1541736 - in /hadoop/common/trunk/hadoop-common-project/hadoop-common: ./ src/main/java/org/apache/hadoop/fs/ src/main/java/org/apache/hadoop/ipc/ src/test/java/org/apache/hadoop/ipc/

2013-11-13 Thread daryn
Author: daryn
Date: Wed Nov 13 21:37:21 2013
New Revision: 1541736

URL: http://svn.apache.org/r1541736
Log:
HADOOP-9956. RPC listener inefficiently assigns connections to readers (daryn)

Modified:
hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeys.java

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestIPC.java

Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1541736r1=1541735r2=1541736view=diff
==
--- hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt 
(original)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt Wed Nov 
13 21:37:21 2013
@@ -284,7 +284,9 @@ Trunk (Unreleased)
 
 HADOOP-7761. Improve the performance of raw comparisons. (todd)
 
-HADOOP-8589 ViewFs tests fail when tests and home dirs are nested (sanjay 
Radia)
+HADOOP-8589. ViewFs tests fail when tests and home dirs are nested (sanjay 
Radia)
+
+HADOOP-9956. RPC listener inefficiently assigns connections to readers 
(daryn)
 
 Release 2.3.0 - UNRELEASED
 

Modified: 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeys.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeys.java?rev=1541736r1=1541735r2=1541736view=diff
==
--- 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeys.java
 (original)
+++ 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeys.java
 Wed Nov 13 21:37:21 2013
@@ -65,6 +65,13 @@ public class CommonConfigurationKeys ext
   /** Default value for IPC_SERVER_RPC_READ_THREADS_KEY */
   public static final int IPC_SERVER_RPC_READ_THREADS_DEFAULT = 1;
   
+  /** Number of pending connections that may be queued per socket reader */
+  public static final String IPC_SERVER_RPC_READ_CONNECTION_QUEUE_SIZE_KEY =
+  ipc.server.read.connection-queue.size;
+  /** Default value for IPC_SERVER_RPC_READ_CONNECTION_QUEUE_SIZE */
+  public static final int IPC_SERVER_RPC_READ_CONNECTION_QUEUE_SIZE_DEFAULT =
+  100;
+  
   public static final String IPC_MAXIMUM_DATA_LENGTH =
   ipc.maximum.data.length;
   

Modified: 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java?rev=1541736r1=1541735r2=1541736view=diff
==
--- 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java
 (original)
+++ 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java
 Wed Nov 13 21:37:21 2013
@@ -345,6 +345,7 @@ public abstract class Server {
   private int port;   // port we listen on
   private int handlerCount;   // number of handler threads
   private int readThreads;// number of read threads
+  private int readerPendingConnectionQueue; // number of connections 
to queue per read thread
   private Class? extends Writable rpcRequestClass;   // class used for 
deserializing the rpc request
   private int maxIdleTime;// the maximum idle time 
after 
   // which a client may be 
disconnected
@@ -553,12 +554,14 @@ public abstract class Server {
 }
 
 private class Reader extends Thread {
-  private volatile boolean adding = false;
+  final private BlockingQueueConnection pendingConnections;
   private final Selector readSelector;
 
   Reader(String name) throws IOException {
 super(name);
 
+this.pendingConnections =
+new LinkedBlockingQueueConnection(readerPendingConnectionQueue);
 this.readSelector = Selector.open();
   }
   
@@ -580,10 +583,14 @@ public abstract class Server {
 while (running) {
   SelectionKey key = null;
   try {
+// consume as many connections as currently queued to avoid
+// unbridled acceptance of connections that starves the select
+int size = pendingConnections.size

svn commit: r1541740 - /hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt

2013-11-13 Thread daryn
Author: daryn
Date: Wed Nov 13 21:41:51 2013
New Revision: 1541740

URL: http://svn.apache.org/r1541740
Log:
Move HADOOP-9956 to correct release in CHANGES.txt

Modified:
hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt

Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1541740r1=1541739r2=1541740view=diff
==
--- hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt 
(original)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt Wed Nov 
13 21:41:51 2013
@@ -286,8 +286,6 @@ Trunk (Unreleased)
 
 HADOOP-8589. ViewFs tests fail when tests and home dirs are nested (sanjay 
Radia)
 
-HADOOP-9956. RPC listener inefficiently assigns connections to readers 
(daryn)
-
 Release 2.3.0 - UNRELEASED
 
   INCOMPATIBLE CHANGES
@@ -2305,6 +2303,8 @@ Release 0.23.10 - UNRELEASED
 
   OPTIMIZATIONS
 
+HADOOP-9956. RPC listener inefficiently assigns connections to readers 
(daryn)
+
   BUG FIXES
 
 Release 0.23.9 - 2013-07-08




svn commit: r1541743 - in /hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common: ./ src/main/java/org/apache/hadoop/fs/ src/main/java/org/apache/hadoop/ipc/ src/test/java/org/apache/had

2013-11-13 Thread daryn
Author: daryn
Date: Wed Nov 13 21:48:21 2013
New Revision: 1541743

URL: http://svn.apache.org/r1541743
Log:
svn merge -c 1541736 FIXES: HADOOP-9956. RPC listener inefficiently assigns 
connections to readers (daryn)

Modified:

hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt

hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeys.java

hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java

hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestIPC.java

Modified: 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1541743r1=1541742r2=1541743view=diff
==
--- 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt 
(original)
+++ 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt 
Wed Nov 13 21:48:21 2013
@@ -2038,6 +2038,8 @@ Release 0.23.10 - UNRELEASED
 
   OPTIMIZATIONS
 
+HADOOP-9956. RPC listener inefficiently assigns connections to readers 
(daryn)
+
   BUG FIXES
 
 Release 0.23.9 - 2013-07-08

Modified: 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeys.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeys.java?rev=1541743r1=1541742r2=1541743view=diff
==
--- 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeys.java
 (original)
+++ 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeys.java
 Wed Nov 13 21:48:21 2013
@@ -65,6 +65,13 @@ public class CommonConfigurationKeys ext
   /** Default value for IPC_SERVER_RPC_READ_THREADS_KEY */
   public static final int IPC_SERVER_RPC_READ_THREADS_DEFAULT = 1;
   
+  /** Number of pending connections that may be queued per socket reader */
+  public static final String IPC_SERVER_RPC_READ_CONNECTION_QUEUE_SIZE_KEY =
+  ipc.server.read.connection-queue.size;
+  /** Default value for IPC_SERVER_RPC_READ_CONNECTION_QUEUE_SIZE */
+  public static final int IPC_SERVER_RPC_READ_CONNECTION_QUEUE_SIZE_DEFAULT =
+  100;
+  
   public static final String IPC_MAXIMUM_DATA_LENGTH =
   ipc.maximum.data.length;
   

Modified: 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java?rev=1541743r1=1541742r2=1541743view=diff
==
--- 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java
 (original)
+++ 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java
 Wed Nov 13 21:48:21 2013
@@ -342,6 +342,7 @@ public abstract class Server {
   private int port;   // port we listen on
   private int handlerCount;   // number of handler threads
   private int readThreads;// number of read threads
+  private int readerPendingConnectionQueue; // number of connections 
to queue per read thread
   private Class? extends Writable rpcRequestClass;   // class used for 
deserializing the rpc request
   private int maxIdleTime;// the maximum idle time 
after 
   // which a client may be 
disconnected
@@ -550,12 +551,14 @@ public abstract class Server {
 }
 
 private class Reader extends Thread {
-  private volatile boolean adding = false;
+  final private BlockingQueueConnection pendingConnections;
   private final Selector readSelector;
 
   Reader(String name) throws IOException {
 super(name);
 
+this.pendingConnections =
+new LinkedBlockingQueueConnection(readerPendingConnectionQueue);
 this.readSelector = Selector.open();
   }
   
@@ -577,10 +580,14 @@ public abstract class Server {
 while (running) {
   SelectionKey key = null;
   try {
+// consume as many connections as currently queued to avoid
+// unbridled acceptance of connections that starves the select
+int size

svn commit: r1541742 - in /hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common: ./ src/main/java/org/apache/hadoop/fs/ src/main/java/org/apache/hadoop/ipc/ src/test/java/org/apache/

2013-11-13 Thread daryn
Author: daryn
Date: Wed Nov 13 21:47:36 2013
New Revision: 1541742

URL: http://svn.apache.org/r1541742
Log:
HADOOP-9956. RPC listener inefficiently assigns connections to readers (daryn)

Modified:

hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/CHANGES.txt

hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeys.java

hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java

hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestIPC.java

Modified: 
hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1541742r1=1541741r2=1541742view=diff
==
--- 
hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/CHANGES.txt
 (original)
+++ 
hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/CHANGES.txt
 Wed Nov 13 21:47:36 2013
@@ -20,6 +20,8 @@ Release 0.23.10 - UNRELEASED
 HADOOP-9476. Some test cases in TestUserGroupInformation fail if ran after
 testSetLoginUser. (Robert Parker via kihwal)
 
+HADOOP-9956. RPC listener inefficiently assigns connections to readers 
(daryn)
+
   BUG FIXES
 
 HADOOP-9757. Har metadata cache can grow without limit (Cristina Abad via 
daryn)

Modified: 
hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeys.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeys.java?rev=1541742r1=1541741r2=1541742view=diff
==
--- 
hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeys.java
 (original)
+++ 
hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeys.java
 Wed Nov 13 21:47:36 2013
@@ -67,6 +67,12 @@ public class CommonConfigurationKeys ext
 ipc.server.read.threadpool.size;
   /** Default value for IPC_SERVER_RPC_READ_THREADS_KEY */
   public static final int IPC_SERVER_RPC_READ_THREADS_DEFAULT = 1;
+  /** Number of pending connections that may be queued per socket reader */
+  public static final String IPC_SERVER_RPC_READ_CONNECTION_QUEUE_SIZE_KEY =
+  ipc.server.read.connection-queue.size;
+  /** Default value for IPC_SERVER_RPC_READ_CONNECTION_QUEUE_SIZE */
+  public static final int IPC_SERVER_RPC_READ_CONNECTION_QUEUE_SIZE_DEFAULT =
+  100;
 
   /** How many calls per handler are allowed in the queue. */
   public static final String  IPC_SERVER_HANDLER_QUEUE_SIZE_KEY =

Modified: 
hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java?rev=1541742r1=1541741r2=1541742view=diff
==
--- 
hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java
 (original)
+++ 
hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java
 Wed Nov 13 21:47:36 2013
@@ -225,6 +225,7 @@ public abstract class Server {
   private int port;   // port we listen on
   private int handlerCount;   // number of handler threads
   private int readThreads;// number of read threads
+  private int readerPendingConnectionQueue;   // number of connections to 
queue per read thread
   private Class? extends Writable paramClass;   // class of call parameters
   private int maxIdleTime;// the maximum idle time 
after 
   // which a client may be 
disconnected
@@ -410,12 +411,14 @@ public abstract class Server {
 }
 
 private class Reader extends Thread {
-  private volatile boolean adding = false;
+  final private BlockingQueueConnection pendingConnections;
   private final Selector readSelector;
 
   Reader(String name) throws IOException {
 super(name);
 
+this.pendingConnections =
+new LinkedBlockingQueueConnection(readerPendingConnectionQueue);
 this.readSelector = Selector.open();
   }
   
@@ -436,10 +439,14 @@ public abstract class Server {
 while (running

svn commit: r1530667 - /hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt

2013-10-09 Thread daryn
Author: daryn
Date: Wed Oct  9 15:09:37 2013
New Revision: 1530667

URL: http://svn.apache.org/r1530667
Log:
HADOOP-9470. eliminate duplicate FQN tests in different Hadoop modules (Ivan A. 
Veselovsky via daryn)

Modified:
hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt

Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1530667r1=1530666r2=1530667view=diff
==
--- hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt 
(original)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt Wed Oct 
 9 15:09:37 2013
@@ -351,6 +351,9 @@ Release 2.3.0 - UNRELEASED
 HADOOP-9199. Cover package org.apache.hadoop.io with unit tests (Andrey
 Klochkov via jeagles)
 
+HADOOP-9470. eliminate duplicate FQN tests in different Hadoop modules
+(Ivan A. Veselovsky via daryn)
+
   OPTIMIZATIONS
 
 HADOOP-9748. Reduce blocking on UGI.ensureInitialized (daryn)




svn commit: r1530671 - /hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt

2013-10-09 Thread daryn
Author: daryn
Date: Wed Oct  9 15:10:43 2013
New Revision: 1530671

URL: http://svn.apache.org/r1530671
Log:
HADOOP-9470. eliminate duplicate FQN tests in different Hadoop modules (Ivan A. 
Veselovsky via daryn)

Modified:

hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt

Modified: 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1530671r1=1530670r2=1530671view=diff
==
--- 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt 
(original)
+++ 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt 
Wed Oct  9 15:10:43 2013
@@ -64,6 +64,9 @@ Release 2.3.0 - UNRELEASED
 HADOOP-9199. Cover package org.apache.hadoop.io with unit tests (Andrey
 Klochkov via jeagles)
 
+HADOOP-9470. eliminate duplicate FQN tests in different Hadoop modules
+(Ivan A. Veselovsky via daryn)
+
   OPTIMIZATIONS
 
 HADOOP-9748. Reduce blocking on UGI.ensureInitialized (daryn)




svn commit: r1512380 - in /hadoop/common/trunk/hadoop-common-project/hadoop-common: ./ src/main/java/org/apache/hadoop/security/ src/test/java/org/apache/hadoop/ipc/

2013-08-09 Thread daryn
Author: daryn
Date: Fri Aug  9 16:28:39 2013
New Revision: 1512380

URL: http://svn.apache.org/r1512380
Log:
HADOOP-9789. Support server advertised kerberos principals (daryn)

Modified:
hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslRpcClient.java

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslRpcServer.java

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestSaslRPC.java

Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1512380r1=1512379r2=1512380view=diff
==
--- hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt 
(original)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt Fri Aug 
 9 16:28:39 2013
@@ -338,6 +338,8 @@ Release 2.1.1-beta - UNRELEASED
 
 HADOOP-9672. Upgrade Avro dependency to 1.7.4. (sandy via kihwal)
 
+HADOOP-9789. Support server advertised kerberos principals (daryn)
+
   OPTIMIZATIONS
 
   BUG FIXES

Modified: 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslRpcClient.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslRpcClient.java?rev=1512380r1=1512379r2=1512380view=diff
==
--- 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslRpcClient.java
 (original)
+++ 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslRpcClient.java
 Fri Aug  9 16:28:39 2013
@@ -33,6 +33,7 @@ import java.nio.ByteBuffer;
 import java.util.ArrayList;
 import java.util.List;
 import java.util.Map;
+import java.util.regex.Pattern;
 
 import javax.security.auth.callback.Callback;
 import javax.security.auth.callback.CallbackHandler;
@@ -51,6 +52,7 @@ import org.apache.commons.logging.LogFac
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.GlobPattern;
 import org.apache.hadoop.ipc.ProtobufRpcEngine.RpcRequestMessageWrapper;
 import org.apache.hadoop.ipc.ProtobufRpcEngine.RpcResponseMessageWrapper;
 import org.apache.hadoop.ipc.RPC.RpcKind;
@@ -280,9 +282,8 @@ public class SaslRpcClient {
* @return String of the server's principal
* @throws IOException - error determining configured principal
*/
-
-  // try to get the configured principal for the remote server
-  private String getServerPrincipal(SaslAuth authType) throws IOException {
+  @VisibleForTesting
+  String getServerPrincipal(SaslAuth authType) throws IOException {
 KerberosInfo krbInfo = SecurityUtil.getKerberosInfo(protocol, conf);
 LOG.debug(Get kerberos info proto:+protocol+ info:+krbInfo);
 if (krbInfo == null) { // protocol has no support for kerberos
@@ -294,28 +295,37 @@ public class SaslRpcClient {
   Can't obtain server Kerberos config key from protocol=
   + protocol.getCanonicalName());
 }
-// construct the expected principal from the config
-String confPrincipal = SecurityUtil.getServerPrincipal(
-conf.get(serverKey), serverAddr.getAddress());
-if (confPrincipal == null || confPrincipal.isEmpty()) {
-  throw new IllegalArgumentException(
-  Failed to specify server's Kerberos principal name);
-}
-// ensure it looks like a host-based service principal
-KerberosName name = new KerberosName(confPrincipal);
-if (name.getHostName() == null) {
-  throw new IllegalArgumentException(
-  Kerberos principal name does NOT have the expected hostname part: 
-  + confPrincipal);
+// construct server advertised principal for comparision
+String serverPrincipal = new KerberosPrincipal(
+authType.getProtocol() + / + authType.getServerId()).getName();
+boolean isPrincipalValid = false;
+
+// use the pattern if defined
+String serverKeyPattern = conf.get(serverKey + .pattern);
+if (serverKeyPattern != null  !serverKeyPattern.isEmpty()) {
+  Pattern pattern = GlobPattern.compile(serverKeyPattern);
+  isPrincipalValid = pattern.matcher(serverPrincipal).matches();
+} else {
+  // check that the server advertised principal matches our conf
+  String confPrincipal = SecurityUtil.getServerPrincipal(
+  conf.get(serverKey), serverAddr.getAddress());
+  if (confPrincipal == null || confPrincipal.isEmpty()) {
+throw new

svn commit: r1512381 - in /hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common: ./ src/main/java/org/apache/hadoop/security/ src/test/java/org/apache/hadoop/ipc/

2013-08-09 Thread daryn
Author: daryn
Date: Fri Aug  9 16:30:10 2013
New Revision: 1512381

URL: http://svn.apache.org/r1512381
Log:
merge -c 1512380 FIXES: HADOOP-9789.  Support server advertised kerberos 
principals (daryn)

Modified:

hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt

hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslRpcClient.java

hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslRpcServer.java

hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestSaslRPC.java

Modified: 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1512381r1=1512380r2=1512381view=diff
==
--- 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt 
(original)
+++ 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt 
Fri Aug  9 16:30:10 2013
@@ -65,6 +65,8 @@ Release 2.1.1-beta - UNRELEASED
 
 HADOOP-9672. Upgrade Avro dependency to 1.7.4. (sandy via kihwal)
 
+HADOOP-9789. Support server advertised kerberos principals (daryn)
+
   OPTIMIZATIONS
 
   BUG FIXES

Modified: 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslRpcClient.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslRpcClient.java?rev=1512381r1=1512380r2=1512381view=diff
==
--- 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslRpcClient.java
 (original)
+++ 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslRpcClient.java
 Fri Aug  9 16:30:10 2013
@@ -33,6 +33,7 @@ import java.nio.ByteBuffer;
 import java.util.ArrayList;
 import java.util.List;
 import java.util.Map;
+import java.util.regex.Pattern;
 
 import javax.security.auth.callback.Callback;
 import javax.security.auth.callback.CallbackHandler;
@@ -51,6 +52,7 @@ import org.apache.commons.logging.LogFac
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.GlobPattern;
 import org.apache.hadoop.ipc.ProtobufRpcEngine.RpcRequestMessageWrapper;
 import org.apache.hadoop.ipc.ProtobufRpcEngine.RpcResponseMessageWrapper;
 import org.apache.hadoop.ipc.RPC.RpcKind;
@@ -280,9 +282,8 @@ public class SaslRpcClient {
* @return String of the server's principal
* @throws IOException - error determining configured principal
*/
-
-  // try to get the configured principal for the remote server
-  private String getServerPrincipal(SaslAuth authType) throws IOException {
+  @VisibleForTesting
+  String getServerPrincipal(SaslAuth authType) throws IOException {
 KerberosInfo krbInfo = SecurityUtil.getKerberosInfo(protocol, conf);
 LOG.debug(Get kerberos info proto:+protocol+ info:+krbInfo);
 if (krbInfo == null) { // protocol has no support for kerberos
@@ -294,28 +295,37 @@ public class SaslRpcClient {
   Can't obtain server Kerberos config key from protocol=
   + protocol.getCanonicalName());
 }
-// construct the expected principal from the config
-String confPrincipal = SecurityUtil.getServerPrincipal(
-conf.get(serverKey), serverAddr.getAddress());
-if (confPrincipal == null || confPrincipal.isEmpty()) {
-  throw new IllegalArgumentException(
-  Failed to specify server's Kerberos principal name);
-}
-// ensure it looks like a host-based service principal
-KerberosName name = new KerberosName(confPrincipal);
-if (name.getHostName() == null) {
-  throw new IllegalArgumentException(
-  Kerberos principal name does NOT have the expected hostname part: 
-  + confPrincipal);
+// construct server advertised principal for comparision
+String serverPrincipal = new KerberosPrincipal(
+authType.getProtocol() + / + authType.getServerId()).getName();
+boolean isPrincipalValid = false;
+
+// use the pattern if defined
+String serverKeyPattern = conf.get(serverKey + .pattern);
+if (serverKeyPattern != null  !serverKeyPattern.isEmpty()) {
+  Pattern pattern = GlobPattern.compile(serverKeyPattern);
+  isPrincipalValid = pattern.matcher(serverPrincipal).matches();
+} else {
+  // check that the server advertised principal matches our conf
+  String confPrincipal

svn commit: r1512382 - in /hadoop/common/branches/branch-2.1-beta/hadoop-common-project/hadoop-common: ./ src/main/java/org/apache/hadoop/security/ src/test/java/org/apache/hadoop/ipc/

2013-08-09 Thread daryn
Author: daryn
Date: Fri Aug  9 16:32:29 2013
New Revision: 1512382

URL: http://svn.apache.org/r1512382
Log:
merge -c 1512381 FIXES: HADOOP-9789.  Support server advertised kerberos 
principals (daryn)

Modified:

hadoop/common/branches/branch-2.1-beta/hadoop-common-project/hadoop-common/CHANGES.txt

hadoop/common/branches/branch-2.1-beta/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslRpcClient.java

hadoop/common/branches/branch-2.1-beta/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslRpcServer.java

hadoop/common/branches/branch-2.1-beta/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestSaslRPC.java

Modified: 
hadoop/common/branches/branch-2.1-beta/hadoop-common-project/hadoop-common/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2.1-beta/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1512382r1=1512381r2=1512382view=diff
==
--- 
hadoop/common/branches/branch-2.1-beta/hadoop-common-project/hadoop-common/CHANGES.txt
 (original)
+++ 
hadoop/common/branches/branch-2.1-beta/hadoop-common-project/hadoop-common/CHANGES.txt
 Fri Aug  9 16:32:29 2013
@@ -53,6 +53,8 @@ Release 2.1.0-beta - 2013-08-06
 HADOOP-9821. ClientId should have getMsb/getLsb methods. 
 (Tsuyoshi OZAWA via jing9)
 
+HADOOP-9789. Support server advertised kerberos principals (daryn)
+
   OPTIMIZATIONS
 
   BUG FIXES

Modified: 
hadoop/common/branches/branch-2.1-beta/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslRpcClient.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2.1-beta/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslRpcClient.java?rev=1512382r1=1512381r2=1512382view=diff
==
--- 
hadoop/common/branches/branch-2.1-beta/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslRpcClient.java
 (original)
+++ 
hadoop/common/branches/branch-2.1-beta/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslRpcClient.java
 Fri Aug  9 16:32:29 2013
@@ -33,6 +33,7 @@ import java.nio.ByteBuffer;
 import java.util.ArrayList;
 import java.util.List;
 import java.util.Map;
+import java.util.regex.Pattern;
 
 import javax.security.auth.callback.Callback;
 import javax.security.auth.callback.CallbackHandler;
@@ -51,6 +52,7 @@ import org.apache.commons.logging.LogFac
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.GlobPattern;
 import org.apache.hadoop.ipc.ProtobufRpcEngine.RpcRequestMessageWrapper;
 import org.apache.hadoop.ipc.ProtobufRpcEngine.RpcResponseMessageWrapper;
 import org.apache.hadoop.ipc.RPC.RpcKind;
@@ -280,9 +282,8 @@ public class SaslRpcClient {
* @return String of the server's principal
* @throws IOException - error determining configured principal
*/
-
-  // try to get the configured principal for the remote server
-  private String getServerPrincipal(SaslAuth authType) throws IOException {
+  @VisibleForTesting
+  String getServerPrincipal(SaslAuth authType) throws IOException {
 KerberosInfo krbInfo = SecurityUtil.getKerberosInfo(protocol, conf);
 LOG.debug(Get kerberos info proto:+protocol+ info:+krbInfo);
 if (krbInfo == null) { // protocol has no support for kerberos
@@ -294,28 +295,37 @@ public class SaslRpcClient {
   Can't obtain server Kerberos config key from protocol=
   + protocol.getCanonicalName());
 }
-// construct the expected principal from the config
-String confPrincipal = SecurityUtil.getServerPrincipal(
-conf.get(serverKey), serverAddr.getAddress());
-if (confPrincipal == null || confPrincipal.isEmpty()) {
-  throw new IllegalArgumentException(
-  Failed to specify server's Kerberos principal name);
-}
-// ensure it looks like a host-based service principal
-KerberosName name = new KerberosName(confPrincipal);
-if (name.getHostName() == null) {
-  throw new IllegalArgumentException(
-  Kerberos principal name does NOT have the expected hostname part: 
-  + confPrincipal);
+// construct server advertised principal for comparision
+String serverPrincipal = new KerberosPrincipal(
+authType.getProtocol() + / + authType.getServerId()).getName();
+boolean isPrincipalValid = false;
+
+// use the pattern if defined
+String serverKeyPattern = conf.get(serverKey + .pattern);
+if (serverKeyPattern != null  !serverKeyPattern.isEmpty()) {
+  Pattern pattern = GlobPattern.compile(serverKeyPattern);
+  isPrincipalValid = pattern.matcher(serverPrincipal).matches();
+} else

svn commit: r1512465 - in /hadoop/common/trunk/hadoop-common-project/hadoop-common: CHANGES.txt src/main/java/org/apache/hadoop/fs/HarFileSystem.java src/test/java/org/apache/hadoop/fs/TestHarFileSyst

2013-08-09 Thread daryn
Author: daryn
Date: Fri Aug  9 19:41:59 2013
New Revision: 1512465

URL: http://svn.apache.org/r1512465
Log:
HADOOP-9757. Har metadata cache can grow without limit (Cristina Abad via daryn)

Modified:
hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/HarFileSystem.java

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestHarFileSystemBasics.java

Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1512465r1=1512464r2=1512465view=diff
==
--- hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt 
(original)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt Fri Aug 
 9 19:41:59 2013
@@ -361,6 +361,8 @@ Release 2.1.1-beta - UNRELEASED
 HADOOP-9675. use svn:eol-style native for html to prevent line ending
 issues (Colin Patrick McCabe)
 
+HADOOP-9757. Har metadata cache can grow without limit (Cristina Abad via 
daryn)
+
 Release 2.1.0-beta - 2013-08-06
 
   INCOMPATIBLE CHANGES

Modified: 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/HarFileSystem.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/HarFileSystem.java?rev=1512465r1=1512464r2=1512465view=diff
==
--- 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/HarFileSystem.java
 (original)
+++ 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/HarFileSystem.java
 Fri Aug  9 19:41:59 2013
@@ -24,11 +24,12 @@ import java.net.URI;
 import java.net.URISyntaxException;
 import java.net.URLDecoder;
 import java.util.ArrayList;
+import java.util.Collections;
 import java.util.List;
+import java.util.LinkedHashMap;
 import java.util.Map;
 import java.util.TreeMap;
 import java.util.HashMap;
-import java.util.concurrent.ConcurrentHashMap;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
@@ -56,10 +57,12 @@ public class HarFileSystem extends Filte
 
   private static final Log LOG = LogFactory.getLog(HarFileSystem.class);
 
+  public static final String METADATA_CACHE_ENTRIES_KEY = 
fs.har.metadatacache.entries;
+  public static final int METADATA_CACHE_ENTRIES_DEFAULT = 10;
+
   public static final int VERSION = 3;
 
-  private static final MapURI, HarMetaData harMetaCache =
-  new ConcurrentHashMapURI, HarMetaData();
+  private static MapURI, HarMetaData harMetaCache;
 
   // uri representation of this Har filesystem
   private URI uri;
@@ -98,7 +101,14 @@ public class HarFileSystem extends Filte
   public HarFileSystem(FileSystem fs) {
 super(fs);
   }
-  
+ 
+  private synchronized void initializeMetadataCache(Configuration conf) {
+if (harMetaCache == null) {
+  int cacheSize = conf.getInt(METADATA_CACHE_ENTRIES_KEY, 
METADATA_CACHE_ENTRIES_DEFAULT);
+  harMetaCache = Collections.synchronizedMap(new LruCacheURI, 
HarMetaData(cacheSize));
+}
+  }
+ 
   /**
* Initialize a Har filesystem per har archive. The 
* archive home directory is the top level directory
@@ -114,6 +124,9 @@ public class HarFileSystem extends Filte
*/
   @Override
   public void initialize(URI name, Configuration conf) throws IOException {
+// initialize the metadata cache, if needed
+initializeMetadataCache(conf);
+
 // decode the name
 URI underLyingURI = decodeHarURI(name, conf);
 // we got the right har Path- now check if this is 
@@ -1117,4 +1130,18 @@ public class HarFileSystem extends Filte
   HarMetaData getMetadata() {
 return metadata;
   }
+
+  private static class LruCacheK, V extends LinkedHashMapK, V {
+private final int MAX_ENTRIES;
+
+public LruCache(int maxEntries) {
+super(maxEntries + 1, 1.0f, true);
+MAX_ENTRIES = maxEntries;
+}
+
+@Override
+protected boolean removeEldestEntry(Map.EntryK, V eldest) {
+return size()  MAX_ENTRIES;
+}
+  }
 }

Modified: 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestHarFileSystemBasics.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestHarFileSystemBasics.java?rev=1512465r1=1512464r2=1512465view=diff
==
--- 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestHarFileSystemBasics.java
 (original)
+++ 
hadoop/common/trunk/hadoop-common-project/hadoop-common

svn commit: r1512466 - in /hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common: CHANGES.txt src/main/java/org/apache/hadoop/fs/HarFileSystem.java src/test/java/org/apache/hadoop/fs/Tes

2013-08-09 Thread daryn
Author: daryn
Date: Fri Aug  9 19:44:50 2013
New Revision: 1512466

URL: http://svn.apache.org/r1512466
Log:
merge -c 1512465 FIXES: HADOOP-9757. Har metadata cache can grow without limit 
(Cristina Abad via daryn)

Modified:

hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt

hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/HarFileSystem.java

hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestHarFileSystemBasics.java

Modified: 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1512466r1=1512465r2=1512466view=diff
==
--- 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt 
(original)
+++ 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt 
Fri Aug  9 19:44:50 2013
@@ -91,6 +91,8 @@ Release 2.1.1-beta - UNRELEASED
 HADOOP-9675. use svn:eol-style native for html to prevent line ending
 issues (Colin Patrick McCabe)
 
+HADOOP-9757. Har metadata cache can grow without limit (Cristina Abad via 
daryn)
+
 Release 2.1.0-beta - 2013-08-06
 
   INCOMPATIBLE CHANGES

Modified: 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/HarFileSystem.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/HarFileSystem.java?rev=1512466r1=1512465r2=1512466view=diff
==
--- 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/HarFileSystem.java
 (original)
+++ 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/HarFileSystem.java
 Fri Aug  9 19:44:50 2013
@@ -24,11 +24,12 @@ import java.net.URI;
 import java.net.URISyntaxException;
 import java.net.URLDecoder;
 import java.util.ArrayList;
+import java.util.Collections;
 import java.util.List;
+import java.util.LinkedHashMap;
 import java.util.Map;
 import java.util.TreeMap;
 import java.util.HashMap;
-import java.util.concurrent.ConcurrentHashMap;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
@@ -56,10 +57,12 @@ public class HarFileSystem extends Filte
 
   private static final Log LOG = LogFactory.getLog(HarFileSystem.class);
 
+  public static final String METADATA_CACHE_ENTRIES_KEY = 
fs.har.metadatacache.entries;
+  public static final int METADATA_CACHE_ENTRIES_DEFAULT = 10;
+
   public static final int VERSION = 3;
 
-  private static final MapURI, HarMetaData harMetaCache =
-  new ConcurrentHashMapURI, HarMetaData();
+  private static MapURI, HarMetaData harMetaCache;
 
   // uri representation of this Har filesystem
   private URI uri;
@@ -98,7 +101,14 @@ public class HarFileSystem extends Filte
   public HarFileSystem(FileSystem fs) {
 super(fs);
   }
-  
+ 
+  private synchronized void initializeMetadataCache(Configuration conf) {
+if (harMetaCache == null) {
+  int cacheSize = conf.getInt(METADATA_CACHE_ENTRIES_KEY, 
METADATA_CACHE_ENTRIES_DEFAULT);
+  harMetaCache = Collections.synchronizedMap(new LruCacheURI, 
HarMetaData(cacheSize));
+}
+  }
+ 
   /**
* Initialize a Har filesystem per har archive. The 
* archive home directory is the top level directory
@@ -114,6 +124,9 @@ public class HarFileSystem extends Filte
*/
   @Override
   public void initialize(URI name, Configuration conf) throws IOException {
+// initialize the metadata cache, if needed
+initializeMetadataCache(conf);
+
 // decode the name
 URI underLyingURI = decodeHarURI(name, conf);
 // we got the right har Path- now check if this is 
@@ -1117,4 +1130,18 @@ public class HarFileSystem extends Filte
   HarMetaData getMetadata() {
 return metadata;
   }
+
+  private static class LruCacheK, V extends LinkedHashMapK, V {
+private final int MAX_ENTRIES;
+
+public LruCache(int maxEntries) {
+super(maxEntries + 1, 1.0f, true);
+MAX_ENTRIES = maxEntries;
+}
+
+@Override
+protected boolean removeEldestEntry(Map.EntryK, V eldest) {
+return size()  MAX_ENTRIES;
+}
+  }
 }

Modified: 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestHarFileSystemBasics.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestHarFileSystemBasics.java?rev=1512466r1=1512465r2=1512466view=diff
==
--- 
hadoop

svn commit: r1512468 - in /hadoop/common/branches/branch-2.1-beta/hadoop-common-project/hadoop-common: CHANGES.txt src/main/java/org/apache/hadoop/fs/HarFileSystem.java src/test/java/org/apache/hadoop

2013-08-09 Thread daryn
Author: daryn
Date: Fri Aug  9 19:46:26 2013
New Revision: 1512468

URL: http://svn.apache.org/r1512468
Log:
merge -c 1512466 FIXES: HADOOP-9757. Har metadata cache can grow without limit 
(Cristina Abad via daryn)

Modified:

hadoop/common/branches/branch-2.1-beta/hadoop-common-project/hadoop-common/CHANGES.txt

hadoop/common/branches/branch-2.1-beta/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/HarFileSystem.java

hadoop/common/branches/branch-2.1-beta/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestHarFileSystemBasics.java

Modified: 
hadoop/common/branches/branch-2.1-beta/hadoop-common-project/hadoop-common/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2.1-beta/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1512468r1=1512467r2=1512468view=diff
==
--- 
hadoop/common/branches/branch-2.1-beta/hadoop-common-project/hadoop-common/CHANGES.txt
 (original)
+++ 
hadoop/common/branches/branch-2.1-beta/hadoop-common-project/hadoop-common/CHANGES.txt
 Fri Aug  9 19:46:26 2013
@@ -65,6 +65,8 @@ Release 2.1.0-beta - 2013-08-06
 HADOOP-9768. chown and chgrp reject users and groups with spaces on 
platforms
 where spaces are otherwise acceptable. (cnauroth)
 
+HADOOP-9757. Har metadata cache can grow without limit (Cristina Abad via 
daryn)
+
 Release 2.1.0-beta - 2013-08-06
 
   INCOMPATIBLE CHANGES

Modified: 
hadoop/common/branches/branch-2.1-beta/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/HarFileSystem.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2.1-beta/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/HarFileSystem.java?rev=1512468r1=1512467r2=1512468view=diff
==
--- 
hadoop/common/branches/branch-2.1-beta/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/HarFileSystem.java
 (original)
+++ 
hadoop/common/branches/branch-2.1-beta/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/HarFileSystem.java
 Fri Aug  9 19:46:26 2013
@@ -24,11 +24,12 @@ import java.net.URI;
 import java.net.URISyntaxException;
 import java.net.URLDecoder;
 import java.util.ArrayList;
+import java.util.Collections;
 import java.util.List;
+import java.util.LinkedHashMap;
 import java.util.Map;
 import java.util.TreeMap;
 import java.util.HashMap;
-import java.util.concurrent.ConcurrentHashMap;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
@@ -56,10 +57,12 @@ public class HarFileSystem extends Filte
 
   private static final Log LOG = LogFactory.getLog(HarFileSystem.class);
 
+  public static final String METADATA_CACHE_ENTRIES_KEY = 
fs.har.metadatacache.entries;
+  public static final int METADATA_CACHE_ENTRIES_DEFAULT = 10;
+
   public static final int VERSION = 3;
 
-  private static final MapURI, HarMetaData harMetaCache =
-  new ConcurrentHashMapURI, HarMetaData();
+  private static MapURI, HarMetaData harMetaCache;
 
   // uri representation of this Har filesystem
   private URI uri;
@@ -98,7 +101,14 @@ public class HarFileSystem extends Filte
   public HarFileSystem(FileSystem fs) {
 super(fs);
   }
-  
+ 
+  private synchronized void initializeMetadataCache(Configuration conf) {
+if (harMetaCache == null) {
+  int cacheSize = conf.getInt(METADATA_CACHE_ENTRIES_KEY, 
METADATA_CACHE_ENTRIES_DEFAULT);
+  harMetaCache = Collections.synchronizedMap(new LruCacheURI, 
HarMetaData(cacheSize));
+}
+  }
+ 
   /**
* Initialize a Har filesystem per har archive. The 
* archive home directory is the top level directory
@@ -114,6 +124,9 @@ public class HarFileSystem extends Filte
*/
   @Override
   public void initialize(URI name, Configuration conf) throws IOException {
+// initialize the metadata cache, if needed
+initializeMetadataCache(conf);
+
 // decode the name
 URI underLyingURI = decodeHarURI(name, conf);
 // we got the right har Path- now check if this is 
@@ -1105,4 +1118,18 @@ public class HarFileSystem extends Filte
   HarMetaData getMetadata() {
 return metadata;
   }
+
+  private static class LruCacheK, V extends LinkedHashMapK, V {
+private final int MAX_ENTRIES;
+
+public LruCache(int maxEntries) {
+super(maxEntries + 1, 1.0f, true);
+MAX_ENTRIES = maxEntries;
+}
+
+@Override
+protected boolean removeEldestEntry(Map.EntryK, V eldest) {
+return size()  MAX_ENTRIES;
+}
+  }
 }

Modified: 
hadoop/common/branches/branch-2.1-beta/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestHarFileSystemBasics.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2.1-beta/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestHarFileSystemBasics.java?rev=1512468r1

svn commit: r1512470 - in /hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common: CHANGES.txt src/main/java/org/apache/hadoop/fs/HarFileSystem.java src/test/java/org/apache/hadoop/fs/

2013-08-09 Thread daryn
Author: daryn
Date: Fri Aug  9 19:52:20 2013
New Revision: 1512470

URL: http://svn.apache.org/r1512470
Log:
HADOOP-9757. Har metadata cache can grow without limit (Cristina Abad via daryn)

Modified:

hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/CHANGES.txt

hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/HarFileSystem.java

hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestHarFileSystemBasics.java

Modified: 
hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1512470r1=1512469r2=1512470view=diff
==
--- 
hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/CHANGES.txt
 (original)
+++ 
hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/CHANGES.txt
 Fri Aug  9 19:52:20 2013
@@ -17,6 +17,8 @@ Release 0.23.10 - UNRELEASED
 
   BUG FIXES
 
+HADOOP-9757. Har metadata cache can grow without limit (Cristina Abad via 
daryn)
+
 Release 0.23.9 - 2013-07-08
 
   INCOMPATIBLE CHANGES

Modified: 
hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/HarFileSystem.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/HarFileSystem.java?rev=1512470r1=1512469r2=1512470view=diff
==
--- 
hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/HarFileSystem.java
 (original)
+++ 
hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/HarFileSystem.java
 Fri Aug  9 19:52:20 2013
@@ -24,11 +24,12 @@ import java.net.URI;
 import java.net.URISyntaxException;
 import java.net.URLDecoder;
 import java.util.ArrayList;
+import java.util.Collections;
 import java.util.List;
+import java.util.LinkedHashMap;
 import java.util.Map;
 import java.util.TreeMap;
 import java.util.HashMap;
-import java.util.concurrent.ConcurrentHashMap;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
@@ -56,10 +57,12 @@ public class HarFileSystem extends Filte
 
   private static final Log LOG = LogFactory.getLog(HarFileSystem.class);
 
+  public static final String METADATA_CACHE_ENTRIES_KEY = 
fs.har.metadatacache.entries;
+  public static final int METADATA_CACHE_ENTRIES_DEFAULT = 10;
+
   public static final int VERSION = 3;
 
-  private static final MapURI, HarMetaData harMetaCache =
-  new ConcurrentHashMapURI, HarMetaData();
+  private static MapURI, HarMetaData harMetaCache;
 
   // uri representation of this Har filesystem
   private URI uri;
@@ -87,7 +90,14 @@ public class HarFileSystem extends Filte
   public HarFileSystem(FileSystem fs) {
 super(fs);
   }
-  
+ 
+  private synchronized void initializeMetadataCache(Configuration conf) {
+if (harMetaCache == null) {
+  int cacheSize = conf.getInt(METADATA_CACHE_ENTRIES_KEY, 
METADATA_CACHE_ENTRIES_DEFAULT);
+  harMetaCache = Collections.synchronizedMap(new LruCacheURI, 
HarMetaData(cacheSize));
+}
+  }
+ 
   /**
* Initialize a Har filesystem per har archive. The 
* archive home directory is the top level directory
@@ -102,6 +112,9 @@ public class HarFileSystem extends Filte
* to be used in case not specified.
*/
   public void initialize(URI name, Configuration conf) throws IOException {
+// initialize the metadata cache, if needed
+initializeMetadataCache(conf);
+
 // decode the name
 URI underLyingURI = decodeHarURI(name, conf);
 // we got the right har Path- now check if this is 
@@ -1066,4 +1079,18 @@ public class HarFileSystem extends Filte
   HarMetaData getMetadata() {
 return metadata;
   }
+
+  private static class LruCacheK, V extends LinkedHashMapK, V {
+private final int MAX_ENTRIES;
+
+public LruCache(int maxEntries) {
+super(maxEntries + 1, 1.0f, true);
+MAX_ENTRIES = maxEntries;
+}
+
+@Override
+protected boolean removeEldestEntry(Map.EntryK, V eldest) {
+return size()  MAX_ENTRIES;
+}
+  }
 }

Modified: 
hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestHarFileSystemBasics.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestHarFileSystemBasics.java?rev=1512470r1=1512469r2=1512470view=diff
==
--- 
hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common

svn commit: r1510772 - in /hadoop/common/trunk/hadoop-common-project/hadoop-common: ./ src/main/java/org/apache/hadoop/ipc/ src/main/java/org/apache/hadoop/security/ src/test/java/org/apache/hadoop/ip

2013-08-05 Thread daryn
Author: daryn
Date: Mon Aug  5 22:02:40 2013
New Revision: 1510772

URL: http://svn.apache.org/r1510772
Log:
HADOOP-9816. RPC Sasl QOP is broken (daryn)

Modified:
hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslRpcClient.java

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestSaslRPC.java

Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1510772r1=1510771r2=1510772view=diff
==
--- hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt 
(original)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt Mon Aug 
 5 22:02:40 2013
@@ -686,6 +686,8 @@ Release 2.1.0-beta - 2013-08-06
 HADOOP-9507. LocalFileSystem rename() is broken in some cases when
 destination exists. (cnauroth)
 
+HADOOP-9816. RPC Sasl QOP is broken (daryn)
+
   BREAKDOWN OF HADOOP-8562 SUBTASKS AND RELATED JIRAS
 
 HADOOP-8924. Hadoop Common creating package-info.java must not depend on

Modified: 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java?rev=1510772r1=1510771r2=1510772view=diff
==
--- 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java
 (original)
+++ 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java
 Mon Aug  5 22:02:40 2013
@@ -52,6 +52,7 @@ import java.util.concurrent.atomic.Atomi
 import java.util.concurrent.atomic.AtomicLong;
 
 import javax.net.SocketFactory;
+import javax.security.sasl.Sasl;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
@@ -87,6 +88,7 @@ import org.apache.hadoop.util.Reflection
 import org.apache.hadoop.util.StringUtils;
 import org.apache.hadoop.util.Time;
 
+import com.google.common.annotations.VisibleForTesting;
 import com.google.common.base.Preconditions;
 import com.google.common.util.concurrent.ThreadFactoryBuilder;
 import com.google.protobuf.CodedOutputStream;
@@ -711,6 +713,9 @@ public class Client {
   // Sasl connect is successful. Let's set up Sasl i/o streams.
   inStream = saslRpcClient.getInputStream(inStream);
   outStream = saslRpcClient.getOutputStream(outStream);
+  // for testing
+  remoteId.saslQop =
+  (String)saslRpcClient.getNegotiatedProperty(Sasl.QOP);
 } else if (UserGroupInformation.isSecurityEnabled() 
!fallbackAllowed) {
   throw new IOException(Server asks us to fall back to SIMPLE  +
@@ -1455,6 +1460,7 @@ public class Client {
 private final boolean tcpNoDelay; // if T then disable Nagle's Algorithm
 private final boolean doPing; //do we need to send ping message
 private final int pingInterval; // how often sends ping to the server in 
msecs
+private String saslQop; // here for testing
 
 ConnectionId(InetSocketAddress address, Class? protocol, 
  UserGroupInformation ticket, int rpcTimeout, int maxIdleTime, 
@@ -1509,6 +1515,11 @@ public class Client {
   return pingInterval;
 }
 
+@VisibleForTesting
+String getSaslQop() {
+  return saslQop;
+}
+
 static ConnectionId getConnectionId(InetSocketAddress addr,
 Class? protocol, UserGroupInformation ticket, int rpcTimeout,
 Configuration conf) throws IOException {

Modified: 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java?rev=1510772r1=1510771r2=1510772view=diff
==
--- 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java
 (original)
+++ 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java
 Mon Aug  5 22:02:40 2013
@@ -1276,8 +1276,8 @@ public abstract class Server {
   }
 }
 
-private RpcSaslProto saslReadAndProcess(DataInputStream dis) throws
-WrappedRpcServerException

svn commit: r1510774 - in /hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common: ./ src/main/java/org/apache/hadoop/ipc/ src/main/java/org/apache/hadoop/security/ src/test/java/org/apac

2013-08-05 Thread daryn
Author: daryn
Date: Mon Aug  5 22:05:25 2013
New Revision: 1510774

URL: http://svn.apache.org/r1510774
Log:
merge -c 1510772 FIXES: HADOOP-9816.  RPC Sasl QOP is broken (daryn)

Modified:

hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt

hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java

hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java

hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslRpcClient.java

hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestSaslRPC.java

Modified: 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1510774r1=1510773r2=1510774view=diff
==
--- 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt 
(original)
+++ 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt 
Mon Aug  5 22:05:25 2013
@@ -419,6 +419,8 @@ Release 2.1.0-beta - 2013-08-06
 HADOOP-9507. LocalFileSystem rename() is broken in some cases when
 destination exists. (cnauroth)
 
+HADOOP-9816. RPC Sasl QOP is broken (daryn)
+
   BREAKDOWN OF HADOOP-8562 SUBTASKS AND RELATED JIRAS
 
 HADOOP-8924. Hadoop Common creating package-info.java must not depend on

Modified: 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java?rev=1510774r1=1510773r2=1510774view=diff
==
--- 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java
 (original)
+++ 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java
 Mon Aug  5 22:05:25 2013
@@ -50,6 +50,7 @@ import java.util.concurrent.atomic.Atomi
 import java.util.concurrent.atomic.AtomicLong;
 
 import javax.net.SocketFactory;
+import javax.security.sasl.Sasl;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
@@ -85,6 +86,7 @@ import org.apache.hadoop.util.Reflection
 import org.apache.hadoop.util.StringUtils;
 import org.apache.hadoop.util.Time;
 
+import com.google.common.annotations.VisibleForTesting;
 import com.google.common.base.Preconditions;
 import com.google.common.util.concurrent.ThreadFactoryBuilder;
 import com.google.protobuf.CodedOutputStream;
@@ -709,6 +711,9 @@ public class Client {
   // Sasl connect is successful. Let's set up Sasl i/o streams.
   inStream = saslRpcClient.getInputStream(inStream);
   outStream = saslRpcClient.getOutputStream(outStream);
+  // for testing
+  remoteId.saslQop =
+  (String)saslRpcClient.getNegotiatedProperty(Sasl.QOP);
 } else if (UserGroupInformation.isSecurityEnabled() 
!fallbackAllowed) {
   throw new IOException(Server asks us to fall back to SIMPLE  +
@@ -1453,6 +1458,7 @@ public class Client {
 private final boolean tcpNoDelay; // if T then disable Nagle's Algorithm
 private final boolean doPing; //do we need to send ping message
 private final int pingInterval; // how often sends ping to the server in 
msecs
+private String saslQop; // here for testing
 
 ConnectionId(InetSocketAddress address, Class? protocol, 
  UserGroupInformation ticket, int rpcTimeout, int maxIdleTime, 
@@ -1507,6 +1513,11 @@ public class Client {
   return pingInterval;
 }
 
+@VisibleForTesting
+String getSaslQop() {
+  return saslQop;
+}
+
 static ConnectionId getConnectionId(InetSocketAddress addr,
 Class? protocol, UserGroupInformation ticket, int rpcTimeout,
 Configuration conf) throws IOException {

Modified: 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java?rev=1510774r1=1510773r2=1510774view=diff
==
--- 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java
 (original)
+++ 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org

svn commit: r1510783 - in /hadoop/common/branches/branch-2.1-beta/hadoop-common-project/hadoop-common: ./ src/main/java/org/apache/hadoop/ipc/ src/main/java/org/apache/hadoop/security/ src/test/java/o

2013-08-05 Thread daryn
Author: daryn
Date: Mon Aug  5 22:13:42 2013
New Revision: 1510783

URL: http://svn.apache.org/r1510783
Log:
merge -c 1510774 FIXES: HADOOP-9816.  RPC Sasl QOP is broken (daryn)

Modified:

hadoop/common/branches/branch-2.1-beta/hadoop-common-project/hadoop-common/CHANGES.txt

hadoop/common/branches/branch-2.1-beta/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java

hadoop/common/branches/branch-2.1-beta/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java

hadoop/common/branches/branch-2.1-beta/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslRpcClient.java

hadoop/common/branches/branch-2.1-beta/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestSaslRPC.java

Modified: 
hadoop/common/branches/branch-2.1-beta/hadoop-common-project/hadoop-common/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2.1-beta/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1510783r1=1510782r2=1510783view=diff
==
--- 
hadoop/common/branches/branch-2.1-beta/hadoop-common-project/hadoop-common/CHANGES.txt
 (original)
+++ 
hadoop/common/branches/branch-2.1-beta/hadoop-common-project/hadoop-common/CHANGES.txt
 Mon Aug  5 22:13:42 2013
@@ -393,6 +393,8 @@ Release 2.1.0-beta - 2013-08-06
 HADOOP-9507. LocalFileSystem rename() is broken in some cases when
 destination exists. (cnauroth)
 
+HADOOP-9816. RPC Sasl QOP is broken (daryn)
+
   BREAKDOWN OF HADOOP-8562 SUBTASKS AND RELATED JIRAS
 
 HADOOP-8924. Hadoop Common creating package-info.java must not depend on

Modified: 
hadoop/common/branches/branch-2.1-beta/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2.1-beta/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java?rev=1510783r1=1510782r2=1510783view=diff
==
--- 
hadoop/common/branches/branch-2.1-beta/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java
 (original)
+++ 
hadoop/common/branches/branch-2.1-beta/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java
 Mon Aug  5 22:13:42 2013
@@ -50,6 +50,7 @@ import java.util.concurrent.atomic.Atomi
 import java.util.concurrent.atomic.AtomicLong;
 
 import javax.net.SocketFactory;
+import javax.security.sasl.Sasl;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
@@ -85,6 +86,7 @@ import org.apache.hadoop.util.Reflection
 import org.apache.hadoop.util.StringUtils;
 import org.apache.hadoop.util.Time;
 
+import com.google.common.annotations.VisibleForTesting;
 import com.google.common.base.Preconditions;
 import com.google.common.util.concurrent.ThreadFactoryBuilder;
 import com.google.protobuf.CodedOutputStream;
@@ -656,6 +658,9 @@ public class Client {
   // Sasl connect is successful. Let's set up Sasl i/o streams.
   inStream = saslRpcClient.getInputStream(inStream);
   outStream = saslRpcClient.getOutputStream(outStream);
+  // for testing
+  remoteId.saslQop =
+  (String)saslRpcClient.getNegotiatedProperty(Sasl.QOP);
 } else if (UserGroupInformation.isSecurityEnabled() 
!fallbackAllowed) {
   throw new IOException(Server asks us to fall back to SIMPLE  +
@@ -1396,6 +1401,7 @@ public class Client {
 private final boolean tcpNoDelay; // if T then disable Nagle's Algorithm
 private final boolean doPing; //do we need to send ping message
 private final int pingInterval; // how often sends ping to the server in 
msecs
+private String saslQop; // here for testing
 
 ConnectionId(InetSocketAddress address, Class? protocol, 
  UserGroupInformation ticket, int rpcTimeout, int maxIdleTime, 
@@ -1450,6 +1456,11 @@ public class Client {
   return pingInterval;
 }
 
+@VisibleForTesting
+String getSaslQop() {
+  return saslQop;
+}
+
 static ConnectionId getConnectionId(InetSocketAddress addr,
 Class? protocol, UserGroupInformation ticket, int rpcTimeout,
 Configuration conf) throws IOException {

Modified: 
hadoop/common/branches/branch-2.1-beta/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2.1-beta/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java?rev=1510783r1=1510782r2=1510783view=diff
==
--- 
hadoop/common/branches/branch-2.1-beta/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc

svn commit: r1510784 - in /hadoop/common/branches/branch-2.1.0-beta/hadoop-common-project/hadoop-common: ./ src/main/java/org/apache/hadoop/ipc/ src/main/java/org/apache/hadoop/security/ src/test/java

2013-08-05 Thread daryn
Author: daryn
Date: Mon Aug  5 22:16:06 2013
New Revision: 1510784

URL: http://svn.apache.org/r1510784
Log:
merge -c 1510783 FIXES: HADOOP-9816.  RPC Sasl QOP is broken (daryn)

Modified:

hadoop/common/branches/branch-2.1.0-beta/hadoop-common-project/hadoop-common/CHANGES.txt

hadoop/common/branches/branch-2.1.0-beta/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java

hadoop/common/branches/branch-2.1.0-beta/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java

hadoop/common/branches/branch-2.1.0-beta/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslRpcClient.java

hadoop/common/branches/branch-2.1.0-beta/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestSaslRPC.java

Modified: 
hadoop/common/branches/branch-2.1.0-beta/hadoop-common-project/hadoop-common/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2.1.0-beta/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1510784r1=1510783r2=1510784view=diff
==
--- 
hadoop/common/branches/branch-2.1.0-beta/hadoop-common-project/hadoop-common/CHANGES.txt
 (original)
+++ 
hadoop/common/branches/branch-2.1.0-beta/hadoop-common-project/hadoop-common/CHANGES.txt
 Mon Aug  5 22:16:06 2013
@@ -363,6 +363,8 @@ Release 2.1.0-beta - 2013-08-06
 HADOOP-9507. LocalFileSystem rename() is broken in some cases when
 destination exists. (cnauroth)
 
+HADOOP-9816. RPC Sasl QOP is broken (daryn)
+
   BREAKDOWN OF HADOOP-8562 SUBTASKS AND RELATED JIRAS
 
 HADOOP-8924. Hadoop Common creating package-info.java must not depend on

Modified: 
hadoop/common/branches/branch-2.1.0-beta/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2.1.0-beta/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java?rev=1510784r1=1510783r2=1510784view=diff
==
--- 
hadoop/common/branches/branch-2.1.0-beta/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java
 (original)
+++ 
hadoop/common/branches/branch-2.1.0-beta/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java
 Mon Aug  5 22:16:06 2013
@@ -50,6 +50,7 @@ import java.util.concurrent.atomic.Atomi
 import java.util.concurrent.atomic.AtomicLong;
 
 import javax.net.SocketFactory;
+import javax.security.sasl.Sasl;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
@@ -85,6 +86,7 @@ import org.apache.hadoop.util.Reflection
 import org.apache.hadoop.util.StringUtils;
 import org.apache.hadoop.util.Time;
 
+import com.google.common.annotations.VisibleForTesting;
 import com.google.common.base.Preconditions;
 import com.google.common.util.concurrent.ThreadFactoryBuilder;
 import com.google.protobuf.CodedOutputStream;
@@ -656,6 +658,9 @@ public class Client {
   // Sasl connect is successful. Let's set up Sasl i/o streams.
   inStream = saslRpcClient.getInputStream(inStream);
   outStream = saslRpcClient.getOutputStream(outStream);
+  // for testing
+  remoteId.saslQop =
+  (String)saslRpcClient.getNegotiatedProperty(Sasl.QOP);
 } else if (UserGroupInformation.isSecurityEnabled() 
!fallbackAllowed) {
   throw new IOException(Server asks us to fall back to SIMPLE  +
@@ -1396,6 +1401,7 @@ public class Client {
 private final boolean tcpNoDelay; // if T then disable Nagle's Algorithm
 private final boolean doPing; //do we need to send ping message
 private final int pingInterval; // how often sends ping to the server in 
msecs
+private String saslQop; // here for testing
 
 ConnectionId(InetSocketAddress address, Class? protocol, 
  UserGroupInformation ticket, int rpcTimeout, int maxIdleTime, 
@@ -1450,6 +1456,11 @@ public class Client {
   return pingInterval;
 }
 
+@VisibleForTesting
+String getSaslQop() {
+  return saslQop;
+}
+
 static ConnectionId getConnectionId(InetSocketAddress addr,
 Class? protocol, UserGroupInformation ticket, int rpcTimeout,
 Configuration conf) throws IOException {

Modified: 
hadoop/common/branches/branch-2.1.0-beta/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2.1.0-beta/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java?rev=1510784r1=1510783r2=1510784view=diff
==
--- 
hadoop/common/branches/branch-2.1.0-beta/hadoop-common-project/hadoop-common/src/main

svn commit: r1510793 - in /hadoop/common/trunk/hadoop-common-project/hadoop-common/src: main/java/org/apache/hadoop/ipc/ test/java/org/apache/hadoop/ipc/

2013-08-05 Thread daryn
Author: daryn
Date: Mon Aug  5 23:01:27 2013
New Revision: 1510793

URL: http://svn.apache.org/r1510793
Log:
HADOOP-9832. Add RPC header to client ping (daryn)


Modified:

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RpcConstants.java

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestRPC.java

Modified: 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java?rev=1510793r1=1510792r2=1510793view=diff
==
--- 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java
 (original)
+++ 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java
 Mon Aug  5 23:01:27 2013
@@ -18,10 +18,11 @@
 
 package org.apache.hadoop.ipc;
 
-import static org.apache.hadoop.ipc.RpcConstants.CONNECTION_CONTEXT_CALL_ID;
+import static org.apache.hadoop.ipc.RpcConstants.*;
 
 import java.io.BufferedInputStream;
 import java.io.BufferedOutputStream;
+import java.io.ByteArrayOutputStream;
 import java.io.DataInputStream;
 import java.io.DataOutputStream;
 import java.io.FilterInputStream;
@@ -382,6 +383,7 @@ public class Client {
 private boolean tcpNoDelay; // if T then disable Nagle's Algorithm
 private boolean doPing; //do we need to send ping message
 private int pingInterval; // how often sends ping to the server in msecs
+private ByteArrayOutputStream pingRequest; // ping message
 
 // currently active calls
 private HashtableInteger, Call calls = new HashtableInteger, Call();
@@ -407,6 +409,15 @@ public class Client {
   this.maxRetriesOnSocketTimeouts = 
remoteId.getMaxRetriesOnSocketTimeouts();
   this.tcpNoDelay = remoteId.getTcpNoDelay();
   this.doPing = remoteId.getDoPing();
+  if (doPing) {
+// construct a RPC header with the callId as the ping callId
+pingRequest = new ByteArrayOutputStream();
+RpcRequestHeaderProto pingHeader = ProtoUtil
+.makeRpcRequestHeader(RpcKind.RPC_PROTOCOL_BUFFER,
+OperationProto.RPC_FINAL_PACKET, PING_CALL_ID,
+RpcConstants.INVALID_RETRY_COUNT, clientId);
+pingHeader.writeDelimitedTo(pingRequest);
+  }
   this.pingInterval = remoteId.getPingInterval();
   this.serviceClass = serviceClass;
   if (LOG.isDebugEnabled()) {
@@ -910,7 +921,8 @@ public class Client {
   if ( curTime - lastActivity.get() = pingInterval) {
 lastActivity.set(curTime);
 synchronized (out) {
-  out.writeInt(RpcConstants.PING_CALL_ID);
+  out.writeInt(pingRequest.size());
+  pingRequest.writeTo(out);
   out.flush();
 }
   }

Modified: 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RpcConstants.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RpcConstants.java?rev=1510793r1=1510792r2=1510793view=diff
==
--- 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RpcConstants.java
 (original)
+++ 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RpcConstants.java
 Mon Aug  5 23:01:27 2013
@@ -27,13 +27,13 @@ public class RpcConstants {
 // Hidden Constructor
   }
   
-  public static final int PING_CALL_ID = -1;
+  public static final int AUTHORIZATION_FAILED_CALL_ID = -1;
+  public static final int INVALID_CALL_ID = -2;
+  public static final int CONNECTION_CONTEXT_CALL_ID = -3;
+  public static final int PING_CALL_ID = -4;
   
   public static final byte[] DUMMY_CLIENT_ID = new byte[0];
   
-  public static final int INVALID_CALL_ID = -2;
-
-  public static final int CONNECTION_CONTEXT_CALL_ID = -3;
   
   public static final int INVALID_RETRY_COUNT = -1;
   

Modified: 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java?rev=1510793r1=1510792r2=1510793view=diff
==
--- 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java
 (original)
+++ 
hadoop/common/trunk/hadoop

svn commit: r1510796 - /hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt

2013-08-05 Thread daryn
Author: daryn
Date: Mon Aug  5 23:03:16 2013
New Revision: 1510796

URL: http://svn.apache.org/r1510796
Log:
Update changes for HADOOP-9832.

Modified:
hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt

Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1510796r1=1510795r2=1510796view=diff
==
--- hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt 
(original)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt Mon Aug 
 5 23:03:16 2013
@@ -370,6 +370,8 @@ Release 2.1.0-beta - 2013-08-06
 
 HADOOP-9698. [RPC v9] Client must honor server's SASL negotiate response 
(daryn)
 
+HADOOP-9832. [RPC v9] Add RPC header to client ping (daryn)
+
   NEW FEATURES
 
 HADOOP-9283. Add support for running the Hadoop client on AIX. (atm)




svn commit: r1510805 - in /hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common: ./ src/main/java/org/apache/hadoop/ipc/ src/test/java/org/apache/hadoop/ipc/

2013-08-05 Thread daryn
Author: daryn
Date: Mon Aug  5 23:21:53 2013
New Revision: 1510805

URL: http://svn.apache.org/r1510805
Log:
HADOOP-9832. [RPC v9] Add RPC header to client ping (daryn)

Modified:

hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt

hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java

hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RpcConstants.java

hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java

hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestRPC.java

Modified: 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1510805r1=1510804r2=1510805view=diff
==
--- 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt 
(original)
+++ 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt 
Mon Aug  5 23:21:53 2013
@@ -97,6 +97,8 @@ Release 2.1.0-beta - 2013-08-06
 
 HADOOP-9698. [RPC v9] Client must honor server's SASL negotiate response 
(daryn)
 
+HADOOP-9832. [RPC v9] Add RPC header to client ping (daryn)
+
   NEW FEATURES
 
 HADOOP-9283. Add support for running the Hadoop client on AIX. (atm)

Modified: 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java?rev=1510805r1=1510804r2=1510805view=diff
==
--- 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java
 (original)
+++ 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java
 Mon Aug  5 23:21:53 2013
@@ -18,8 +18,11 @@
 
 package org.apache.hadoop.ipc;
 
+import static org.apache.hadoop.ipc.RpcConstants.*;
+
 import java.io.BufferedInputStream;
 import java.io.BufferedOutputStream;
+import java.io.ByteArrayOutputStream;
 import java.io.DataInputStream;
 import java.io.DataOutputStream;
 import java.io.FilterInputStream;
@@ -380,6 +383,7 @@ public class Client {
 private boolean tcpNoDelay; // if T then disable Nagle's Algorithm
 private boolean doPing; //do we need to send ping message
 private int pingInterval; // how often sends ping to the server in msecs
+private ByteArrayOutputStream pingRequest; // ping message
 
 // currently active calls
 private HashtableInteger, Call calls = new HashtableInteger, Call();
@@ -405,6 +409,15 @@ public class Client {
   this.maxRetriesOnSocketTimeouts = 
remoteId.getMaxRetriesOnSocketTimeouts();
   this.tcpNoDelay = remoteId.getTcpNoDelay();
   this.doPing = remoteId.getDoPing();
+  if (doPing) {
+// construct a RPC header with the callId as the ping callId
+pingRequest = new ByteArrayOutputStream();
+RpcRequestHeaderProto pingHeader = ProtoUtil
+.makeRpcRequestHeader(RpcKind.RPC_PROTOCOL_BUFFER,
+OperationProto.RPC_FINAL_PACKET, PING_CALL_ID,
+RpcConstants.INVALID_RETRY_COUNT, clientId);
+pingHeader.writeDelimitedTo(pingRequest);
+  }
   this.pingInterval = remoteId.getPingInterval();
   this.serviceClass = serviceClass;
   if (LOG.isDebugEnabled()) {
@@ -908,7 +921,8 @@ public class Client {
   if ( curTime - lastActivity.get() = pingInterval) {
 lastActivity.set(curTime);
 synchronized (out) {
-  out.writeInt(RpcConstants.PING_CALL_ID);
+  out.writeInt(pingRequest.size());
+  pingRequest.writeTo(out);
   out.flush();
 }
   }

Modified: 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RpcConstants.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RpcConstants.java?rev=1510805r1=1510804r2=1510805view=diff
==
--- 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RpcConstants.java
 (original)
+++ 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RpcConstants.java
 Mon Aug  5 23:21:53 2013
@@ -27,12 +27,13 @@ public class RpcConstants {
 // Hidden Constructor
   }
   
-  public static final int PING_CALL_ID

svn commit: r1510808 - in /hadoop/common/branches/branch-2.1-beta/hadoop-common-project/hadoop-common: ./ src/main/java/org/apache/hadoop/ipc/ src/test/java/org/apache/hadoop/ipc/

2013-08-05 Thread daryn
Author: daryn
Date: Mon Aug  5 23:31:03 2013
New Revision: 1510808

URL: http://svn.apache.org/r1510808
Log:
merge -c 1510805 FIXES: HADOOP-9832. [RPC v9] Add RPC header to client ping 
(daryn)

Modified:

hadoop/common/branches/branch-2.1-beta/hadoop-common-project/hadoop-common/CHANGES.txt

hadoop/common/branches/branch-2.1-beta/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java

hadoop/common/branches/branch-2.1-beta/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RpcConstants.java

hadoop/common/branches/branch-2.1-beta/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java

hadoop/common/branches/branch-2.1-beta/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestRPC.java

Modified: 
hadoop/common/branches/branch-2.1-beta/hadoop-common-project/hadoop-common/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2.1-beta/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1510808r1=1510807r2=1510808view=diff
==
--- 
hadoop/common/branches/branch-2.1-beta/hadoop-common-project/hadoop-common/CHANGES.txt
 (original)
+++ 
hadoop/common/branches/branch-2.1-beta/hadoop-common-project/hadoop-common/CHANGES.txt
 Mon Aug  5 23:31:03 2013
@@ -71,6 +71,8 @@ Release 2.1.0-beta - 2013-08-06
 
 HADOOP-9698. [RPC v9] Client must honor server's SASL negotiate response 
(daryn)
 
+HADOOP-9832. [RPC v9] Add RPC header to client ping (daryn)
+
   NEW FEATURES
 
 HADOOP-9283. Add support for running the Hadoop client on AIX. (atm)

Modified: 
hadoop/common/branches/branch-2.1-beta/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2.1-beta/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java?rev=1510808r1=1510807r2=1510808view=diff
==
--- 
hadoop/common/branches/branch-2.1-beta/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java
 (original)
+++ 
hadoop/common/branches/branch-2.1-beta/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java
 Mon Aug  5 23:31:03 2013
@@ -18,8 +18,11 @@
 
 package org.apache.hadoop.ipc;
 
+import static org.apache.hadoop.ipc.RpcConstants.*;
+
 import java.io.BufferedInputStream;
 import java.io.BufferedOutputStream;
+import java.io.ByteArrayOutputStream;
 import java.io.DataInputStream;
 import java.io.DataOutputStream;
 import java.io.FilterInputStream;
@@ -327,6 +330,7 @@ public class Client {
 private boolean tcpNoDelay; // if T then disable Nagle's Algorithm
 private boolean doPing; //do we need to send ping message
 private int pingInterval; // how often sends ping to the server in msecs
+private ByteArrayOutputStream pingRequest; // ping message
 
 // currently active calls
 private HashtableInteger, Call calls = new HashtableInteger, Call();
@@ -352,6 +356,15 @@ public class Client {
   this.maxRetriesOnSocketTimeouts = 
remoteId.getMaxRetriesOnSocketTimeouts();
   this.tcpNoDelay = remoteId.getTcpNoDelay();
   this.doPing = remoteId.getDoPing();
+  if (doPing) {
+// construct a RPC header with the callId as the ping callId
+pingRequest = new ByteArrayOutputStream();
+RpcRequestHeaderProto pingHeader = ProtoUtil
+.makeRpcRequestHeader(RpcKind.RPC_PROTOCOL_BUFFER,
+OperationProto.RPC_FINAL_PACKET, PING_CALL_ID,
+RpcConstants.INVALID_RETRY_COUNT, clientId);
+pingHeader.writeDelimitedTo(pingRequest);
+  }
   this.pingInterval = remoteId.getPingInterval();
   this.serviceClass = serviceClass;
   if (LOG.isDebugEnabled()) {
@@ -855,7 +868,8 @@ public class Client {
   if ( curTime - lastActivity.get() = pingInterval) {
 lastActivity.set(curTime);
 synchronized (out) {
-  out.writeInt(RpcConstants.PING_CALL_ID);
+  out.writeInt(pingRequest.size());
+  pingRequest.writeTo(out);
   out.flush();
 }
   }

Modified: 
hadoop/common/branches/branch-2.1-beta/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RpcConstants.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2.1-beta/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RpcConstants.java?rev=1510808r1=1510807r2=1510808view=diff
==
--- 
hadoop/common/branches/branch-2.1-beta/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RpcConstants.java
 (original)
+++ 
hadoop/common/branches/branch-2.1-beta/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RpcConstants.java
 Mon

svn commit: r1510810 - in /hadoop/common/branches/branch-2.1.0-beta/hadoop-common-project/hadoop-common: ./ src/main/java/org/apache/hadoop/ipc/ src/test/java/org/apache/hadoop/ipc/

2013-08-05 Thread daryn
Author: daryn
Date: Mon Aug  5 23:32:00 2013
New Revision: 1510810

URL: http://svn.apache.org/r1510810
Log:
merge -c 1510808 FIXES: HADOOP-9832. [RPC v9] Add RPC header to client ping 
(daryn)

Modified:

hadoop/common/branches/branch-2.1.0-beta/hadoop-common-project/hadoop-common/CHANGES.txt

hadoop/common/branches/branch-2.1.0-beta/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java

hadoop/common/branches/branch-2.1.0-beta/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RpcConstants.java

hadoop/common/branches/branch-2.1.0-beta/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java

hadoop/common/branches/branch-2.1.0-beta/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestRPC.java

Modified: 
hadoop/common/branches/branch-2.1.0-beta/hadoop-common-project/hadoop-common/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2.1.0-beta/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1510810r1=1510809r2=1510810view=diff
==
--- 
hadoop/common/branches/branch-2.1.0-beta/hadoop-common-project/hadoop-common/CHANGES.txt
 (original)
+++ 
hadoop/common/branches/branch-2.1.0-beta/hadoop-common-project/hadoop-common/CHANGES.txt
 Mon Aug  5 23:32:00 2013
@@ -41,6 +41,8 @@ Release 2.1.0-beta - 2013-08-06
 
 HADOOP-9698. [RPC v9] Client must honor server's SASL negotiate response 
(daryn)
 
+HADOOP-9832. [RPC v9] Add RPC header to client ping (daryn)
+
   NEW FEATURES
 
 HADOOP-9283. Add support for running the Hadoop client on AIX. (atm)

Modified: 
hadoop/common/branches/branch-2.1.0-beta/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2.1.0-beta/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java?rev=1510810r1=1510809r2=1510810view=diff
==
--- 
hadoop/common/branches/branch-2.1.0-beta/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java
 (original)
+++ 
hadoop/common/branches/branch-2.1.0-beta/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java
 Mon Aug  5 23:32:00 2013
@@ -18,8 +18,11 @@
 
 package org.apache.hadoop.ipc;
 
+import static org.apache.hadoop.ipc.RpcConstants.*;
+
 import java.io.BufferedInputStream;
 import java.io.BufferedOutputStream;
+import java.io.ByteArrayOutputStream;
 import java.io.DataInputStream;
 import java.io.DataOutputStream;
 import java.io.FilterInputStream;
@@ -327,6 +330,7 @@ public class Client {
 private boolean tcpNoDelay; // if T then disable Nagle's Algorithm
 private boolean doPing; //do we need to send ping message
 private int pingInterval; // how often sends ping to the server in msecs
+private ByteArrayOutputStream pingRequest; // ping message
 
 // currently active calls
 private HashtableInteger, Call calls = new HashtableInteger, Call();
@@ -352,6 +356,15 @@ public class Client {
   this.maxRetriesOnSocketTimeouts = 
remoteId.getMaxRetriesOnSocketTimeouts();
   this.tcpNoDelay = remoteId.getTcpNoDelay();
   this.doPing = remoteId.getDoPing();
+  if (doPing) {
+// construct a RPC header with the callId as the ping callId
+pingRequest = new ByteArrayOutputStream();
+RpcRequestHeaderProto pingHeader = ProtoUtil
+.makeRpcRequestHeader(RpcKind.RPC_PROTOCOL_BUFFER,
+OperationProto.RPC_FINAL_PACKET, PING_CALL_ID,
+RpcConstants.INVALID_RETRY_COUNT, clientId);
+pingHeader.writeDelimitedTo(pingRequest);
+  }
   this.pingInterval = remoteId.getPingInterval();
   this.serviceClass = serviceClass;
   if (LOG.isDebugEnabled()) {
@@ -855,7 +868,8 @@ public class Client {
   if ( curTime - lastActivity.get() = pingInterval) {
 lastActivity.set(curTime);
 synchronized (out) {
-  out.writeInt(RpcConstants.PING_CALL_ID);
+  out.writeInt(pingRequest.size());
+  pingRequest.writeTo(out);
   out.flush();
 }
   }

Modified: 
hadoop/common/branches/branch-2.1.0-beta/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RpcConstants.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2.1.0-beta/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RpcConstants.java?rev=1510810r1=1510809r2=1510810view=diff
==
--- 
hadoop/common/branches/branch-2.1.0-beta/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RpcConstants.java
 (original)
+++ 
hadoop/common/branches/branch-2.1.0-beta/hadoop-common-project/hadoop-common/src/main/java/org/apache

svn commit: r1504874 - in /hadoop/common/trunk/hadoop-common-project/hadoop-common: CHANGES.txt src/main/java/org/apache/hadoop/security/UserGroupInformation.java

2013-07-19 Thread daryn
Author: daryn
Date: Fri Jul 19 13:37:17 2013
New Revision: 1504874

URL: http://svn.apache.org/r1504874
Log:
HADOOP-9748. Reduce blocking on UGI.ensureInitialized (daryn)

Modified:
hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/UserGroupInformation.java

Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1504874r1=1504873r2=1504874view=diff
==
--- hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt 
(original)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt Fri Jul 
19 13:37:17 2013
@@ -300,6 +300,8 @@ Release 2.3.0 - UNRELEASED
 
   OPTIMIZATIONS
 
+HADOOP-9748. Reduce blocking on UGI.ensureInitialized (daryn)
+
   BUG FIXES
 
 HADOOP-9582. Non-existent file to hadoop fs -conf doesn't throw error

Modified: 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/UserGroupInformation.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/UserGroupInformation.java?rev=1504874r1=1504873r2=1504874view=diff
==
--- 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/UserGroupInformation.java
 (original)
+++ 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/UserGroupInformation.java
 Fri Jul 19 13:37:17 2013
@@ -209,9 +209,13 @@ public class UserGroupInformation {
* A method to initialize the fields that depend on a configuration.
* Must be called before useKerberos or groups is used.
*/
-  private static synchronized void ensureInitialized() {
+  private static void ensureInitialized() {
 if (conf == null) {
-  initialize(new Configuration(), false);
+  synchronized(UserGroupInformation.class) {
+if (conf == null) { // someone might have beat us
+  initialize(new Configuration(), false);
+}
+  }
 }
   }
 




svn commit: r1504875 - in /hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common: CHANGES.txt src/main/java/org/apache/hadoop/security/UserGroupInformation.java

2013-07-19 Thread daryn
Author: daryn
Date: Fri Jul 19 13:39:50 2013
New Revision: 1504875

URL: http://svn.apache.org/r1504875
Log:
merge -c 1504874 FIXES: HADOOP-9748. Reduce blocking on UGI.ensureInitialized 
(daryn)

Modified:

hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt

hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/UserGroupInformation.java

Modified: 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1504875r1=1504874r2=1504875view=diff
==
--- 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt 
(original)
+++ 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt 
Fri Jul 19 13:39:50 2013
@@ -27,6 +27,8 @@ Release 2.3.0 - UNRELEASED
 
   OPTIMIZATIONS
 
+HADOOP-9748. Reduce blocking on UGI.ensureInitialized (daryn)
+
   BUG FIXES
 
 HADOOP-9582. Non-existent file to hadoop fs -conf doesn't throw error

Modified: 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/UserGroupInformation.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/UserGroupInformation.java?rev=1504875r1=1504874r2=1504875view=diff
==
--- 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/UserGroupInformation.java
 (original)
+++ 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/UserGroupInformation.java
 Fri Jul 19 13:39:50 2013
@@ -208,9 +208,13 @@ public class UserGroupInformation {
* A method to initialize the fields that depend on a configuration.
* Must be called before useKerberos or groups is used.
*/
-  private static synchronized void ensureInitialized() {
+  private static void ensureInitialized() {
 if (conf == null) {
-  initialize(new Configuration(), false);
+  synchronized(UserGroupInformation.class) {
+if (conf == null) { // someone might have beat us
+  initialize(new Configuration(), false);
+}
+  }
 }
   }
 




svn commit: r1504882 - in /hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common: CHANGES.txt src/main/java/org/apache/hadoop/security/UserGroupInformation.java

2013-07-19 Thread daryn
Author: daryn
Date: Fri Jul 19 14:07:49 2013
New Revision: 1504882

URL: http://svn.apache.org/r1504882
Log:
HADOOP-9748. Reduce blocking on UGI.ensureInitialized (daryn)

Modified:

hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/CHANGES.txt

hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/UserGroupInformation.java

Modified: 
hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1504882r1=1504881r2=1504882view=diff
==
--- 
hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/CHANGES.txt
 (original)
+++ 
hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/CHANGES.txt
 Fri Jul 19 14:07:49 2013
@@ -10,6 +10,8 @@ Release 0.23.10 - UNRELEASED
 
   OPTIMIZATIONS
 
+HADOOP-9748. Reduce blocking on UGI.ensureInitialized (daryn)
+
   BUG FIXES
 
 Release 0.23.9 - 2013-07-08

Modified: 
hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/UserGroupInformation.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/UserGroupInformation.java?rev=1504882r1=1504881r2=1504882view=diff
==
--- 
hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/UserGroupInformation.java
 (original)
+++ 
hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/UserGroupInformation.java
 Fri Jul 19 14:07:49 2013
@@ -190,7 +190,6 @@ public class UserGroupInformation {
   /** Metrics to track UGI activity */
   static UgiMetrics metrics = UgiMetrics.create();
   /** Are the static variables that depend on configuration initialized? */
-  private static boolean isInitialized = false;
   /** Should we use Kerberos configuration? */
   private static boolean useKerberos;
   /** Server-side groups fetching service */
@@ -210,9 +209,13 @@ public class UserGroupInformation {
* A method to initialize the fields that depend on a configuration.
* Must be called before useKerberos or groups is used.
*/
-  private static synchronized void ensureInitialized() {
-if (!isInitialized) {
-initialize(new Configuration(), KerberosName.hasRulesBeenSet());
+  private static void ensureInitialized() {
+if (conf == null) {
+  synchronized(UserGroupInformation.class) {
+if (conf == null) { // someone might have beat us
+  initialize(new Configuration(), KerberosName.hasRulesBeenSet());
+}
+  }
 }
   }
 
@@ -252,7 +255,6 @@ public class UserGroupInformation {
 if (!(groups instanceof TestingGroups)) {
   groups = Groups.getUserToGroupsMappingService(conf);
 }
-isInitialized = true;
 UserGroupInformation.conf = conf;
   }
 




svn commit: r1503811 - in /hadoop/common/trunk/hadoop-common-project/hadoop-common: ./ src/main/java/org/apache/hadoop/ipc/ src/test/java/org/apache/hadoop/ipc/

2013-07-16 Thread daryn
Author: daryn
Date: Tue Jul 16 17:59:39 2013
New Revision: 1503811

URL: http://svn.apache.org/r1503811
Log:
HADOOP-9683. [RPC v9] Wrap IpcConnectionContext in RPC headers (daryn)

Modified:
hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RpcConstants.java

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestSaslRPC.java

Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1503811r1=1503810r2=1503811view=diff
==
--- hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt 
(original)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt Tue Jul 
16 17:59:39 2013
@@ -354,6 +354,8 @@ Release 2.1.0-beta - 2013-07-02
 
 HADOOP-9688. Add globally unique Client ID to RPC requests. (suresh)
 
+HADOOP-9683. [RPC v9] Wrap IpcConnectionContext in RPC headers (daryn)
+
   NEW FEATURES
 
 HADOOP-9283. Add support for running the Hadoop client on AIX. (atm)

Modified: 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java?rev=1503811r1=1503810r2=1503811view=diff
==
--- 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java
 (original)
+++ 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java
 Tue Jul 16 17:59:39 2013
@@ -18,6 +18,8 @@
 
 package org.apache.hadoop.ipc;
 
+import static org.apache.hadoop.ipc.RpcConstants.CONNECTION_CONTEXT_CALL_ID;
+
 import java.io.BufferedInputStream;
 import java.io.BufferedOutputStream;
 import java.io.DataInputStream;
@@ -63,7 +65,10 @@ import org.apache.hadoop.io.Writable;
 import org.apache.hadoop.io.retry.RetryPolicies;
 import org.apache.hadoop.io.retry.RetryPolicy;
 import org.apache.hadoop.io.retry.RetryPolicy.RetryAction;
+import org.apache.hadoop.ipc.ProtobufRpcEngine.RpcRequestMessageWrapper;
+import org.apache.hadoop.ipc.RPC.RpcKind;
 import org.apache.hadoop.ipc.Server.AuthProtocol;
+import 
org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.IpcConnectionContextProto;
 import org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto;
 import 
org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto.OperationProto;
 import org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto;
@@ -833,17 +838,20 @@ public class Client {
 AuthMethod authMethod)
 throws IOException {
   // Write out the ConnectionHeader
-  DataOutputBuffer buf = new DataOutputBuffer();
-  ProtoUtil.makeIpcConnectionContext(
+  IpcConnectionContextProto message = ProtoUtil.makeIpcConnectionContext(
   RPC.getProtocolName(remoteId.getProtocol()),
   remoteId.getTicket(),
-  authMethod).writeTo(buf);
+  authMethod);
+  RpcRequestHeaderProto connectionContextHeader =
+  ProtoUtil.makeRpcRequestHeader(RpcKind.RPC_PROTOCOL_BUFFER,
+  OperationProto.RPC_FINAL_PACKET, CONNECTION_CONTEXT_CALL_ID,
+  clientId);
+  RpcRequestMessageWrapper request =
+  new RpcRequestMessageWrapper(connectionContextHeader, message);
   
   // Write out the packet length
-  int bufLen = buf.getLength();
-
-  out.writeInt(bufLen);
-  out.write(buf.getData(), 0, bufLen);
+  out.writeInt(request.getLength());
+  request.write(out);
 }
 
 /* wait till someone signals us to start reading RPC response or

Modified: 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RpcConstants.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RpcConstants.java?rev=1503811r1=1503810r2=1503811view=diff
==
--- 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RpcConstants.java
 (original)
+++ 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RpcConstants.java
 Tue Jul 16 17:59:39 2013
@@ -32,6 +32,8 @@ public class RpcConstants

svn commit: r1503830 - in /hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common: ./ src/main/java/org/apache/hadoop/ipc/ src/test/java/org/apache/hadoop/ipc/

2013-07-16 Thread daryn
Author: daryn
Date: Tue Jul 16 18:59:29 2013
New Revision: 1503830

URL: http://svn.apache.org/r1503830
Log:
HADOOP-9683. [RPC v9] Wrap IpcConnectionContext in RPC headers (daryn)

Modified:

hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt

hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java

hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java

hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestSaslRPC.java

Modified: 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1503830r1=1503829r2=1503830view=diff
==
--- 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt 
(original)
+++ 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt 
Tue Jul 16 18:59:29 2013
@@ -78,6 +78,8 @@ Release 2.1.0-beta - 2013-07-02
 HADOOP-9421. [RPC v9] Convert SASL to use ProtoBuf and provide
 negotiation capabilities (daryn)
 
+HADOOP-9683. [RPC v9] Wrap IpcConnectionContext in RPC headers (daryn)
+
   NEW FEATURES
 
 HADOOP-9283. Add support for running the Hadoop client on AIX. (atm)

Modified: 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java?rev=1503830r1=1503829r2=1503830view=diff
==
--- 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java
 (original)
+++ 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java
 Tue Jul 16 18:59:29 2013
@@ -62,7 +62,10 @@ import org.apache.hadoop.io.Writable;
 import org.apache.hadoop.io.retry.RetryPolicies;
 import org.apache.hadoop.io.retry.RetryPolicy;
 import org.apache.hadoop.io.retry.RetryPolicy.RetryAction;
+import org.apache.hadoop.ipc.ProtobufRpcEngine.RpcRequestMessageWrapper;
+import org.apache.hadoop.ipc.RPC.RpcKind;
 import org.apache.hadoop.ipc.Server.AuthProtocol;
+import 
org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.IpcConnectionContextProto;
 import org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto;
 import 
org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto.OperationProto;
 import org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto;
@@ -113,6 +116,8 @@ public class Client {
   private final boolean fallbackAllowed;
   
   final static int PING_CALL_ID = -1;
+
+  final static int CONNECTION_CONTEXT_CALL_ID = -3;
   
   /**
* Executor on which IPC calls' parameters are sent.
@@ -832,17 +837,19 @@ public class Client {
 AuthMethod authMethod)
 throws IOException {
   // Write out the ConnectionHeader
-  DataOutputBuffer buf = new DataOutputBuffer();
-  ProtoUtil.makeIpcConnectionContext(
+  IpcConnectionContextProto message = ProtoUtil.makeIpcConnectionContext(
   RPC.getProtocolName(remoteId.getProtocol()),
   remoteId.getTicket(),
-  authMethod).writeTo(buf);
+  authMethod);
+  RpcRequestHeaderProto connectionContextHeader =
+  ProtoUtil.makeRpcRequestHeader(RpcKind.RPC_PROTOCOL_BUFFER,
+  OperationProto.RPC_FINAL_PACKET, CONNECTION_CONTEXT_CALL_ID);
+  RpcRequestMessageWrapper request =
+  new RpcRequestMessageWrapper(connectionContextHeader, message);
   
   // Write out the packet length
-  int bufLen = buf.getLength();
-
-  out.writeInt(bufLen);
-  out.write(buf.getData(), 0, bufLen);
+  out.writeInt(request.getLength());
+  request.write(out);
 }
 
 /* wait till someone signals us to start reading RPC response or

Modified: 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java?rev=1503830r1=1503829r2=1503830view=diff
==
--- 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java
 (original)
+++ 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java
 Tue Jul 16

svn commit: r1503832 - in /hadoop/common/branches/branch-2.1-beta/hadoop-common-project/hadoop-common: ./ src/main/java/org/apache/hadoop/ipc/ src/test/java/org/apache/hadoop/ipc/

2013-07-16 Thread daryn
Author: daryn
Date: Tue Jul 16 19:04:17 2013
New Revision: 1503832

URL: http://svn.apache.org/r1503832
Log:
merge -c 1503830 FIXES: HADOOP-9683. [RPC v9] Wrap IpcConnectionContext in RPC 
headers (daryn)

Modified:

hadoop/common/branches/branch-2.1-beta/hadoop-common-project/hadoop-common/CHANGES.txt

hadoop/common/branches/branch-2.1-beta/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java

hadoop/common/branches/branch-2.1-beta/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java

hadoop/common/branches/branch-2.1-beta/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestSaslRPC.java

Modified: 
hadoop/common/branches/branch-2.1-beta/hadoop-common-project/hadoop-common/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2.1-beta/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1503832r1=1503831r2=1503832view=diff
==
--- 
hadoop/common/branches/branch-2.1-beta/hadoop-common-project/hadoop-common/CHANGES.txt
 (original)
+++ 
hadoop/common/branches/branch-2.1-beta/hadoop-common-project/hadoop-common/CHANGES.txt
 Tue Jul 16 19:04:17 2013
@@ -39,6 +39,8 @@ Release 2.1.0-beta - 2013-07-02
 HADOOP-9421. [RPC v9] Convert SASL to use ProtoBuf and provide
 negotiation capabilities (daryn)
 
+HADOOP-9683. [RPC v9] Wrap IpcConnectionContext in RPC headers (daryn)
+
   NEW FEATURES
 
 HADOOP-9283. Add support for running the Hadoop client on AIX. (atm)

Modified: 
hadoop/common/branches/branch-2.1-beta/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2.1-beta/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java?rev=1503832r1=1503831r2=1503832view=diff
==
--- 
hadoop/common/branches/branch-2.1-beta/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java
 (original)
+++ 
hadoop/common/branches/branch-2.1-beta/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java
 Tue Jul 16 19:04:17 2013
@@ -62,7 +62,10 @@ import org.apache.hadoop.io.Writable;
 import org.apache.hadoop.io.retry.RetryPolicies;
 import org.apache.hadoop.io.retry.RetryPolicy;
 import org.apache.hadoop.io.retry.RetryPolicy.RetryAction;
+import org.apache.hadoop.ipc.ProtobufRpcEngine.RpcRequestMessageWrapper;
+import org.apache.hadoop.ipc.RPC.RpcKind;
 import org.apache.hadoop.ipc.Server.AuthProtocol;
+import 
org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.IpcConnectionContextProto;
 import org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto;
 import 
org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto.OperationProto;
 import org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto;
@@ -113,6 +116,8 @@ public class Client {
   private final boolean fallbackAllowed;
   
   final static int PING_CALL_ID = -1;
+
+  final static int CONNECTION_CONTEXT_CALL_ID = -3;
   
   /**
* Executor on which IPC calls' parameters are sent. Deferring
@@ -779,17 +784,19 @@ public class Client {
 AuthMethod authMethod)
 throws IOException {
   // Write out the ConnectionHeader
-  DataOutputBuffer buf = new DataOutputBuffer();
-  ProtoUtil.makeIpcConnectionContext(
+  IpcConnectionContextProto message = ProtoUtil.makeIpcConnectionContext(
   RPC.getProtocolName(remoteId.getProtocol()),
   remoteId.getTicket(),
-  authMethod).writeTo(buf);
+  authMethod);
+  RpcRequestHeaderProto connectionContextHeader =
+  ProtoUtil.makeRpcRequestHeader(RpcKind.RPC_PROTOCOL_BUFFER,
+  OperationProto.RPC_FINAL_PACKET, CONNECTION_CONTEXT_CALL_ID);
+  RpcRequestMessageWrapper request =
+  new RpcRequestMessageWrapper(connectionContextHeader, message);
   
   // Write out the packet length
-  int bufLen = buf.getLength();
-
-  out.writeInt(bufLen);
-  out.write(buf.getData(), 0, bufLen);
+  out.writeInt(request.getLength());
+  request.write(out);
 }
 
 /* wait till someone signals us to start reading RPC response or

Modified: 
hadoop/common/branches/branch-2.1-beta/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2.1-beta/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java?rev=1503832r1=1503831r2=1503832view=diff
==
--- 
hadoop/common/branches/branch-2.1-beta/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java

svn commit: r1491914 - /hadoop/common/trunk/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/DistCp.java

2013-06-11 Thread daryn
Author: daryn
Date: Tue Jun 11 19:22:53 2013
New Revision: 1491914

URL: http://svn.apache.org/r1491914
Log:
MAPREDUCE-5315.  DistCp reports success even on failure. (mithun and jlowe via 
daryn)

Modified:

hadoop/common/trunk/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/DistCp.java

Modified: 
hadoop/common/trunk/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/DistCp.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/DistCp.java?rev=1491914r1=1491913r2=1491914view=diff
==
--- 
hadoop/common/trunk/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/DistCp.java
 (original)
+++ 
hadoop/common/trunk/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/DistCp.java
 Tue Jun 11 19:22:53 2013
@@ -162,8 +162,9 @@ public class DistCp extends Configured i
 job.getConfiguration().set(DistCpConstants.CONF_LABEL_DISTCP_JOB_ID, 
jobID);
 
 LOG.info(DistCp job-id:  + jobID);
-if (inputOptions.shouldBlock()) {
-  job.waitForCompletion(true);
+if (inputOptions.shouldBlock()  !job.waitForCompletion(true)) {
+  throw new IOException(DistCp failure: Job  + jobID +  has failed: 
+  + job.getStatus().getFailureInfo());
 }
 return job;
   }




svn commit: r1491918 - /hadoop/common/branches/branch-2/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/DistCp.java

2013-06-11 Thread daryn
Author: daryn
Date: Tue Jun 11 19:30:44 2013
New Revision: 1491918

URL: http://svn.apache.org/r1491918
Log:
svn merge -c 1491914 FIXES: MAPREDUCE-5315.  DistCp reports success even on 
failure. (mithun and jlowe via daryn)

Modified:

hadoop/common/branches/branch-2/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/DistCp.java

Modified: 
hadoop/common/branches/branch-2/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/DistCp.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/DistCp.java?rev=1491918r1=1491917r2=1491918view=diff
==
--- 
hadoop/common/branches/branch-2/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/DistCp.java
 (original)
+++ 
hadoop/common/branches/branch-2/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/DistCp.java
 Tue Jun 11 19:30:44 2013
@@ -162,8 +162,9 @@ public class DistCp extends Configured i
 job.getConfiguration().set(DistCpConstants.CONF_LABEL_DISTCP_JOB_ID, 
jobID);
 
 LOG.info(DistCp job-id:  + jobID);
-if (inputOptions.shouldBlock()) {
-  job.waitForCompletion(true);
+if (inputOptions.shouldBlock()  !job.waitForCompletion(true)) {
+  throw new IOException(DistCp failure: Job  + jobID +  has failed: 
+  + job.getStatus().getFailureInfo());
 }
 return job;
   }




svn commit: r1491919 - /hadoop/common/branches/branch-2.1-beta/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/DistCp.java

2013-06-11 Thread daryn
Author: daryn
Date: Tue Jun 11 19:32:39 2013
New Revision: 1491919

URL: http://svn.apache.org/r1491919
Log:
svn merge -c 1491914 FIXES: MAPREDUCE-5315.  DistCp reports success even on 
failure. (mithun and jlowe via daryn)

Modified:

hadoop/common/branches/branch-2.1-beta/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/DistCp.java

Modified: 
hadoop/common/branches/branch-2.1-beta/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/DistCp.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2.1-beta/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/DistCp.java?rev=1491919r1=1491918r2=1491919view=diff
==
--- 
hadoop/common/branches/branch-2.1-beta/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/DistCp.java
 (original)
+++ 
hadoop/common/branches/branch-2.1-beta/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/DistCp.java
 Tue Jun 11 19:32:39 2013
@@ -162,8 +162,9 @@ public class DistCp extends Configured i
 job.getConfiguration().set(DistCpConstants.CONF_LABEL_DISTCP_JOB_ID, 
jobID);
 
 LOG.info(DistCp job-id:  + jobID);
-if (inputOptions.shouldBlock()) {
-  job.waitForCompletion(true);
+if (inputOptions.shouldBlock()  !job.waitForCompletion(true)) {
+  throw new IOException(DistCp failure: Job  + jobID +  has failed: 
+  + job.getStatus().getFailureInfo());
 }
 return job;
   }




svn commit: r1491921 - /hadoop/common/branches/branch-0.23/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/DistCp.java

2013-06-11 Thread daryn
Author: daryn
Date: Tue Jun 11 19:35:26 2013
New Revision: 1491921

URL: http://svn.apache.org/r1491921
Log:
svn merge -c 1491914 FIXES: MAPREDUCE-5315.  DistCp reports success even on 
failure. (mithun and jlowe via daryn)

Modified:

hadoop/common/branches/branch-0.23/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/DistCp.java

Modified: 
hadoop/common/branches/branch-0.23/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/DistCp.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/DistCp.java?rev=1491921r1=1491920r2=1491921view=diff
==
--- 
hadoop/common/branches/branch-0.23/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/DistCp.java
 (original)
+++ 
hadoop/common/branches/branch-0.23/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/DistCp.java
 Tue Jun 11 19:35:26 2013
@@ -157,8 +157,9 @@ public class DistCp extends Configured i
 job.getConfiguration().set(DistCpConstants.CONF_LABEL_DISTCP_JOB_ID, 
jobID);
 
 LOG.info(DistCp job-id:  + jobID);
-if (inputOptions.shouldBlock()) {
-  job.waitForCompletion(true);
+if (inputOptions.shouldBlock()  !job.waitForCompletion(true)) {
+  throw new IOException(DistCp failure: Job  + jobID +  has failed: 
+  + job.getStatus().getFailureInfo());
 }
 return job;
   }




svn commit: r1457763 - in /hadoop/common/trunk/hadoop-common-project: hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/util/ hadoop-common/ hadoop-common/src/main/java/org/apache/ha

2013-03-18 Thread daryn
Author: daryn
Date: Mon Mar 18 13:46:52 2013
New Revision: 1457763

URL: http://svn.apache.org/r1457763
Log:
HADOOP-9299.  kerberos name resolution is kicking in even when kerberos is not 
configured (daryn)

Modified:

hadoop/common/trunk/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/util/KerberosName.java
hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/HadoopKerberosName.java

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/UserGroupInformation.java

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestUserGroupInformation.java

Modified: 
hadoop/common/trunk/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/util/KerberosName.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/util/KerberosName.java?rev=1457763r1=1457762r2=1457763view=diff
==
--- 
hadoop/common/trunk/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/util/KerberosName.java
 (original)
+++ 
hadoop/common/trunk/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/util/KerberosName.java
 Mon Mar 18 13:46:52 2013
@@ -383,10 +383,26 @@ public class KerberosName {
* @param ruleString the rules string.
*/
   public static void setRules(String ruleString) {
-rules = parseRules(ruleString);
+rules = (ruleString != null) ? parseRules(ruleString) : null;
   }
 
   /**
+   * Get the rules.
+   * @return String of configured rules, or null if not yet configured
+   */
+  public static String getRules() {
+String ruleString = null;
+if (rules != null) {
+  StringBuilder sb = new StringBuilder();
+  for (Rule rule : rules) {
+sb.append(rule.toString()).append(\n);
+  }
+  ruleString = sb.toString().trim();
+}
+return ruleString;
+  }
+  
+  /**
* Indicates if the name rules have been set.
* 
* @return if the name rules have been set.

Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1457763r1=1457762r2=1457763view=diff
==
--- hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt 
(original)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt Mon Mar 
18 13:46:52 2013
@@ -571,6 +571,9 @@ Release 2.0.5-beta - UNRELEASED
 HADOOP-9407. commons-daemon 1.0.3 dependency has bad group id causing
 build issues. (Sangjin Lee via suresh)
 
+HADOOP-9299.  kerberos name resolution is kicking in even when kerberos
+is not configured (daryn)
+
 Release 2.0.4-alpha - UNRELEASED
 
   INCOMPATIBLE CHANGES

Modified: 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/HadoopKerberosName.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/HadoopKerberosName.java?rev=1457763r1=1457762r2=1457763view=diff
==
--- 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/HadoopKerberosName.java
 (original)
+++ 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/HadoopKerberosName.java
 Mon Mar 18 13:46:52 2013
@@ -18,6 +18,8 @@
 
 package org.apache.hadoop.security;
 
+import static 
org.apache.hadoop.fs.CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTH_TO_LOCAL;
+
 import java.io.IOException;
 
 import org.apache.hadoop.classification.InterfaceAudience;
@@ -25,7 +27,6 @@ import org.apache.hadoop.classification.
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.security.authentication.util.KerberosName;
 import org.apache.hadoop.security.authentication.util.KerberosUtil;
-import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
 /**
  * This class implements parsing and handling of Kerberos principal names. In 
  * particular, it splits them apart and translates them down into local
@@ -36,15 +37,6 @@ import org.apache.hadoop.fs.CommonConfig
 @InterfaceStability.Evolving
 public class HadoopKerberosName extends KerberosName {
 
-  static {
-try {
-  KerberosUtil.getDefaultRealm();
-} catch (Exception ke) {
-  if(UserGroupInformation.isSecurityEnabled())
-throw new IllegalArgumentException(Can't get Kerberos 
configuration,ke

svn commit: r1457770 - in /hadoop/common/branches/branch-2/hadoop-common-project: hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/util/ hadoop-common/ hadoop-common/src/main/java/o

2013-03-18 Thread daryn
Author: daryn
Date: Mon Mar 18 14:05:09 2013
New Revision: 1457770

URL: http://svn.apache.org/r1457770
Log:
svn merge -c 1457763 FIXES: HADOOP-9299.  kerberos name resolution is kicking 
in even when kerberos is not configured (daryn)

Modified:

hadoop/common/branches/branch-2/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/util/KerberosName.java

hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt

hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/HadoopKerberosName.java

hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/UserGroupInformation.java

hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestUserGroupInformation.java

Modified: 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/util/KerberosName.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/util/KerberosName.java?rev=1457770r1=1457769r2=1457770view=diff
==
--- 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/util/KerberosName.java
 (original)
+++ 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/util/KerberosName.java
 Mon Mar 18 14:05:09 2013
@@ -383,10 +383,26 @@ public class KerberosName {
* @param ruleString the rules string.
*/
   public static void setRules(String ruleString) {
-rules = parseRules(ruleString);
+rules = (ruleString != null) ? parseRules(ruleString) : null;
   }
 
   /**
+   * Get the rules.
+   * @return String of configured rules, or null if not yet configured
+   */
+  public static String getRules() {
+String ruleString = null;
+if (rules != null) {
+  StringBuilder sb = new StringBuilder();
+  for (Rule rule : rules) {
+sb.append(rule.toString()).append(\n);
+  }
+  ruleString = sb.toString().trim();
+}
+return ruleString;
+  }
+  
+  /**
* Indicates if the name rules have been set.
* 
* @return if the name rules have been set.

Modified: 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1457770r1=1457769r2=1457770view=diff
==
--- 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt 
(original)
+++ 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt 
Mon Mar 18 14:05:09 2013
@@ -85,6 +85,9 @@ Release 2.0.5-beta - UNRELEASED
 HADOOP-9407. commons-daemon 1.0.3 dependency has bad group id causing
 build issues. (Sangjin Lee via suresh)
 
+HADOOP-9299.  kerberos name resolution is kicking in even when kerberos
+is not configured (daryn)
+
 Release 2.0.4-alpha - UNRELEASED
 
   INCOMPATIBLE CHANGES

Modified: 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/HadoopKerberosName.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/HadoopKerberosName.java?rev=1457770r1=1457769r2=1457770view=diff
==
--- 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/HadoopKerberosName.java
 (original)
+++ 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/HadoopKerberosName.java
 Mon Mar 18 14:05:09 2013
@@ -18,6 +18,8 @@
 
 package org.apache.hadoop.security;
 
+import static 
org.apache.hadoop.fs.CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTH_TO_LOCAL;
+
 import java.io.IOException;
 
 import org.apache.hadoop.classification.InterfaceAudience;
@@ -25,7 +27,6 @@ import org.apache.hadoop.classification.
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.security.authentication.util.KerberosName;
 import org.apache.hadoop.security.authentication.util.KerberosUtil;
-import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
 /**
  * This class implements parsing and handling of Kerberos principal names. In 
  * particular, it splits them apart and translates them down into local
@@ -36,15 +37,6 @@ import org.apache.hadoop.fs.CommonConfig
 @InterfaceStability.Evolving
 public class HadoopKerberosName extends KerberosName {
 
-  static {
-try

svn commit: r1455974 - in /hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common: CHANGES.txt src/main/java/org/apache/hadoop/http/HttpServer.java src/test/java/org/apache/hadoop/http

2013-03-13 Thread daryn
Author: daryn
Date: Wed Mar 13 14:51:01 2013
New Revision: 1455974

URL: http://svn.apache.org/r1455974
Log:
HADOOP-8816.  HTTP Error 413 full HEAD if using kerberos authentication (daryn)

Modified:

hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/CHANGES.txt

hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer.java

hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHttpServer.java

Modified: 
hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1455974r1=1455973r2=1455974view=diff
==
--- 
hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/CHANGES.txt
 (original)
+++ 
hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/CHANGES.txt
 Wed Mar 13 14:51:01 2013
@@ -114,6 +114,9 @@ Release 0.23.7 - UNRELEASED
 HADOOP-9339. IPC.Server incorrectly sets UGI auth type (Daryn Sharp via
 kihwal)
 
+HADOOP-8816. HTTP Error 413 full HEAD if using kerberos authentication
+(daryn)
+
 Release 0.23.6 - 2013-02-06
 
   INCOMPATIBLE CHANGES

Modified: 
hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer.java?rev=1455974r1=1455973r2=1455974view=diff
==
--- 
hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer.java
 (original)
+++ 
hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer.java
 Wed Mar 13 14:51:01 2013
@@ -274,6 +274,7 @@ public class HttpServer implements Filte
 ret.setAcceptQueueSize(128);
 ret.setResolveNames(false);
 ret.setUseDirectBuffers(false);
+ret.setHeaderBufferSize(1024*64);
 return ret;
   }
 

Modified: 
hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHttpServer.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHttpServer.java?rev=1455974r1=1455973r2=1455974view=diff
==
--- 
hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHttpServer.java
 (original)
+++ 
hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHttpServer.java
 Wed Mar 13 14:51:01 2013
@@ -118,6 +118,18 @@ public class TestHttpServer extends Http
   }
 
   @SuppressWarnings(serial)
+  public static class LongHeaderServlet extends HttpServlet {
+@SuppressWarnings(unchecked)
+@Override
+public void doGet(HttpServletRequest request,
+  HttpServletResponse response
+) throws ServletException, IOException {
+  Assert.assertEquals(63*1024, request.getHeader(longheader).length());
+  response.setStatus(HttpServletResponse.SC_OK);
+}
+  }
+
+  @SuppressWarnings(serial)
   public static class HtmlContentServlet extends HttpServlet {
 @Override
 public void doGet(HttpServletRequest request, 
@@ -137,6 +149,7 @@ public class TestHttpServer extends Http
 server.addServlet(echo, /echo, EchoServlet.class);
 server.addServlet(echomap, /echomap, EchoMapServlet.class);
 server.addServlet(htmlcontent, /htmlcontent, HtmlContentServlet.class);
+server.addServlet(longheader, /longheader, LongHeaderServlet.class);
 server.addJerseyResourcePackage(
 JerseyResource.class.getPackage().getName(), /jersey/*);
 server.start();
@@ -195,6 +208,18 @@ public class TestHttpServer extends Http
  readOutput(new URL(baseUrl, /echomap?a=bc=da=)));
   }
 
+  /** Test the echo map servlet that uses getParameterMap. */
+  @Test public void testLongHeader() throws Exception {
+URL url = new URL(baseUrl, /longheader);
+HttpURLConnection conn = (HttpURLConnection) url.openConnection();
+StringBuilder sb = new StringBuilder();
+for (int i = 0 ; i  63 * 1024; i++) {
+  sb.append(a);
+}
+conn.setRequestProperty(longheader, sb.toString());
+assertEquals(HttpURLConnection.HTTP_OK, conn.getResponseCode());
+  }
+
   @Test public void testContentTypes() throws Exception {
 // Static CSS files should have text/css
 URL cssUrl = new URL(baseUrl, /static/test.css);




svn commit: r1454019 - in /hadoop/common/trunk/hadoop-common-project/hadoop-common: CHANGES.txt src/main/java/org/apache/hadoop/util/GenericOptionsParser.java src/test/java/org/apache/hadoop/util/Test

2013-03-07 Thread daryn
Author: daryn
Date: Thu Mar  7 19:22:44 2013
New Revision: 1454019

URL: http://svn.apache.org/r1454019
Log:
HADOOP-9374. Add tokens from -tokenCacheFile into UGI (daryn)

Modified:
hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/GenericOptionsParser.java

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestGenericOptionsParser.java

Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1454019r1=1454018r2=1454019view=diff
==
--- hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt 
(original)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt Thu Mar 
 7 19:22:44 2013
@@ -1529,6 +1529,8 @@ Release 0.23.7 - UNRELEASED
 HADOOP-9209. Add shell command to dump file checksums (Todd Lipcon via
 jeagles)
 
+HADOOP-9374. Add tokens from -tokenCacheFile into UGI (daryn)
+
   OPTIMIZATIONS
 
 HADOOP-8462. Native-code implementation of bzip2 codec. (Govind Kamat via

Modified: 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/GenericOptionsParser.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/GenericOptionsParser.java?rev=1454019r1=1454018r2=1454019view=diff
==
--- 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/GenericOptionsParser.java
 (original)
+++ 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/GenericOptionsParser.java
 Thu Mar  7 19:22:44 2013
@@ -42,6 +42,8 @@ import org.apache.hadoop.classification.
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.security.Credentials;
+import org.apache.hadoop.security.UserGroupInformation;
 
 /**
  * codeGenericOptionsParser/code is a utility to parse command line
@@ -321,15 +323,17 @@ public class GenericOptionsParser {
   String fileName = line.getOptionValue(tokenCacheFile);
   // check if the local file exists
   FileSystem localFs = FileSystem.getLocal(conf);
-  Path p = new Path(fileName);
+  Path p = localFs.makeQualified(new Path(fileName));
   if (!localFs.exists(p)) {
   throw new FileNotFoundException(File +fileName+ does not exist.);
   }
   if(LOG.isDebugEnabled()) {
 LOG.debug(setting conf tokensFile:  + fileName);
   }
-  conf.set(mapreduce.job.credentials.json, localFs.makeQualified(p)
-  .toString(), from -tokenCacheFile command line option);
+  UserGroupInformation.getCurrentUser().addCredentials(
+  Credentials.readTokenStorageFile(p, conf));
+  conf.set(mapreduce.job.credentials.json, p.toString(),
+   from -tokenCacheFile command line option);
 
 }
   }

Modified: 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestGenericOptionsParser.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestGenericOptionsParser.java?rev=1454019r1=1454018r2=1454019view=diff
==
--- 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestGenericOptionsParser.java
 (original)
+++ 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestGenericOptionsParser.java
 Thu Mar  7 19:22:44 2013
@@ -27,6 +27,11 @@ import junit.framework.TestCase;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.io.Text;
+import org.apache.hadoop.security.Credentials;
+import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hadoop.security.token.Token;
+import 
org.apache.hadoop.security.token.delegation.AbstractDelegationTokenIdentifier;
 import org.apache.commons.cli.Option;
 import org.apache.commons.cli.OptionBuilder;
 import org.apache.commons.cli.Options;
@@ -164,13 +169,25 @@ public class TestGenericOptionsParser ex
 th instanceof FileNotFoundException);
 
 // create file
-Path tmpPath = new Path(tmpFile.toString());
-localFs.create(tmpPath);
+Path tmpPath = localFs.makeQualified(new Path(tmpFile.toString()));
+Token? token = new TokenAbstractDelegationTokenIdentifier(
+identifier.getBytes(), password.getBytes(),
+new Text(token-kind), new

svn commit: r1454025 - in /hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common: CHANGES.txt src/main/java/org/apache/hadoop/util/GenericOptionsParser.java src/test/java/org/apache/hado

2013-03-07 Thread daryn
Author: daryn
Date: Thu Mar  7 19:23:50 2013
New Revision: 1454025

URL: http://svn.apache.org/r1454025
Log:
svn merge -c 1454019 FIXES: HADOOP-9374. Add tokens from -tokenCacheFile into 
UGI (daryn)

Modified:

hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt

hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/GenericOptionsParser.java

hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestGenericOptionsParser.java

Modified: 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1454025r1=1454024r2=1454025view=diff
==
--- 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt 
(original)
+++ 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt 
Thu Mar  7 19:23:50 2013
@@ -1087,6 +1087,8 @@ Release 0.23.7 - UNRELEASED
 HADOOP-9209. Add shell command to dump file checksums (Todd Lipcon via
 jeagles)
 
+HADOOP-9374. Add tokens from -tokenCacheFile into UGI (daryn)
+
   OPTIMIZATIONS
 
 HADOOP-8462. Native-code implementation of bzip2 codec. (Govind Kamat via

Modified: 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/GenericOptionsParser.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/GenericOptionsParser.java?rev=1454025r1=1454024r2=1454025view=diff
==
--- 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/GenericOptionsParser.java
 (original)
+++ 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/GenericOptionsParser.java
 Thu Mar  7 19:23:50 2013
@@ -42,6 +42,8 @@ import org.apache.hadoop.classification.
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.security.Credentials;
+import org.apache.hadoop.security.UserGroupInformation;
 
 /**
  * codeGenericOptionsParser/code is a utility to parse command line
@@ -321,15 +323,17 @@ public class GenericOptionsParser {
   String fileName = line.getOptionValue(tokenCacheFile);
   // check if the local file exists
   FileSystem localFs = FileSystem.getLocal(conf);
-  Path p = new Path(fileName);
+  Path p = localFs.makeQualified(new Path(fileName));
   if (!localFs.exists(p)) {
   throw new FileNotFoundException(File +fileName+ does not exist.);
   }
   if(LOG.isDebugEnabled()) {
 LOG.debug(setting conf tokensFile:  + fileName);
   }
-  conf.set(mapreduce.job.credentials.json, localFs.makeQualified(p)
-  .toString(), from -tokenCacheFile command line option);
+  UserGroupInformation.getCurrentUser().addCredentials(
+  Credentials.readTokenStorageFile(p, conf));
+  conf.set(mapreduce.job.credentials.json, p.toString(),
+   from -tokenCacheFile command line option);
 
 }
   }

Modified: 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestGenericOptionsParser.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestGenericOptionsParser.java?rev=1454025r1=1454024r2=1454025view=diff
==
--- 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestGenericOptionsParser.java
 (original)
+++ 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestGenericOptionsParser.java
 Thu Mar  7 19:23:50 2013
@@ -27,6 +27,11 @@ import junit.framework.TestCase;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.io.Text;
+import org.apache.hadoop.security.Credentials;
+import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hadoop.security.token.Token;
+import 
org.apache.hadoop.security.token.delegation.AbstractDelegationTokenIdentifier;
 import org.apache.commons.cli.Option;
 import org.apache.commons.cli.OptionBuilder;
 import org.apache.commons.cli.Options;
@@ -162,13 +167,25 @@ public class TestGenericOptionsParser ex
 th instanceof FileNotFoundException);
 
 // create file
-Path tmpPath = new Path(tmpFile.toString());
-localFs.create(tmpPath);
+Path

svn commit: r1454028 - in /hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common: CHANGES.txt src/main/java/org/apache/hadoop/util/GenericOptionsParser.java src/test/java/org/apache/h

2013-03-07 Thread daryn
Author: daryn
Date: Thu Mar  7 19:27:16 2013
New Revision: 1454028

URL: http://svn.apache.org/r1454028
Log:
svn merge -c 1454019 FIXES: HADOOP-9374. Add tokens from -tokenCacheFile into 
UGI (daryn)

Modified:

hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/CHANGES.txt

hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/GenericOptionsParser.java

hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestGenericOptionsParser.java

Modified: 
hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1454028r1=1454027r2=1454028view=diff
==
--- 
hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/CHANGES.txt
 (original)
+++ 
hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/CHANGES.txt
 Thu Mar  7 19:27:16 2013
@@ -31,6 +31,8 @@ Release 0.23.7 - UNRELEASED
 HADOOP-9209. Add shell command to dump file checksums (Todd Lipcon via
 jeagles)
 
+HADOOP-9374. Add tokens from -tokenCacheFile into UGI (daryn)
+
   OPTIMIZATIONS
 
 HADOOP-9147. Add missing fields to FIleStatus.toString.

Modified: 
hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/GenericOptionsParser.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/GenericOptionsParser.java?rev=1454028r1=1454027r2=1454028view=diff
==
--- 
hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/GenericOptionsParser.java
 (original)
+++ 
hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/GenericOptionsParser.java
 Thu Mar  7 19:27:16 2013
@@ -42,6 +42,8 @@ import org.apache.hadoop.classification.
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.security.Credentials;
+import org.apache.hadoop.security.UserGroupInformation;
 
 /**
  * codeGenericOptionsParser/code is a utility to parse command line
@@ -316,15 +318,17 @@ public class GenericOptionsParser {
   String fileName = line.getOptionValue(tokenCacheFile);
   // check if the local file exists
   FileSystem localFs = FileSystem.getLocal(conf);
-  Path p = new Path(fileName);
+  Path p = localFs.makeQualified(new Path(fileName));
   if (!localFs.exists(p)) {
   throw new FileNotFoundException(File +fileName+ does not exist.);
   }
   if(LOG.isDebugEnabled()) {
 LOG.debug(setting conf tokensFile:  + fileName);
   }
-  conf.set(mapreduce.job.credentials.json, localFs.makeQualified(p)
-  .toString(), from -tokenCacheFile command line option);
+  UserGroupInformation.getCurrentUser().addCredentials(
+  Credentials.readTokenStorageFile(p, conf));
+  conf.set(mapreduce.job.credentials.json, p.toString(),
+   from -tokenCacheFile command line option);
 
 }
   }

Modified: 
hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestGenericOptionsParser.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestGenericOptionsParser.java?rev=1454028r1=1454027r2=1454028view=diff
==
--- 
hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestGenericOptionsParser.java
 (original)
+++ 
hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestGenericOptionsParser.java
 Thu Mar  7 19:27:16 2013
@@ -27,6 +27,11 @@ import junit.framework.TestCase;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.io.Text;
+import org.apache.hadoop.security.Credentials;
+import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hadoop.security.token.Token;
+import 
org.apache.hadoop.security.token.delegation.AbstractDelegationTokenIdentifier;
 import org.apache.commons.cli.Option;
 import org.apache.commons.cli.OptionBuilder;
 import org.apache.commons.cli.Options;
@@ -162,13 +167,25 @@ public class TestGenericOptionsParser ex
 th instanceof FileNotFoundException);
 
 // create file
-Path tmpPath = new Path(tmpFile.toString());
-localFs.create

svn commit: r1452340 - in /hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common: CHANGES.txt src/main/java/org/apache/hadoop/security/UserGroupInformation.java src/test/java/org/apac

2013-03-04 Thread daryn
Author: daryn
Date: Mon Mar  4 15:03:54 2013
New Revision: 1452340

URL: http://svn.apache.org/r1452340
Log:
HADOOP-9352. Expose UGI.setLoginUser for tests (daryn)

Modified:

hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/CHANGES.txt

hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/UserGroupInformation.java

hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestUserGroupInformation.java

Modified: 
hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1452340r1=1452339r2=1452340view=diff
==
--- 
hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/CHANGES.txt
 (original)
+++ 
hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/CHANGES.txt
 Mon Mar  4 15:03:54 2013
@@ -26,6 +26,8 @@ Release 0.23.7 - UNRELEASED
 HADOOP-9336. Allow UGI of current connection to be queried. (Daryn Sharp
 via kihwal)
 
+HADOOP-9352. Expose UGI.setLoginUser for tests (daryn)
+
   OPTIMIZATIONS
 
 HADOOP-9147. Add missing fields to FIleStatus.toString.

Modified: 
hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/UserGroupInformation.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/UserGroupInformation.java?rev=1452340r1=1452339r2=1452340view=diff
==
--- 
hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/UserGroupInformation.java
 (original)
+++ 
hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/UserGroupInformation.java
 Mon Mar  4 15:03:54 2013
@@ -64,6 +64,8 @@ import org.apache.hadoop.security.token.
 import org.apache.hadoop.security.token.TokenIdentifier;
 import org.apache.hadoop.util.Shell;
 
+import com.google.common.annotations.VisibleForTesting;
+
 /**
  * User and group information for Hadoop.
  * This class wraps around a JAAS Subject and provides methods to determine the
@@ -544,7 +546,7 @@ public class UserGroupInformation {
 if (proxyUser == null) {
   proxyUser = System.getProperty(HADOOP_PROXY_USER);
 }
-loginUser = proxyUser == null ? realUser : createProxyUser(proxyUser, 
realUser);
+setLoginUser(proxyUser == null ? realUser : createProxyUser(proxyUser, 
realUser));
 
 String fileLocation = System.getenv(HADOOP_TOKEN_FILE_LOCATION);
 if (fileLocation != null) {
@@ -566,6 +568,15 @@ public class UserGroupInformation {
 return loginUser;
   }
 
+  @InterfaceAudience.Private
+  @InterfaceStability.Unstable
+  @VisibleForTesting
+  public synchronized static void setLoginUser(UserGroupInformation ugi) {
+// if this is to become stable, should probably logout the currently
+// logged in ugi if it's different
+loginUser = ugi;
+  }
+
   /**
* Is this user logged in from a keytab file?
* @return true if the credentials are from a keytab file.
@@ -679,7 +690,7 @@ public class UserGroupInformation {
   start = System.currentTimeMillis();
   login.login();
   metrics.loginSuccess.add(System.currentTimeMillis() - start);
-  loginUser = new UserGroupInformation(subject);
+  setLoginUser(new UserGroupInformation(subject));
   loginUser.setLogin(login);
   loginUser.setAuthenticationMethod(AuthenticationMethod.KERBEROS);
 } catch (LoginException le) {

Modified: 
hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestUserGroupInformation.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestUserGroupInformation.java?rev=1452340r1=1452339r2=1452340view=diff
==
--- 
hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestUserGroupInformation.java
 (original)
+++ 
hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestUserGroupInformation.java
 Mon Mar  4 15:03:54 2013
@@ -530,4 +530,11 @@ public class TestUserGroupInformation {
 }
   });
   }
+
+  @Test(timeout=1000)
+  public void testSetLoginUser() throws IOException {
+UserGroupInformation ugi = 
UserGroupInformation.createRemoteUser(test-user);
+UserGroupInformation.setLoginUser(ugi

svn commit: r1434370 - /hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt

2013-01-16 Thread daryn
Author: daryn
Date: Wed Jan 16 20:34:03 2013
New Revision: 1434370

URL: http://svn.apache.org/viewvc?rev=1434370view=rev
Log:
HADOOP-8999. Move to incompatible section of changelog

Modified:
hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt

Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1434370r1=1434369r2=1434370view=diff
==
--- hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt 
(original)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt Wed Jan 
16 20:34:03 2013
@@ -326,6 +326,8 @@ Release 2.0.3-alpha - Unreleased 
 
   INCOMPATIBLE CHANGES
 
+HADOOP-8999. SASL negotiation is flawed (daryn)
+
   NEW FEATURES
 
 HADOOP-8597. Permit FsShell's text command to read Avro files.
@@ -502,8 +504,6 @@ Release 2.0.3-alpha - Unreleased 
 
 HADOOP-7115. Add a cache for getpwuid_r and getpwgid_r calls (tucu)
 
-HADOOP-8999. SASL negotiation is flawed (daryn)
-
 HADOOP-6607. Add different variants of non caching HTTP headers. (tucu)
 
 HADOOP-9049. DelegationTokenRenewer needs to be Singleton and FileSystems




svn commit: r1434373 - /hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt

2013-01-16 Thread daryn
Author: daryn
Date: Wed Jan 16 20:37:53 2013
New Revision: 1434373

URL: http://svn.apache.org/viewvc?rev=1434373view=rev
Log:
HADOOP-8999. Move to incompatible section of changelog

Modified:

hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt

Modified: 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1434373r1=1434372r2=1434373view=diff
==
--- 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt 
(original)
+++ 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt 
Wed Jan 16 20:37:53 2013
@@ -4,6 +4,8 @@ Release 2.0.3-alpha - Unreleased 
 
   INCOMPATIBLE CHANGES
 
+HADOOP-8999. SASL negotiation is flawed (daryn)
+
   NEW FEATURES
 
 HADOOP-8561. Introduce HADOOP_PROXY_USER for secure impersonation in child
@@ -195,8 +197,6 @@ Release 2.0.3-alpha - Unreleased 
 
 HADOOP-7115. Add a cache for getpwuid_r and getpwgid_r calls (tucu)
 
-HADOOP-8999. SASL negotiation is flawed (daryn)
-
 HADOOP-6607. Add different variants of non caching HTTP headers. (tucu)
 
 HADOOP-9049. DelegationTokenRenewer needs to be Singleton and FileSystems




svn commit: r1409848 - /hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Test.java

2012-11-15 Thread daryn
Author: daryn
Date: Thu Nov 15 15:51:03 2012
New Revision: 1409848

URL: http://svn.apache.org/viewvc?rev=1409848view=rev
Log:
HDFS-4104. dfs -test -d prints inappropriate error on nonexistent directory 
(Andy Isaacson via daryn)

Modified:

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Test.java

Modified: 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Test.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Test.java?rev=1409848r1=1409847r2=1409848view=diff
==
--- 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Test.java
 (original)
+++ 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Test.java
 Thu Nov 15 15:51:03 2012
@@ -91,8 +91,6 @@ class Test extends FsCommand {  
 
   @Override
   protected void processNonexistentPath(PathData item) throws IOException {
-// NOTE: errors for FNF is not how the shell works!
-if (flag != 'e') displayError(new PathNotFoundException(item.toString()));
 exitCode = 1;
   }
 }




svn commit: r1409850 - /hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Test.java

2012-11-15 Thread daryn
Author: daryn
Date: Thu Nov 15 15:55:36 2012
New Revision: 1409850

URL: http://svn.apache.org/viewvc?rev=1409850view=rev
Log:
svn merge -c 1409848 FIXES: HDFS-4104. dfs -test -d prints inappropriate error 
on nonexistent directory (Andy Isaacson via daryn)

Modified:

hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Test.java

Modified: 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Test.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Test.java?rev=1409850r1=1409849r2=1409850view=diff
==
--- 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Test.java
 (original)
+++ 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Test.java
 Thu Nov 15 15:55:36 2012
@@ -91,8 +91,6 @@ class Test extends FsCommand {  
 
   @Override
   protected void processNonexistentPath(PathData item) throws IOException {
-// NOTE: errors for FNF is not how the shell works!
-if (flag != 'e') displayError(new PathNotFoundException(item.toString()));
 exitCode = 1;
   }
 }




svn commit: r1408837 - in /hadoop/common/trunk/hadoop-common-project/hadoop-common: CHANGES.txt src/main/java/org/apache/hadoop/ipc/Server.java src/main/java/org/apache/hadoop/security/SaslRpcClient.j

2012-11-13 Thread daryn
Author: daryn
Date: Tue Nov 13 17:10:13 2012
New Revision: 1408837

URL: http://svn.apache.org/viewvc?rev=1408837view=rev
Log:
HADOOP-8999. SASL negotiation is flawed (daryn)

Modified:
hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslRpcClient.java

Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1408837r1=1408836r2=1408837view=diff
==
--- hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt 
(original)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt Tue Nov 
13 17:10:13 2012
@@ -424,6 +424,8 @@ Release 2.0.3-alpha - Unreleased 
 
 HADOOP-7115. Add a cache for getpwuid_r and getpwgid_r calls (tucu)
 
+HADOOP-8999. SASL negotiation is flawed (daryn)
+
 Release 2.0.2-alpha - 2012-09-07 
 
   INCOMPATIBLE CHANGES

Modified: 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java?rev=1408837r1=1408836r2=1408837view=diff
==
--- 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java
 (original)
+++ 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java
 Tue Nov 13 17:10:13 2012
@@ -1220,6 +1220,10 @@ public abstract class Server {
   AUDITLOG.warn(AUTH_FAILED_FOR + clientIP + : + attemptingUser);
   throw e;
 }
+if (replyToken == null  authMethod == AuthMethod.PLAIN) {
+  // client needs at least response to know if it should use SIMPLE
+  replyToken = new byte[0];
+}
 if (replyToken != null) {
   if (LOG.isDebugEnabled())
 LOG.debug(Will send token of size  + replyToken.length

Modified: 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslRpcClient.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslRpcClient.java?rev=1408837r1=1408836r2=1408837view=diff
==
--- 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslRpcClient.java
 (original)
+++ 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslRpcClient.java
 Tue Nov 13 17:10:13 2012
@@ -145,15 +145,13 @@ public class SaslRpcClient {
   byte[] saslToken = new byte[0];
   if (saslClient.hasInitialResponse())
 saslToken = saslClient.evaluateChallenge(saslToken);
-  if (saslToken != null) {
+  while (saslToken != null) {
 outStream.writeInt(saslToken.length);
 outStream.write(saslToken, 0, saslToken.length);
 outStream.flush();
 if (LOG.isDebugEnabled())
   LOG.debug(Have sent token of size  + saslToken.length
   +  from initSASLContext.);
-  }
-  if (!saslClient.isComplete()) {
 readStatus(inStream);
 int len = inStream.readInt();
 if (len == SaslRpcServer.SWITCH_TO_SIMPLE_AUTH) {
@@ -161,32 +159,18 @@ public class SaslRpcClient {
 LOG.debug(Server asks us to fall back to simple auth.);
   saslClient.dispose();
   return false;
+} else if ((len == 0)  saslClient.isComplete()) {
+  break;
 }
 saslToken = new byte[len];
 if (LOG.isDebugEnabled())
   LOG.debug(Will read input token of size  + saslToken.length
   +  for processing by initSASLContext);
 inStream.readFully(saslToken);
-  }
-
-  while (!saslClient.isComplete()) {
 saslToken = saslClient.evaluateChallenge(saslToken);
-if (saslToken != null) {
-  if (LOG.isDebugEnabled())
-LOG.debug(Will send token of size  + saslToken.length
-+  from initSASLContext.);
-  outStream.writeInt(saslToken.length);
-  outStream.write(saslToken, 0, saslToken.length);
-  outStream.flush();
-}
-if (!saslClient.isComplete()) {
-  readStatus(inStream);
-  saslToken = new byte[inStream.readInt()];
-  if (LOG.isDebugEnabled())
-LOG.debug(Will read input token of size  + saslToken.length
-+  for processing by initSASLContext

svn commit: r1408839 - in /hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common: CHANGES.txt src/main/java/org/apache/hadoop/ipc/Server.java src/main/java/org/apache/hadoop/security/Sas

2012-11-13 Thread daryn
Author: daryn
Date: Tue Nov 13 17:11:52 2012
New Revision: 1408839

URL: http://svn.apache.org/viewvc?rev=1408839view=rev
Log:
svn merge -c 1408837 FIXES: HADOOP-8999. SASL negotiation is flawed (daryn)

Modified:

hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt

hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java

hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslRpcClient.java

Modified: 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1408839r1=1408838r2=1408839view=diff
==
--- 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt 
(original)
+++ 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt 
Tue Nov 13 17:11:52 2012
@@ -141,6 +141,8 @@ Release 2.0.3-alpha - Unreleased 
 
 HADOOP-7115. Add a cache for getpwuid_r and getpwgid_r calls (tucu)
 
+HADOOP-8999. SASL negotiation is flawed (daryn)
+
 Release 2.0.2-alpha - 2012-09-07 
 
   INCOMPATIBLE CHANGES

Modified: 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java?rev=1408839r1=1408838r2=1408839view=diff
==
--- 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java
 (original)
+++ 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java
 Tue Nov 13 17:11:52 2012
@@ -1179,6 +1179,10 @@ public abstract class Server {
   AUDITLOG.warn(AUTH_FAILED_FOR + clientIP + : + attemptingUser);
   throw e;
 }
+if (replyToken == null  authMethod == AuthMethod.PLAIN) {
+  // client needs at least response to know if it should use SIMPLE
+  replyToken = new byte[0];
+}
 if (replyToken != null) {
   if (LOG.isDebugEnabled())
 LOG.debug(Will send token of size  + replyToken.length

Modified: 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslRpcClient.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslRpcClient.java?rev=1408839r1=1408838r2=1408839view=diff
==
--- 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslRpcClient.java
 (original)
+++ 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslRpcClient.java
 Tue Nov 13 17:11:52 2012
@@ -145,15 +145,13 @@ public class SaslRpcClient {
   byte[] saslToken = new byte[0];
   if (saslClient.hasInitialResponse())
 saslToken = saslClient.evaluateChallenge(saslToken);
-  if (saslToken != null) {
+  while (saslToken != null) {
 outStream.writeInt(saslToken.length);
 outStream.write(saslToken, 0, saslToken.length);
 outStream.flush();
 if (LOG.isDebugEnabled())
   LOG.debug(Have sent token of size  + saslToken.length
   +  from initSASLContext.);
-  }
-  if (!saslClient.isComplete()) {
 readStatus(inStream);
 int len = inStream.readInt();
 if (len == SaslRpcServer.SWITCH_TO_SIMPLE_AUTH) {
@@ -161,32 +159,18 @@ public class SaslRpcClient {
 LOG.debug(Server asks us to fall back to simple auth.);
   saslClient.dispose();
   return false;
+} else if ((len == 0)  saslClient.isComplete()) {
+  break;
 }
 saslToken = new byte[len];
 if (LOG.isDebugEnabled())
   LOG.debug(Will read input token of size  + saslToken.length
   +  for processing by initSASLContext);
 inStream.readFully(saslToken);
-  }
-
-  while (!saslClient.isComplete()) {
 saslToken = saslClient.evaluateChallenge(saslToken);
-if (saslToken != null) {
-  if (LOG.isDebugEnabled())
-LOG.debug(Will send token of size  + saslToken.length
-+  from initSASLContext.);
-  outStream.writeInt(saslToken.length);
-  outStream.write(saslToken, 0, saslToken.length);
-  outStream.flush();
-}
-if (!saslClient.isComplete()) {
-  readStatus(inStream);
-  saslToken = new

svn commit: r1406198 - in /hadoop/common/trunk/hadoop-common-project/hadoop-common/src: main/java/org/apache/hadoop/fs/shell/Test.java test/resources/testConf.xml

2012-11-06 Thread daryn
Author: daryn
Date: Tue Nov  6 15:57:58 2012
New Revision: 1406198

URL: http://svn.apache.org/viewvc?rev=1406198view=rev
Log:
HDFS-1331. dfs -test should work like /bin/test (Andy Isaacson via daryn)

Modified:

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Test.java

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/resources/testConf.xml

Modified: 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Test.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Test.java?rev=1406198r1=1406197r2=1406198view=diff
==
--- 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Test.java
 (original)
+++ 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Test.java
 Tue Nov  6 15:57:58 2012
@@ -37,16 +37,21 @@ class Test extends FsCommand {  
   }
 
   public static final String NAME = test;
-  public static final String USAGE = -[ezd] path;
+  public static final String USAGE = -[defsz] path;
   public static final String DESCRIPTION =
-If file exists, has zero length, is a directory\n +
-then return 0, else return 1.;
+Answer various questions about path, with result via exit status.\n +
+  -d  return 0 if path is a directory.\n +
+  -e  return 0 if path exists.\n +
+  -f  return 0 if path is a file.\n +
+  -s  return 0 if file path is greater than zero bytes in size.\n +
+  -z  return 0 if file path is zero bytes in size.\n +
+else, return 1.;
 
   private char flag;
   
   @Override
   protected void processOptions(LinkedListString args) {
-CommandFormat cf = new CommandFormat(1, 1, e, d, z);
+CommandFormat cf = new CommandFormat(1, 1, e, d, f, s, z);
 cf.parse(args);
 
 String[] opts = cf.getOpts().toArray(new String[0]);
@@ -71,6 +76,12 @@ class Test extends FsCommand {  
   case 'd':
 test = item.stat.isDirectory();
 break;
+  case 'f':
+test = item.stat.isFile();
+break;
+  case 's':
+test = (item.stat.getLen()  0);
+break;
   case 'z':
 test = (item.stat.getLen() == 0);
 break;

Modified: 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/resources/testConf.xml
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/resources/testConf.xml?rev=1406198r1=1406197r2=1406198view=diff
==
--- 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/resources/testConf.xml
 (original)
+++ 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/resources/testConf.xml
 Tue Nov  6 15:57:58 2012
@@ -591,11 +591,11 @@
   comparators
 comparator
   typeRegexpComparator/type
-  expected-output^-test -\[ezd\] lt;pathgt;:\s+If file exists, has 
zero length, is a directory( )*/expected-output
+  expected-output^-test -\[defsz\] lt;pathgt;:\sAnswer various 
questions about lt;pathgt;, with result via exit status./expected-output
 /comparator
 comparator
   typeRegexpComparator/type
-  expected-output^( |\t)*then return 0, else return 1.( 
)*/expected-output
+  expected-output^( |\t)*else, return 1.( )*/expected-output
 /comparator
   /comparators
 /test




svn commit: r1406203 - in /hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src: main/java/org/apache/hadoop/fs/shell/Test.java test/resources/testConf.xml

2012-11-06 Thread daryn
Author: daryn
Date: Tue Nov  6 16:13:05 2012
New Revision: 1406203

URL: http://svn.apache.org/viewvc?rev=1406203view=rev
Log:
svn merge -c 1406198 FIXES: HDFS-1331. dfs -test should work like /bin/test 
(Andy Isaacson via daryn)

Modified:

hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Test.java

hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/test/resources/testConf.xml

Modified: 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Test.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Test.java?rev=1406203r1=1406202r2=1406203view=diff
==
--- 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Test.java
 (original)
+++ 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Test.java
 Tue Nov  6 16:13:05 2012
@@ -37,16 +37,21 @@ class Test extends FsCommand {  
   }
 
   public static final String NAME = test;
-  public static final String USAGE = -[ezd] path;
+  public static final String USAGE = -[defsz] path;
   public static final String DESCRIPTION =
-If file exists, has zero length, is a directory\n +
-then return 0, else return 1.;
+Answer various questions about path, with result via exit status.\n +
+  -d  return 0 if path is a directory.\n +
+  -e  return 0 if path exists.\n +
+  -f  return 0 if path is a file.\n +
+  -s  return 0 if file path is greater than zero bytes in size.\n +
+  -z  return 0 if file path is zero bytes in size.\n +
+else, return 1.;
 
   private char flag;
   
   @Override
   protected void processOptions(LinkedListString args) {
-CommandFormat cf = new CommandFormat(1, 1, e, d, z);
+CommandFormat cf = new CommandFormat(1, 1, e, d, f, s, z);
 cf.parse(args);
 
 String[] opts = cf.getOpts().toArray(new String[0]);
@@ -71,6 +76,12 @@ class Test extends FsCommand {  
   case 'd':
 test = item.stat.isDirectory();
 break;
+  case 'f':
+test = item.stat.isFile();
+break;
+  case 's':
+test = (item.stat.getLen()  0);
+break;
   case 'z':
 test = (item.stat.getLen() == 0);
 break;

Modified: 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/test/resources/testConf.xml
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/test/resources/testConf.xml?rev=1406203r1=1406202r2=1406203view=diff
==
--- 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/test/resources/testConf.xml
 (original)
+++ 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/test/resources/testConf.xml
 Tue Nov  6 16:13:05 2012
@@ -591,11 +591,11 @@
   comparators
 comparator
   typeRegexpComparator/type
-  expected-output^-test -\[ezd\] lt;pathgt;:\s+If file exists, has 
zero length, is a directory( )*/expected-output
+  expected-output^-test -\[defsz\] lt;pathgt;:\sAnswer various 
questions about lt;pathgt;, with result via exit status./expected-output
 /comparator
 comparator
   typeRegexpComparator/type
-  expected-output^( |\t)*then return 0, else return 1.( 
)*/expected-output
+  expected-output^( |\t)*else, return 1.( )*/expected-output
 /comparator
   /comparators
 /test




svn commit: r1401473 - in /hadoop/common/trunk/hadoop-common-project/hadoop-common/src: main/java/org/apache/hadoop/util/StringInterner.java test/java/org/apache/hadoop/util/TestStringInterner.java

2012-10-23 Thread daryn
Author: daryn
Date: Tue Oct 23 21:01:03 2012
New Revision: 1401473

URL: http://svn.apache.org/viewvc?rev=1401473view=rev
Log:
MAPREDUCE-4229. Intern counter names in the JT (bobby via daryn)

Added:

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/StringInterner.java

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestStringInterner.java

Added: 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/StringInterner.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/StringInterner.java?rev=1401473view=auto
==
--- 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/StringInterner.java
 (added)
+++ 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/StringInterner.java
 Tue Oct 23 21:01:03 2012
@@ -0,0 +1,78 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * License); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an AS IS BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.util;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+
+import com.google.common.collect.Interner;
+import com.google.common.collect.Interners;
+
+/**
+ * Provides equivalent behavior to String.intern() to optimize performance, 
+ * whereby does not consume memory in the permanent generation.
+ */
+@InterfaceAudience.Public
+@InterfaceStability.Stable
+public class StringInterner {
+  
+  /**
+   * Retains a strong reference to each string instance it has interned.
+   */
+  private final static InternerString strongInterner;
+  
+  /**
+   * Retains a weak reference to each string instance it has interned. 
+   */
+  private final static InternerString weakInterner;
+  
+  
+  
+  static {
+strongInterner = Interners.newStrongInterner();
+weakInterner = Interners.newWeakInterner();
+  }
+  
+  /**
+   * Interns and returns a reference to the representative instance 
+   * for any of a collection of string instances that are equal to each other.
+   * Retains strong reference to the instance, 
+   * thus preventing it from being garbage-collected. 
+   * 
+   * @param sample string instance to be interned
+   * @return strong reference to interned string instance
+   */
+  public static String strongIntern(String sample) {
+return strongInterner.intern(sample);
+  }
+  
+  /**
+   * Interns and returns a reference to the representative instance 
+   * for any of a collection of string instances that are equal to each other.
+   * Retains weak reference to the instance, 
+   * and so does not prevent it from being garbage-collected.
+   * 
+   * @param sample string instance to be interned
+   * @return weak reference to interned string instance
+   */
+  public static String weakIntern(String sample) {
+return weakInterner.intern(sample);
+  }
+
+}

Added: 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestStringInterner.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestStringInterner.java?rev=1401473view=auto
==
--- 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestStringInterner.java
 (added)
+++ 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestStringInterner.java
 Tue Oct 23 21:01:03 2012
@@ -0,0 +1,76 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * License); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless

svn commit: r1397634 - in /hadoop/common/trunk/hadoop-common-project/hadoop-common: CHANGES.txt src/main/java/org/apache/hadoop/ipc/Client.java src/test/java/org/apache/hadoop/ipc/TestSaslRPC.java

2012-10-12 Thread daryn
Author: daryn
Date: Fri Oct 12 16:27:26 2012
New Revision: 1397634

URL: http://svn.apache.org/viewvc?rev=1397634view=rev
Log:
HADOOP-8784. Improve IPC.Client's token use (daryn)

Modified:
hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestSaslRPC.java

Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1397634r1=1397633r2=1397634view=diff
==
--- hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt 
(original)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt Fri Oct 
12 16:27:26 2012
@@ -312,6 +312,8 @@ Release 2.0.3-alpha - Unreleased 
 HADOOP-8912. Add .gitattributes file to prevent CRLF and LF mismatches
 for source and text files. (Raja Aluri via suresh)
 
+HADOOP-8784. Improve IPC.Client's token use (daryn)
+
   OPTIMIZATIONS
 
 HADOOP-8866. SampleQuantiles#query is O(N^2) instead of O(N). (Andrew Wang

Modified: 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java?rev=1397634r1=1397633r2=1397634view=diff
==
--- 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java
 (original)
+++ 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java
 Fri Oct 12 16:27:26 2012
@@ -225,7 +225,6 @@ public class Client {
 private IpcConnectionContextProto connectionContext;   // connection 
context
 private final ConnectionId remoteId;// connection id
 private AuthMethod authMethod; // authentication method
-private boolean useSasl;
 private Token? extends TokenIdentifier token;
 private SaslRpcClient saslRpcClient;
 
@@ -270,8 +269,7 @@ public class Client {
 
   UserGroupInformation ticket = remoteId.getTicket();
   Class? protocol = remoteId.getProtocol();
-  this.useSasl = UserGroupInformation.isSecurityEnabled();
-  if (useSasl  protocol != null) {
+  if (protocol != null) {
 TokenInfo tokenInfo = SecurityUtil.getTokenInfo(protocol, conf);
 if (tokenInfo != null) {
   TokenSelector? extends TokenIdentifier tokenSelector = null;
@@ -296,12 +294,12 @@ public class Client {
 }
   }
   
-  if (!useSasl) {
-authMethod = AuthMethod.SIMPLE;
-  } else if (token != null) {
+  if (token != null) {
 authMethod = AuthMethod.DIGEST;
-  } else {
+  } else if (UserGroupInformation.isSecurityEnabled()) {
 authMethod = AuthMethod.KERBEROS;
+  } else {
+authMethod = AuthMethod.SIMPLE;
   }
   
   connectionContext = ProtoUtil.makeIpcConnectionContext(
@@ -576,14 +574,12 @@ public class Client {
   InputStream inStream = NetUtils.getInputStream(socket);
   OutputStream outStream = NetUtils.getOutputStream(socket);
   writeConnectionHeader(outStream);
-  if (useSasl) {
+  if (authMethod != AuthMethod.SIMPLE) {
 final InputStream in2 = inStream;
 final OutputStream out2 = outStream;
 UserGroupInformation ticket = remoteId.getTicket();
-if (authMethod == AuthMethod.KERBEROS) {
-  if (ticket.getRealUser() != null) {
-ticket = ticket.getRealUser();
-  }
+if (ticket.getRealUser() != null) {
+  ticket = ticket.getRealUser();
 }
 boolean continueSasl = false;
 try {
@@ -614,7 +610,6 @@ public class Client {
   connectionContext.getProtocol(), 
   ProtoUtil.getUgi(connectionContext.getUserInfo()),
   authMethod);
-  useSasl = false;
 }
   }
 
@@ -1174,7 +1169,7 @@ public class Client {
   call.error);
 }
   } else {
-return call.rpcResponse;
+return call.getRpcResult();
   }
 }
   }

Modified: 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestSaslRPC.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestSaslRPC.java?rev=1397634r1=1397633r2=1397634view=diff
==
--- 
hadoop/common/trunk/hadoop-common

svn commit: r1393483 - in /hadoop/common/trunk/hadoop-common-project/hadoop-common: CHANGES.txt src/main/java/org/apache/hadoop/ipc/Server.java src/test/java/org/apache/hadoop/ipc/TestSaslRPC.java

2012-10-03 Thread daryn
Author: daryn
Date: Wed Oct  3 13:43:53 2012
New Revision: 1393483

URL: http://svn.apache.org/viewvc?rev=1393483view=rev
Log:
HADOOP-8783. Improve RPC.Server's digest auth (daryn)

Modified:
hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestSaslRPC.java

Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1393483r1=1393482r2=1393483view=diff
==
--- hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt 
(original)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt Wed Oct 
 3 13:43:53 2012
@@ -288,6 +288,8 @@ Release 2.0.3-alpha - Unreleased 
 HADOOP-8851. Use -XX:+HeapDumpOnOutOfMemoryError JVM option in the forked
 tests. (Ivan A. Veselovsky via atm)
 
+HADOOP-8783. Improve RPC.Server's digest auth (daryn)
+
   OPTIMIZATIONS
 
 HADOOP-8866. SampleQuantiles#query is O(N^2) instead of O(N). (Andrew Wang

Modified: 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java?rev=1393483r1=1393482r2=1393483view=diff
==
--- 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java
 (original)
+++ 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java
 Wed Oct  3 13:43:53 2012
@@ -87,7 +87,6 @@ import org.apache.hadoop.security.SaslRp
 import org.apache.hadoop.security.SaslRpcServer.SaslGssCallbackHandler;
 import org.apache.hadoop.security.SaslRpcServer.SaslStatus;
 import org.apache.hadoop.security.UserGroupInformation;
-import org.apache.hadoop.security.UserGroupInformation.AuthenticationMethod;
 import org.apache.hadoop.security.authorize.AuthorizationException;
 import org.apache.hadoop.security.authorize.PolicyProvider;
 import org.apache.hadoop.security.authorize.ProxyUsers;
@@ -1374,20 +1373,38 @@ public abstract class Server {
   dataLengthBuffer.clear();
   if (authMethod == null) {
 throw new IOException(Unable to read authentication method);
-  }
-  if (isSecurityEnabled  authMethod == AuthMethod.SIMPLE) {
-AccessControlException ae = new 
AccessControlException(Authorization (
-  + CommonConfigurationKeys.HADOOP_SECURITY_AUTHORIZATION
-  + ) is enabled but authentication (
-  + CommonConfigurationKeys.HADOOP_SECURITY_AUTHENTICATION
-  + ) is configured as simple. Please configure another method 
-  + like kerberos or digest.);
-setupResponse(authFailedResponse, authFailedCall, 
RpcStatusProto.FATAL,
-null, ae.getClass().getName(), ae.getMessage());
-responder.doRespond(authFailedCall);
-throw ae;
-  }
-  if (!isSecurityEnabled  authMethod != AuthMethod.SIMPLE) {
+  }  
+  final boolean clientUsingSasl;
+  switch (authMethod) {
+case SIMPLE: { // no sasl for simple
+  if (isSecurityEnabled) {
+AccessControlException ae = new 
AccessControlException(Authorization (
++ CommonConfigurationKeys.HADOOP_SECURITY_AUTHORIZATION
++ ) is enabled but authentication (
++ CommonConfigurationKeys.HADOOP_SECURITY_AUTHENTICATION
++ ) is configured as simple. Please configure another 
method 
++ like kerberos or digest.);
+setupResponse(authFailedResponse, authFailedCall, 
RpcStatusProto.FATAL,
+null, ae.getClass().getName(), ae.getMessage());
+responder.doRespond(authFailedCall);
+throw ae;
+  }
+  clientUsingSasl = false;
+  useSasl = false; 
+  break;
+}
+case DIGEST: {
+  clientUsingSasl = true;
+  useSasl = (secretManager != null);
+  break;
+}
+default: {
+  clientUsingSasl = true;
+  useSasl = isSecurityEnabled; 
+  break;
+}
+  }  
+  if (clientUsingSasl  !useSasl) {
 doSaslReply(SaslStatus.SUCCESS, new IntWritable(
 SaslRpcServer.SWITCH_TO_SIMPLE_AUTH), null, null);
 authMethod = AuthMethod.SIMPLE;
@@ -1396,9 +1413,6

svn commit: r1393498 - in /hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common: CHANGES.txt src/main/java/org/apache/hadoop/ipc/Server.java src/test/java/org/apache/hadoop/ipc/TestSasl

2012-10-03 Thread daryn
Author: daryn
Date: Wed Oct  3 14:08:15 2012
New Revision: 1393498

URL: http://svn.apache.org/viewvc?rev=1393498view=rev
Log:
HADOOP-8783. Improve RPC.Server's digest auth (daryn)

Modified:

hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt

hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java

hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestSaslRPC.java

Modified: 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1393498r1=1393497r2=1393498view=diff
==
--- 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt 
(original)
+++ 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt 
Wed Oct  3 14:08:15 2012
@@ -28,6 +28,8 @@ Release 2.0.3-alpha - Unreleased 
 HADOOP-8851. Use -XX:+HeapDumpOnOutOfMemoryError JVM option in the forked
 tests. (Ivan A. Veselovsky via atm)
 
+HADOOP-8783. Improve RPC.Server's digest auth (daryn)
+
   OPTIMIZATIONS
 
 HADOOP-8866. SampleQuantiles#query is O(N^2) instead of O(N). (Andrew Wang

Modified: 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java?rev=1393498r1=1393497r2=1393498view=diff
==
--- 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java
 (original)
+++ 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java
 Wed Oct  3 14:08:15 2012
@@ -84,7 +84,6 @@ import org.apache.hadoop.security.SaslRp
 import org.apache.hadoop.security.SaslRpcServer.SaslGssCallbackHandler;
 import org.apache.hadoop.security.SaslRpcServer.SaslStatus;
 import org.apache.hadoop.security.UserGroupInformation;
-import org.apache.hadoop.security.UserGroupInformation.AuthenticationMethod;
 import org.apache.hadoop.security.authorize.AuthorizationException;
 import org.apache.hadoop.security.authorize.PolicyProvider;
 import org.apache.hadoop.security.authorize.ProxyUsers;
@@ -1333,20 +1332,38 @@ public abstract class Server {
   dataLengthBuffer.clear();
   if (authMethod == null) {
 throw new IOException(Unable to read authentication method);
-  }
-  if (isSecurityEnabled  authMethod == AuthMethod.SIMPLE) {
-AccessControlException ae = new 
AccessControlException(Authorization (
-  + CommonConfigurationKeys.HADOOP_SECURITY_AUTHORIZATION
-  + ) is enabled but authentication (
-  + CommonConfigurationKeys.HADOOP_SECURITY_AUTHENTICATION
-  + ) is configured as simple. Please configure another method 
-  + like kerberos or digest.);
-setupResponse(authFailedResponse, authFailedCall, 
RpcStatusProto.FATAL,
-null, ae.getClass().getName(), ae.getMessage());
-responder.doRespond(authFailedCall);
-throw ae;
-  }
-  if (!isSecurityEnabled  authMethod != AuthMethod.SIMPLE) {
+  }  
+  final boolean clientUsingSasl;
+  switch (authMethod) {
+case SIMPLE: { // no sasl for simple
+  if (isSecurityEnabled) {
+AccessControlException ae = new 
AccessControlException(Authorization (
++ CommonConfigurationKeys.HADOOP_SECURITY_AUTHORIZATION
++ ) is enabled but authentication (
++ CommonConfigurationKeys.HADOOP_SECURITY_AUTHENTICATION
++ ) is configured as simple. Please configure another 
method 
++ like kerberos or digest.);
+setupResponse(authFailedResponse, authFailedCall, 
RpcStatusProto.FATAL,
+null, ae.getClass().getName(), ae.getMessage());
+responder.doRespond(authFailedCall);
+throw ae;
+  }
+  clientUsingSasl = false;
+  useSasl = false; 
+  break;
+}
+case DIGEST: {
+  clientUsingSasl = true;
+  useSasl = (secretManager != null);
+  break;
+}
+default: {
+  clientUsingSasl = true;
+  useSasl = isSecurityEnabled; 
+  break;
+}
+  }  
+  if (clientUsingSasl  !useSasl) {
 doSaslReply(SaslStatus.SUCCESS, new IntWritable

svn commit: r1391150 - /hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/CHANGES.txt

2012-09-27 Thread daryn
Author: daryn
Date: Thu Sep 27 18:11:25 2012
New Revision: 1391150

URL: http://svn.apache.org/viewvc?rev=1391150view=rev
Log:
HDFS-3922. namenode throws away blocks under construction on restart (Kihwal 
Lee via daryn)

Modified:

hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/CHANGES.txt

Modified: 
hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1391150r1=1391149r2=1391150view=diff
==
--- 
hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/CHANGES.txt
 (original)
+++ 
hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/CHANGES.txt
 Thu Sep 27 18:11:25 2012
@@ -224,6 +224,9 @@ Release 0.23.3 - UNRELEASED
 HADOOP-8727. Gracefully deprecate dfs.umaskmode in 2.x onwards (Harsh J
 via bobby)
 
+HDFS-3922. namenode throws away blocks under construction on restart
+(Kihwal Lee via daryn)
+
 Release 0.23.2 - UNRELEASED 
 
   NEW FEATURES




svn commit: r1391155 - /hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/CHANGES.txt

2012-09-27 Thread daryn
Author: daryn
Date: Thu Sep 27 18:21:17 2012
New Revision: 1391155

URL: http://svn.apache.org/viewvc?rev=1391155view=rev
Log:
HDFS-3731. 2.0 release upgrade must handle blocks being written from 1.0 
(Kihwal Lee via daryn)

Modified:

hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/CHANGES.txt

Modified: 
hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1391155r1=1391154r2=1391155view=diff
==
--- 
hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/CHANGES.txt
 (original)
+++ 
hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/CHANGES.txt
 Thu Sep 27 18:21:17 2012
@@ -224,9 +224,6 @@ Release 0.23.3 - UNRELEASED
 HADOOP-8727. Gracefully deprecate dfs.umaskmode in 2.x onwards (Harsh J
 via bobby)
 
-HDFS-3922. namenode throws away blocks under construction on restart
-(Kihwal Lee via daryn)
-
 Release 0.23.2 - UNRELEASED 
 
   NEW FEATURES




svn commit: r1376485 - /hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/AbstractDelegationTokenSecretManager.java

2012-08-23 Thread daryn
Author: daryn
Date: Thu Aug 23 13:38:48 2012
New Revision: 1376485

URL: http://svn.apache.org/viewvc?rev=1376485view=rev
Log:
HDFS-3841. Port HDFS-3835 to branch-0.23 (bobby via daryn)

Modified:

hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/AbstractDelegationTokenSecretManager.java

Modified: 
hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/AbstractDelegationTokenSecretManager.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/AbstractDelegationTokenSecretManager.java?rev=1376485r1=1376484r2=1376485view=diff
==
--- 
hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/AbstractDelegationTokenSecretManager.java
 (original)
+++ 
hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/AbstractDelegationTokenSecretManager.java
 Thu Aug 23 13:38:48 2012
@@ -103,6 +103,16 @@ extends AbstractDelegationTokenIdentifie
 }
   }
   
+  /**
+   * Reset all data structures and mutable state.
+   */
+  public synchronized void reset() {
+currentId = 0;
+allKeys.clear();
+delegationTokenSequenceNumber = 0;
+currentTokens.clear();
+  }
+  
   /** 
* Add a previously used master key to cache (when NN restarts), 
* should be called before activate().
@@ -180,7 +190,6 @@ extends AbstractDelegationTokenIdentifie
   
   @Override
   protected synchronized byte[] createPassword(TokenIdent identifier) {
-LOG.info(Creating password for identifier: +identifier);
 int sequenceNum;
 long now = System.currentTimeMillis();
 sequenceNum = ++delegationTokenSequenceNumber;
@@ -188,6 +197,7 @@ extends AbstractDelegationTokenIdentifie
 identifier.setMaxDate(now + tokenMaxLifetime);
 identifier.setMasterKeyId(currentId);
 identifier.setSequenceNumber(sequenceNum);
+LOG.info(Creating password for identifier:  + identifier);
 byte[] password = createPassword(identifier.getBytes(), 
currentKey.getKey());
 currentTokens.put(identifier, new DelegationTokenInformation(now
 + tokenRenewInterval, password));




svn commit: r1375063 - in /hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common: ./ src/main/java/org/apache/hadoop/fs/ src/main/java/org/apache/hadoop/fs/viewfs/ src/main/java/org/a

2012-08-20 Thread daryn
Author: daryn
Date: Mon Aug 20 15:33:54 2012
New Revision: 1375063

URL: http://svn.apache.org/viewvc?rev=1375063view=rev
Log:
svn merge -c 1374346 FIXES: HADOOP-7967. Need generalized multi-token 
filesystem support (daryn)

Added:

hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileSystemTokens.java
  - copied unchanged from r1374346, 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileSystemTokens.java
Modified:

hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/CHANGES.txt

hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/DelegateToFileSystem.java

hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java

hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FilterFileSystem.java

hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFileSystem.java

hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/Credentials.java

hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileSystemTestHelper.java

hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFilterFileSystem.java

hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/ViewFileSystemBaseTest.java

Modified: 
hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1375063r1=1375062r2=1375063view=diff
==
--- 
hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/CHANGES.txt
 (original)
+++ 
hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/CHANGES.txt
 Mon Aug 20 15:33:54 2012
@@ -4,6 +4,8 @@ Release 0.23.3 - UNRELEASED
 
   INCOMPATIBLE CHANGES
 
+HADOOP-7967. Need generalized multi-token filesystem support (daryn)
+
   NEW FEATURES
 
   IMPROVEMENTS

Modified: 
hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/DelegateToFileSystem.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/DelegateToFileSystem.java?rev=1375063r1=1375062r2=1375063view=diff
==
--- 
hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/DelegateToFileSystem.java
 (original)
+++ 
hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/DelegateToFileSystem.java
 Mon Aug 20 15:33:54 2012
@@ -21,6 +21,7 @@ import java.io.FileNotFoundException;
 import java.io.IOException;
 import java.net.URI;
 import java.net.URISyntaxException;
+import java.util.Arrays;
 import java.util.EnumSet;
 import java.util.List;
 
@@ -217,6 +218,6 @@ public abstract class DelegateToFileSyst
   
   @Override //AbstractFileSystem
   public ListToken? getDelegationTokens(String renewer) throws IOException 
{
-return fsImpl.getDelegationTokens(renewer);
+return Arrays.asList(fsImpl.addDelegationTokens(renewer, null));
   }
 }
\ No newline at end of file

Modified: 
hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java?rev=1375063r1=1375062r2=1375063view=diff
==
--- 
hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java
 (original)
+++ 
hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java
 Mon Aug 20 15:33:54 2012
@@ -47,6 +47,7 @@ import org.apache.hadoop.conf.Configured
 import org.apache.hadoop.fs.Options.Rename;
 import org.apache.hadoop.fs.permission.FsPermission;
 import org.apache.hadoop.io.MultipleIOException;
+import org.apache.hadoop.io.Text;
 import org.apache.hadoop.net.NetUtils;
 import org.apache.hadoop.security.Credentials;
 import org.apache.hadoop.security.SecurityUtil;
@@ -56,6 +57,8 @@ import org.apache.hadoop.util.Progressab
 import org.apache.hadoop.util.ReflectionUtils;
 import org.apache.hadoop.util.ShutdownHookManager

svn commit: r1374271 - in /hadoop/common/trunk/hadoop-common-project/hadoop-common: ./ src/main/java/org/apache/hadoop/fs/ src/main/java/org/apache/hadoop/fs/viewfs/ src/main/java/org/apache/hadoop/se

2012-08-17 Thread daryn
Author: daryn
Date: Fri Aug 17 14:05:11 2012
New Revision: 1374271

URL: http://svn.apache.org/viewvc?rev=1374271view=rev
Log:
HADOOP-7967. Need generalized multi-token filesystem support (daryn)

Added:

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileSystemTokens.java
Modified:
hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/DelegateToFileSystem.java

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/DelegationTokenRenewer.java

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FilterFileSystem.java

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFileSystem.java

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/Credentials.java

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileSystemTestHelper.java

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFilterFileSystem.java

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestViewFileSystemDelegationTokenSupport.java

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/ViewFileSystemBaseTest.java

Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1374271r1=1374270r2=1374271view=diff
==
--- hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt 
(original)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt Fri Aug 
17 14:05:11 2012
@@ -806,6 +806,8 @@ Release 0.23.3 - UNRELEASED
 
   INCOMPATIBLE CHANGES
 
+HADOOP-7967. Need generalized multi-token filesystem support (daryn)
+
   NEW FEATURES
 
   IMPROVEMENTS

Modified: 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/DelegateToFileSystem.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/DelegateToFileSystem.java?rev=1374271r1=1374270r2=1374271view=diff
==
--- 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/DelegateToFileSystem.java
 (original)
+++ 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/DelegateToFileSystem.java
 Fri Aug 17 14:05:11 2012
@@ -21,6 +21,7 @@ import java.io.FileNotFoundException;
 import java.io.IOException;
 import java.net.URI;
 import java.net.URISyntaxException;
+import java.util.Arrays;
 import java.util.EnumSet;
 import java.util.List;
 
@@ -217,6 +218,6 @@ public abstract class DelegateToFileSyst
   
   @Override //AbstractFileSystem
   public ListToken? getDelegationTokens(String renewer) throws IOException 
{
-return fsImpl.getDelegationTokens(renewer);
+return Arrays.asList(fsImpl.addDelegationTokens(renewer, null));
   }
 }
\ No newline at end of file

Modified: 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/DelegationTokenRenewer.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/DelegationTokenRenewer.java?rev=1374271r1=1374270r2=1374271view=diff
==
--- 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/DelegationTokenRenewer.java
 (original)
+++ 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/DelegationTokenRenewer.java
 Fri Aug 17 14:05:11 2012
@@ -110,7 +110,11 @@ public class DelegationTokenRenewerT ex
 fs.getRenewToken().renew(fs.getConf());
   } catch (IOException ie) {
 try {
-  fs.setDelegationToken(fs.getDelegationTokens(null).get(0));
+  Token?[] tokens = fs.addDelegationTokens(null, null);
+  if (tokens.length == 0) {
+throw new IOException(addDelegationTokens returned no 
tokens);
+  }
+  fs.setDelegationToken(tokens[0]);
 } catch (IOException ie2) {
   throw new IOException(Can't renew or get new delegation token 
, ie);
 }

Modified: 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs

svn commit: r1371724 - /hadoop/common/branches/branch-0.23/hadoop-project/pom.xml

2012-08-10 Thread daryn
Author: daryn
Date: Fri Aug 10 14:45:19 2012
New Revision: 1371724

URL: http://svn.apache.org/viewvc?rev=1371724view=rev
Log:
HDFS-3187. Upgrade guava to 11.0.2. Contributed by Todd Lipcon.

Modified:
hadoop/common/branches/branch-0.23/hadoop-project/pom.xml

Modified: hadoop/common/branches/branch-0.23/hadoop-project/pom.xml
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23/hadoop-project/pom.xml?rev=1371724r1=1371723r2=1371724view=diff
==
--- hadoop/common/branches/branch-0.23/hadoop-project/pom.xml (original)
+++ hadoop/common/branches/branch-0.23/hadoop-project/pom.xml Fri Aug 10 
14:45:19 2012
@@ -266,7 +266,7 @@
   dependency
 groupIdcom.google.guava/groupId
 artifactIdguava/artifactId
-versionr09/version
+version11.0.2/version
   /dependency
   dependency
 groupIdcommons-cli/groupId




svn commit: r1366440 - in /hadoop/common/trunk/hadoop-common-project/hadoop-common: ./ src/main/java/org/apache/hadoop/security/token/delegation/ src/test/java/org/apache/hadoop/security/token/delegat

2012-07-27 Thread daryn
Author: daryn
Date: Fri Jul 27 16:39:51 2012
New Revision: 1366440

URL: http://svn.apache.org/viewvc?rev=1366440view=rev
Log:
HADOOP-8613. AbstractDelegationTokenIdentifier#getUser() should set token auth 
type. (daryn)

Modified:
hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/AbstractDelegationTokenIdentifier.java

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/token/delegation/TestDelegationToken.java

Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1366440r1=1366439r2=1366440view=diff
==
--- hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt 
(original)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt Fri Jul 
27 16:39:51 2012
@@ -854,6 +854,9 @@ Release 0.23.3 - UNRELEASED
 HADOOP-8551. fs -mkdir creates parent directories without the -p option
 (John George via bobby)
 
+HADOOP-8613. AbstractDelegationTokenIdentifier#getUser() should set token
+auth type. (daryn)
+
 Release 0.23.2 - UNRELEASED 
 
   INCOMPATIBLE CHANGES

Modified: 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/AbstractDelegationTokenIdentifier.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/AbstractDelegationTokenIdentifier.java?rev=1366440r1=1366439r2=1366440view=diff
==
--- 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/AbstractDelegationTokenIdentifier.java
 (original)
+++ 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/AbstractDelegationTokenIdentifier.java
 Fri Jul 27 16:39:51 2012
@@ -29,6 +29,7 @@ import org.apache.hadoop.io.Text;
 import org.apache.hadoop.io.WritableUtils;
 import org.apache.hadoop.security.HadoopKerberosName;
 import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hadoop.security.UserGroupInformation.AuthenticationMethod;
 import org.apache.hadoop.security.token.TokenIdentifier;
 
 import com.google.common.annotations.VisibleForTesting;
@@ -88,14 +89,17 @@ extends TokenIdentifier {
 if ( (owner == null) || (.equals(owner.toString( {
   return null;
 }
+final UserGroupInformation realUgi;
+final UserGroupInformation ugi;
 if ((realUser == null) || (.equals(realUser.toString()))
 || realUser.equals(owner)) {
-  return UserGroupInformation.createRemoteUser(owner.toString());
+  ugi = realUgi = UserGroupInformation.createRemoteUser(owner.toString());
 } else {
-  UserGroupInformation realUgi = UserGroupInformation
-  .createRemoteUser(realUser.toString());
-  return UserGroupInformation.createProxyUser(owner.toString(), realUgi);
+  realUgi = UserGroupInformation.createRemoteUser(realUser.toString());
+  ugi = UserGroupInformation.createProxyUser(owner.toString(), realUgi);
 }
+realUgi.setAuthenticationMethod(AuthenticationMethod.TOKEN);
+return ugi;
   }
 
   public Text getOwner() {

Modified: 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/token/delegation/TestDelegationToken.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/token/delegation/TestDelegationToken.java?rev=1366440r1=1366439r2=1366440view=diff
==
--- 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/token/delegation/TestDelegationToken.java
 (original)
+++ 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/token/delegation/TestDelegationToken.java
 Fri Jul 27 16:39:51 2012
@@ -40,6 +40,8 @@ import org.apache.hadoop.io.DataOutputBu
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.io.Writable;
 import org.apache.hadoop.security.AccessControlException;
+import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hadoop.security.UserGroupInformation.AuthenticationMethod;
 import org.apache.hadoop.security.token.SecretManager;
 import org.apache.hadoop.security.token.Token;
 import org.apache.hadoop.security.token.SecretManager.InvalidToken;
@@ -172,6 +174,52 @@ public class TestDelegationToken {
   }
 
   @Test
+  public void testGetUserNullOwner

svn commit: r1366450 - in /hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common: ./ src/main/java/org/apache/hadoop/security/token/delegation/ src/test/java/org/apache/hadoop/security/t

2012-07-27 Thread daryn
Author: daryn
Date: Fri Jul 27 16:53:38 2012
New Revision: 1366450

URL: http://svn.apache.org/viewvc?rev=1366450view=rev
Log:
svn merge -c 1366440 FIXES: HADOOP-8613. 
AbstractDelegationTokenIdentifier#getUser() should set token auth type. (daryn)

Modified:

hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt

hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/AbstractDelegationTokenIdentifier.java

hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/token/delegation/TestDelegationToken.java

Modified: 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1366450r1=1366449r2=1366450view=diff
==
--- 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt 
(original)
+++ 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt 
Fri Jul 27 16:53:38 2012
@@ -673,6 +673,9 @@ Release 0.23.3 - UNRELEASED
 HADOOP-8551. fs -mkdir creates parent directories without the -p option
 (John George via bobby)
 
+HADOOP-8613. AbstractDelegationTokenIdentifier#getUser() should set token
+auth type. (daryn)
+
 Release 0.23.2 - UNRELEASED 
 
   NEW FEATURES

Modified: 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/AbstractDelegationTokenIdentifier.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/AbstractDelegationTokenIdentifier.java?rev=1366450r1=1366449r2=1366450view=diff
==
--- 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/AbstractDelegationTokenIdentifier.java
 (original)
+++ 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/AbstractDelegationTokenIdentifier.java
 Fri Jul 27 16:53:38 2012
@@ -29,6 +29,7 @@ import org.apache.hadoop.io.Text;
 import org.apache.hadoop.io.WritableUtils;
 import org.apache.hadoop.security.HadoopKerberosName;
 import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hadoop.security.UserGroupInformation.AuthenticationMethod;
 import org.apache.hadoop.security.token.TokenIdentifier;
 
 import com.google.common.annotations.VisibleForTesting;
@@ -88,14 +89,17 @@ extends TokenIdentifier {
 if ( (owner == null) || (.equals(owner.toString( {
   return null;
 }
+final UserGroupInformation realUgi;
+final UserGroupInformation ugi;
 if ((realUser == null) || (.equals(realUser.toString()))
 || realUser.equals(owner)) {
-  return UserGroupInformation.createRemoteUser(owner.toString());
+  ugi = realUgi = UserGroupInformation.createRemoteUser(owner.toString());
 } else {
-  UserGroupInformation realUgi = UserGroupInformation
-  .createRemoteUser(realUser.toString());
-  return UserGroupInformation.createProxyUser(owner.toString(), realUgi);
+  realUgi = UserGroupInformation.createRemoteUser(realUser.toString());
+  ugi = UserGroupInformation.createProxyUser(owner.toString(), realUgi);
 }
+realUgi.setAuthenticationMethod(AuthenticationMethod.TOKEN);
+return ugi;
   }
 
   public Text getOwner() {

Modified: 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/token/delegation/TestDelegationToken.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/token/delegation/TestDelegationToken.java?rev=1366450r1=1366449r2=1366450view=diff
==
--- 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/token/delegation/TestDelegationToken.java
 (original)
+++ 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/token/delegation/TestDelegationToken.java
 Fri Jul 27 16:53:38 2012
@@ -40,6 +40,8 @@ import org.apache.hadoop.io.DataOutputBu
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.io.Writable;
 import org.apache.hadoop.security.AccessControlException;
+import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hadoop.security.UserGroupInformation.AuthenticationMethod;
 import org.apache.hadoop.security.token.SecretManager;
 import org.apache.hadoop.security.token.Token;
 import

svn commit: r1366462 - in /hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common: ./ src/main/java/org/apache/hadoop/security/token/delegation/ src/test/java/org/apache/hadoop/securit

2012-07-27 Thread daryn
Author: daryn
Date: Fri Jul 27 17:14:30 2012
New Revision: 1366462

URL: http://svn.apache.org/viewvc?rev=1366462view=rev
Log:
svn merge -c 1366440 FIXES: HADOOP-8613. 
AbstractDelegationTokenIdentifier#getUser() should set token auth type. (daryn)

Modified:

hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/CHANGES.txt

hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/AbstractDelegationTokenIdentifier.java

hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/token/delegation/TestDelegationToken.java

Modified: 
hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1366462r1=1366461r2=1366462view=diff
==
--- 
hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/CHANGES.txt
 (original)
+++ 
hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/CHANGES.txt
 Fri Jul 27 17:14:30 2012
@@ -120,6 +120,9 @@ Release 0.23.3 - UNRELEASED
 HADOOP-8551. fs -mkdir creates parent directories without the -p option
 (John George via bobby)
 
+HADOOP-8613. AbstractDelegationTokenIdentifier#getUser() should set token
+auth type. (daryn)
+
 Release 0.23.2 - UNRELEASED 
 
   NEW FEATURES

Modified: 
hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/AbstractDelegationTokenIdentifier.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/AbstractDelegationTokenIdentifier.java?rev=1366462r1=1366461r2=1366462view=diff
==
--- 
hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/AbstractDelegationTokenIdentifier.java
 (original)
+++ 
hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/AbstractDelegationTokenIdentifier.java
 Fri Jul 27 17:14:30 2012
@@ -29,6 +29,7 @@ import org.apache.hadoop.io.Text;
 import org.apache.hadoop.io.WritableUtils;
 import org.apache.hadoop.security.HadoopKerberosName;
 import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hadoop.security.UserGroupInformation.AuthenticationMethod;
 import org.apache.hadoop.security.token.TokenIdentifier;
 
 @InterfaceAudience.LimitedPrivate({HDFS, MapReduce})
@@ -86,14 +87,17 @@ extends TokenIdentifier {
 if ( (owner == null) || (.equals(owner.toString( {
   return null;
 }
+final UserGroupInformation realUgi;
+final UserGroupInformation ugi;
 if ((realUser == null) || (.equals(realUser.toString()))
 || realUser.equals(owner)) {
-  return UserGroupInformation.createRemoteUser(owner.toString());
+  ugi = realUgi = UserGroupInformation.createRemoteUser(owner.toString());
 } else {
-  UserGroupInformation realUgi = UserGroupInformation
-  .createRemoteUser(realUser.toString());
-  return UserGroupInformation.createProxyUser(owner.toString(), realUgi);
+  realUgi = UserGroupInformation.createRemoteUser(realUser.toString());
+  ugi = UserGroupInformation.createProxyUser(owner.toString(), realUgi);
 }
+realUgi.setAuthenticationMethod(AuthenticationMethod.TOKEN);
+return ugi;
   }
 
   public Text getOwner() {

Modified: 
hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/token/delegation/TestDelegationToken.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/token/delegation/TestDelegationToken.java?rev=1366462r1=1366461r2=1366462view=diff
==
--- 
hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/token/delegation/TestDelegationToken.java
 (original)
+++ 
hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/token/delegation/TestDelegationToken.java
 Fri Jul 27 17:14:30 2012
@@ -39,6 +39,8 @@ import org.apache.hadoop.io.DataOutputBu
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.io.Writable;
 import org.apache.hadoop.security.AccessControlException;
+import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hadoop.security.UserGroupInformation.AuthenticationMethod;
 import org.apache.hadoop.security.token.SecretManager;
 import

svn commit: r1366467 - in /hadoop/common/branches/branch-1: ./ src/core/org/apache/hadoop/security/token/delegation/ src/hdfs/org/apache/hadoop/hdfs/server/namenode/ src/test/org/apache/hadoop/securit

2012-07-27 Thread daryn
Author: daryn
Date: Fri Jul 27 17:21:25 2012
New Revision: 1366467

URL: http://svn.apache.org/viewvc?rev=1366467view=rev
Log:
HADOOP-8613. AbstractDelegationTokenIdentifier#getUser() should set token auth 
type. (daryn)

Modified:
hadoop/common/branches/branch-1/CHANGES.txt

hadoop/common/branches/branch-1/src/core/org/apache/hadoop/security/token/delegation/AbstractDelegationTokenIdentifier.java

hadoop/common/branches/branch-1/src/hdfs/org/apache/hadoop/hdfs/server/namenode/JspHelper.java

hadoop/common/branches/branch-1/src/test/org/apache/hadoop/security/token/delegation/TestDelegationToken.java

Modified: hadoop/common/branches/branch-1/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-1/CHANGES.txt?rev=1366467r1=1366466r2=1366467view=diff
==
--- hadoop/common/branches/branch-1/CHANGES.txt (original)
+++ hadoop/common/branches/branch-1/CHANGES.txt Fri Jul 27 17:21:25 2012
@@ -136,6 +136,9 @@ Release 1.2.0 - unreleased
 HDFS-3696. Set chunked streaming mode in WebHdfsFileSystem write operations
 to get around a Java library bug causing OutOfMemoryError.  (szetszwo)
 
+HADOOP-8613. AbstractDelegationTokenIdentifier#getUser() should set token
+auth type. (daryn)
+
 Release 1.1.0 - unreleased
 
   INCOMPATIBLE CHANGES

Modified: 
hadoop/common/branches/branch-1/src/core/org/apache/hadoop/security/token/delegation/AbstractDelegationTokenIdentifier.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-1/src/core/org/apache/hadoop/security/token/delegation/AbstractDelegationTokenIdentifier.java?rev=1366467r1=1366466r2=1366467view=diff
==
--- 
hadoop/common/branches/branch-1/src/core/org/apache/hadoop/security/token/delegation/AbstractDelegationTokenIdentifier.java
 (original)
+++ 
hadoop/common/branches/branch-1/src/core/org/apache/hadoop/security/token/delegation/AbstractDelegationTokenIdentifier.java
 Fri Jul 27 17:21:25 2012
@@ -30,6 +30,7 @@ import org.apache.hadoop.io.Text;
 import org.apache.hadoop.io.WritableUtils;
 import org.apache.hadoop.security.KerberosName;
 import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hadoop.security.UserGroupInformation.AuthenticationMethod;
 import org.apache.hadoop.security.token.TokenIdentifier;
 
 //@InterfaceAudience.LimitedPrivate({HDFS, MAPREDUCE})
@@ -86,14 +87,17 @@ extends TokenIdentifier {
 if ( (owner == null) || (.equals(owner.toString( {
   return null;
 }
+final UserGroupInformation realUgi;
+final UserGroupInformation ugi;
 if ((realUser == null) || (.equals(realUser.toString()))
 || realUser.equals(owner)) {
-  return UserGroupInformation.createRemoteUser(owner.toString());
+  ugi = realUgi = UserGroupInformation.createRemoteUser(owner.toString());
 } else {
-  UserGroupInformation realUgi = UserGroupInformation
-  .createRemoteUser(realUser.toString());
-  return UserGroupInformation.createProxyUser(owner.toString(), realUgi);
+  realUgi = UserGroupInformation.createRemoteUser(realUser.toString());
+  ugi = UserGroupInformation.createProxyUser(owner.toString(), realUgi);
 }
+realUgi.setAuthenticationMethod(AuthenticationMethod.TOKEN);
+return ugi;
   }
 
   public Text getRenewer() {

Modified: 
hadoop/common/branches/branch-1/src/hdfs/org/apache/hadoop/hdfs/server/namenode/JspHelper.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-1/src/hdfs/org/apache/hadoop/hdfs/server/namenode/JspHelper.java?rev=1366467r1=1366466r2=1366467view=diff
==
--- 
hadoop/common/branches/branch-1/src/hdfs/org/apache/hadoop/hdfs/server/namenode/JspHelper.java
 (original)
+++ 
hadoop/common/branches/branch-1/src/hdfs/org/apache/hadoop/hdfs/server/namenode/JspHelper.java
 Fri Jul 27 17:21:25 2012
@@ -506,7 +506,6 @@ public class JspHelper {
   ProxyUsers.authorize(ugi, request.getRemoteAddr(), conf);
 }
 ugi.addToken(token);
-ugi.setAuthenticationMethod(AuthenticationMethod.TOKEN);
   } else {
 if(remoteUser == null) {
   throw new IOException(Security enabled but user not  +

Modified: 
hadoop/common/branches/branch-1/src/test/org/apache/hadoop/security/token/delegation/TestDelegationToken.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-1/src/test/org/apache/hadoop/security/token/delegation/TestDelegationToken.java?rev=1366467r1=1366466r2=1366467view=diff
==
--- 
hadoop/common/branches/branch-1/src/test/org/apache/hadoop/security/token/delegation/TestDelegationToken.java
 (original)
+++ 
hadoop/common/branches/branch-1/src/test/org/apache/hadoop/security/token/delegation/TestDelegationToken.java
 Fri Jul

svn commit: r1366074 - in /hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/src: main/java/org/apache/hadoop/fs/Path.java test/java/org/apache/hadoop/fs/TestPath.java

2012-07-26 Thread daryn
Author: daryn
Date: Thu Jul 26 16:33:03 2012
New Revision: 1366074

URL: http://svn.apache.org/viewvc?rev=1366074view=rev
Log:
svn merge -r 1365800:1365817 FIXES: HDFS-3626. Creating file with invalid path 
can corrupt edit log. Contributed by Todd Lipcon.

Modified:

hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Path.java

hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestPath.java

Modified: 
hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Path.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Path.java?rev=1366074r1=1366073r2=1366074view=diff
==
--- 
hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Path.java
 (original)
+++ 
hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Path.java
 Thu Jul 26 16:33:03 2012
@@ -139,7 +139,7 @@ public class Path implements Comparable 
* Construct a path from a URI
*/
   public Path(URI aUri) {
-uri = aUri;
+uri = aUri.normalize();
   }
   
   /** Construct a Path from components. */

Modified: 
hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestPath.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestPath.java?rev=1366074r1=1366073r2=1366074view=diff
==
--- 
hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestPath.java
 (original)
+++ 
hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestPath.java
 Thu Jul 26 16:33:03 2012
@@ -61,7 +61,7 @@ public class TestPath extends TestCase {
 assertEquals(pathString, new Path(pathString).toString());
   }
 
-  public void testNormalize() {
+  public void testNormalize() throws URISyntaxException {
 assertEquals(, new Path(.).toString());
 assertEquals(.., new Path(..).toString());
 assertEquals(/, new Path(/).toString());
@@ -75,6 +75,8 @@ public class TestPath extends TestCase {
 assertEquals(foo, new Path(foo/).toString());
 assertEquals(foo, new Path(foo//).toString());
 assertEquals(foo/bar, new Path(foo//bar).toString());
+assertEquals(hdfs://foo/foo2/bar/baz/,
+new Path(new URI(hdfs://foo//foo2///bar/baz///)).toString());
 if (Path.WINDOWS) {
   assertEquals(c:/a/b, new Path(c:\\a\\b).toString());
 }




svn commit: r1359821 - in /hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common: CHANGES.txt src/main/java/org/apache/hadoop/security/token/delegation/AbstractDelegationTokenIdentifi

2012-07-10 Thread daryn
Author: daryn
Date: Tue Jul 10 18:20:19 2012
New Revision: 1359821

URL: http://svn.apache.org/viewvc?rev=1359821view=rev
Log:
svn merge -c 1344970, 1308485 FIXES: HDFS-3486 and HADOOP-8242

Modified:

hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/CHANGES.txt

hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/AbstractDelegationTokenIdentifier.java

Modified: 
hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1359821r1=1359820r2=1359821view=diff
==
--- 
hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/CHANGES.txt
 (original)
+++ 
hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/CHANGES.txt
 Tue Jul 10 18:20:19 2012
@@ -25,6 +25,9 @@ Release 0.23.3 - UNRELEASED
 
 HADOOP-8525. Provide Improved Traceability for Configuration (bobby)
 
+HADOOP-8242. AbstractDelegationTokenIdentifier: add getter methods
+for owner and realuser. (Colin Patrick McCabe via eli)
+
   OPTIMIZATIONS
 
   BUG FIXES

Modified: 
hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/AbstractDelegationTokenIdentifier.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/AbstractDelegationTokenIdentifier.java?rev=1359821r1=1359820r2=1359821view=diff
==
--- 
hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/AbstractDelegationTokenIdentifier.java
 (original)
+++ 
hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/AbstractDelegationTokenIdentifier.java
 Tue Jul 10 18:20:19 2012
@@ -96,10 +96,18 @@ extends TokenIdentifier {
 }
   }
 
+  public Text getOwner() {
+return owner;
+  }
+
   public Text getRenewer() {
 return renewer;
   }
   
+  public Text getRealUser() {
+return realUser;
+  }
+  
   public void setIssueDate(long issueDate) {
 this.issueDate = issueDate;
   }




svn commit: r1359209 - in /hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common: ./ src/main/java/org/apache/hadoop/net/ src/test/java/org/apache/hadoop/net/ src/test/java/org/apache

2012-07-09 Thread daryn
Author: daryn
Date: Mon Jul  9 15:09:52 2012
New Revision: 1359209

URL: http://svn.apache.org/viewvc?rev=1359209view=rev
Log:
HDFS-3591 Backport HDFS-3357 to branch-0.23.

Added:

hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/SocketInputWrapper.java

hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/test/GenericTestUtils.java
  - copied unchanged from r1359200, 
hadoop/common/branches/branch-0.23/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/test/GenericTestUtils.java
Modified:

hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/CHANGES.txt

hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/NetUtils.java

hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/SocketIOWithTimeout.java

hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/SocketInputStream.java

hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/TestNetUtils.java

hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/TestSocketIOWithTimeout.java

Modified: 
hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1359209r1=1359208r2=1359209view=diff
==
--- 
hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/CHANGES.txt
 (original)
+++ 
hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/CHANGES.txt
 Mon Jul  9 15:09:52 2012
@@ -20,6 +20,9 @@ Release 0.23.3 - UNRELEASED
 
 HADOOP-8450. Remove src/test/system. (eli)
 
+HADOOP-8350. Improve NetUtils.getInputStream to return a stream which has
+a tunable timeout. (todd)
+
   OPTIMIZATIONS
 
   BUG FIXES

Modified: 
hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/NetUtils.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/NetUtils.java?rev=1359209r1=1359208r2=1359209view=diff
==
--- 
hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/NetUtils.java
 (original)
+++ 
hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/NetUtils.java
 Mon Jul  9 15:09:52 2012
@@ -371,53 +371,44 @@ public class NetUtils {
   }
   
   /**
-   * Same as getInputStream(socket, socket.getSoTimeout()).brbr
+   * Same as codegetInputStream(socket, socket.getSoTimeout())./code
+   * brbr
* 
-   * From documentation for {@link #getInputStream(Socket, long)}:br
-   * Returns InputStream for the socket. If the socket has an associated
-   * SocketChannel then it returns a 
-   * {@link SocketInputStream} with the given timeout. If the socket does not
-   * have a channel, {@link Socket#getInputStream()} is returned. In the later
-   * case, the timeout argument is ignored and the timeout set with 
-   * {@link Socket#setSoTimeout(int)} applies for reads.brbr
-   *
-   * Any socket created using socket factories returned by {@link NetUtils},
-   * must use this interface instead of {@link Socket#getInputStream()}.
-   * 
* @see #getInputStream(Socket, long)
-   * 
-   * @param socket
-   * @return InputStream for reading from the socket.
-   * @throws IOException
*/
-  public static InputStream getInputStream(Socket socket) 
+  public static SocketInputWrapper getInputStream(Socket socket) 
throws IOException {
 return getInputStream(socket, socket.getSoTimeout());
   }
-  
+
   /**
-   * Returns InputStream for the socket. If the socket has an associated
-   * SocketChannel then it returns a 
-   * {@link SocketInputStream} with the given timeout. If the socket does not
-   * have a channel, {@link Socket#getInputStream()} is returned. In the later
-   * case, the timeout argument is ignored and the timeout set with 
-   * {@link Socket#setSoTimeout(int)} applies for reads.brbr
+   * Return a {@link SocketInputWrapper} for the socket and set the given
+   * timeout. If the socket does not have an associated channel, then its 
socket
+   * timeout will be set to the specified value. Otherwise, a
+   * {@link SocketInputStream} will be created which reads with the configured
+   * timeout.
* 
-   * Any socket created using socket factories returned by {@link NetUtils},
+   * Any socket created using socket

svn commit: r1356897 - in /hadoop/common/trunk/hadoop-common-project/hadoop-common: CHANGES.txt src/main/java/org/apache/hadoop/fs/TrashPolicyDefault.java

2012-07-03 Thread daryn
Author: daryn
Date: Tue Jul  3 19:59:56 2012
New Revision: 1356897

URL: http://svn.apache.org/viewvc?rev=1356897view=rev
Log:
HADOOP-8110. Fix trash checkpoint collisions (Jason Lowe via daryn)

Modified:
hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/TrashPolicyDefault.java

Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1356897r1=1356896r2=1356897view=diff
==
--- hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt 
(original)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt Tue Jul 
 3 19:59:56 2012
@@ -167,6 +167,8 @@ Trunk (unreleased changes)
 HADOOP-8548. test-patch.sh shows an incorrect link in Jekins builds
 (Kihwal Lee via bobby)
 
+HADOOP-8110. Fix trash checkpoint collisions (Jason Lowe via daryn)
+
   OPTIMIZATIONS
 
 HADOOP-7761. Improve the performance of raw comparisons. (todd)

Modified: 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/TrashPolicyDefault.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/TrashPolicyDefault.java?rev=1356897r1=1356896r2=1356897view=diff
==
--- 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/TrashPolicyDefault.java
 (original)
+++ 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/TrashPolicyDefault.java
 Tue Jul  3 19:59:56 2012
@@ -35,6 +35,7 @@ import org.apache.hadoop.classification.
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.conf.Configured;
+import org.apache.hadoop.fs.Options.Rename;
 import org.apache.hadoop.fs.permission.FsAction;
 import org.apache.hadoop.fs.permission.FsPermission;
 
@@ -148,21 +149,32 @@ public class TrashPolicyDefault extends 
   new IOException(Failed to move to trash: +path).initCause(cause);
   }
 
+  @SuppressWarnings(deprecation)
   @Override
   public void createCheckpoint() throws IOException {
 if (!fs.exists(current)) // no trash, no checkpoint
   return;
 
-Path checkpoint;
+Path checkpointBase;
 synchronized (CHECKPOINT) {
-  checkpoint = new Path(trash, CHECKPOINT.format(new Date()));
+  checkpointBase = new Path(trash, CHECKPOINT.format(new Date()));
 }
+Path checkpoint = checkpointBase;
 
-if (fs.rename(current, checkpoint)) {
-  LOG.info(Created trash checkpoint: +checkpoint.toUri().getPath());
-} else {
-  throw new IOException(Failed to checkpoint trash: +checkpoint);
+int attempt = 0;
+while (true) {
+  try {
+fs.rename(current, checkpoint, Rename.NONE);
+break;
+  } catch (FileAlreadyExistsException e) {
+if (++attempt  1000) {
+  throw new IOException(Failed to checkpoint trash: +checkpoint);
+}
+checkpoint = checkpointBase.suffix(- + attempt);
+  }
 }
+
+LOG.info(Created trash checkpoint: +checkpoint.toUri().getPath());
   }
 
   @Override




svn commit: r1356913 - in /hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common: CHANGES.txt src/main/java/org/apache/hadoop/fs/TrashPolicyDefault.java

2012-07-03 Thread daryn
Author: daryn
Date: Tue Jul  3 20:24:07 2012
New Revision: 1356913

URL: http://svn.apache.org/viewvc?rev=1356913view=rev
Log:
HADOOP-8110. Fix trash checkpoint collisions (Jason Lowe via daryn)

Modified:

hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/CHANGES.txt

hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/TrashPolicyDefault.java

Modified: 
hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1356913r1=1356912r2=1356913view=diff
==
--- 
hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/CHANGES.txt
 (original)
+++ 
hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/CHANGES.txt
 Tue Jul  3 20:24:07 2012
@@ -84,6 +84,8 @@ Release 0.23.3 - UNRELEASED
 HADOOP-8393. hadoop-config.sh missing variable exports, causes Yarn jobs to
 fail with ClassNotFoundException MRAppMaster. (phunt via tucu)
 
+HADOOP-8110. Fix trash checkpoint collisions (Jason Lowe via daryn)
+
 Release 0.23.2 - UNRELEASED 
 
   NEW FEATURES

Modified: 
hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/TrashPolicyDefault.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/TrashPolicyDefault.java?rev=1356913r1=1356912r2=1356913view=diff
==
--- 
hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/TrashPolicyDefault.java
 (original)
+++ 
hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/TrashPolicyDefault.java
 Tue Jul  3 20:24:07 2012
@@ -35,6 +35,7 @@ import org.apache.hadoop.classification.
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.conf.Configured;
+import org.apache.hadoop.fs.Options.Rename;
 import org.apache.hadoop.fs.permission.FsAction;
 import org.apache.hadoop.fs.permission.FsPermission;
 
@@ -148,21 +149,32 @@ public class TrashPolicyDefault extends 
   new IOException(Failed to move to trash: +path).initCause(cause);
   }
 
+  @SuppressWarnings(deprecation)
   @Override
   public void createCheckpoint() throws IOException {
 if (!fs.exists(current)) // no trash, no checkpoint
   return;
 
-Path checkpoint;
+Path checkpointBase;
 synchronized (CHECKPOINT) {
-  checkpoint = new Path(trash, CHECKPOINT.format(new Date()));
+  checkpointBase = new Path(trash, CHECKPOINT.format(new Date()));
 }
+Path checkpoint = checkpointBase;
 
-if (fs.rename(current, checkpoint)) {
-  LOG.info(Created trash checkpoint: +checkpoint.toUri().getPath());
-} else {
-  throw new IOException(Failed to checkpoint trash: +checkpoint);
+int attempt = 0;
+while (true) {
+  try {
+fs.rename(current, checkpoint, Rename.NONE);
+break;
+  } catch (FileAlreadyExistsException e) {
+if (++attempt  1000) {
+  throw new IOException(Failed to checkpoint trash: +checkpoint);
+}
+checkpoint = checkpointBase.suffix(- + attempt);
+  }
 }
+
+LOG.info(Created trash checkpoint: +checkpoint.toUri().getPath());
   }
 
   @Override




svn commit: r1356918 - in /hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common: CHANGES.txt src/main/java/org/apache/hadoop/fs/TrashPolicyDefault.java

2012-07-03 Thread daryn
Author: daryn
Date: Tue Jul  3 20:28:30 2012
New Revision: 1356918

URL: http://svn.apache.org/viewvc?rev=1356918view=rev
Log:
HADOOP-8110. Fix trash checkpoint collisions (Jason Lowe via daryn)

Modified:

hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt

hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/TrashPolicyDefault.java

Modified: 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1356918r1=1356917r2=1356918view=diff
==
--- 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt 
(original)
+++ 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt 
Tue Jul  3 20:28:30 2012
@@ -550,6 +550,8 @@ Release 0.23.3 - UNRELEASED
 
 HADOOP-8535. Cut hadoop build times in half (Job Eagles via bobby)
 
+HADOOP-8110. Fix trash checkpoint collisions (Jason Lowe via daryn)
+
   OPTIMIZATIONS
 
   BUG FIXES

Modified: 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/TrashPolicyDefault.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/TrashPolicyDefault.java?rev=1356918r1=1356917r2=1356918view=diff
==
--- 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/TrashPolicyDefault.java
 (original)
+++ 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/TrashPolicyDefault.java
 Tue Jul  3 20:28:30 2012
@@ -35,6 +35,7 @@ import org.apache.hadoop.classification.
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.conf.Configured;
+import org.apache.hadoop.fs.Options.Rename;
 import org.apache.hadoop.fs.permission.FsAction;
 import org.apache.hadoop.fs.permission.FsPermission;
 
@@ -148,21 +149,32 @@ public class TrashPolicyDefault extends 
   new IOException(Failed to move to trash: +path).initCause(cause);
   }
 
+  @SuppressWarnings(deprecation)
   @Override
   public void createCheckpoint() throws IOException {
 if (!fs.exists(current)) // no trash, no checkpoint
   return;
 
-Path checkpoint;
+Path checkpointBase;
 synchronized (CHECKPOINT) {
-  checkpoint = new Path(trash, CHECKPOINT.format(new Date()));
+  checkpointBase = new Path(trash, CHECKPOINT.format(new Date()));
 }
+Path checkpoint = checkpointBase;
 
-if (fs.rename(current, checkpoint)) {
-  LOG.info(Created trash checkpoint: +checkpoint.toUri().getPath());
-} else {
-  throw new IOException(Failed to checkpoint trash: +checkpoint);
+int attempt = 0;
+while (true) {
+  try {
+fs.rename(current, checkpoint, Rename.NONE);
+break;
+  } catch (FileAlreadyExistsException e) {
+if (++attempt  1000) {
+  throw new IOException(Failed to checkpoint trash: +checkpoint);
+}
+checkpoint = checkpointBase.suffix(- + attempt);
+  }
 }
+
+LOG.info(Created trash checkpoint: +checkpoint.toUri().getPath());
   }
 
   @Override