svn commit: r1539132 - in /hadoop/common/branches/branch-2/hadoop-tools/hadoop-sls/src/main/java/org/apache/hadoop/yarn/sls: nodemanager/NodeInfo.java scheduler/RMNodeWrapper.java

2013-11-05 Thread llu
Author: llu
Date: Tue Nov  5 21:20:05 2013
New Revision: 1539132

URL: http://svn.apache.org/r1539132
Log:
YARN-311. RM/scheduler support for dynamic resource configuration. (Junping Du 
via llu)

Modified:

hadoop/common/branches/branch-2/hadoop-tools/hadoop-sls/src/main/java/org/apache/hadoop/yarn/sls/nodemanager/NodeInfo.java

hadoop/common/branches/branch-2/hadoop-tools/hadoop-sls/src/main/java/org/apache/hadoop/yarn/sls/scheduler/RMNodeWrapper.java

Modified: 
hadoop/common/branches/branch-2/hadoop-tools/hadoop-sls/src/main/java/org/apache/hadoop/yarn/sls/nodemanager/NodeInfo.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-tools/hadoop-sls/src/main/java/org/apache/hadoop/yarn/sls/nodemanager/NodeInfo.java?rev=1539132r1=1539131r2=1539132view=diff
==
--- 
hadoop/common/branches/branch-2/hadoop-tools/hadoop-sls/src/main/java/org/apache/hadoop/yarn/sls/nodemanager/NodeInfo.java
 (original)
+++ 
hadoop/common/branches/branch-2/hadoop-tools/hadoop-sls/src/main/java/org/apache/hadoop/yarn/sls/nodemanager/NodeInfo.java
 Tue Nov  5 21:20:05 2013
@@ -30,6 +30,7 @@ import org.apache.hadoop.yarn.api.record
 import org.apache.hadoop.yarn.api.records.NodeId;
 import org.apache.hadoop.yarn.api.records.NodeState;
 import org.apache.hadoop.yarn.api.records.Resource;
+import org.apache.hadoop.yarn.api.records.ResourceOption;
 import org.apache.hadoop.yarn.server.api.protocolrecords.NodeHeartbeatResponse;
 import org.apache.hadoop.yarn.server.resourcemanager.rmnode.RMNode;
 import org.apache.hadoop.yarn.server.resourcemanager.rmnode
@@ -48,7 +49,7 @@ public class NodeInfo {
 private String nodeAddr;
 private String httpAddress;
 private int cmdPort;
-private Resource perNode;
+private volatile ResourceOption perNode;
 private String rackName;
 private String healthReport;
 private NodeState state;
@@ -56,7 +57,7 @@ public class NodeInfo {
 private ListApplicationId toCleanUpApplications;
 
 public FakeRMNodeImpl(NodeId nodeId, String nodeAddr, String httpAddress,
-Resource perNode, String rackName, String healthReport,
+ResourceOption perNode, String rackName, String healthReport,
 int cmdPort, String hostName, NodeState state) {
   this.nodeId = nodeId;
   this.nodeAddr = nodeAddr;
@@ -104,6 +105,10 @@ public class NodeInfo {
 }
 
 public Resource getTotalCapability() {
+  return perNode.getResource();
+}
+
+public ResourceOption getResourceOption() {
   return perNode;
 }
 
@@ -153,21 +158,27 @@ public class NodeInfo {
// TODO Auto-generated method stub
return null;
}
+
+@Override
+public void setResourceOption(ResourceOption resourceOption) {
+  perNode = resourceOption;
+}
   }
   
   public static RMNode newNodeInfo(String rackName, String hostName,
-  final Resource resource, int port) {
+  final ResourceOption resourceOption, int port) {
 final NodeId nodeId = newNodeID(hostName, port);
 final String nodeAddr = hostName + : + port;
 final String httpAddress = hostName;
 
 return new FakeRMNodeImpl(nodeId, nodeAddr, httpAddress,
-resource, rackName, Me good,
+resourceOption, rackName, Me good,
 port, hostName, null);
   }
   
   public static RMNode newNodeInfo(String rackName, String hostName,
   final Resource resource) {
-return newNodeInfo(rackName, hostName, resource, NODE_ID++);
+return newNodeInfo(rackName, hostName, ResourceOption.newInstance(resource,
+RMNode.OVER_COMMIT_TIMEOUT_MILLIS_DEFAULT), NODE_ID++);
   }
 }

Modified: 
hadoop/common/branches/branch-2/hadoop-tools/hadoop-sls/src/main/java/org/apache/hadoop/yarn/sls/scheduler/RMNodeWrapper.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-tools/hadoop-sls/src/main/java/org/apache/hadoop/yarn/sls/scheduler/RMNodeWrapper.java?rev=1539132r1=1539131r2=1539132view=diff
==
--- 
hadoop/common/branches/branch-2/hadoop-tools/hadoop-sls/src/main/java/org/apache/hadoop/yarn/sls/scheduler/RMNodeWrapper.java
 (original)
+++ 
hadoop/common/branches/branch-2/hadoop-tools/hadoop-sls/src/main/java/org/apache/hadoop/yarn/sls/scheduler/RMNodeWrapper.java
 Tue Nov  5 21:20:05 2013
@@ -24,6 +24,7 @@ import org.apache.hadoop.yarn.api.record
 import org.apache.hadoop.yarn.api.records.NodeId;
 import org.apache.hadoop.yarn.api.records.NodeState;
 import org.apache.hadoop.yarn.api.records.Resource;
+import org.apache.hadoop.yarn.api.records.ResourceOption;
 import org.apache.hadoop.yarn.server.api.protocolrecords.NodeHeartbeatResponse;
 import org.apache.hadoop.yarn.server.resourcemanager.rmnode.RMNode;
 import

svn commit: r1539134 - in /hadoop/common/trunk/hadoop-tools/hadoop-sls/src/main/java/org/apache/hadoop/yarn/sls: nodemanager/NodeInfo.java scheduler/RMNodeWrapper.java

2013-11-05 Thread llu
Author: llu
Date: Tue Nov  5 21:23:53 2013
New Revision: 1539134

URL: http://svn.apache.org/r1539134
Log:
YARN-311. RM/scheduler support for dynamic resource configuration. (Junping Du 
via llu)

Modified:

hadoop/common/trunk/hadoop-tools/hadoop-sls/src/main/java/org/apache/hadoop/yarn/sls/nodemanager/NodeInfo.java

hadoop/common/trunk/hadoop-tools/hadoop-sls/src/main/java/org/apache/hadoop/yarn/sls/scheduler/RMNodeWrapper.java

Modified: 
hadoop/common/trunk/hadoop-tools/hadoop-sls/src/main/java/org/apache/hadoop/yarn/sls/nodemanager/NodeInfo.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-tools/hadoop-sls/src/main/java/org/apache/hadoop/yarn/sls/nodemanager/NodeInfo.java?rev=1539134r1=1539133r2=1539134view=diff
==
--- 
hadoop/common/trunk/hadoop-tools/hadoop-sls/src/main/java/org/apache/hadoop/yarn/sls/nodemanager/NodeInfo.java
 (original)
+++ 
hadoop/common/trunk/hadoop-tools/hadoop-sls/src/main/java/org/apache/hadoop/yarn/sls/nodemanager/NodeInfo.java
 Tue Nov  5 21:23:53 2013
@@ -30,6 +30,7 @@ import org.apache.hadoop.yarn.api.record
 import org.apache.hadoop.yarn.api.records.NodeId;
 import org.apache.hadoop.yarn.api.records.NodeState;
 import org.apache.hadoop.yarn.api.records.Resource;
+import org.apache.hadoop.yarn.api.records.ResourceOption;
 import org.apache.hadoop.yarn.server.api.protocolrecords.NodeHeartbeatResponse;
 import org.apache.hadoop.yarn.server.resourcemanager.rmnode.RMNode;
 import org.apache.hadoop.yarn.server.resourcemanager.rmnode
@@ -48,7 +49,7 @@ public class NodeInfo {
 private String nodeAddr;
 private String httpAddress;
 private int cmdPort;
-private Resource perNode;
+private volatile ResourceOption perNode;
 private String rackName;
 private String healthReport;
 private NodeState state;
@@ -56,7 +57,7 @@ public class NodeInfo {
 private ListApplicationId toCleanUpApplications;
 
 public FakeRMNodeImpl(NodeId nodeId, String nodeAddr, String httpAddress,
-Resource perNode, String rackName, String healthReport,
+ResourceOption perNode, String rackName, String healthReport,
 int cmdPort, String hostName, NodeState state) {
   this.nodeId = nodeId;
   this.nodeAddr = nodeAddr;
@@ -104,6 +105,10 @@ public class NodeInfo {
 }
 
 public Resource getTotalCapability() {
+  return perNode.getResource();
+}
+
+public ResourceOption getResourceOption() {
   return perNode;
 }
 
@@ -153,21 +158,27 @@ public class NodeInfo {
// TODO Auto-generated method stub
return null;
}
+
+@Override
+public void setResourceOption(ResourceOption resourceOption) {
+  perNode = resourceOption;
+}
   }
   
   public static RMNode newNodeInfo(String rackName, String hostName,
-  final Resource resource, int port) {
+  final ResourceOption resourceOption, int port) {
 final NodeId nodeId = newNodeID(hostName, port);
 final String nodeAddr = hostName + : + port;
 final String httpAddress = hostName;
 
 return new FakeRMNodeImpl(nodeId, nodeAddr, httpAddress,
-resource, rackName, Me good,
+resourceOption, rackName, Me good,
 port, hostName, null);
   }
   
   public static RMNode newNodeInfo(String rackName, String hostName,
   final Resource resource) {
-return newNodeInfo(rackName, hostName, resource, NODE_ID++);
+return newNodeInfo(rackName, hostName, ResourceOption.newInstance(resource,
+RMNode.OVER_COMMIT_TIMEOUT_MILLIS_DEFAULT), NODE_ID++);
   }
 }

Modified: 
hadoop/common/trunk/hadoop-tools/hadoop-sls/src/main/java/org/apache/hadoop/yarn/sls/scheduler/RMNodeWrapper.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-tools/hadoop-sls/src/main/java/org/apache/hadoop/yarn/sls/scheduler/RMNodeWrapper.java?rev=1539134r1=1539133r2=1539134view=diff
==
--- 
hadoop/common/trunk/hadoop-tools/hadoop-sls/src/main/java/org/apache/hadoop/yarn/sls/scheduler/RMNodeWrapper.java
 (original)
+++ 
hadoop/common/trunk/hadoop-tools/hadoop-sls/src/main/java/org/apache/hadoop/yarn/sls/scheduler/RMNodeWrapper.java
 Tue Nov  5 21:23:53 2013
@@ -24,6 +24,7 @@ import org.apache.hadoop.yarn.api.record
 import org.apache.hadoop.yarn.api.records.NodeId;
 import org.apache.hadoop.yarn.api.records.NodeState;
 import org.apache.hadoop.yarn.api.records.Resource;
+import org.apache.hadoop.yarn.api.records.ResourceOption;
 import org.apache.hadoop.yarn.server.api.protocolrecords.NodeHeartbeatResponse;
 import org.apache.hadoop.yarn.server.resourcemanager.rmnode.RMNode;
 import org.apache.hadoop.yarn.server.resourcemanager.rmnode
@@ -138,10 +139,19 @@ public class RMNodeWrapper implements RM
 return updates;
   }
 
-@Override
-public

svn commit: r1531501 - /hadoop/common/trunk/.gitattributes

2013-10-11 Thread llu
Author: llu
Date: Sat Oct 12 04:55:06 2013
New Revision: 1531501

URL: http://svn.apache.org/r1531501
Log:
Workaround git eol hell

Modified:
hadoop/common/trunk/.gitattributes

Modified: hadoop/common/trunk/.gitattributes
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/.gitattributes?rev=1531501r1=1531500r2=1531501view=diff
==
--- hadoop/common/trunk/.gitattributes (original)
+++ hadoop/common/trunk/.gitattributes Sat Oct 12 04:55:06 2013
@@ -13,7 +13,8 @@
 
 *.sh text eol=lf
 
-*.battext eol=crlf
-*.cmdtext eol=crlf
-*.csproj text merge=union eol=crlf
-*.slntext merge=union eol=crlf
+*.batbinary
+*.cmdbinary
+*.vcxproj binary
+*.csproj binary
+*.slnbinary




svn commit: r1527650 - in /hadoop/common/trunk/hadoop-common-project/hadoop-common: CHANGES.txt src/main/java/org/apache/hadoop/util/ReflectionUtils.java

2013-09-30 Thread llu
Author: llu
Date: Mon Sep 30 17:01:26 2013
New Revision: 1527650

URL: http://svn.apache.org/r1527650
Log:
HADOOP-9964. Fix deadlocks in TestHttpServer by synchronize 
ReflectionUtils.printThreadInfo. (Junping Du via llu)

Modified:
hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ReflectionUtils.java

Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1527650r1=1527649r2=1527650view=diff
==
--- hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt 
(original)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt Mon Sep 
30 17:01:26 2013
@@ -348,6 +348,9 @@ Release 2.3.0 - UNRELEASED
 
   BUG FIXES
 
+HADOOP-9964. Fix deadlocks in TestHttpServer by synchronize
+ReflectionUtils.printThreadInfo. (Junping Du via llu)
+
 HADOOP-9582. Non-existent file to hadoop fs -conf doesn't throw error
 (Ashwin Shankar via jlowe)
 

Modified: 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ReflectionUtils.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ReflectionUtils.java?rev=1527650r1=1527649r2=1527650view=diff
==
--- 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ReflectionUtils.java
 (original)
+++ 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ReflectionUtils.java
 Mon Sep 30 17:01:26 2013
@@ -154,7 +154,7 @@ public class ReflectionUtils {
* @param stream the stream to
* @param title a string title for the stack trace
*/
-  public static void printThreadInfo(PrintWriter stream,
+  public synchronized static void printThreadInfo(PrintWriter stream,
  String title) {
 final int STACK_DEPTH = 20;
 boolean contention = threadBean.isThreadContentionMonitoringEnabled();




svn commit: r1519973 - in /hadoop/common/trunk/hadoop-common-project/hadoop-common: CHANGES.txt src/main/java/org/apache/hadoop/ipc/Client.java src/test/java/org/apache/hadoop/ipc/TestIPC.java

2013-09-04 Thread llu
Author: llu
Date: Wed Sep  4 10:34:28 2013
New Revision: 1519973

URL: http://svn.apache.org/r1519973
Log:
HADOOP-9916. Fix race in ipc.Client retry. (Binglin Chang via llu)

Modified:
hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestIPC.java

Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1519973r1=1519972r2=1519973view=diff
==
--- hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt 
(original)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt Wed Sep 
 4 10:34:28 2013
@@ -406,6 +406,8 @@ Release 2.1.1-beta - UNRELEASED
 
   BUG FIXES
 
+HADOOP-9916. Fix race in ipc.Client retry. (Binglin Chang via llu)
+
 HADOOP-9768. chown and chgrp reject users and groups with spaces on 
platforms
 where spaces are otherwise acceptable. (cnauroth)
 

Modified: 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java?rev=1519973r1=1519972r2=1519973view=diff
==
--- 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java
 (original)
+++ 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java
 Wed Sep  4 10:34:28 2013
@@ -1063,8 +1063,8 @@ public class Client {
 if (status == RpcStatusProto.SUCCESS) {
   Writable value = ReflectionUtils.newInstance(valueClass, conf);
   value.readFields(in); // read value
-  call.setRpcResponse(value);
   calls.remove(callId);
+  call.setRpcResponse(value);
   
   // verify that length was correct
   // only for ProtobufEngine where len can be verified easily
@@ -1098,8 +1098,8 @@ public class Client {
   new RemoteException(exceptionClassName, errorMsg) :
   new RemoteException(exceptionClassName, errorMsg, erCode));
   if (status == RpcStatusProto.ERROR) {
-call.setException(re);
 calls.remove(callId);
+call.setException(re);
   } else if (status == RpcStatusProto.FATAL) {
 // Close the connection
 markClosed(re);
@@ -1166,8 +1166,8 @@ public class Client {
   IteratorEntryInteger, Call itor = calls.entrySet().iterator() ;
   while (itor.hasNext()) {
 Call c = itor.next().getValue(); 
+itor.remove();
 c.setException(closeException); // local exception
-itor.remove(); 
   }
 }
   }

Modified: 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestIPC.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestIPC.java?rev=1519973r1=1519972r2=1519973view=diff
==
--- 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestIPC.java
 (original)
+++ 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestIPC.java
 Wed Sep  4 10:34:28 2013
@@ -216,13 +216,13 @@ public class TestIPC {
 }
   }
   
-  @Test
+  @Test(timeout=6)
   public void testSerial() throws IOException, InterruptedException {
-testSerial(3, false, 2, 5, 100);
-testSerial(3, true, 2, 5, 10);
+internalTestSerial(3, false, 2, 5, 100);
+internalTestSerial(3, true, 2, 5, 10);
   }
 
-  public void testSerial(int handlerCount, boolean handlerSleep, 
+  public void internalTestSerial(int handlerCount, boolean handlerSleep,
  int clientCount, int callerCount, int callCount)
 throws IOException, InterruptedException {
 Server server = new TestServer(handlerCount, handlerSleep);
@@ -249,7 +249,7 @@ public class TestIPC {
 server.stop();
   }

-  @Test
+  @Test(timeout=6)
   public void testStandAloneClient() throws IOException {
 Client client = new Client(LongWritable.class, conf);
 InetSocketAddress address = new InetSocketAddress(127.0.0.1, 10);
@@ -383,7 +383,7 @@ public class TestIPC {
 }
   }
 
-  @Test
+  @Test(timeout=6)
   public void testIOEOnClientWriteParam() throws Exception {
 doErrorTest(IOEOnWriteWritable.class,
 LongWritable.class,
@@ -391,7 +391,7 @@ public class

svn commit: r1519974 - in /hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common: CHANGES.txt src/main/java/org/apache/hadoop/ipc/Client.java src/test/java/org/apache/hadoop/ipc/TestIPC.

2013-09-04 Thread llu
Author: llu
Date: Wed Sep  4 10:34:56 2013
New Revision: 1519974

URL: http://svn.apache.org/r1519974
Log:
HADOOP-9916. Fix race in ipc.Client retry. (Binglin Chang via llu)

Modified:

hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt

hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java

hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestIPC.java

Modified: 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1519974r1=1519973r2=1519974view=diff
==
--- 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt 
(original)
+++ 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt 
Wed Sep  4 10:34:56 2013
@@ -127,6 +127,8 @@ Release 2.1.1-beta - UNRELEASED
 
   BUG FIXES
 
+HADOOP-9916. Fix race in ipc.Client retry. (Binglin Chang via llu)
+
 HADOOP-9768. chown and chgrp reject users and groups with spaces on 
platforms
 where spaces are otherwise acceptable. (cnauroth)
 

Modified: 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java?rev=1519974r1=1519973r2=1519974view=diff
==
--- 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java
 (original)
+++ 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java
 Wed Sep  4 10:34:56 2013
@@ -1063,8 +1063,8 @@ public class Client {
 if (status == RpcStatusProto.SUCCESS) {
   Writable value = ReflectionUtils.newInstance(valueClass, conf);
   value.readFields(in); // read value
-  call.setRpcResponse(value);
   calls.remove(callId);
+  call.setRpcResponse(value);
   
   // verify that length was correct
   // only for ProtobufEngine where len can be verified easily
@@ -1098,8 +1098,8 @@ public class Client {
   new RemoteException(exceptionClassName, errorMsg) :
   new RemoteException(exceptionClassName, errorMsg, erCode));
   if (status == RpcStatusProto.ERROR) {
-call.setException(re);
 calls.remove(callId);
+call.setException(re);
   } else if (status == RpcStatusProto.FATAL) {
 // Close the connection
 markClosed(re);
@@ -1166,8 +1166,8 @@ public class Client {
   IteratorEntryInteger, Call itor = calls.entrySet().iterator() ;
   while (itor.hasNext()) {
 Call c = itor.next().getValue(); 
+itor.remove();
 c.setException(closeException); // local exception
-itor.remove(); 
   }
 }
   }

Modified: 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestIPC.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestIPC.java?rev=1519974r1=1519973r2=1519974view=diff
==
--- 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestIPC.java
 (original)
+++ 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestIPC.java
 Wed Sep  4 10:34:56 2013
@@ -216,13 +216,13 @@ public class TestIPC {
 }
   }
   
-  @Test
+  @Test(timeout=6)
   public void testSerial() throws IOException, InterruptedException {
-testSerial(3, false, 2, 5, 100);
-testSerial(3, true, 2, 5, 10);
+internalTestSerial(3, false, 2, 5, 100);
+internalTestSerial(3, true, 2, 5, 10);
   }
 
-  public void testSerial(int handlerCount, boolean handlerSleep, 
+  public void internalTestSerial(int handlerCount, boolean handlerSleep,
  int clientCount, int callerCount, int callCount)
 throws IOException, InterruptedException {
 Server server = new TestServer(handlerCount, handlerSleep);
@@ -249,7 +249,7 @@ public class TestIPC {
 server.stop();
   }

-  @Test
+  @Test(timeout=6)
   public void testStandAloneClient() throws IOException {
 Client client = new Client(LongWritable.class, conf);
 InetSocketAddress address = new InetSocketAddress(127.0.0.1, 10);
@@ -383,7 +383,7 @@ public class TestIPC {
 }
   }
 
-  @Test
+  @Test

svn commit: r1516130 - in /hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common: ./ src/main/java/org/apache/hadoop/http/ src/test/java/org/apache/hadoop/http/ src/test/java/org/apache/

2013-08-21 Thread llu
Author: llu
Date: Wed Aug 21 10:13:17 2013
New Revision: 1516130

URL: http://svn.apache.org/r1516130
Log:
HADOOP-9784. Add a builder for HttpServer. (Junping Du via llu)

Modified:

hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt

hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer.java

hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/HttpServerFunctionalTest.java

hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHttpServer.java

hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/log/TestLogLevel.java

Modified: 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1516130r1=1516129r2=1516130view=diff
==
--- 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt 
(original)
+++ 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt 
Wed Aug 21 10:13:17 2013
@@ -8,6 +8,8 @@ Release 2.3.0 - UNRELEASED
 
   IMPROVEMENTS
 
+HADOOP-9784. Add a builder for HttpServer. (Junping Du via llu)
+
 HADOOP 9871. Fix intermittent findbugs warnings in DefaultMetricsSystem.
 (Junping Du via llu)
 

Modified: 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer.java?rev=1516130r1=1516129r2=1516130view=diff
==
--- 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer.java
 (original)
+++ 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer.java
 Wed Aug 21 10:13:17 2013
@@ -46,6 +46,7 @@ import javax.servlet.http.HttpServletRes
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.HadoopIllegalArgumentException;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.ConfServlet;
@@ -118,18 +119,117 @@ public class HttpServer implements Filte
   protected final MapContext, Boolean defaultContexts =
   new HashMapContext, Boolean();
   protected final ListString filterNames = new ArrayListString();
-  private static final int MAX_RETRIES = 10;
   static final String STATE_DESCRIPTION_ALIVE =  - alive;
   static final String STATE_DESCRIPTION_NOT_LIVE =  - not live;
 
   private final boolean listenerStartedExternally;
   
+  /**
+   * Class to construct instances of HTTP server with specific options.
+   */
+  public static class Builder {
+String name;
+String bindAddress;
+Integer port;
+Boolean findPort;
+Configuration conf;
+Connector connector;
+String[] pathSpecs;
+AccessControlList adminsAcl;
+boolean securityEnabled = false;
+String usernameConfKey = null;
+String keytabConfKey = null;
+
+public Builder setName(String name){
+  this.name = name;
+  return this;
+}
+
+public Builder setBindAddress(String bindAddress){
+  this.bindAddress = bindAddress;
+  return this;
+}
+
+public Builder setPort(int port) {
+  this.port = port;
+  return this;
+}
+
+public Builder setFindPort(boolean findPort) {
+  this.findPort = findPort;
+  return this;
+}
+
+public Builder setConf(Configuration conf) {
+  this.conf = conf;
+  return this;
+}
+
+public Builder setConnector(Connector connector) {
+  this.connector = connector;
+  return this;
+}
+
+public Builder setPathSpec(String[] pathSpec) {
+  this.pathSpecs = pathSpec;
+  return this;
+}
+
+public Builder setACL(AccessControlList acl) {
+  this.adminsAcl = acl;
+  return this;
+}
+
+public Builder setSecurityEnabled(boolean securityEnabled) {
+  this.securityEnabled = securityEnabled;
+  return this;
+}
+
+public Builder setUsernameConfKey(String usernameConfKey) {
+  this.usernameConfKey = usernameConfKey;
+  return this;
+}
+
+public Builder setKeytabConfKey(String keytabConfKey) {
+  this.keytabConfKey = keytabConfKey;
+  return this;
+}
+
+public HttpServer build() throws IOException {
+  if (this.name == null) {
+throw new HadoopIllegalArgumentException(name

svn commit: r1516128 - in /hadoop/common/trunk/hadoop-common-project/hadoop-common: ./ src/main/java/org/apache/hadoop/http/ src/test/java/org/apache/hadoop/http/ src/test/java/org/apache/hadoop/log/

2013-08-21 Thread llu
Author: llu
Date: Wed Aug 21 10:12:13 2013
New Revision: 1516128

URL: http://svn.apache.org/r1516128
Log:
HADOOP-9784. Add a builder for HttpServer. (Junping Du via llu)

Modified:
hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer.java

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/HttpServerFunctionalTest.java

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHttpServer.java

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/log/TestLogLevel.java

Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1516128r1=1516127r2=1516128view=diff
==
--- hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt 
(original)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt Wed Aug 
21 10:12:13 2013
@@ -291,6 +291,8 @@ Release 2.3.0 - UNRELEASED
 
   IMPROVEMENTS
 
+HADOOP-9784. Add a builder for HttpServer. (Junping Du via llu)
+
 HADOOP 9871. Fix intermittent findbugs warnings in DefaultMetricsSystem.
 (Junping Du via llu)
 

Modified: 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer.java?rev=1516128r1=1516127r2=1516128view=diff
==
--- 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer.java
 (original)
+++ 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer.java
 Wed Aug 21 10:12:13 2013
@@ -47,6 +47,7 @@ import javax.servlet.http.HttpServletRes
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.HadoopIllegalArgumentException;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.ConfServlet;
@@ -119,18 +120,117 @@ public class HttpServer implements Filte
   protected final MapContext, Boolean defaultContexts =
   new HashMapContext, Boolean();
   protected final ListString filterNames = new ArrayListString();
-  private static final int MAX_RETRIES = 10;
   static final String STATE_DESCRIPTION_ALIVE =  - alive;
   static final String STATE_DESCRIPTION_NOT_LIVE =  - not live;
 
   private final boolean listenerStartedExternally;
   
+  /**
+   * Class to construct instances of HTTP server with specific options.
+   */
+  public static class Builder {
+String name;
+String bindAddress;
+Integer port;
+Boolean findPort;
+Configuration conf;
+Connector connector;
+String[] pathSpecs;
+AccessControlList adminsAcl;
+boolean securityEnabled = false;
+String usernameConfKey = null;
+String keytabConfKey = null;
+
+public Builder setName(String name){
+  this.name = name;
+  return this;
+}
+
+public Builder setBindAddress(String bindAddress){
+  this.bindAddress = bindAddress;
+  return this;
+}
+
+public Builder setPort(int port) {
+  this.port = port;
+  return this;
+}
+
+public Builder setFindPort(boolean findPort) {
+  this.findPort = findPort;
+  return this;
+}
+
+public Builder setConf(Configuration conf) {
+  this.conf = conf;
+  return this;
+}
+
+public Builder setConnector(Connector connector) {
+  this.connector = connector;
+  return this;
+}
+
+public Builder setPathSpec(String[] pathSpec) {
+  this.pathSpecs = pathSpec;
+  return this;
+}
+
+public Builder setACL(AccessControlList acl) {
+  this.adminsAcl = acl;
+  return this;
+}
+
+public Builder setSecurityEnabled(boolean securityEnabled) {
+  this.securityEnabled = securityEnabled;
+  return this;
+}
+
+public Builder setUsernameConfKey(String usernameConfKey) {
+  this.usernameConfKey = usernameConfKey;
+  return this;
+}
+
+public Builder setKeytabConfKey(String keytabConfKey) {
+  this.keytabConfKey = keytabConfKey;
+  return this;
+}
+
+public HttpServer build() throws IOException {
+  if (this.name == null) {
+throw new HadoopIllegalArgumentException(name is not set);
+  }
+  if (this.bindAddress == null) {
+throw new HadoopIllegalArgumentException(bindAddress is not set

svn commit: r1514024 - in /hadoop/common/trunk/hadoop-common-project/hadoop-common: CHANGES.txt src/main/java/org/apache/hadoop/metrics2/lib/DefaultMetricsSystem.java

2013-08-14 Thread llu
Author: llu
Date: Wed Aug 14 20:23:07 2013
New Revision: 1514024

URL: http://svn.apache.org/r1514024
Log:
HADOOP 9871. Fix intermittent findbugs warnings in DefaultMetricsSystem. 
(Junping Du via llu)

Modified:
hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/lib/DefaultMetricsSystem.java

Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1514024r1=1514023r2=1514024view=diff
==
--- hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt 
(original)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt Wed Aug 
14 20:23:07 2013
@@ -293,6 +293,9 @@ Release 2.3.0 - UNRELEASED
 
   IMPROVEMENTS
 
+HADOOP 9871. Fix intermittent findbugs warnings in DefaultMetricsSystem.
+(Junping Du via llu)
+
 HADOOP-9319. Update bundled LZ4 source to r99. (Binglin Chang via llu)
 
 HADOOP-9241. DU refresh interval is not configurable (harsh)

Modified: 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/lib/DefaultMetricsSystem.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/lib/DefaultMetricsSystem.java?rev=1514024r1=1514023r2=1514024view=diff
==
--- 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/lib/DefaultMetricsSystem.java
 (original)
+++ 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/lib/DefaultMetricsSystem.java
 Wed Aug 14 20:23:07 2013
@@ -46,8 +46,8 @@ public enum DefaultMetricsSystem {
   @VisibleForTesting
   volatile boolean miniClusterMode = false;
   
-  final UniqueNames mBeanNames = new UniqueNames();
-  final UniqueNames sourceNames = new UniqueNames();
+  transient final UniqueNames mBeanNames = new UniqueNames();
+  transient final UniqueNames sourceNames = new UniqueNames();
 
   /**
* Convenience method to initialize the metrics system




svn commit: r1514025 - in /hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common: CHANGES.txt src/main/java/org/apache/hadoop/metrics2/lib/DefaultMetricsSystem.java

2013-08-14 Thread llu
Author: llu
Date: Wed Aug 14 20:23:32 2013
New Revision: 1514025

URL: http://svn.apache.org/r1514025
Log:
HADOOP 9871. Fix intermittent findbugs warnings in DefaultMetricsSystem. 
(Junping Du via llu)

Modified:

hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt

hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/lib/DefaultMetricsSystem.java

Modified: 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1514025r1=1514024r2=1514025view=diff
==
--- 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt 
(original)
+++ 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt 
Wed Aug 14 20:23:32 2013
@@ -8,6 +8,9 @@ Release 2.3.0 - UNRELEASED
 
   IMPROVEMENTS
 
+HADOOP 9871. Fix intermittent findbugs warnings in DefaultMetricsSystem.
+(Junping Du via llu)
+
 HADOOP-9319. Update bundled LZ4 source to r99. (Binglin Chang via llu)
 
 HADOOP-9432 Add support for markdown .md files in site documentation 
(stevel)

Modified: 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/lib/DefaultMetricsSystem.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/lib/DefaultMetricsSystem.java?rev=1514025r1=1514024r2=1514025view=diff
==
--- 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/lib/DefaultMetricsSystem.java
 (original)
+++ 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/lib/DefaultMetricsSystem.java
 Wed Aug 14 20:23:32 2013
@@ -38,8 +38,8 @@ public enum DefaultMetricsSystem {
   private AtomicReferenceMetricsSystem impl =
   new AtomicReferenceMetricsSystem(new MetricsSystemImpl());
   volatile boolean miniClusterMode = false;
-  final UniqueNames mBeanNames = new UniqueNames();
-  final UniqueNames sourceNames = new UniqueNames();
+  transient final UniqueNames mBeanNames = new UniqueNames();
+  transient final UniqueNames sourceNames = new UniqueNames();
 
   /**
* Convenience method to initialize the metrics system




svn commit: r1513687 - in /hadoop/common/trunk/hadoop-common-project: hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/client/ hadoop-auth/src/main/java/org/apache/hadoop/security/a

2013-08-13 Thread llu
Author: llu
Date: Tue Aug 13 23:12:20 2013
New Revision: 1513687

URL: http://svn.apache.org/r1513687
Log:
HADOOP-9446. Support Kerberos SPNEGO for IBM JDK. (Yu Gao via llu)

Added:

hadoop/common/trunk/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/util/

hadoop/common/trunk/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/util/PlatformName.java
  - copied, changed from r1513684, 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/PlatformName.java
Removed:

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/PlatformName.java
Modified:

hadoop/common/trunk/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/client/KerberosAuthenticator.java

hadoop/common/trunk/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/server/KerberosAuthenticationHandler.java

hadoop/common/trunk/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/util/KerberosUtil.java
hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/UserGroupInformation.java

Modified: 
hadoop/common/trunk/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/client/KerberosAuthenticator.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/client/KerberosAuthenticator.java?rev=1513687r1=1513686r2=1513687view=diff
==
--- 
hadoop/common/trunk/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/client/KerberosAuthenticator.java
 (original)
+++ 
hadoop/common/trunk/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/client/KerberosAuthenticator.java
 Tue Aug 13 23:12:20 2013
@@ -37,6 +37,8 @@ import java.security.PrivilegedException
 import java.util.HashMap;
 import java.util.Map;
 
+import static org.apache.hadoop.util.PlatformName.IBM_JAVA;
+
 /**
  * The {@link KerberosAuthenticator} implements the Kerberos SPNEGO 
authentication sequence.
  * p/
@@ -75,15 +77,31 @@ public class KerberosAuthenticator imple
 
 private static final String OS_LOGIN_MODULE_NAME;
 private static final boolean windows = 
System.getProperty(os.name).startsWith(Windows);
+private static final boolean is64Bit = 
System.getProperty(os.arch).contains(64);
+private static final boolean aix = 
System.getProperty(os.name).equals(AIX);
 
-static {
-  if (windows) {
-OS_LOGIN_MODULE_NAME = com.sun.security.auth.module.NTLoginModule;
+/* Return the OS login module class name */
+private static String getOSLoginModuleName() {
+  if (IBM_JAVA) {
+if (windows) {
+  return is64Bit ? com.ibm.security.auth.module.Win64LoginModule
+  : com.ibm.security.auth.module.NTLoginModule;
+} else if (aix) {
+  return is64Bit ? com.ibm.security.auth.module.AIX64LoginModule
+  : com.ibm.security.auth.module.AIXLoginModule;
+} else {
+  return com.ibm.security.auth.module.LinuxLoginModule;
+}
   } else {
-OS_LOGIN_MODULE_NAME = com.sun.security.auth.module.UnixLoginModule;
+return windows ? com.sun.security.auth.module.NTLoginModule
+: com.sun.security.auth.module.UnixLoginModule;
   }
 }
 
+static {
+  OS_LOGIN_MODULE_NAME = getOSLoginModuleName();
+}
+
 private static final AppConfigurationEntry OS_SPECIFIC_LOGIN =
   new AppConfigurationEntry(OS_LOGIN_MODULE_NAME,
 
AppConfigurationEntry.LoginModuleControlFlag.REQUIRED,
@@ -92,13 +110,22 @@ public class KerberosAuthenticator imple
 private static final MapString, String USER_KERBEROS_OPTIONS = new 
HashMapString, String();
 
 static {
-  USER_KERBEROS_OPTIONS.put(doNotPrompt, true);
-  USER_KERBEROS_OPTIONS.put(useTicketCache, true);
-  USER_KERBEROS_OPTIONS.put(renewTGT, true);
   String ticketCache = System.getenv(KRB5CCNAME);
+  if (IBM_JAVA) {
+USER_KERBEROS_OPTIONS.put(useDefaultCcache, true);
+  } else {
+USER_KERBEROS_OPTIONS.put(doNotPrompt, true);
+USER_KERBEROS_OPTIONS.put(useTicketCache, true);
+  }
   if (ticketCache != null) {
-USER_KERBEROS_OPTIONS.put(ticketCache, ticketCache);
+if (IBM_JAVA) {
+  // The first value searched when useDefaultCcache is used.
+  System.setProperty(KRB5CCNAME, ticketCache);
+} else {
+  USER_KERBEROS_OPTIONS.put(ticketCache, ticketCache);
+}
   }
+  USER_KERBEROS_OPTIONS.put(renewTGT, true);
 }
 
 private

svn commit: r1513688 - in /hadoop/common/branches/branch-2/hadoop-common-project: hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/client/ hadoop-auth/src/main/java/org/apache/hadoo

2013-08-13 Thread llu
Author: llu
Date: Tue Aug 13 23:13:07 2013
New Revision: 1513688

URL: http://svn.apache.org/r1513688
Log:
HADOOP-9446. Support Kerberos SPNEGO for IBM JDK. (Yu Gao via llu)

Added:

hadoop/common/branches/branch-2/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/util/

hadoop/common/branches/branch-2/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/util/PlatformName.java
  - copied, changed from r1513528, 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/PlatformName.java
Removed:

hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/PlatformName.java
Modified:

hadoop/common/branches/branch-2/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/client/KerberosAuthenticator.java

hadoop/common/branches/branch-2/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/server/KerberosAuthenticationHandler.java

hadoop/common/branches/branch-2/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/util/KerberosUtil.java

hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt

hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/UserGroupInformation.java

Modified: 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/client/KerberosAuthenticator.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/client/KerberosAuthenticator.java?rev=1513688r1=1513687r2=1513688view=diff
==
--- 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/client/KerberosAuthenticator.java
 (original)
+++ 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/client/KerberosAuthenticator.java
 Tue Aug 13 23:13:07 2013
@@ -37,6 +37,8 @@ import java.security.PrivilegedException
 import java.util.HashMap;
 import java.util.Map;
 
+import static org.apache.hadoop.util.PlatformName.IBM_JAVA;
+
 /**
  * The {@link KerberosAuthenticator} implements the Kerberos SPNEGO 
authentication sequence.
  * p/
@@ -75,15 +77,31 @@ public class KerberosAuthenticator imple
 
 private static final String OS_LOGIN_MODULE_NAME;
 private static final boolean windows = 
System.getProperty(os.name).startsWith(Windows);
+private static final boolean is64Bit = 
System.getProperty(os.arch).contains(64);
+private static final boolean aix = 
System.getProperty(os.name).equals(AIX);
 
-static {
-  if (windows) {
-OS_LOGIN_MODULE_NAME = com.sun.security.auth.module.NTLoginModule;
+/* Return the OS login module class name */
+private static String getOSLoginModuleName() {
+  if (IBM_JAVA) {
+if (windows) {
+  return is64Bit ? com.ibm.security.auth.module.Win64LoginModule
+  : com.ibm.security.auth.module.NTLoginModule;
+} else if (aix) {
+  return is64Bit ? com.ibm.security.auth.module.AIX64LoginModule
+  : com.ibm.security.auth.module.AIXLoginModule;
+} else {
+  return com.ibm.security.auth.module.LinuxLoginModule;
+}
   } else {
-OS_LOGIN_MODULE_NAME = com.sun.security.auth.module.UnixLoginModule;
+return windows ? com.sun.security.auth.module.NTLoginModule
+: com.sun.security.auth.module.UnixLoginModule;
   }
 }
 
+static {
+  OS_LOGIN_MODULE_NAME = getOSLoginModuleName();
+}
+
 private static final AppConfigurationEntry OS_SPECIFIC_LOGIN =
   new AppConfigurationEntry(OS_LOGIN_MODULE_NAME,
 
AppConfigurationEntry.LoginModuleControlFlag.REQUIRED,
@@ -92,13 +110,22 @@ public class KerberosAuthenticator imple
 private static final MapString, String USER_KERBEROS_OPTIONS = new 
HashMapString, String();
 
 static {
-  USER_KERBEROS_OPTIONS.put(doNotPrompt, true);
-  USER_KERBEROS_OPTIONS.put(useTicketCache, true);
-  USER_KERBEROS_OPTIONS.put(renewTGT, true);
   String ticketCache = System.getenv(KRB5CCNAME);
+  if (IBM_JAVA) {
+USER_KERBEROS_OPTIONS.put(useDefaultCcache, true);
+  } else {
+USER_KERBEROS_OPTIONS.put(doNotPrompt, true);
+USER_KERBEROS_OPTIONS.put(useTicketCache, true);
+  }
   if (ticketCache != null) {
-USER_KERBEROS_OPTIONS.put(ticketCache, ticketCache);
+if (IBM_JAVA) {
+  // The first value searched when useDefaultCcache is used.
+  System.setProperty(KRB5CCNAME, ticketCache);
+} else

svn commit: r1513689 - in /hadoop/common/branches/branch-2.1-beta/hadoop-common-project: hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/client/ hadoop-auth/src/main/java/org/apach

2013-08-13 Thread llu
Author: llu
Date: Tue Aug 13 23:14:12 2013
New Revision: 1513689

URL: http://svn.apache.org/r1513689
Log:
HADOOP-9446. Support Kerberos SPNEGO for IBM JDK. (Yu Gao via llu)

Added:

hadoop/common/branches/branch-2.1-beta/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/util/

hadoop/common/branches/branch-2.1-beta/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/util/PlatformName.java
  - copied, changed from r1513377, 
hadoop/common/branches/branch-2.1-beta/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/PlatformName.java
Removed:

hadoop/common/branches/branch-2.1-beta/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/PlatformName.java
Modified:

hadoop/common/branches/branch-2.1-beta/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/client/KerberosAuthenticator.java

hadoop/common/branches/branch-2.1-beta/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/server/KerberosAuthenticationHandler.java

hadoop/common/branches/branch-2.1-beta/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/util/KerberosUtil.java

hadoop/common/branches/branch-2.1-beta/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/UserGroupInformation.java

Modified: 
hadoop/common/branches/branch-2.1-beta/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/client/KerberosAuthenticator.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2.1-beta/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/client/KerberosAuthenticator.java?rev=1513689r1=1513688r2=1513689view=diff
==
--- 
hadoop/common/branches/branch-2.1-beta/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/client/KerberosAuthenticator.java
 (original)
+++ 
hadoop/common/branches/branch-2.1-beta/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/client/KerberosAuthenticator.java
 Tue Aug 13 23:14:12 2013
@@ -37,6 +37,8 @@ import java.security.PrivilegedException
 import java.util.HashMap;
 import java.util.Map;
 
+import static org.apache.hadoop.util.PlatformName.IBM_JAVA;
+
 /**
  * The {@link KerberosAuthenticator} implements the Kerberos SPNEGO 
authentication sequence.
  * p/
@@ -75,15 +77,31 @@ public class KerberosAuthenticator imple
 
 private static final String OS_LOGIN_MODULE_NAME;
 private static final boolean windows = 
System.getProperty(os.name).startsWith(Windows);
+private static final boolean is64Bit = 
System.getProperty(os.arch).contains(64);
+private static final boolean aix = 
System.getProperty(os.name).equals(AIX);
 
-static {
-  if (windows) {
-OS_LOGIN_MODULE_NAME = com.sun.security.auth.module.NTLoginModule;
+/* Return the OS login module class name */
+private static String getOSLoginModuleName() {
+  if (IBM_JAVA) {
+if (windows) {
+  return is64Bit ? com.ibm.security.auth.module.Win64LoginModule
+  : com.ibm.security.auth.module.NTLoginModule;
+} else if (aix) {
+  return is64Bit ? com.ibm.security.auth.module.AIX64LoginModule
+  : com.ibm.security.auth.module.AIXLoginModule;
+} else {
+  return com.ibm.security.auth.module.LinuxLoginModule;
+}
   } else {
-OS_LOGIN_MODULE_NAME = com.sun.security.auth.module.UnixLoginModule;
+return windows ? com.sun.security.auth.module.NTLoginModule
+: com.sun.security.auth.module.UnixLoginModule;
   }
 }
 
+static {
+  OS_LOGIN_MODULE_NAME = getOSLoginModuleName();
+}
+
 private static final AppConfigurationEntry OS_SPECIFIC_LOGIN =
   new AppConfigurationEntry(OS_LOGIN_MODULE_NAME,
 
AppConfigurationEntry.LoginModuleControlFlag.REQUIRED,
@@ -92,13 +110,22 @@ public class KerberosAuthenticator imple
 private static final MapString, String USER_KERBEROS_OPTIONS = new 
HashMapString, String();
 
 static {
-  USER_KERBEROS_OPTIONS.put(doNotPrompt, true);
-  USER_KERBEROS_OPTIONS.put(useTicketCache, true);
-  USER_KERBEROS_OPTIONS.put(renewTGT, true);
   String ticketCache = System.getenv(KRB5CCNAME);
+  if (IBM_JAVA) {
+USER_KERBEROS_OPTIONS.put(useDefaultCcache, true);
+  } else {
+USER_KERBEROS_OPTIONS.put(doNotPrompt, true);
+USER_KERBEROS_OPTIONS.put(useTicketCache, true);
+  }
   if (ticketCache != null) {
-USER_KERBEROS_OPTIONS.put(ticketCache, ticketCache);
+if (IBM_JAVA) {
+  // The first value searched when useDefaultCcache is used.
+  System.setProperty(KRB5CCNAME, ticketCache);
+} else

svn commit: r1510734 [2/2] - in /hadoop/common/trunk/hadoop-common-project/hadoop-common: ./ src/ src/main/java/org/apache/hadoop/fs/ src/main/java/org/apache/hadoop/io/compress/ src/main/java/org/apa

2013-08-05 Thread llu
Added: 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/lz4/lz4hc.c
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/lz4/lz4hc.c?rev=1510734view=auto
==
--- 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/lz4/lz4hc.c
 (added)
+++ 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/lz4/lz4hc.c
 Mon Aug  5 21:10:54 2013
@@ -0,0 +1,584 @@
+/*
+   LZ4 HC - High Compression Mode of LZ4
+   Copyright (C) 2011-2013, Yann Collet.
+   BSD 2-Clause License (http://www.opensource.org/licenses/bsd-license.php)
+
+   Redistribution and use in source and binary forms, with or without
+   modification, are permitted provided that the following conditions are
+   met:
+
+   * Redistributions of source code must retain the above copyright
+   notice, this list of conditions and the following disclaimer.
+   * Redistributions in binary form must reproduce the above
+   copyright notice, this list of conditions and the following disclaimer
+   in the documentation and/or other materials provided with the
+   distribution.
+
+   THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+   AS IS AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+   LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+   A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+   OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+   SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+   LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+   DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+   THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+   (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+   OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+   You can contact the author at :
+   - LZ4 homepage : http://fastcompression.blogspot.com/p/lz4.html
+   - LZ4 source repository : http://code.google.com/p/lz4/
+*/
+
+/*
+Note : this source file requires lz4hc_encoder.h
+*/
+
+
+//**
+// Memory routines
+//**
+#include stdlib.h   // calloc, free
+#define ALLOCATOR(s)  calloc(1,s)
+#define FREEMEM   free
+#include string.h   // memset, memcpy
+#define MEM_INIT  memset
+
+
+//**
+// CPU Feature Detection
+//**
+// 32 or 64 bits ?
+#if (defined(__x86_64__) || defined(_M_X64) || defined(_WIN64) \
+  || defined(__powerpc64__) || defined(__ppc64__) || defined(__PPC64__) \
+  || defined(__64BIT__) || defined(_LP64) || defined(__LP64__) \
+  || defined(__ia64) || defined(__itanium__) || defined(_M_IA64) )   // 
Detects 64 bits mode
+#  define LZ4_ARCH64 1
+#else
+#  define LZ4_ARCH64 0
+#endif
+
+// Little Endian or Big Endian ?
+// Overwrite the #define below if you know your architecture endianess
+#if defined (__GLIBC__)
+#  include endian.h
+#  if (__BYTE_ORDER == __BIG_ENDIAN)
+# define LZ4_BIG_ENDIAN 1
+#  endif
+#elif (defined(__BIG_ENDIAN__) || defined(__BIG_ENDIAN) || 
defined(_BIG_ENDIAN))  !(defined(__LITTLE_ENDIAN__) || 
defined(__LITTLE_ENDIAN) || defined(_LITTLE_ENDIAN))
+#  define LZ4_BIG_ENDIAN 1
+#elif defined(__sparc) || defined(__sparc__) \
+   || defined(__powerpc__) || defined(__ppc__) || defined(__PPC__) \
+   || defined(__hpux)  || defined(__hppa) \
+   || defined(_MIPSEB) || defined(__s390__)
+#  define LZ4_BIG_ENDIAN 1
+#else
+// Little Endian assumed. PDP Endian and other very rare endian format are 
unsupported.
+#endif
+
+// Unaligned memory access is automatically enabled for common CPU, such as 
x86.
+// For others CPU, the compiler will be more cautious, and insert extra code 
to ensure aligned access is respected
+// If you know your target CPU supports unaligned memory access, you want to 
force this option manually to improve performance
+#if defined(__ARM_FEATURE_UNALIGNED)
+#  define LZ4_FORCE_UNALIGNED_ACCESS 1
+#endif
+
+// Define this parameter if your target system or compiler does not support 
hardware bit count
+#if defined(_MSC_VER)  defined(_WIN32_WCE)// Visual Studio for 
Windows CE does not support Hardware bit count
+#  define LZ4_FORCE_SW_BITCOUNT
+#endif
+
+
+//**
+// Compiler Options
+//**
+#if defined (__STDC_VERSION__)  __STDC_VERSION__ = 199901L   // C99
+  /* restrict is a known keyword */
+#else
+#  define restrict  // Disable restrict
+#endif
+
+#ifdef _MSC_VER// Visual Studio
+#  define forceinline static __forceinline
+#  include 

svn commit: r1510735 [4/4] - in /hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common: ./ src/ src/main/docs/ src/main/java/org/apache/hadoop/fs/ src/main/java/org/apache/hadoop/io/comp

2013-08-05 Thread llu
Added: 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/lz4/lz4hc.c
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/lz4/lz4hc.c?rev=1510735view=auto
==
--- 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/lz4/lz4hc.c
 (added)
+++ 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/lz4/lz4hc.c
 Mon Aug  5 21:12:23 2013
@@ -0,0 +1,584 @@
+/*
+   LZ4 HC - High Compression Mode of LZ4
+   Copyright (C) 2011-2013, Yann Collet.
+   BSD 2-Clause License (http://www.opensource.org/licenses/bsd-license.php)
+
+   Redistribution and use in source and binary forms, with or without
+   modification, are permitted provided that the following conditions are
+   met:
+
+   * Redistributions of source code must retain the above copyright
+   notice, this list of conditions and the following disclaimer.
+   * Redistributions in binary form must reproduce the above
+   copyright notice, this list of conditions and the following disclaimer
+   in the documentation and/or other materials provided with the
+   distribution.
+
+   THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+   AS IS AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+   LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+   A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+   OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+   SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+   LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+   DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+   THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+   (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+   OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+   You can contact the author at :
+   - LZ4 homepage : http://fastcompression.blogspot.com/p/lz4.html
+   - LZ4 source repository : http://code.google.com/p/lz4/
+*/
+
+/*
+Note : this source file requires lz4hc_encoder.h
+*/
+
+
+//**
+// Memory routines
+//**
+#include stdlib.h   // calloc, free
+#define ALLOCATOR(s)  calloc(1,s)
+#define FREEMEM   free
+#include string.h   // memset, memcpy
+#define MEM_INIT  memset
+
+
+//**
+// CPU Feature Detection
+//**
+// 32 or 64 bits ?
+#if (defined(__x86_64__) || defined(_M_X64) || defined(_WIN64) \
+  || defined(__powerpc64__) || defined(__ppc64__) || defined(__PPC64__) \
+  || defined(__64BIT__) || defined(_LP64) || defined(__LP64__) \
+  || defined(__ia64) || defined(__itanium__) || defined(_M_IA64) )   // 
Detects 64 bits mode
+#  define LZ4_ARCH64 1
+#else
+#  define LZ4_ARCH64 0
+#endif
+
+// Little Endian or Big Endian ?
+// Overwrite the #define below if you know your architecture endianess
+#if defined (__GLIBC__)
+#  include endian.h
+#  if (__BYTE_ORDER == __BIG_ENDIAN)
+# define LZ4_BIG_ENDIAN 1
+#  endif
+#elif (defined(__BIG_ENDIAN__) || defined(__BIG_ENDIAN) || 
defined(_BIG_ENDIAN))  !(defined(__LITTLE_ENDIAN__) || 
defined(__LITTLE_ENDIAN) || defined(_LITTLE_ENDIAN))
+#  define LZ4_BIG_ENDIAN 1
+#elif defined(__sparc) || defined(__sparc__) \
+   || defined(__powerpc__) || defined(__ppc__) || defined(__PPC__) \
+   || defined(__hpux)  || defined(__hppa) \
+   || defined(_MIPSEB) || defined(__s390__)
+#  define LZ4_BIG_ENDIAN 1
+#else
+// Little Endian assumed. PDP Endian and other very rare endian format are 
unsupported.
+#endif
+
+// Unaligned memory access is automatically enabled for common CPU, such as 
x86.
+// For others CPU, the compiler will be more cautious, and insert extra code 
to ensure aligned access is respected
+// If you know your target CPU supports unaligned memory access, you want to 
force this option manually to improve performance
+#if defined(__ARM_FEATURE_UNALIGNED)
+#  define LZ4_FORCE_UNALIGNED_ACCESS 1
+#endif
+
+// Define this parameter if your target system or compiler does not support 
hardware bit count
+#if defined(_MSC_VER)  defined(_WIN32_WCE)// Visual Studio for 
Windows CE does not support Hardware bit count
+#  define LZ4_FORCE_SW_BITCOUNT
+#endif
+
+
+//**
+// Compiler Options
+//**
+#if defined (__STDC_VERSION__)  __STDC_VERSION__ = 199901L   // C99
+  /* restrict is a known keyword */
+#else
+#  define restrict  // Disable restrict
+#endif
+
+#ifdef _MSC_VER// Visual Studio
+#  define 

svn commit: r1510735 [1/4] - in /hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common: ./ src/ src/main/docs/ src/main/java/org/apache/hadoop/fs/ src/main/java/org/apache/hadoop/io/comp

2013-08-05 Thread llu
Author: llu
Date: Mon Aug  5 21:12:23 2013
New Revision: 1510735

URL: http://svn.apache.org/r1510735
Log:
HADOOP-9319. Update bundled LZ4 source to r99. (Binglin Chang via llu)

Added:

hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/lz4/lz4.h

hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/lz4/lz4_encoder.h

hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/lz4/lz4hc.c

hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/lz4/lz4hc.h

hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/lz4/lz4hc_encoder.h
Modified:

hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt

hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/LICENSE.txt
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/pom.xml

hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/CMakeLists.txt

hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/docs/releasenotes.html

hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeys.java

hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/Lz4Codec.java

hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/lz4/Lz4Compressor.java

hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/native/native.vcxproj

hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/native/native.vcxproj.filters

hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/lz4/Lz4Compressor.c

hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/lz4/Lz4Decompressor.c

hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/lz4/lz4.c

hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/TestCodec.java

Modified: 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1510735r1=1510734r2=1510735view=diff
==
--- 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt 
(original)
+++ 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt 
Mon Aug  5 21:12:23 2013
@@ -8,6 +8,8 @@ Release 2.3.0 - UNRELEASED
 
   IMPROVEMENTS
 
+HADOOP-9319. Update bundled LZ4 source to r99. (Binglin Chang via llu)
+
 HADOOP-9432 Add support for markdown .md files in site documentation 
(stevel)
 
 HADOOP-9241. DU refresh interval is not configurable (harsh)

Modified: 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/LICENSE.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/LICENSE.txt?rev=1510735r1=1510734r2=1510735view=diff
==
--- 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/LICENSE.txt 
(original)
+++ 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/LICENSE.txt 
Mon Aug  5 21:12:23 2013
@@ -252,24 +252,26 @@ in src/main/native/src/org/apache/hadoop
  *   BSD-style license that can be found in the LICENSE file.
  */
 
- For src/main/native/src/org/apache/hadoop/io/compress/lz4/lz4.c:
+For src/main/native/src/org/apache/hadoop/io/compress/lz4/{lz4.h,lz4.c,
+lz4_encoder.h,lz4hc.h,lz4hc.c,lz4hc_encoder.h},
 
 /*
LZ4 - Fast LZ compression algorithm
-   Copyright (C) 2011, Yann Collet.
-   BSD License
+   Header File
+   Copyright (C) 2011-2013, Yann Collet.
+   BSD 2-Clause License (http://www.opensource.org/licenses/bsd-license.php)
 
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
-  
+
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above
copyright notice, this list of conditions and the following disclaimer
in the documentation and/or other materials provided with the
distribution.
-  
+
THIS SOFTWARE IS PROVIDED

svn commit: r1504700 - in /hadoop/common/trunk/hadoop-common-project/hadoop-common: ./ src/main/java/org/apache/hadoop/io/compress/ src/main/java/org/apache/hadoop/io/compress/bzip2/ src/main/java/org

2013-07-18 Thread llu
Author: llu
Date: Thu Jul 18 22:38:36 2013
New Revision: 1504700

URL: http://svn.apache.org/r1504700
Log:
HADOOP-9164. Print paths of loaded native libraries in NativeLibraryChecker. 
(Binglin Chang via llu)

Modified:
hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/Lz4Codec.java

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/SnappyCodec.java

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/bzip2/Bzip2Compressor.java

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/bzip2/Bzip2Factory.java

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/lz4/Lz4Compressor.java

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/snappy/SnappyCompressor.java

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zlib/ZlibCompressor.java

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zlib/ZlibFactory.java

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/NativeCodeLoader.java

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/NativeLibraryChecker.java

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/bzip2/Bzip2Compressor.c

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/lz4/Lz4Compressor.c

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/snappy/SnappyCompressor.c

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/zlib/ZlibCompressor.c

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/util/NativeCodeLoader.c

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestNativeCodeLoader.java

Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1504700r1=1504699r2=1504700view=diff
==
--- hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt 
(original)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt Thu Jul 
18 22:38:36 2013
@@ -378,6 +378,9 @@ Release 2.1.0-beta - 2013-07-02
 
   IMPROVEMENTS
 
+HADOOP-9164. Print paths of loaded native libraries in
+NativeLibraryChecker. (Binglin Chang via llu)
+
 HADOOP-9253. Capture ulimit info in the logs at service start time.
 (Arpit Gupta via suresh)
 

Modified: 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/Lz4Codec.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/Lz4Codec.java?rev=1504700r1=1504699r2=1504700view=diff
==
--- 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/Lz4Codec.java
 (original)
+++ 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/Lz4Codec.java
 Thu Jul 18 22:38:36 2013
@@ -69,6 +69,10 @@ public class Lz4Codec implements Configu
 return NativeCodeLoader.isNativeCodeLoaded();
   }
 
+  public static String getLibraryName() {
+return Lz4Compressor.getLibraryName();
+  }
+
   /**
* Create a {@link CompressionOutputStream} that will write to the given
* {@link OutputStream}.

Modified: 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/SnappyCodec.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/SnappyCodec.java?rev=1504700r1=1504699r2=1504700view=diff
==
--- 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/SnappyCodec.java
 (original)
+++ 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/SnappyCodec.java
 Thu Jul 18 22:38:36 2013
@@ -79,6 +79,10 @@ public class SnappyCodec implements Conf
 SnappyDecompressor.isNativeCodeLoaded();
   }
 
+  public static String getLibraryName() {
+return SnappyCompressor.getLibraryName

svn commit: r1504711 - in /hadoop/common/branches/branch-2.1-beta/hadoop-common-project/hadoop-common: ./ src/main/java/org/apache/hadoop/io/compress/ src/main/java/org/apache/hadoop/io/compress/bzip2

2013-07-18 Thread llu
Author: llu
Date: Thu Jul 18 22:52:28 2013
New Revision: 1504711

URL: http://svn.apache.org/r1504711
Log:
HADOOP-9164. Print paths of loaded native libraries in NativeLibraryChecker. 
(Binglin Chang via llu)

Modified:

hadoop/common/branches/branch-2.1-beta/hadoop-common-project/hadoop-common/CHANGES.txt

hadoop/common/branches/branch-2.1-beta/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/Lz4Codec.java

hadoop/common/branches/branch-2.1-beta/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/SnappyCodec.java

hadoop/common/branches/branch-2.1-beta/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/bzip2/Bzip2Compressor.java

hadoop/common/branches/branch-2.1-beta/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/bzip2/Bzip2Factory.java

hadoop/common/branches/branch-2.1-beta/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/lz4/Lz4Compressor.java

hadoop/common/branches/branch-2.1-beta/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/snappy/SnappyCompressor.java

hadoop/common/branches/branch-2.1-beta/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zlib/ZlibCompressor.java

hadoop/common/branches/branch-2.1-beta/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zlib/ZlibFactory.java

hadoop/common/branches/branch-2.1-beta/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/NativeCodeLoader.java

hadoop/common/branches/branch-2.1-beta/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/NativeLibraryChecker.java

hadoop/common/branches/branch-2.1-beta/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/bzip2/Bzip2Compressor.c

hadoop/common/branches/branch-2.1-beta/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/lz4/Lz4Compressor.c

hadoop/common/branches/branch-2.1-beta/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/snappy/SnappyCompressor.c

hadoop/common/branches/branch-2.1-beta/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/zlib/ZlibCompressor.c

hadoop/common/branches/branch-2.1-beta/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/util/NativeCodeLoader.c

hadoop/common/branches/branch-2.1-beta/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestNativeCodeLoader.java

Modified: 
hadoop/common/branches/branch-2.1-beta/hadoop-common-project/hadoop-common/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2.1-beta/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1504711r1=1504710r2=1504711view=diff
==
--- 
hadoop/common/branches/branch-2.1-beta/hadoop-common-project/hadoop-common/CHANGES.txt
 (original)
+++ 
hadoop/common/branches/branch-2.1-beta/hadoop-common-project/hadoop-common/CHANGES.txt
 Thu Jul 18 22:52:28 2013
@@ -63,6 +63,9 @@ Release 2.1.0-beta - 2013-07-02
 
   IMPROVEMENTS
 
+HADOOP-9164. Print paths of loaded native libraries in
+NativeLibraryChecker. (Binglin Chang via llu)
+
 HADOOP-9253. Capture ulimit info in the logs at service start time.
 (Arpit Gupta via suresh)
 

Modified: 
hadoop/common/branches/branch-2.1-beta/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/Lz4Codec.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2.1-beta/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/Lz4Codec.java?rev=1504711r1=1504710r2=1504711view=diff
==
--- 
hadoop/common/branches/branch-2.1-beta/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/Lz4Codec.java
 (original)
+++ 
hadoop/common/branches/branch-2.1-beta/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/Lz4Codec.java
 Thu Jul 18 22:52:28 2013
@@ -69,6 +69,10 @@ public class Lz4Codec implements Configu
 return NativeCodeLoader.isNativeCodeLoaded();
   }
 
+  public static String getLibraryName() {
+return Lz4Compressor.getLibraryName();
+  }
+
   /**
* Create a {@link CompressionOutputStream} that will write to the given
* {@link OutputStream}.

Modified: 
hadoop/common/branches/branch-2.1-beta/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/SnappyCodec.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2.1-beta/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/SnappyCodec.java?rev=1504711r1=1504710r2=1504711view=diff

svn commit: r1504713 - in /hadoop/common/branches/branch-2.1.0-beta/hadoop-common-project/hadoop-common: ./ src/main/java/org/apache/hadoop/io/compress/ src/main/java/org/apache/hadoop/io/compress/bzi

2013-07-18 Thread llu
Author: llu
Date: Thu Jul 18 22:57:22 2013
New Revision: 1504713

URL: http://svn.apache.org/r1504713
Log:
HADOOP-9164. Print paths of loaded native libraries in NativeLibraryChecker. 
(Binglin Chang via llu)

Modified:

hadoop/common/branches/branch-2.1.0-beta/hadoop-common-project/hadoop-common/CHANGES.txt

hadoop/common/branches/branch-2.1.0-beta/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/Lz4Codec.java

hadoop/common/branches/branch-2.1.0-beta/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/SnappyCodec.java

hadoop/common/branches/branch-2.1.0-beta/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/bzip2/Bzip2Compressor.java

hadoop/common/branches/branch-2.1.0-beta/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/bzip2/Bzip2Factory.java

hadoop/common/branches/branch-2.1.0-beta/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/lz4/Lz4Compressor.java

hadoop/common/branches/branch-2.1.0-beta/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/snappy/SnappyCompressor.java

hadoop/common/branches/branch-2.1.0-beta/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zlib/ZlibCompressor.java

hadoop/common/branches/branch-2.1.0-beta/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zlib/ZlibFactory.java

hadoop/common/branches/branch-2.1.0-beta/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/NativeCodeLoader.java

hadoop/common/branches/branch-2.1.0-beta/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/NativeLibraryChecker.java

hadoop/common/branches/branch-2.1.0-beta/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/bzip2/Bzip2Compressor.c

hadoop/common/branches/branch-2.1.0-beta/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/lz4/Lz4Compressor.c

hadoop/common/branches/branch-2.1.0-beta/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/snappy/SnappyCompressor.c

hadoop/common/branches/branch-2.1.0-beta/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/zlib/ZlibCompressor.c

hadoop/common/branches/branch-2.1.0-beta/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/util/NativeCodeLoader.c

hadoop/common/branches/branch-2.1.0-beta/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestNativeCodeLoader.java

Modified: 
hadoop/common/branches/branch-2.1.0-beta/hadoop-common-project/hadoop-common/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2.1.0-beta/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1504713r1=1504712r2=1504713view=diff
==
--- 
hadoop/common/branches/branch-2.1.0-beta/hadoop-common-project/hadoop-common/CHANGES.txt
 (original)
+++ 
hadoop/common/branches/branch-2.1.0-beta/hadoop-common-project/hadoop-common/CHANGES.txt
 Thu Jul 18 22:57:22 2013
@@ -61,6 +61,9 @@ Release 2.1.0-beta - 2013-07-02
 
   IMPROVEMENTS
 
+HADOOP-9164. Print paths of loaded native libraries in
+NativeLibraryChecker. (Binglin Chang via llu)
+
 HADOOP-9253. Capture ulimit info in the logs at service start time.
 (Arpit Gupta via suresh)
 

Modified: 
hadoop/common/branches/branch-2.1.0-beta/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/Lz4Codec.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2.1.0-beta/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/Lz4Codec.java?rev=1504713r1=1504712r2=1504713view=diff
==
--- 
hadoop/common/branches/branch-2.1.0-beta/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/Lz4Codec.java
 (original)
+++ 
hadoop/common/branches/branch-2.1.0-beta/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/Lz4Codec.java
 Thu Jul 18 22:57:22 2013
@@ -69,6 +69,10 @@ public class Lz4Codec implements Configu
 return NativeCodeLoader.isNativeCodeLoaded();
   }
 
+  public static String getLibraryName() {
+return Lz4Compressor.getLibraryName();
+  }
+
   /**
* Create a {@link CompressionOutputStream} that will write to the given
* {@link OutputStream}.

Modified: 
hadoop/common/branches/branch-2.1.0-beta/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/SnappyCodec.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2.1.0-beta/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/SnappyCodec.java?rev=1504713r1=1504712r2=1504713view=diff

svn commit: r1498011 [1/2] - /hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/docs/releasenotes.html

2013-06-29 Thread llu
Author: llu
Date: Sat Jun 29 19:26:45 2013
New Revision: 1498011

URL: http://svn.apache.org/r1498011
Log:
LF normalization according to .gitattributes

Modified:

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/docs/releasenotes.html



svn commit: r1498023 [1/2] - /hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/docs/releasenotes.html

2013-06-29 Thread llu
Author: llu
Date: Sat Jun 29 20:20:59 2013
New Revision: 1498023

URL: http://svn.apache.org/r1498023
Log:
LF normalization according to HADOOP-8912

Modified:

hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/docs/releasenotes.html



svn commit: r1491683 - in /hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common: CHANGES.txt src/main/java/org/apache/hadoop/ipc/Client.java src/main/java/org/apache/hadoop/ipc/Server.j

2013-06-10 Thread llu
Author: llu
Date: Tue Jun 11 03:31:10 2013
New Revision: 1491683

URL: http://svn.apache.org/r1491683
Log:
HADOOP-9630. [RPC v9] Remove IpcSerializationType. (Junping Du via llu)

Modified:

hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt

hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java

hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java

Modified: 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1491683r1=1491682r2=1491683view=diff
==
--- 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt 
(original)
+++ 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt 
Tue Jun 11 03:31:10 2013
@@ -30,17 +30,19 @@ Release 2.1.0-beta - UNRELEASED
 
 HADOOP-8886. Remove KFS support. (eli)
 
-HADOOP-9163 The rpc msg in ProtobufRpcEngine.proto should be moved out to
+HADOOP-9163. [RPC v9] The rpc msg in ProtobufRpcEngine.proto should be 
moved out to
 avoid an extra copy (Sanjay Radia)
 
-HADOOP-9151 Include RPC error info in RpcResponseHeader instead of sending
+HADOOP-9151. [RPC v9] Include RPC error info in RpcResponseHeader instead 
of sending
 it separately (sanjay Radia)
 
-HADOOP-9380 Add totalLength to rpc response  (sanjay Radia)
+HADOOP-9380. [RPC v9] Add totalLength to rpc response  (sanjay Radia)
 
-HADOOP-9425 Add error codes to rpc-response (sanjay Radia)
+HADOOP-9425. [RPC v9] Add error codes to rpc-response (sanjay Radia)
 
-HADOOP-9194. RPC support for QoS. (Junping Du via llu)
+HADOOP-9194. [RPC v9] RPC support for QoS. (Junping Du via llu)
+
+HADOOP-9630. [RPC v9] Remove IpcSerializationType. (Junping Du via llu)
 
   NEW FEATURES
 

Modified: 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java?rev=1491683r1=1491682r2=1491683view=diff
==
--- 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java
 (original)
+++ 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java
 Tue Jun 11 03:31:10 2013
@@ -748,8 +748,6 @@ public class Client {
  * +--+
  * |  Authmethod (1 byte) |  
  * +--+
- * |  IpcSerializationType (1 byte)   |  
- * +--+
  */
 private void writeConnectionHeader(OutputStream outStream)
 throws IOException {
@@ -759,7 +757,6 @@ public class Client {
   out.write(Server.CURRENT_VERSION);
   out.write(serviceClass);
   authMethod.write(out);
-  Server.IpcSerializationType.PROTOBUF.write(out);
   out.flush();
 }
 

Modified: 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java?rev=1491683r1=1491682r2=1491683view=diff
==
--- 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java
 (original)
+++ 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java
 Tue Jun 11 03:31:10 2013
@@ -162,22 +162,6 @@ public abstract class Server {
   public static final ByteBuffer HEADER = ByteBuffer.wrap(hrpc.getBytes());
   
   /**
-   * Serialization type for ConnectionContext and RpcRequestHeader
-   */
-  public enum IpcSerializationType {
-// Add new serialization type to the end without affecting the enum order
-PROTOBUF;
-
-void write(DataOutput out) throws IOException {
-  out.writeByte(this.ordinal());
-}
-
-static IpcSerializationType fromByte(byte b) {
-  return IpcSerializationType.values()[b];
-}
-  }
-  
-  /**
* If the user accidentally sends an HTTP GET to an IPC port, we detect this
* and send back a nicer response.
*/
@@ -1319,7 +1303,7 @@ public abstract class Server {
 if (!connectionHeaderRead) {
   //Every connection is expected to send the header.
   if (connectionHeaderBuf == null) {
-connectionHeaderBuf

svn commit: r1491682 - in /hadoop/common/trunk/hadoop-common-project/hadoop-common: CHANGES.txt src/main/java/org/apache/hadoop/ipc/Client.java src/main/java/org/apache/hadoop/ipc/Server.java

2013-06-10 Thread llu
Author: llu
Date: Tue Jun 11 03:30:41 2013
New Revision: 1491682

URL: http://svn.apache.org/r1491682
Log:
HADOOP-9630. [RPC v9] Remove IpcSerializationType. (Junping Du via llu)

Modified:
hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java

Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1491682r1=1491681r2=1491682view=diff
==
--- hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt 
(original)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt Tue Jun 
11 03:30:41 2013
@@ -358,17 +358,19 @@ Release 2.1.0-beta - UNRELEASED
 
 HADOOP-8886. Remove KFS support. (eli)
 
-HADOOP-9163 The rpc msg in ProtobufRpcEngine.proto should be moved out to
+HADOOP-9163. [RPC v9] The rpc msg in ProtobufRpcEngine.proto should be 
moved out to
 avoid an extra copy (Sanjay Radia)
 
-HADOOP-9151 Include RPC error info in RpcResponseHeader instead of sending
+HADOOP-9151. [RPC v9] Include RPC error info in RpcResponseHeader instead 
of sending
 it separately (sanjay Radia)
 
-HADOOP-9380 Add totalLength to rpc response  (sanjay Radia)
+HADOOP-9380. [RPC v9] Add totalLength to rpc response  (sanjay Radia)
 
-HADOOP-9425 Add error codes to rpc-response (sanjay Radia)
+HADOOP-9425. [RPC v9] Add error codes to rpc-response (sanjay Radia)
 
-HADOOP-9194. RPC support for QoS. (Junping Du via llu)
+HADOOP-9194. [RPC v9] RPC support for QoS. (Junping Du via llu)
+
+HADOOP-9630. [RPC v9] Remove IpcSerializationType. (Junping Du via llu)
 
   NEW FEATURES
 

Modified: 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java?rev=1491682r1=1491681r2=1491682view=diff
==
--- 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java
 (original)
+++ 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java
 Tue Jun 11 03:30:41 2013
@@ -750,8 +750,6 @@ public class Client {
  * +--+
  * |  Authmethod (1 byte) |  
  * +--+
- * |  IpcSerializationType (1 byte)   |  
- * +--+
  */
 private void writeConnectionHeader(OutputStream outStream)
 throws IOException {
@@ -761,7 +759,6 @@ public class Client {
   out.write(Server.CURRENT_VERSION);
   out.write(serviceClass);
   authMethod.write(out);
-  Server.IpcSerializationType.PROTOBUF.write(out);
   out.flush();
 }
 

Modified: 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java?rev=1491682r1=1491681r2=1491682view=diff
==
--- 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java
 (original)
+++ 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java
 Tue Jun 11 03:30:41 2013
@@ -165,22 +165,6 @@ public abstract class Server {
   public static final ByteBuffer HEADER = ByteBuffer.wrap(hrpc.getBytes());
   
   /**
-   * Serialization type for ConnectionContext and RpcRequestHeader
-   */
-  public enum IpcSerializationType {
-// Add new serialization type to the end without affecting the enum order
-PROTOBUF;
-
-void write(DataOutput out) throws IOException {
-  out.writeByte(this.ordinal());
-}
-
-static IpcSerializationType fromByte(byte b) {
-  return IpcSerializationType.values()[b];
-}
-  }
-  
-  /**
* If the user accidentally sends an HTTP GET to an IPC port, we detect this
* and send back a nicer response.
*/
@@ -1322,7 +1306,7 @@ public abstract class Server {
 if (!connectionHeaderRead) {
   //Every connection is expected to send the header.
   if (connectionHeaderBuf == null) {
-connectionHeaderBuf = ByteBuffer.allocate(4);
+connectionHeaderBuf = ByteBuffer.allocate(3);
   }
   count = channelRead(channel, connectionHeaderBuf);
   if (count  0

svn commit: r1489951 - in /hadoop/common/branches/branch-2: dev-support/ hadoop-tools/hadoop-extras/src/main/java/org/apache/hadoop/mapred/tools/ hadoop-tools/hadoop-rumen/src/main/java/org/apache/had

2013-06-05 Thread llu
Author: llu
Date: Wed Jun  5 16:49:04 2013
New Revision: 1489951

URL: http://svn.apache.org/r1489951
Log:
HADOOP-9616. Fix branch-2 javadoc warnings. (Junping Du via llu)

Modified:
hadoop/common/branches/branch-2/dev-support/test-patch.properties

hadoop/common/branches/branch-2/hadoop-tools/hadoop-extras/src/main/java/org/apache/hadoop/mapred/tools/package-info.java

hadoop/common/branches/branch-2/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/CurrentJHParser.java

hadoop/common/branches/branch-2/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/LoggedTaskAttempt.java

hadoop/common/branches/branch-2/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/datatypes/util/MapReduceJobPropertiesParser.java

hadoop/common/branches/branch-2/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/package-info.java

Modified: hadoop/common/branches/branch-2/dev-support/test-patch.properties
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/dev-support/test-patch.properties?rev=1489951r1=1489950r2=1489951view=diff
==
--- hadoop/common/branches/branch-2/dev-support/test-patch.properties (original)
+++ hadoop/common/branches/branch-2/dev-support/test-patch.properties Wed Jun  
5 16:49:04 2013
@@ -18,4 +18,4 @@
 
 OK_RELEASEAUDIT_WARNINGS=0
 OK_FINDBUGS_WARNINGS=0
-OK_JAVADOC_WARNINGS=13
+OK_JAVADOC_WARNINGS=16

Modified: 
hadoop/common/branches/branch-2/hadoop-tools/hadoop-extras/src/main/java/org/apache/hadoop/mapred/tools/package-info.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-tools/hadoop-extras/src/main/java/org/apache/hadoop/mapred/tools/package-info.java?rev=1489951r1=1489950r2=1489951view=diff
==
--- 
hadoop/common/branches/branch-2/hadoop-tools/hadoop-extras/src/main/java/org/apache/hadoop/mapred/tools/package-info.java
 (original)
+++ 
hadoop/common/branches/branch-2/hadoop-tools/hadoop-extras/src/main/java/org/apache/hadoop/mapred/tools/package-info.java
 Wed Jun  5 16:49:04 2013
@@ -17,7 +17,6 @@
  */
 
 /**
- * Command-line tools associated with the {@link org.apache.hadoop.mapred}
- * package.
+ * Command-line tools associated with the org.apache.hadoop.mapred package.
  */
 package org.apache.hadoop.mapred.tools;

Modified: 
hadoop/common/branches/branch-2/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/CurrentJHParser.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/CurrentJHParser.java?rev=1489951r1=1489950r2=1489951view=diff
==
--- 
hadoop/common/branches/branch-2/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/CurrentJHParser.java
 (original)
+++ 
hadoop/common/branches/branch-2/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/CurrentJHParser.java
 Wed Jun  5 16:49:04 2013
@@ -27,7 +27,7 @@ import org.apache.hadoop.mapreduce.v2.hs
 
 /**
  * {@link JobHistoryParser} that parses {@link JobHistory} files produced by
- * {@link org.apache.hadoop.mapreduce.jobhistory.JobHistory} in the same source
+ * {@link org.apache.hadoop.mapreduce.v2.hs.JobHistory} in the same source
  * code tree as rumen.
  */
 public class CurrentJHParser implements JobHistoryParser {

Modified: 
hadoop/common/branches/branch-2/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/LoggedTaskAttempt.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/LoggedTaskAttempt.java?rev=1489951r1=1489950r2=1489951view=diff
==
--- 
hadoop/common/branches/branch-2/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/LoggedTaskAttempt.java
 (original)
+++ 
hadoop/common/branches/branch-2/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/LoggedTaskAttempt.java
 Wed Jun  5 16:49:04 2013
@@ -170,7 +170,7 @@ public class LoggedTaskAttempt implement
 
   /**
*
-   * @returns a list of all splits vectors, ordered in enumeral order
+   * @return a list of all splits vectors, ordered in enumeral order
*   within {@link SplitVectorKind} .  Do NOT use hard-coded
*   indices within the return for this with a hard-coded
*   index to get individual values; use

Modified: 
hadoop/common/branches/branch-2/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/datatypes/util/MapReduceJobPropertiesParser.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/datatypes/util

svn commit: r1489952 - in /hadoop/common/branches/branch-2.1-beta: dev-support/ hadoop-tools/hadoop-extras/src/main/java/org/apache/hadoop/mapred/tools/ hadoop-tools/hadoop-rumen/src/main/java/org/apa

2013-06-05 Thread llu
Author: llu
Date: Wed Jun  5 16:50:35 2013
New Revision: 1489952

URL: http://svn.apache.org/r1489952
Log:
HADOOP-9616. Fix branch-2 javadoc warnings. (Junping Du via llu)

Modified:
hadoop/common/branches/branch-2.1-beta/dev-support/test-patch.properties

hadoop/common/branches/branch-2.1-beta/hadoop-tools/hadoop-extras/src/main/java/org/apache/hadoop/mapred/tools/package-info.java

hadoop/common/branches/branch-2.1-beta/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/CurrentJHParser.java

hadoop/common/branches/branch-2.1-beta/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/LoggedTaskAttempt.java

hadoop/common/branches/branch-2.1-beta/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/datatypes/util/MapReduceJobPropertiesParser.java

hadoop/common/branches/branch-2.1-beta/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/package-info.java

Modified: 
hadoop/common/branches/branch-2.1-beta/dev-support/test-patch.properties
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2.1-beta/dev-support/test-patch.properties?rev=1489952r1=1489951r2=1489952view=diff
==
--- hadoop/common/branches/branch-2.1-beta/dev-support/test-patch.properties 
(original)
+++ hadoop/common/branches/branch-2.1-beta/dev-support/test-patch.properties 
Wed Jun  5 16:50:35 2013
@@ -18,4 +18,4 @@
 
 OK_RELEASEAUDIT_WARNINGS=0
 OK_FINDBUGS_WARNINGS=0
-OK_JAVADOC_WARNINGS=13
+OK_JAVADOC_WARNINGS=16

Modified: 
hadoop/common/branches/branch-2.1-beta/hadoop-tools/hadoop-extras/src/main/java/org/apache/hadoop/mapred/tools/package-info.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2.1-beta/hadoop-tools/hadoop-extras/src/main/java/org/apache/hadoop/mapred/tools/package-info.java?rev=1489952r1=1489951r2=1489952view=diff
==
--- 
hadoop/common/branches/branch-2.1-beta/hadoop-tools/hadoop-extras/src/main/java/org/apache/hadoop/mapred/tools/package-info.java
 (original)
+++ 
hadoop/common/branches/branch-2.1-beta/hadoop-tools/hadoop-extras/src/main/java/org/apache/hadoop/mapred/tools/package-info.java
 Wed Jun  5 16:50:35 2013
@@ -17,7 +17,6 @@
  */
 
 /**
- * Command-line tools associated with the {@link org.apache.hadoop.mapred}
- * package.
+ * Command-line tools associated with the org.apache.hadoop.mapred package.
  */
 package org.apache.hadoop.mapred.tools;

Modified: 
hadoop/common/branches/branch-2.1-beta/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/CurrentJHParser.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2.1-beta/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/CurrentJHParser.java?rev=1489952r1=1489951r2=1489952view=diff
==
--- 
hadoop/common/branches/branch-2.1-beta/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/CurrentJHParser.java
 (original)
+++ 
hadoop/common/branches/branch-2.1-beta/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/CurrentJHParser.java
 Wed Jun  5 16:50:35 2013
@@ -27,7 +27,7 @@ import org.apache.hadoop.mapreduce.v2.hs
 
 /**
  * {@link JobHistoryParser} that parses {@link JobHistory} files produced by
- * {@link org.apache.hadoop.mapreduce.jobhistory.JobHistory} in the same source
+ * {@link org.apache.hadoop.mapreduce.v2.hs.JobHistory} in the same source
  * code tree as rumen.
  */
 public class CurrentJHParser implements JobHistoryParser {

Modified: 
hadoop/common/branches/branch-2.1-beta/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/LoggedTaskAttempt.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2.1-beta/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/LoggedTaskAttempt.java?rev=1489952r1=1489951r2=1489952view=diff
==
--- 
hadoop/common/branches/branch-2.1-beta/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/LoggedTaskAttempt.java
 (original)
+++ 
hadoop/common/branches/branch-2.1-beta/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/LoggedTaskAttempt.java
 Wed Jun  5 16:50:35 2013
@@ -170,7 +170,7 @@ public class LoggedTaskAttempt implement
 
   /**
*
-   * @returns a list of all splits vectors, ordered in enumeral order
+   * @return a list of all splits vectors, ordered in enumeral order
*   within {@link SplitVectorKind} .  Do NOT use hard-coded
*   indices within the return for this with a hard-coded
*   index to get individual values; use

Modified: 
hadoop/common/branches/branch-2.1-beta/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/datatypes/util

svn commit: r1477388 - in /hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common: ./ src/main/java/org/apache/hadoop/ipc/ src/test/java/org/apache/hadoop/ipc/

2013-04-29 Thread llu
Author: llu
Date: Mon Apr 29 22:33:39 2013
New Revision: 1477388

URL: http://svn.apache.org/r1477388
Log:
HADOOP-9194. RPC Support for QoS. (Junping Du via llu)

Merged from trunk@1461370.

Modified:

hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt

hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java

hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RPC.java

hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java

hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestIPC.java

Modified: 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1477388r1=1477387r2=1477388view=diff
==
--- 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt 
(original)
+++ 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt 
Mon Apr 29 22:33:39 2013
@@ -6,6 +6,8 @@ Release 2.0.5-beta - UNRELEASED
 
   NEW FEATURES
 
+HADOOP-9194. RPC support for QoS. (Junping Du via llu)
+
 HADOOP-9283. Add support for running the Hadoop client on AIX. (atm)
 
 HADOOP-8415. Add getDouble() and setDouble() in 

Modified: 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java?rev=1477388r1=1477387r2=1477388view=diff
==
--- 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java
 (original)
+++ 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java
 Mon Apr 29 22:33:39 2013
@@ -256,6 +256,7 @@ public class Client {
 private final ConnectionId remoteId;// connection id
 private AuthMethod authMethod; // authentication method
 private Token? extends TokenIdentifier token;
+private int serviceClass;
 private SaslRpcClient saslRpcClient;
 
 private Socket socket = null; // connected socket
@@ -278,7 +279,7 @@ public class Client {
 
 private final Object sendParamsLock = new Object();
 
-public Connection(ConnectionId remoteId) throws IOException {
+public Connection(ConnectionId remoteId, int serviceClass) throws 
IOException {
   this.remoteId = remoteId;
   this.server = remoteId.getAddress();
   if (server.isUnresolved()) {
@@ -295,6 +296,7 @@ public class Client {
   this.tcpNoDelay = remoteId.getTcpNoDelay();
   this.doPing = remoteId.getDoPing();
   this.pingInterval = remoteId.getPingInterval();
+  this.serviceClass = serviceClass;
   if (LOG.isDebugEnabled()) {
 LOG.debug(The ping interval is  + this.pingInterval +  ms.);
   }
@@ -740,7 +742,9 @@ public class Client {
  * +--+
  * |  hrpc 4 bytes  |  
  * +--+
- * |  Version (1 bytes)   |  
+ * |  Version (1 byte)|
+ * +--+
+ * |  Service Class (1 byte)  |
  * +--+
  * |  Authmethod (1 byte) |  
  * +--+
@@ -753,6 +757,7 @@ public class Client {
   // Write out the header, version and authentication method
   out.write(Server.HEADER.array());
   out.write(Server.CURRENT_VERSION);
+  out.write(serviceClass);
   authMethod.write(out);
   Server.IpcSerializationType.PROTOBUF.write(out);
   out.flush();
@@ -1144,20 +1149,34 @@ public class Client {
 
   
   /**
-   * Same as {@link #call(RPC.RpcKind, Writable, InetSocketAddress, 
+   * Same as {@link #call(RPC.RpcKind, Writable, InetSocketAddress,
* Class, UserGroupInformation, int, Configuration)}
* except that rpcKind is writable.
*/
-  public Writable call(Writable param, InetSocketAddress addr, 
+  public Writable call(Writable param, InetSocketAddress addr,
   Class? protocol, UserGroupInformation ticket,
-  int rpcTimeout, Configuration conf)  
+  int rpcTimeout, Configuration conf)
   throws InterruptedException, IOException {
-ConnectionId remoteId = ConnectionId.getConnectionId(addr, protocol,
+ConnectionId remoteId = ConnectionId.getConnectionId(addr, protocol,
 ticket, rpcTimeout, conf);
 return

svn commit: r1477390 [1/2] - /hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/docs/releasenotes.html

2013-04-29 Thread llu
Author: llu
Date: Mon Apr 29 22:36:53 2013
New Revision: 1477390

URL: http://svn.apache.org/r1477390
Log:
Fix CRLF in html according to HADOOP-8912.

Modified:

hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/docs/releasenotes.html



svn commit: r1468731 - /hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt

2013-04-16 Thread llu
Author: llu
Date: Wed Apr 17 02:57:29 2013
New Revision: 1468731

URL: http://svn.apache.org/r1468731
Log:
Fix HADOOP-9467 attribution.

Modified:
hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt

Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1468731r1=1468730r2=1468731view=diff
==
--- hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt 
(original)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt Wed Apr 
17 02:57:29 2013
@@ -641,7 +641,7 @@ Release 2.0.4-alpha - UNRELEASED
   BUG FIXES
 
 HADOOP-9467. Metrics2 record filter should check name as well as tags.
-(Ganeshan Iyler via llu)
+(Chris Nauroth and Ganeshan Iyler via llu)
 
 HADOOP-9406. hadoop-client leaks dependency on JDK tools jar. (tucu)
 




svn commit: r1468732 - /hadoop/common/branches/branch-1/CHANGES.txt

2013-04-16 Thread llu
Author: llu
Date: Wed Apr 17 03:00:31 2013
New Revision: 1468732

URL: http://svn.apache.org/r1468732
Log:
Fix HADOOP-9467 attribution.

Modified:
hadoop/common/branches/branch-1/CHANGES.txt

Modified: hadoop/common/branches/branch-1/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-1/CHANGES.txt?rev=1468732r1=1468731r2=1468732view=diff
==
--- hadoop/common/branches/branch-1/CHANGES.txt (original)
+++ hadoop/common/branches/branch-1/CHANGES.txt Wed Apr 17 03:00:31 2013
@@ -242,7 +242,7 @@ Release 1.2.0 - unreleased
   BUG FIXES
 
 HADOOP-9467. Metrics2 record filter should check name as well as tags.
-(Ganeshan Iyler via llu)
+(Chris Nauroth and Ganeshan Iyler via llu)
 
 HADOOP-9451. Fault single-layer config if node group topology is enabled.
 (Junping Du via llu)




svn commit: r1466377 - in /hadoop/common/trunk/hadoop-common-project/hadoop-common: CHANGES.txt src/main/java/org/apache/hadoop/metrics2/MetricsFilter.java src/test/java/org/apache/hadoop/metrics2/fil

2013-04-10 Thread llu
Author: llu
Date: Wed Apr 10 08:25:37 2013
New Revision: 1466377

URL: http://svn.apache.org/r1466377
Log:
HADOOP-9467. Metrics2 record filter should check name as well as tags. 
(Ganeshan Iyler via llu)

Modified:
hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/MetricsFilter.java

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/filter/TestPatternFilter.java

Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1466377r1=1466376r2=1466377view=diff
==
--- hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt 
(original)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt Wed Apr 
10 08:25:37 2013
@@ -629,6 +629,9 @@ Release 2.0.4-alpha - UNRELEASED
 
   BUG FIXES
 
+HADOOP-9467. Metrics2 record filter should check name as well as tags.
+(Ganeshan Iyler via llu)
+
 HADOOP-9406. hadoop-client leaks dependency on JDK tools jar. (tucu)
 
 HADOOP-9301. hadoop client servlet/jsp/jetty/tomcat JARs creating

Modified: 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/MetricsFilter.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/MetricsFilter.java?rev=1466377r1=1466376r2=1466377view=diff
==
--- 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/MetricsFilter.java
 (original)
+++ 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/MetricsFilter.java
 Wed Apr 10 08:25:37 2013
@@ -57,7 +57,7 @@ public abstract class MetricsFilter impl
* @return  true to accept; false otherwise.
*/
   public boolean accepts(MetricsRecord record) {
-return accepts(record.tags());
+return accepts(record.name())  accepts(record.tags());
   }
 
 }

Modified: 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/filter/TestPatternFilter.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/filter/TestPatternFilter.java?rev=1466377r1=1466376r2=1466377view=diff
==
--- 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/filter/TestPatternFilter.java
 (original)
+++ 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/filter/TestPatternFilter.java
 Wed Apr 10 08:25:37 2013
@@ -24,7 +24,9 @@ import java.util.List;
 import org.apache.commons.configuration.SubsetConfiguration;
 import org.junit.Test;
 import static org.junit.Assert.*;
+import static org.mockito.Mockito.*;
 
+import org.apache.hadoop.metrics2.MetricsRecord;
 import org.apache.hadoop.metrics2.MetricsTag;
 import org.apache.hadoop.metrics2.impl.ConfigBuilder;
 import static org.apache.hadoop.metrics2.lib.Interns.*;
@@ -38,6 +40,8 @@ public class TestPatternFilter {
 SubsetConfiguration empty = new ConfigBuilder().subset();
 shouldAccept(empty, anything);
 shouldAccept(empty, Arrays.asList(tag(key, desc, value)));
+shouldAccept(empty, mockMetricsRecord(anything, Arrays.asList(
+  tag(key, desc, value;
   }
 
   /**
@@ -50,9 +54,15 @@ public class TestPatternFilter {
 shouldAccept(wl, foo);
 shouldAccept(wl, Arrays.asList(tag(bar, , ),
tag(foo, , f)));
+shouldAccept(wl, mockMetricsRecord(foo, Arrays.asList(
+  tag(bar, , ), tag(foo, , f;
 shouldReject(wl, bar);
 shouldReject(wl, Arrays.asList(tag(bar, , )));
 shouldReject(wl, Arrays.asList(tag(foo, , boo)));
+shouldReject(wl, mockMetricsRecord(bar, Arrays.asList(
+  tag(foo, , f;
+shouldReject(wl, mockMetricsRecord(foo, Arrays.asList(
+  tag(bar, , ;
   }
 
   /**
@@ -64,9 +74,15 @@ public class TestPatternFilter {
 .add(p.exclude.tags, foo:f).subset(p);
 shouldAccept(bl, bar);
 shouldAccept(bl, Arrays.asList(tag(bar, , )));
+shouldAccept(bl, mockMetricsRecord(bar, Arrays.asList(
+  tag(bar, , ;
 shouldReject(bl, foo);
 shouldReject(bl, Arrays.asList(tag(bar, , ),
tag(foo, , f)));
+shouldReject(bl, mockMetricsRecord(foo, Arrays.asList(
+  tag(bar, , ;
+shouldReject(bl, mockMetricsRecord(bar, Arrays.asList(
+  tag(bar, , ), tag(foo, , f;
   }
 
   /**
@@ -81,10 +97,18 @@ public class

svn commit: r1466381 - in /hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common: CHANGES.txt src/main/java/org/apache/hadoop/metrics2/MetricsFilter.java src/test/java/org/apache/hadoop/

2013-04-10 Thread llu
Author: llu
Date: Wed Apr 10 08:34:29 2013
New Revision: 1466381

URL: http://svn.apache.org/r1466381
Log:
HADOOP-9467. Metrics2 record filter should check name as well as tags. 
(Ganeshan Iyler via llu)

Modified:

hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt

hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/MetricsFilter.java

hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/filter/TestPatternFilter.java

Modified: 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1466381r1=1466380r2=1466381view=diff
==
--- 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt 
(original)
+++ 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt 
Wed Apr 10 08:34:29 2013
@@ -122,6 +122,9 @@ Release 2.0.4-alpha - UNRELEASED
 
   BUG FIXES
 
+HADOOP-9467. Metrics2 record filter should check name as well as tags.
+(Ganeshan Iyler via llu)
+
 HADOOP-9406. hadoop-client leaks dependency on JDK tools jar. (tucu)
 
 HADOOP-9301. hadoop client servlet/jsp/jetty/tomcat JARs creating

Modified: 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/MetricsFilter.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/MetricsFilter.java?rev=1466381r1=1466380r2=1466381view=diff
==
--- 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/MetricsFilter.java
 (original)
+++ 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/MetricsFilter.java
 Wed Apr 10 08:34:29 2013
@@ -55,7 +55,7 @@ public abstract class MetricsFilter impl
* @return  true to accept; false otherwise.
*/
   public boolean accepts(MetricsRecord record) {
-return accepts(record.tags());
+return accepts(record.name())  accepts(record.tags());
   }
 
 }

Modified: 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/filter/TestPatternFilter.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/filter/TestPatternFilter.java?rev=1466381r1=1466380r2=1466381view=diff
==
--- 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/filter/TestPatternFilter.java
 (original)
+++ 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/filter/TestPatternFilter.java
 Wed Apr 10 08:34:29 2013
@@ -24,7 +24,9 @@ import java.util.List;
 import org.apache.commons.configuration.SubsetConfiguration;
 import org.junit.Test;
 import static org.junit.Assert.*;
+import static org.mockito.Mockito.*;
 
+import org.apache.hadoop.metrics2.MetricsRecord;
 import org.apache.hadoop.metrics2.MetricsTag;
 import org.apache.hadoop.metrics2.impl.ConfigBuilder;
 import static org.apache.hadoop.metrics2.lib.Interns.*;
@@ -38,6 +40,8 @@ public class TestPatternFilter {
 SubsetConfiguration empty = new ConfigBuilder().subset();
 shouldAccept(empty, anything);
 shouldAccept(empty, Arrays.asList(tag(key, desc, value)));
+shouldAccept(empty, mockMetricsRecord(anything, Arrays.asList(
+  tag(key, desc, value;
   }
 
   /**
@@ -50,9 +54,15 @@ public class TestPatternFilter {
 shouldAccept(wl, foo);
 shouldAccept(wl, Arrays.asList(tag(bar, , ),
tag(foo, , f)));
+shouldAccept(wl, mockMetricsRecord(foo, Arrays.asList(
+  tag(bar, , ), tag(foo, , f;
 shouldReject(wl, bar);
 shouldReject(wl, Arrays.asList(tag(bar, , )));
 shouldReject(wl, Arrays.asList(tag(foo, , boo)));
+shouldReject(wl, mockMetricsRecord(bar, Arrays.asList(
+  tag(foo, , f;
+shouldReject(wl, mockMetricsRecord(foo, Arrays.asList(
+  tag(bar, , ;
   }
 
   /**
@@ -64,9 +74,15 @@ public class TestPatternFilter {
 .add(p.exclude.tags, foo:f).subset(p);
 shouldAccept(bl, bar);
 shouldAccept(bl, Arrays.asList(tag(bar, , )));
+shouldAccept(bl, mockMetricsRecord(bar, Arrays.asList(
+  tag(bar, , ;
 shouldReject(bl, foo);
 shouldReject(bl, Arrays.asList(tag(bar, , ),
tag(foo, , f)));
+shouldReject(bl, mockMetricsRecord(foo, Arrays.asList

svn commit: r1466384 - in /hadoop/common/branches/branch-2.0.4-alpha/hadoop-common-project/hadoop-common: CHANGES.txt src/main/java/org/apache/hadoop/metrics2/MetricsFilter.java src/test/java/org/apac

2013-04-10 Thread llu
Author: llu
Date: Wed Apr 10 08:37:32 2013
New Revision: 1466384

URL: http://svn.apache.org/r1466384
Log:
HADOOP-9467. Metrics2 record filter should check name as well as tags. 
(Ganeshan Iyler via llu)

Modified:

hadoop/common/branches/branch-2.0.4-alpha/hadoop-common-project/hadoop-common/CHANGES.txt

hadoop/common/branches/branch-2.0.4-alpha/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/MetricsFilter.java

hadoop/common/branches/branch-2.0.4-alpha/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/filter/TestPatternFilter.java

Modified: 
hadoop/common/branches/branch-2.0.4-alpha/hadoop-common-project/hadoop-common/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2.0.4-alpha/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1466384r1=1466383r2=1466384view=diff
==
--- 
hadoop/common/branches/branch-2.0.4-alpha/hadoop-common-project/hadoop-common/CHANGES.txt
 (original)
+++ 
hadoop/common/branches/branch-2.0.4-alpha/hadoop-common-project/hadoop-common/CHANGES.txt
 Wed Apr 10 08:37:32 2013
@@ -83,6 +83,9 @@ Release 2.0.4-alpha - UNRELEASED
 
   BUG FIXES
 
+HADOOP-9467. Metrics2 record filter should check name as well as tags.
+(Ganeshan Iyler via llu)
+
 HADOOP-9406. hadoop-client leaks dependency on JDK tools jar. (tucu)
 
 HADOOP-9301. hadoop client servlet/jsp/jetty/tomcat JARs creating 
conflicts in Oozie  HttpFS. (tucu)

Modified: 
hadoop/common/branches/branch-2.0.4-alpha/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/MetricsFilter.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2.0.4-alpha/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/MetricsFilter.java?rev=1466384r1=1466383r2=1466384view=diff
==
--- 
hadoop/common/branches/branch-2.0.4-alpha/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/MetricsFilter.java
 (original)
+++ 
hadoop/common/branches/branch-2.0.4-alpha/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/MetricsFilter.java
 Wed Apr 10 08:37:32 2013
@@ -55,7 +55,7 @@ public abstract class MetricsFilter impl
* @return  true to accept; false otherwise.
*/
   public boolean accepts(MetricsRecord record) {
-return accepts(record.tags());
+return accepts(record.name())  accepts(record.tags());
   }
 
 }

Modified: 
hadoop/common/branches/branch-2.0.4-alpha/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/filter/TestPatternFilter.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2.0.4-alpha/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/filter/TestPatternFilter.java?rev=1466384r1=1466383r2=1466384view=diff
==
--- 
hadoop/common/branches/branch-2.0.4-alpha/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/filter/TestPatternFilter.java
 (original)
+++ 
hadoop/common/branches/branch-2.0.4-alpha/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/filter/TestPatternFilter.java
 Wed Apr 10 08:37:32 2013
@@ -24,7 +24,9 @@ import java.util.List;
 import org.apache.commons.configuration.SubsetConfiguration;
 import org.junit.Test;
 import static org.junit.Assert.*;
+import static org.mockito.Mockito.*;
 
+import org.apache.hadoop.metrics2.MetricsRecord;
 import org.apache.hadoop.metrics2.MetricsTag;
 import org.apache.hadoop.metrics2.impl.ConfigBuilder;
 import static org.apache.hadoop.metrics2.lib.Interns.*;
@@ -38,6 +40,8 @@ public class TestPatternFilter {
 SubsetConfiguration empty = new ConfigBuilder().subset();
 shouldAccept(empty, anything);
 shouldAccept(empty, Arrays.asList(tag(key, desc, value)));
+shouldAccept(empty, mockMetricsRecord(anything, Arrays.asList(
+  tag(key, desc, value;
   }
 
   /**
@@ -50,9 +54,15 @@ public class TestPatternFilter {
 shouldAccept(wl, foo);
 shouldAccept(wl, Arrays.asList(tag(bar, , ),
tag(foo, , f)));
+shouldAccept(wl, mockMetricsRecord(foo, Arrays.asList(
+  tag(bar, , ), tag(foo, , f;
 shouldReject(wl, bar);
 shouldReject(wl, Arrays.asList(tag(bar, , )));
 shouldReject(wl, Arrays.asList(tag(foo, , boo)));
+shouldReject(wl, mockMetricsRecord(bar, Arrays.asList(
+  tag(foo, , f;
+shouldReject(wl, mockMetricsRecord(foo, Arrays.asList(
+  tag(bar, , ;
   }
 
   /**
@@ -64,9 +74,15 @@ public class TestPatternFilter {
 .add(p.exclude.tags, foo:f).subset(p);
 shouldAccept(bl, bar);
 shouldAccept(bl, Arrays.asList(tag(bar, , )));
+shouldAccept(bl, mockMetricsRecord(bar, Arrays.asList(
+  tag(bar

svn commit: r1466385 - in /hadoop/common/branches/branch-1: CHANGES.txt src/core/org/apache/hadoop/metrics2/MetricsFilter.java src/test/org/apache/hadoop/metrics2/filter/TestPatternFilter.java

2013-04-10 Thread llu
Author: llu
Date: Wed Apr 10 08:39:52 2013
New Revision: 1466385

URL: http://svn.apache.org/r1466385
Log:
HADOOP-9467. Metrics2 record filter should check name as well as tags. 
(Ganeshan Iyler via llu)

Modified:
hadoop/common/branches/branch-1/CHANGES.txt

hadoop/common/branches/branch-1/src/core/org/apache/hadoop/metrics2/MetricsFilter.java

hadoop/common/branches/branch-1/src/test/org/apache/hadoop/metrics2/filter/TestPatternFilter.java

Modified: hadoop/common/branches/branch-1/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-1/CHANGES.txt?rev=1466385r1=1466384r2=1466385view=diff
==
--- hadoop/common/branches/branch-1/CHANGES.txt (original)
+++ hadoop/common/branches/branch-1/CHANGES.txt Wed Apr 10 08:39:52 2013
@@ -238,6 +238,9 @@ Release 1.2.0 - unreleased
 
   BUG FIXES
 
+HADOOP-9467. Metrics2 record filter should check name as well as tags.
+(Ganeshan Iyler via llu)
+
 HADOOP-9451. Fault single-layer config if node group topology is enabled.
 (Junping Du via llu)
 

Modified: 
hadoop/common/branches/branch-1/src/core/org/apache/hadoop/metrics2/MetricsFilter.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-1/src/core/org/apache/hadoop/metrics2/MetricsFilter.java?rev=1466385r1=1466384r2=1466385view=diff
==
--- 
hadoop/common/branches/branch-1/src/core/org/apache/hadoop/metrics2/MetricsFilter.java
 (original)
+++ 
hadoop/common/branches/branch-1/src/core/org/apache/hadoop/metrics2/MetricsFilter.java
 Wed Apr 10 08:39:52 2013
@@ -54,7 +54,7 @@ public abstract class MetricsFilter impl
* @return  true to accept; false otherwise.
*/
   public boolean accepts(MetricsRecord record) {
-return accepts(record.tags());
+return accepts(record.name())  accepts(record.tags());
   }
 
 }

Modified: 
hadoop/common/branches/branch-1/src/test/org/apache/hadoop/metrics2/filter/TestPatternFilter.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-1/src/test/org/apache/hadoop/metrics2/filter/TestPatternFilter.java?rev=1466385r1=1466384r2=1466385view=diff
==
--- 
hadoop/common/branches/branch-1/src/test/org/apache/hadoop/metrics2/filter/TestPatternFilter.java
 (original)
+++ 
hadoop/common/branches/branch-1/src/test/org/apache/hadoop/metrics2/filter/TestPatternFilter.java
 Wed Apr 10 08:39:52 2013
@@ -24,7 +24,9 @@ import java.util.List;
 import org.apache.commons.configuration.SubsetConfiguration;
 import org.junit.Test;
 import static org.junit.Assert.*;
+import static org.mockito.Mockito.*;
 
+import org.apache.hadoop.metrics2.MetricsRecord;
 import org.apache.hadoop.metrics2.MetricsTag;
 import org.apache.hadoop.metrics2.impl.ConfigBuilder;
 
@@ -37,6 +39,8 @@ public class TestPatternFilter {
 SubsetConfiguration empty = new ConfigBuilder().subset();
 shouldAccept(empty, anything);
 shouldAccept(empty, Arrays.asList(new MetricsTag(key, desc, value)));
+shouldAccept(empty, mockMetricsRecord(anything, Arrays.asList(
+  new MetricsTag(key, desc, value;
   }
 
   /**
@@ -49,9 +53,16 @@ public class TestPatternFilter {
 shouldAccept(wl, foo);
 shouldAccept(wl, Arrays.asList(new MetricsTag(bar, , ),
new MetricsTag(foo, , f)));
+shouldAccept(wl, mockMetricsRecord(foo, Arrays.asList(
+  new MetricsTag(bar, , ),
+  new MetricsTag(foo, , f;
 shouldReject(wl, bar);
 shouldReject(wl, Arrays.asList(new MetricsTag(bar, , )));
 shouldReject(wl, Arrays.asList(new MetricsTag(foo, , boo)));
+shouldReject(wl, mockMetricsRecord(bar, Arrays.asList(
+  new MetricsTag(foo, , f;
+shouldReject(wl, mockMetricsRecord(foo, Arrays.asList(
+  new MetricsTag(bar, , ;
   }
 
   /**
@@ -63,9 +74,16 @@ public class TestPatternFilter {
 .add(p.exclude.tags, foo:f).subset(p);
 shouldAccept(bl, bar);
 shouldAccept(bl, Arrays.asList(new MetricsTag(bar, , )));
+shouldAccept(bl, mockMetricsRecord(bar, Arrays.asList(
+  new MetricsTag(bar, , ;
 shouldReject(bl, foo);
 shouldReject(bl, Arrays.asList(new MetricsTag(bar, , ),
new MetricsTag(foo, , f)));
+shouldReject(bl, mockMetricsRecord(foo, Arrays.asList(
+  new MetricsTag(bar, , ;
+shouldReject(bl, mockMetricsRecord(bar, Arrays.asList(
+  new MetricsTag(bar, , ),
+  new MetricsTag(foo, , f;
   }
 
   /**
@@ -80,10 +98,18 @@ public class TestPatternFilter {
 .add(p.exclude.tags, bar:b).subset(p);
 shouldAccept(c, foo);
 shouldAccept(c, Arrays.asList(new MetricsTag(foo, , f)));
+shouldAccept(c, mockMetricsRecord(foo, Arrays.asList(
+  new MetricsTag(foo, , f;
 shouldReject(c, bar);
 shouldReject(c, Arrays.asList(new

svn commit: r1466387 - in /hadoop/common/branches/branch-1.1: CHANGES.txt src/core/org/apache/hadoop/metrics2/MetricsFilter.java src/test/org/apache/hadoop/metrics2/filter/TestPatternFilter.java

2013-04-10 Thread llu
Author: llu
Date: Wed Apr 10 08:41:16 2013
New Revision: 1466387

URL: http://svn.apache.org/r1466387
Log:
HADOOP-9467. Metrics2 record filter should check name as well as tags. 
(Ganeshan Iyler via llu)

Modified:
hadoop/common/branches/branch-1.1/CHANGES.txt

hadoop/common/branches/branch-1.1/src/core/org/apache/hadoop/metrics2/MetricsFilter.java

hadoop/common/branches/branch-1.1/src/test/org/apache/hadoop/metrics2/filter/TestPatternFilter.java

Modified: hadoop/common/branches/branch-1.1/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-1.1/CHANGES.txt?rev=1466387r1=1466386r2=1466387view=diff
==
--- hadoop/common/branches/branch-1.1/CHANGES.txt (original)
+++ hadoop/common/branches/branch-1.1/CHANGES.txt Wed Apr 10 08:41:16 2013
@@ -10,6 +10,9 @@ Release 1.1.3 - unreleased
 
   BUG FIXES
 
+HADOOP-9467. Metrics2 record filter should check name as well as tags.
+(Ganeshan Iyler via llu)
+
 Release 1.1.2 - 2013.01.30
 
   INCOMPATIBLE CHANGES

Modified: 
hadoop/common/branches/branch-1.1/src/core/org/apache/hadoop/metrics2/MetricsFilter.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-1.1/src/core/org/apache/hadoop/metrics2/MetricsFilter.java?rev=1466387r1=1466386r2=1466387view=diff
==
--- 
hadoop/common/branches/branch-1.1/src/core/org/apache/hadoop/metrics2/MetricsFilter.java
 (original)
+++ 
hadoop/common/branches/branch-1.1/src/core/org/apache/hadoop/metrics2/MetricsFilter.java
 Wed Apr 10 08:41:16 2013
@@ -54,7 +54,7 @@ public abstract class MetricsFilter impl
* @return  true to accept; false otherwise.
*/
   public boolean accepts(MetricsRecord record) {
-return accepts(record.tags());
+return accepts(record.name())  accepts(record.tags());
   }
 
 }

Modified: 
hadoop/common/branches/branch-1.1/src/test/org/apache/hadoop/metrics2/filter/TestPatternFilter.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-1.1/src/test/org/apache/hadoop/metrics2/filter/TestPatternFilter.java?rev=1466387r1=1466386r2=1466387view=diff
==
--- 
hadoop/common/branches/branch-1.1/src/test/org/apache/hadoop/metrics2/filter/TestPatternFilter.java
 (original)
+++ 
hadoop/common/branches/branch-1.1/src/test/org/apache/hadoop/metrics2/filter/TestPatternFilter.java
 Wed Apr 10 08:41:16 2013
@@ -24,7 +24,9 @@ import java.util.List;
 import org.apache.commons.configuration.SubsetConfiguration;
 import org.junit.Test;
 import static org.junit.Assert.*;
+import static org.mockito.Mockito.*;
 
+import org.apache.hadoop.metrics2.MetricsRecord;
 import org.apache.hadoop.metrics2.MetricsTag;
 import org.apache.hadoop.metrics2.impl.ConfigBuilder;
 
@@ -37,6 +39,8 @@ public class TestPatternFilter {
 SubsetConfiguration empty = new ConfigBuilder().subset();
 shouldAccept(empty, anything);
 shouldAccept(empty, Arrays.asList(new MetricsTag(key, desc, value)));
+shouldAccept(empty, mockMetricsRecord(anything, Arrays.asList(
+  new MetricsTag(key, desc, value;
   }
 
   /**
@@ -49,9 +53,16 @@ public class TestPatternFilter {
 shouldAccept(wl, foo);
 shouldAccept(wl, Arrays.asList(new MetricsTag(bar, , ),
new MetricsTag(foo, , f)));
+shouldAccept(wl, mockMetricsRecord(foo, Arrays.asList(
+  new MetricsTag(bar, , ),
+  new MetricsTag(foo, , f;
 shouldReject(wl, bar);
 shouldReject(wl, Arrays.asList(new MetricsTag(bar, , )));
 shouldReject(wl, Arrays.asList(new MetricsTag(foo, , boo)));
+shouldReject(wl, mockMetricsRecord(bar, Arrays.asList(
+  new MetricsTag(foo, , f;
+shouldReject(wl, mockMetricsRecord(foo, Arrays.asList(
+  new MetricsTag(bar, , ;
   }
 
   /**
@@ -63,9 +74,16 @@ public class TestPatternFilter {
 .add(p.exclude.tags, foo:f).subset(p);
 shouldAccept(bl, bar);
 shouldAccept(bl, Arrays.asList(new MetricsTag(bar, , )));
+shouldAccept(bl, mockMetricsRecord(bar, Arrays.asList(
+  new MetricsTag(bar, , ;
 shouldReject(bl, foo);
 shouldReject(bl, Arrays.asList(new MetricsTag(bar, , ),
new MetricsTag(foo, , f)));
+shouldReject(bl, mockMetricsRecord(foo, Arrays.asList(
+  new MetricsTag(bar, , ;
+shouldReject(bl, mockMetricsRecord(bar, Arrays.asList(
+  new MetricsTag(bar, , ),
+  new MetricsTag(foo, , f;
   }
 
   /**
@@ -80,10 +98,18 @@ public class TestPatternFilter {
 .add(p.exclude.tags, bar:b).subset(p);
 shouldAccept(c, foo);
 shouldAccept(c, Arrays.asList(new MetricsTag(foo, , f)));
+shouldAccept(c, mockMetricsRecord(foo, Arrays.asList(
+  new MetricsTag(foo, , f;
 shouldReject(c, bar);
 shouldReject(c, Arrays.asList(new MetricsTag(bar, , b

svn commit: r1466388 - in /hadoop/common/branches/branch-1.0: CHANGES.txt src/core/org/apache/hadoop/metrics2/MetricsFilter.java src/test/org/apache/hadoop/metrics2/filter/TestPatternFilter.java

2013-04-10 Thread llu
Author: llu
Date: Wed Apr 10 08:49:02 2013
New Revision: 1466388

URL: http://svn.apache.org/r1466388
Log:
HADOOP-9467. Metrics2 record filter should check name as well as tags. 
(Ganeshan Iyler via llu)

Modified:
hadoop/common/branches/branch-1.0/CHANGES.txt

hadoop/common/branches/branch-1.0/src/core/org/apache/hadoop/metrics2/MetricsFilter.java

hadoop/common/branches/branch-1.0/src/test/org/apache/hadoop/metrics2/filter/TestPatternFilter.java

Modified: hadoop/common/branches/branch-1.0/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-1.0/CHANGES.txt?rev=1466388r1=1466387r2=1466388view=diff
==
--- hadoop/common/branches/branch-1.0/CHANGES.txt (original)
+++ hadoop/common/branches/branch-1.0/CHANGES.txt Wed Apr 10 08:49:02 2013
@@ -8,6 +8,9 @@ Release 1.0.5 - unreleased
 
   BUG FIXES
 
+HADOOP-9467. Metrics2 record filter should check name as well as tags.
+(Ganeshan Iyler via llu)
+
 HADOOP-8419. Fixed GzipCode NPE reset for IBM JDK. (Yu Li via eyang)
 
 MAPREDUCE-4272. SortedRanges.Range#compareTo is not spec compliant.

Modified: 
hadoop/common/branches/branch-1.0/src/core/org/apache/hadoop/metrics2/MetricsFilter.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-1.0/src/core/org/apache/hadoop/metrics2/MetricsFilter.java?rev=1466388r1=1466387r2=1466388view=diff
==
--- 
hadoop/common/branches/branch-1.0/src/core/org/apache/hadoop/metrics2/MetricsFilter.java
 (original)
+++ 
hadoop/common/branches/branch-1.0/src/core/org/apache/hadoop/metrics2/MetricsFilter.java
 Wed Apr 10 08:49:02 2013
@@ -54,7 +54,7 @@ public abstract class MetricsFilter impl
* @return  true to accept; false otherwise.
*/
   public boolean accepts(MetricsRecord record) {
-return accepts(record.tags());
+return accepts(record.name())  accepts(record.tags());
   }
 
 }

Modified: 
hadoop/common/branches/branch-1.0/src/test/org/apache/hadoop/metrics2/filter/TestPatternFilter.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-1.0/src/test/org/apache/hadoop/metrics2/filter/TestPatternFilter.java?rev=1466388r1=1466387r2=1466388view=diff
==
--- 
hadoop/common/branches/branch-1.0/src/test/org/apache/hadoop/metrics2/filter/TestPatternFilter.java
 (original)
+++ 
hadoop/common/branches/branch-1.0/src/test/org/apache/hadoop/metrics2/filter/TestPatternFilter.java
 Wed Apr 10 08:49:02 2013
@@ -24,7 +24,9 @@ import java.util.List;
 import org.apache.commons.configuration.SubsetConfiguration;
 import org.junit.Test;
 import static org.junit.Assert.*;
+import static org.mockito.Mockito.*;
 
+import org.apache.hadoop.metrics2.MetricsRecord;
 import org.apache.hadoop.metrics2.MetricsTag;
 import org.apache.hadoop.metrics2.impl.ConfigBuilder;
 
@@ -37,6 +39,8 @@ public class TestPatternFilter {
 SubsetConfiguration empty = new ConfigBuilder().subset();
 shouldAccept(empty, anything);
 shouldAccept(empty, Arrays.asList(new MetricsTag(key, desc, value)));
+shouldAccept(empty, mockMetricsRecord(anything, Arrays.asList(
+  new MetricsTag(key, desc, value;
   }
 
   /**
@@ -49,9 +53,16 @@ public class TestPatternFilter {
 shouldAccept(wl, foo);
 shouldAccept(wl, Arrays.asList(new MetricsTag(bar, , ),
new MetricsTag(foo, , f)));
+shouldAccept(wl, mockMetricsRecord(foo, Arrays.asList(
+  new MetricsTag(bar, , ),
+  new MetricsTag(foo, , f;
 shouldReject(wl, bar);
 shouldReject(wl, Arrays.asList(new MetricsTag(bar, , )));
 shouldReject(wl, Arrays.asList(new MetricsTag(foo, , boo)));
+shouldReject(wl, mockMetricsRecord(bar, Arrays.asList(
+  new MetricsTag(foo, , f;
+shouldReject(wl, mockMetricsRecord(foo, Arrays.asList(
+  new MetricsTag(bar, , ;
   }
 
   /**
@@ -63,9 +74,16 @@ public class TestPatternFilter {
 .add(p.exclude.tags, foo:f).subset(p);
 shouldAccept(bl, bar);
 shouldAccept(bl, Arrays.asList(new MetricsTag(bar, , )));
+shouldAccept(bl, mockMetricsRecord(bar, Arrays.asList(
+  new MetricsTag(bar, , ;
 shouldReject(bl, foo);
 shouldReject(bl, Arrays.asList(new MetricsTag(bar, , ),
new MetricsTag(foo, , f)));
+shouldReject(bl, mockMetricsRecord(foo, Arrays.asList(
+  new MetricsTag(bar, , ;
+shouldReject(bl, mockMetricsRecord(bar, Arrays.asList(
+  new MetricsTag(bar, , ),
+  new MetricsTag(foo, , f;
   }
 
   /**
@@ -80,10 +98,18 @@ public class TestPatternFilter {
 .add(p.exclude.tags, bar:b).subset(p);
 shouldAccept(c, foo);
 shouldAccept(c, Arrays.asList(new MetricsTag(foo, , f)));
+shouldAccept(c, mockMetricsRecord(foo, Arrays.asList(
+  new MetricsTag(foo, , f

svn commit: r1466391 - in /hadoop/common/branches/branch-1-win: CHANGES.branch-1-win.txt src/core/org/apache/hadoop/metrics2/MetricsFilter.java src/test/org/apache/hadoop/metrics2/filter/TestPatternFi

2013-04-10 Thread llu
Author: llu
Date: Wed Apr 10 08:54:42 2013
New Revision: 1466391

URL: http://svn.apache.org/r1466391
Log:
HADOOP-9467. Metrics2 record filter should check name as well as tags. 
(Ganeshan Iyler via llu)

Modified:
hadoop/common/branches/branch-1-win/CHANGES.branch-1-win.txt

hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/metrics2/MetricsFilter.java

hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/metrics2/filter/TestPatternFilter.java

Modified: hadoop/common/branches/branch-1-win/CHANGES.branch-1-win.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-1-win/CHANGES.branch-1-win.txt?rev=1466391r1=1466390r2=1466391view=diff
==
--- hadoop/common/branches/branch-1-win/CHANGES.branch-1-win.txt (original)
+++ hadoop/common/branches/branch-1-win/CHANGES.branch-1-win.txt Wed Apr 10 
08:54:42 2013
@@ -359,3 +359,6 @@ Branch-hadoop-1-win (branched from branc
 
 HDFS-4413. Secondary namenode won't start if HDFS isn't the default
 file system. (Mostafa Elhemali via suresh)
+
+HADOOP-9467. Metrics2 record filter should check name as well as tags.
+(Ganeshan Iyler via llu)

Modified: 
hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/metrics2/MetricsFilter.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/metrics2/MetricsFilter.java?rev=1466391r1=1466390r2=1466391view=diff
==
--- 
hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/metrics2/MetricsFilter.java
 (original)
+++ 
hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/metrics2/MetricsFilter.java
 Wed Apr 10 08:54:42 2013
@@ -54,7 +54,7 @@ public abstract class MetricsFilter impl
* @return  true to accept; false otherwise.
*/
   public boolean accepts(MetricsRecord record) {
-return accepts(record.tags());
+return accepts(record.name())  accepts(record.tags());
   }
 
 }

Modified: 
hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/metrics2/filter/TestPatternFilter.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/metrics2/filter/TestPatternFilter.java?rev=1466391r1=1466390r2=1466391view=diff
==
--- 
hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/metrics2/filter/TestPatternFilter.java
 (original)
+++ 
hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/metrics2/filter/TestPatternFilter.java
 Wed Apr 10 08:54:42 2013
@@ -24,7 +24,9 @@ import java.util.List;
 import org.apache.commons.configuration.SubsetConfiguration;
 import org.junit.Test;
 import static org.junit.Assert.*;
+import static org.mockito.Mockito.*;
 
+import org.apache.hadoop.metrics2.MetricsRecord;
 import org.apache.hadoop.metrics2.MetricsTag;
 import org.apache.hadoop.metrics2.impl.ConfigBuilder;
 
@@ -37,6 +39,8 @@ public class TestPatternFilter {
 SubsetConfiguration empty = new ConfigBuilder().subset();
 shouldAccept(empty, anything);
 shouldAccept(empty, Arrays.asList(new MetricsTag(key, desc, value)));
+shouldAccept(empty, mockMetricsRecord(anything, Arrays.asList(
+  new MetricsTag(key, desc, value;
   }
 
   /**
@@ -49,9 +53,16 @@ public class TestPatternFilter {
 shouldAccept(wl, foo);
 shouldAccept(wl, Arrays.asList(new MetricsTag(bar, , ),
new MetricsTag(foo, , f)));
+shouldAccept(wl, mockMetricsRecord(foo, Arrays.asList(
+  new MetricsTag(bar, , ),
+  new MetricsTag(foo, , f;
 shouldReject(wl, bar);
 shouldReject(wl, Arrays.asList(new MetricsTag(bar, , )));
 shouldReject(wl, Arrays.asList(new MetricsTag(foo, , boo)));
+shouldReject(wl, mockMetricsRecord(bar, Arrays.asList(
+  new MetricsTag(foo, , f;
+shouldReject(wl, mockMetricsRecord(foo, Arrays.asList(
+  new MetricsTag(bar, , ;
   }
 
   /**
@@ -63,9 +74,16 @@ public class TestPatternFilter {
 .add(p.exclude.tags, foo:f).subset(p);
 shouldAccept(bl, bar);
 shouldAccept(bl, Arrays.asList(new MetricsTag(bar, , )));
+shouldAccept(bl, mockMetricsRecord(bar, Arrays.asList(
+  new MetricsTag(bar, , ;
 shouldReject(bl, foo);
 shouldReject(bl, Arrays.asList(new MetricsTag(bar, , ),
new MetricsTag(foo, , f)));
+shouldReject(bl, mockMetricsRecord(foo, Arrays.asList(
+  new MetricsTag(bar, , ;
+shouldReject(bl, mockMetricsRecord(bar, Arrays.asList(
+  new MetricsTag(bar, , ),
+  new MetricsTag(foo, , f;
   }
 
   /**
@@ -80,10 +98,18 @@ public class TestPatternFilter {
 .add(p.exclude.tags, bar:b).subset(p);
 shouldAccept(c, foo);
 shouldAccept(c, Arrays.asList(new MetricsTag(foo, , f)));
+shouldAccept(c

svn commit: r1464299 - in /hadoop/common/branches/branch-1.2: CHANGES.txt src/core/org/apache/hadoop/net/NetworkTopologyWithNodeGroup.java src/test/org/apache/hadoop/net/TestNetworkTopologyWithNodeGro

2013-04-04 Thread llu
Author: llu
Date: Thu Apr  4 07:03:17 2013
New Revision: 1464299

URL: http://svn.apache.org/r1464299
Log:
HADOOP-9451. Fault single-layer config if node group topology is enabled. 
(Junping Du via llu)

Modified:
hadoop/common/branches/branch-1.2/CHANGES.txt

hadoop/common/branches/branch-1.2/src/core/org/apache/hadoop/net/NetworkTopologyWithNodeGroup.java

hadoop/common/branches/branch-1.2/src/test/org/apache/hadoop/net/TestNetworkTopologyWithNodeGroup.java

Modified: hadoop/common/branches/branch-1.2/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-1.2/CHANGES.txt?rev=1464299r1=1464298r2=1464299view=diff
==
--- hadoop/common/branches/branch-1.2/CHANGES.txt (original)
+++ hadoop/common/branches/branch-1.2/CHANGES.txt Thu Apr  4 07:03:17 2013
@@ -195,6 +195,9 @@ Release 1.2.0 - unreleased
 
   BUG FIXES
 
+HADOOP-9451. Fault single-layer config if node group topology is enabled.
+(Junping Du via llu)
+
 MAPREDUCE-4904. OTHER_LOCAL_MAPS counter is not correct.
 (Junping Du via llu)
 

Modified: 
hadoop/common/branches/branch-1.2/src/core/org/apache/hadoop/net/NetworkTopologyWithNodeGroup.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-1.2/src/core/org/apache/hadoop/net/NetworkTopologyWithNodeGroup.java?rev=1464299r1=1464298r2=1464299view=diff
==
--- 
hadoop/common/branches/branch-1.2/src/core/org/apache/hadoop/net/NetworkTopologyWithNodeGroup.java
 (original)
+++ 
hadoop/common/branches/branch-1.2/src/core/org/apache/hadoop/net/NetworkTopologyWithNodeGroup.java
 Thu Apr  4 07:03:17 2013
@@ -190,7 +190,12 @@ public class NetworkTopologyWithNodeGrou
   }
   rack = getNode(nodeGroup.getNetworkLocation());
 
-  if (rack != null  !(rack instanceof InnerNode)) {
+  // rack should be an innerNode and with parent.
+  // note: rack's null parent case is: node's topology only has one layer, 
+  //   so rack is recognized as / and no parent. 
+  // This will be recognized as a node with fault topology.
+  if (rack != null  
+  (!(rack instanceof InnerNode)|| rack.getParent() == null)) {
 throw new IllegalArgumentException(Unexpected data node  
 + node.toString() 
 +  at an illegal network location);

Modified: 
hadoop/common/branches/branch-1.2/src/test/org/apache/hadoop/net/TestNetworkTopologyWithNodeGroup.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-1.2/src/test/org/apache/hadoop/net/TestNetworkTopologyWithNodeGroup.java?rev=1464299r1=1464298r2=1464299view=diff
==
--- 
hadoop/common/branches/branch-1.2/src/test/org/apache/hadoop/net/TestNetworkTopologyWithNodeGroup.java
 (original)
+++ 
hadoop/common/branches/branch-1.2/src/test/org/apache/hadoop/net/TestNetworkTopologyWithNodeGroup.java
 Thu Apr  4 07:03:17 2013
@@ -39,6 +39,8 @@ public class TestNetworkTopologyWithNode
   };
 
   private final static NodeBase computeNode = new NodeBase(/d1/r1/n1/h9);
+  
+  private final static NodeBase rackOnlyNode = new NodeBase(h10, /r1);
 
   static {
 for(int i=0; idataNodes.length; i++) {
@@ -159,5 +161,21 @@ public class TestNetworkTopologyWithNode
   assertTrue(frequency.get(key)  0 || key == dataNodes[0]);
 }
   }
+  /**
+   * This test checks that adding a node with invalid topology will be failed 
+   * with an exception to show topology is invalid.
+   */
+  public void testAddNodeWithInvalidTopology() {
+// The last node is a node with invalid topology
+try {
+  cluster.add(rackOnlyNode);
+  fail(Exception should be thrown, so we should not have reached here.);
+} catch (Exception e) {
+  if (!(e instanceof IllegalArgumentException)) {
+fail(Expecting IllegalArgumentException, but caught: + e);
+  }
+  assertTrue(e.getMessage().contains(illegal network location));
+}
+  }
 
 }




svn commit: r1464300 - in /hadoop/common/branches/branch-1: CHANGES.txt src/core/org/apache/hadoop/net/NetworkTopologyWithNodeGroup.java src/test/org/apache/hadoop/net/TestNetworkTopologyWithNodeGroup

2013-04-04 Thread llu
Author: llu
Date: Thu Apr  4 07:04:55 2013
New Revision: 1464300

URL: http://svn.apache.org/r1464300
Log:
HADOOP-9451. Fault single-layer config if node group topology is enabled. 
(Junping Du via llu)

Modified:
hadoop/common/branches/branch-1/CHANGES.txt

hadoop/common/branches/branch-1/src/core/org/apache/hadoop/net/NetworkTopologyWithNodeGroup.java

hadoop/common/branches/branch-1/src/test/org/apache/hadoop/net/TestNetworkTopologyWithNodeGroup.java

Modified: hadoop/common/branches/branch-1/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-1/CHANGES.txt?rev=1464300r1=1464299r2=1464300view=diff
==
--- hadoop/common/branches/branch-1/CHANGES.txt (original)
+++ hadoop/common/branches/branch-1/CHANGES.txt Thu Apr  4 07:04:55 2013
@@ -229,6 +229,9 @@ Release 1.2.0 - unreleased
 
   BUG FIXES
 
+HADOOP-9451. Fault single-layer config if node group topology is enabled.
+(Junping Du via llu)
+
 MAPREDUCE-4904. OTHER_LOCAL_MAPS counter is not correct.
 (Junping Du via llu)
 

Modified: 
hadoop/common/branches/branch-1/src/core/org/apache/hadoop/net/NetworkTopologyWithNodeGroup.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-1/src/core/org/apache/hadoop/net/NetworkTopologyWithNodeGroup.java?rev=1464300r1=1464299r2=1464300view=diff
==
--- 
hadoop/common/branches/branch-1/src/core/org/apache/hadoop/net/NetworkTopologyWithNodeGroup.java
 (original)
+++ 
hadoop/common/branches/branch-1/src/core/org/apache/hadoop/net/NetworkTopologyWithNodeGroup.java
 Thu Apr  4 07:04:55 2013
@@ -190,7 +190,12 @@ public class NetworkTopologyWithNodeGrou
   }
   rack = getNode(nodeGroup.getNetworkLocation());
 
-  if (rack != null  !(rack instanceof InnerNode)) {
+  // rack should be an innerNode and with parent.
+  // note: rack's null parent case is: node's topology only has one layer, 
+  //   so rack is recognized as / and no parent. 
+  // This will be recognized as a node with fault topology.
+  if (rack != null  
+  (!(rack instanceof InnerNode)|| rack.getParent() == null)) {
 throw new IllegalArgumentException(Unexpected data node  
 + node.toString() 
 +  at an illegal network location);

Modified: 
hadoop/common/branches/branch-1/src/test/org/apache/hadoop/net/TestNetworkTopologyWithNodeGroup.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-1/src/test/org/apache/hadoop/net/TestNetworkTopologyWithNodeGroup.java?rev=1464300r1=1464299r2=1464300view=diff
==
--- 
hadoop/common/branches/branch-1/src/test/org/apache/hadoop/net/TestNetworkTopologyWithNodeGroup.java
 (original)
+++ 
hadoop/common/branches/branch-1/src/test/org/apache/hadoop/net/TestNetworkTopologyWithNodeGroup.java
 Thu Apr  4 07:04:55 2013
@@ -39,6 +39,8 @@ public class TestNetworkTopologyWithNode
   };
 
   private final static NodeBase computeNode = new NodeBase(/d1/r1/n1/h9);
+  
+  private final static NodeBase rackOnlyNode = new NodeBase(h10, /r1);
 
   static {
 for(int i=0; idataNodes.length; i++) {
@@ -159,5 +161,21 @@ public class TestNetworkTopologyWithNode
   assertTrue(frequency.get(key)  0 || key == dataNodes[0]);
 }
   }
+  /**
+   * This test checks that adding a node with invalid topology will be failed 
+   * with an exception to show topology is invalid.
+   */
+  public void testAddNodeWithInvalidTopology() {
+// The last node is a node with invalid topology
+try {
+  cluster.add(rackOnlyNode);
+  fail(Exception should be thrown, so we should not have reached here.);
+} catch (Exception e) {
+  if (!(e instanceof IllegalArgumentException)) {
+fail(Expecting IllegalArgumentException, but caught: + e);
+  }
+  assertTrue(e.getMessage().contains(illegal network location));
+}
+  }
 
 }




svn commit: r1464301 - in /hadoop/common/trunk/hadoop-common-project/hadoop-common: CHANGES.txt src/main/java/org/apache/hadoop/net/NetworkTopologyWithNodeGroup.java src/test/java/org/apache/hadoop/ne

2013-04-04 Thread llu
Author: llu
Date: Thu Apr  4 07:07:59 2013
New Revision: 1464301

URL: http://svn.apache.org/r1464301
Log:
HADOOP-9451. Fault single-layer config if node group topology is enabled. 
(Junping Du via llu)

Added:

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/TestNetworkTopologyWithNodeGroup.java
Modified:
hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/NetworkTopologyWithNodeGroup.java

Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1464301r1=1464300r2=1464301view=diff
==
--- hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt 
(original)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt Thu Apr 
 4 07:07:59 2013
@@ -165,6 +165,9 @@ Trunk (Unreleased)
 
   BUG FIXES
 
+HADOOP-9451. Fault single-layer config if node group topology is enabled.
+(Junping Du via llu)
+
 HADOOP-8419. Fixed GzipCode NPE reset for IBM JDK. (Yu Li via eyang)
 
 HADOOP-9041. FsUrlStreamHandlerFactory could cause an infinite loop in

Modified: 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/NetworkTopologyWithNodeGroup.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/NetworkTopologyWithNodeGroup.java?rev=1464301r1=1464300r2=1464301view=diff
==
--- 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/NetworkTopologyWithNodeGroup.java
 (original)
+++ 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/NetworkTopologyWithNodeGroup.java
 Thu Apr  4 07:07:59 2013
@@ -191,7 +191,12 @@ public class NetworkTopologyWithNodeGrou
   }
   rack = getNode(nodeGroup.getNetworkLocation());
 
-  if (rack != null  !(rack instanceof InnerNode)) {
+  // rack should be an innerNode and with parent.
+  // note: rack's null parent case is: node's topology only has one layer, 
+  //   so rack is recognized as / and no parent. 
+  // This will be recognized as a node with fault topology.
+  if (rack != null  
+  (!(rack instanceof InnerNode) || rack.getParent() == null)) {
 throw new IllegalArgumentException(Unexpected data node  
 + node.toString() 
 +  at an illegal network location);

Added: 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/TestNetworkTopologyWithNodeGroup.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/TestNetworkTopologyWithNodeGroup.java?rev=1464301view=auto
==
--- 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/TestNetworkTopologyWithNodeGroup.java
 (added)
+++ 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/TestNetworkTopologyWithNodeGroup.java
 Thu Apr  4 07:07:59 2013
@@ -0,0 +1,200 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * License); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an AS IS BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.net;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+
+import java.util.HashMap;
+import java.util.Map;
+
+import org.junit.Test;
+
+public class TestNetworkTopologyWithNodeGroup {
+  private final static NetworkTopologyWithNodeGroup cluster = new 
+  NetworkTopologyWithNodeGroup();
+
+  private final static NodeBase dataNodes[] = new NodeBase[] {
+  new NodeBase(h1, /d1/r1/s1),
+  new NodeBase(h2, /d1/r1/s1),
+  new NodeBase(h3, /d1/r1/s2),
+  new NodeBase(h4, /d1/r2/s3),
+  new NodeBase(h5, /d1

svn commit: r1461353 - in /hadoop/common/branches/branch-1.2: CHANGES.txt bin/hadoop

2013-03-26 Thread llu
Author: llu
Date: Tue Mar 26 22:34:44 2013
New Revision: 1461353

URL: http://svn.apache.org/r1461353
Log:
HADOOP-9434. Backport HADOOP-9267: hadoop -h|-{0,2}help should print usage. (Yu 
Li via llu)

Modified:
hadoop/common/branches/branch-1.2/CHANGES.txt
hadoop/common/branches/branch-1.2/bin/hadoop

Modified: hadoop/common/branches/branch-1.2/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-1.2/CHANGES.txt?rev=1461353r1=1461352r2=1461353view=diff
==
--- hadoop/common/branches/branch-1.2/CHANGES.txt (original)
+++ hadoop/common/branches/branch-1.2/CHANGES.txt Tue Mar 26 22:34:44 2013
@@ -65,6 +65,9 @@ Release 1.2.0 - unreleased
 
   IMPROVEMENTS
 
+HADOOP-9434. Backport HADOOP-9267: hadoop -h|-{0,2}help should print usage.
+(Yu Li via llu)
+
 HDFS-3515. Port HDFS-1457 to branch-1. (eli)
 
 HDFS-3479. Port HDFS-3335 to branch-1. (Colin Patrick McCabe via eli)

Modified: hadoop/common/branches/branch-1.2/bin/hadoop
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-1.2/bin/hadoop?rev=1461353r1=1461352r2=1461353view=diff
==
--- hadoop/common/branches/branch-1.2/bin/hadoop (original)
+++ hadoop/common/branches/branch-1.2/bin/hadoop Tue Mar 26 22:34:44 2013
@@ -68,8 +68,8 @@ case `uname` in
 CYGWIN*) cygwin=true;;
 esac
 
-# if no args specified, show usage
-if [ $# = 0 ]; then
+print_usage()
+{
   echo Usage: hadoop [--config confdir] COMMAND
   echo where COMMAND is one of:
   echo   namenode -format format the DFS filesystem
@@ -98,6 +98,11 @@ if [ $# = 0 ]; then
   echo  or
   echo   CLASSNAMErun the class named CLASSNAME
   echo Most commands print help when invoked w/o parameters.
+}
+
+# if no args specified, show usage
+if [ $# = 0 ]; then
+  print_usage
   exit 1
 fi
 
@@ -105,6 +110,15 @@ fi
 COMMAND=$1
 shift
 
+# support help commands
+case $COMMAND in
+  # usage flags
+  --help|-help|-h|help)
+print_usage
+exit 0
+;;
+esac
+
 # Determine if we're starting a secure datanode, and if so, redefine 
appropriate variables
 if [ $COMMAND == datanode ]  [ $EUID -eq 0 ]  [ -n 
$HADOOP_SECURE_DN_USER ]; then
   HADOOP_PID_DIR=$HADOOP_SECURE_DN_PID_DIR




svn commit: r1461356 - in /hadoop/common/branches/branch-1: CHANGES.txt bin/hadoop

2013-03-26 Thread llu
Author: llu
Date: Tue Mar 26 22:38:57 2013
New Revision: 1461356

URL: http://svn.apache.org/r1461356
Log:
HADOOP-9434. Backport HADOOP-9267: hadoop -h|-{0,2}help should print usage. (Yu 
Li via llu)

Modified:
hadoop/common/branches/branch-1/CHANGES.txt
hadoop/common/branches/branch-1/bin/hadoop

Modified: hadoop/common/branches/branch-1/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-1/CHANGES.txt?rev=1461356r1=1461355r2=1461356view=diff
==
--- hadoop/common/branches/branch-1/CHANGES.txt (original)
+++ hadoop/common/branches/branch-1/CHANGES.txt Tue Mar 26 22:38:57 2013
@@ -93,6 +93,9 @@ Release 1.2.0 - unreleased
 
   IMPROVEMENTS
 
+HADOOP-9434. Backport HADOOP-9267: hadoop -h|-{0,2}help should print usage.
+(Yu Li via llu)
+
 HDFS-3515. Port HDFS-1457 to branch-1. (eli)
 
 HDFS-3479. Port HDFS-3335 to branch-1. (Colin Patrick McCabe via eli)

Modified: hadoop/common/branches/branch-1/bin/hadoop
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-1/bin/hadoop?rev=1461356r1=1461355r2=1461356view=diff
==
--- hadoop/common/branches/branch-1/bin/hadoop (original)
+++ hadoop/common/branches/branch-1/bin/hadoop Tue Mar 26 22:38:57 2013
@@ -68,8 +68,8 @@ case `uname` in
 CYGWIN*) cygwin=true;;
 esac
 
-# if no args specified, show usage
-if [ $# = 0 ]; then
+print_usage()
+{
   echo Usage: hadoop [--config confdir] COMMAND
   echo where COMMAND is one of:
   echo   namenode -format format the DFS filesystem
@@ -98,6 +98,11 @@ if [ $# = 0 ]; then
   echo  or
   echo   CLASSNAMErun the class named CLASSNAME
   echo Most commands print help when invoked w/o parameters.
+}
+
+# if no args specified, show usage
+if [ $# = 0 ]; then
+  print_usage
   exit 1
 fi
 
@@ -105,6 +110,15 @@ fi
 COMMAND=$1
 shift
 
+# support help commands
+case $COMMAND in
+  # usage flags
+  --help|-help|-h|help)
+print_usage
+exit 0
+;;
+esac
+
 # Determine if we're starting a secure datanode, and if so, redefine 
appropriate variables
 if [ $COMMAND == datanode ]  [ $EUID -eq 0 ]  [ -n 
$HADOOP_SECURE_DN_USER ]; then
   HADOOP_PID_DIR=$HADOOP_SECURE_DN_PID_DIR




svn commit: r1461370 - in /hadoop/common/trunk/hadoop-common-project/hadoop-common: ./ src/main/java/org/apache/hadoop/ipc/ src/test/java/org/apache/hadoop/ipc/

2013-03-26 Thread llu
Author: llu
Date: Tue Mar 26 23:29:09 2013
New Revision: 1461370

URL: http://svn.apache.org/r1461370
Log:
HADOOP-9194. RPC Support for QoS. (Junping Du via llu)

Modified:
hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RPC.java

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestIPC.java

Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1461370r1=1461369r2=1461370view=diff
==
--- hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt 
(original)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt Tue Mar 
26 23:29:09 2013
@@ -17,6 +17,8 @@ Trunk (Unreleased)
 
 HADOOP-9380 Add totalLength to rpc response  (sanjay Radia)
 
+HADOOP-9194. RPC Support for QoS. (Junping Du via llu)
+
   NEW FEATURES
 
 HADOOP-8561. Introduce HADOOP_PROXY_USER for secure impersonation in child

Modified: 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java?rev=1461370r1=1461369r2=1461370view=diff
==
--- 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java
 (original)
+++ 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java
 Tue Mar 26 23:29:09 2013
@@ -257,6 +257,7 @@ public class Client {
 private final ConnectionId remoteId;// connection id
 private AuthMethod authMethod; // authentication method
 private Token? extends TokenIdentifier token;
+private int serviceClass;
 private SaslRpcClient saslRpcClient;
 
 private Socket socket = null; // connected socket
@@ -279,7 +280,7 @@ public class Client {
 
 private final Object sendRpcRequestLock = new Object();
 
-public Connection(ConnectionId remoteId) throws IOException {
+public Connection(ConnectionId remoteId, int serviceClass) throws 
IOException {
   this.remoteId = remoteId;
   this.server = remoteId.getAddress();
   if (server.isUnresolved()) {
@@ -296,6 +297,7 @@ public class Client {
   this.tcpNoDelay = remoteId.getTcpNoDelay();
   this.doPing = remoteId.getDoPing();
   this.pingInterval = remoteId.getPingInterval();
+  this.serviceClass = serviceClass;
   if (LOG.isDebugEnabled()) {
 LOG.debug(The ping interval is  + this.pingInterval +  ms.);
   }
@@ -747,7 +749,9 @@ public class Client {
  * +--+
  * |  hrpc 4 bytes  |  
  * +--+
- * |  Version (1 bytes)   |  
+ * |  Version (1 byte)|
+ * +--+
+ * |  Service Class (1 byte)  |
  * +--+
  * |  Authmethod (1 byte) |  
  * +--+
@@ -760,6 +764,7 @@ public class Client {
   // Write out the header, version and authentication method
   out.write(Server.HEADER.array());
   out.write(Server.CURRENT_VERSION);
+  out.write(serviceClass);
   authMethod.write(out);
   Server.IpcSerializationType.PROTOBUF.write(out);
   out.flush();
@@ -1179,20 +1184,34 @@ public class Client {
 
   
   /**
-   * Same as {@link #call(RPC.RpcKind, Writable, InetSocketAddress, 
+   * Same as {@link #call(RPC.RpcKind, Writable, InetSocketAddress,
* Class, UserGroupInformation, int, Configuration)}
* except that rpcKind is writable.
*/
-  public Writable call(Writable param, InetSocketAddress addr, 
+  public Writable call(Writable param, InetSocketAddress addr,
   Class? protocol, UserGroupInformation ticket,
-  int rpcTimeout, Configuration conf)  
+  int rpcTimeout, Configuration conf)
   throws InterruptedException, IOException {
-ConnectionId remoteId = ConnectionId.getConnectionId(addr, protocol,
+ConnectionId remoteId = ConnectionId.getConnectionId(addr, protocol,
 ticket, rpcTimeout, conf);
 return call(RPC.RpcKind.RPC_BUILTIN, param, remoteId);
   }
   
   /**
+   * Same as {@link #call(Writable, InetSocketAddress,
+   * Class, UserGroupInformation, int, Configuration

svn commit: r1425171 - in /hadoop/common/branches/branch-1: ./ src/contrib/fairscheduler/src/java/org/apache/hadoop/mapred/ src/mapred/ src/mapred/org/apache/hadoop/mapred/ src/test/org/apache/hadoop/

2012-12-21 Thread llu
Author: llu
Date: Fri Dec 21 22:40:37 2012
New Revision: 1425171

URL: http://svn.apache.org/viewvc?rev=1425171view=rev
Log:
MAPREDUCE-4660. Update task placement policy for network topology with node 
group. (Junping Du via llu)

Added:

hadoop/common/branches/branch-1/src/contrib/fairscheduler/src/java/org/apache/hadoop/mapred/JobSchedulableWithNodeGroup.java

hadoop/common/branches/branch-1/src/test/org/apache/hadoop/mapred/MiniMRClusterWithNodeGroup.java

hadoop/common/branches/branch-1/src/test/org/apache/hadoop/mapred/TestNodeGroupAwareTaskPlacement.java
Modified:
hadoop/common/branches/branch-1/CHANGES.txt

hadoop/common/branches/branch-1/src/contrib/fairscheduler/src/java/org/apache/hadoop/mapred/FairScheduler.java

hadoop/common/branches/branch-1/src/contrib/fairscheduler/src/java/org/apache/hadoop/mapred/JobSchedulable.java

hadoop/common/branches/branch-1/src/contrib/fairscheduler/src/java/org/apache/hadoop/mapred/LocalityLevel.java
hadoop/common/branches/branch-1/src/mapred/mapred-default.xml

hadoop/common/branches/branch-1/src/mapred/org/apache/hadoop/mapred/JobInProgress.java

hadoop/common/branches/branch-1/src/mapred/org/apache/hadoop/mapred/JobInProgress_Counter.properties

hadoop/common/branches/branch-1/src/mapred/org/apache/hadoop/mapred/JobQueueTaskScheduler.java

hadoop/common/branches/branch-1/src/mapred/org/apache/hadoop/mapred/JobTracker.java

hadoop/common/branches/branch-1/src/test/org/apache/hadoop/mapred/MiniMRCluster.java

Modified: hadoop/common/branches/branch-1/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-1/CHANGES.txt?rev=1425171r1=1425170r2=1425171view=diff
==
--- hadoop/common/branches/branch-1/CHANGES.txt (original)
+++ hadoop/common/branches/branch-1/CHANGES.txt Fri Dec 21 22:40:37 2012
@@ -6,6 +6,9 @@ Release 1.2.0 - unreleased
 
   NEW FEATURES
 
+MAPREDUCE-4660. Update task placement policy for network topology
+with node group. (Junping Du via llu)
+
 HADOOP-8561. Introduce HADOOP_PROXY_USER for secure impersonation in child
 hadoop client processes. (Yu Gao via llu)
 

Modified: 
hadoop/common/branches/branch-1/src/contrib/fairscheduler/src/java/org/apache/hadoop/mapred/FairScheduler.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-1/src/contrib/fairscheduler/src/java/org/apache/hadoop/mapred/FairScheduler.java?rev=1425171r1=1425170r2=1425171view=diff
==
--- 
hadoop/common/branches/branch-1/src/contrib/fairscheduler/src/java/org/apache/hadoop/mapred/FairScheduler.java
 (original)
+++ 
hadoop/common/branches/branch-1/src/contrib/fairscheduler/src/java/org/apache/hadoop/mapred/FairScheduler.java
 Fri Dec 21 22:40:37 2012
@@ -41,6 +41,7 @@ import org.apache.hadoop.mapreduce.serve
 import org.apache.hadoop.metrics.MetricsContext;
 import org.apache.hadoop.metrics.MetricsUtil;
 import org.apache.hadoop.metrics.Updater;
+import org.apache.hadoop.net.NetworkTopology;
 import org.apache.hadoop.util.ReflectionUtils;
 import org.apache.hadoop.util.StringUtils;
 
@@ -327,8 +328,17 @@ public class FairScheduler extends TaskS
 public void jobAdded(JobInProgress job) {
   synchronized (FairScheduler.this) {
 eventLog.log(JOB_ADDED, job.getJobID());
-JobInfo info = new JobInfo(new JobSchedulable(FairScheduler.this, job, 
TaskType.MAP),
-new JobSchedulable(FairScheduler.this, job, TaskType.REDUCE));
+JobSchedulable mapSched = ReflectionUtils.newInstance(
+conf.getClass(mapred.jobtracker.jobSchedulable, 
JobSchedulable.class,
+JobSchedulable.class), conf);
+mapSched.init(FairScheduler.this, job, TaskType.MAP);
+
+JobSchedulable redSched = ReflectionUtils.newInstance(
+conf.getClass(mapred.jobtracker.jobSchedulable, 
JobSchedulable.class,
+JobSchedulable.class), conf);
+redSched.init(FairScheduler.this, job, TaskType.REDUCE);
+
+JobInfo info = new JobInfo(mapSched, redSched);
 infos.put(job, info);
 poolMgr.addJob(job); // Also adds job into the right PoolScheduable
 update();
@@ -585,8 +595,10 @@ public class FairScheduler extends TaskS
   private void updateLastMapLocalityLevel(JobInProgress job,
   Task mapTaskLaunched, TaskTrackerStatus tracker) {
 JobInfo info = infos.get(job);
+boolean isNodeGroupAware = conf.getBoolean(
+net.topology.nodegroup.aware, false);
 LocalityLevel localityLevel = LocalityLevel.fromTask(
-job, mapTaskLaunched, tracker);
+job, mapTaskLaunched, tracker, isNodeGroupAware);
 info.lastMapLocalityLevel = localityLevel;
 info.timeWaitedForLocalMap = 0;
 eventLog.log(ASSIGNED_LOC_LEVEL, job.getJobID(), localityLevel);

Modified: 
hadoop/common/branches/branch-1/src/contrib

svn commit: r1425183 - in /hadoop/common/branches/branch-1.0: CHANGES.txt src/mapred/org/apache/hadoop/mapred/SortedRanges.java

2012-12-21 Thread llu
Author: llu
Date: Fri Dec 21 23:17:46 2012
New Revision: 1425183

URL: http://svn.apache.org/viewvc?rev=1425183view=rev
Log:
MAPREDUCE-4272. SortedRanges.Range#compareTo is not spec compliant.  (Yu Gao 
via llu)

Modified:
hadoop/common/branches/branch-1.0/CHANGES.txt

hadoop/common/branches/branch-1.0/src/mapred/org/apache/hadoop/mapred/SortedRanges.java

Modified: hadoop/common/branches/branch-1.0/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-1.0/CHANGES.txt?rev=1425183r1=1425182r2=1425183view=diff
==
--- hadoop/common/branches/branch-1.0/CHANGES.txt (original)
+++ hadoop/common/branches/branch-1.0/CHANGES.txt Fri Dec 21 23:17:46 2012
@@ -8,6 +8,9 @@ Release 1.0.5 - unreleased
 
   BUG FIXES
 
+MAPREDUCE-4272. SortedRanges.Range#compareTo is not spec compliant.
+(Yu Gao via llu)
+
 HADOOP-9051. Fix ant clean/test with circular symlinks in build dir. (llu)
 
 MAPREDUCE-4396. Port support private distributed cache to

Modified: 
hadoop/common/branches/branch-1.0/src/mapred/org/apache/hadoop/mapred/SortedRanges.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-1.0/src/mapred/org/apache/hadoop/mapred/SortedRanges.java?rev=1425183r1=1425182r2=1425183view=diff
==
--- 
hadoop/common/branches/branch-1.0/src/mapred/org/apache/hadoop/mapred/SortedRanges.java
 (original)
+++ 
hadoop/common/branches/branch-1.0/src/mapred/org/apache/hadoop/mapred/SortedRanges.java
 Fri Dec 21 23:17:46 2012
@@ -271,7 +271,7 @@ class SortedRanges implements Writable{
 }
 
 public boolean equals(Object o) {
-  if(o!=null  o instanceof Range) {
+  if (o instanceof Range) {
 Range range = (Range)o;
 return startIndex==range.startIndex 
 length==range.length;
@@ -285,10 +285,11 @@ class SortedRanges implements Writable{
 }
 
 public int compareTo(Range o) {
-  if(this.equals(o)) {
-return 0;
-  }
-  return (this.startIndex  o.startIndex) ? 1:-1;
+  // Ensure sgn(x.compareTo(y) == -sgn(y.compareTo(x))
+  return this.startIndex  o.startIndex ? -1 :
+  (this.startIndex  o.startIndex ? 1 :
+  (this.length  o.length ? -1 :
+  (this.length  o.length ? 1 : 0)));
 }
 
 public void readFields(DataInput in) throws IOException {




svn commit: r1425185 - in /hadoop/common/branches/branch-1.1: CHANGES.txt src/mapred/org/apache/hadoop/mapred/SortedRanges.java

2012-12-21 Thread llu
Author: llu
Date: Fri Dec 21 23:19:09 2012
New Revision: 1425185

URL: http://svn.apache.org/viewvc?rev=1425185view=rev
Log:
MAPREDUCE-4272. SortedRanges.Range#compareTo is not spec compliant.  (Yu Gao 
via llu)

Modified:
hadoop/common/branches/branch-1.1/CHANGES.txt

hadoop/common/branches/branch-1.1/src/mapred/org/apache/hadoop/mapred/SortedRanges.java

Modified: hadoop/common/branches/branch-1.1/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-1.1/CHANGES.txt?rev=1425185r1=1425184r2=1425185view=diff
==
--- hadoop/common/branches/branch-1.1/CHANGES.txt (original)
+++ hadoop/common/branches/branch-1.1/CHANGES.txt Fri Dec 21 23:19:09 2012
@@ -19,6 +19,9 @@ Release 1.1.2 - 2012.12.07
 
   BUG FIXES
 
+MAPREDUCE-4272. SortedRanges.Range#compareTo is not spec compliant.
+(Yu Gao via llu)
+
 HADOOP-9051 Fix ant clean/test with circular symlinks in build dir. (llu)
 
 MAPREDUCE-4396. Port support private distributed cache to

Modified: 
hadoop/common/branches/branch-1.1/src/mapred/org/apache/hadoop/mapred/SortedRanges.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-1.1/src/mapred/org/apache/hadoop/mapred/SortedRanges.java?rev=1425185r1=1425184r2=1425185view=diff
==
--- 
hadoop/common/branches/branch-1.1/src/mapred/org/apache/hadoop/mapred/SortedRanges.java
 (original)
+++ 
hadoop/common/branches/branch-1.1/src/mapred/org/apache/hadoop/mapred/SortedRanges.java
 Fri Dec 21 23:19:09 2012
@@ -271,7 +271,7 @@ class SortedRanges implements Writable{
 }
 
 public boolean equals(Object o) {
-  if(o!=null  o instanceof Range) {
+  if (o instanceof Range) {
 Range range = (Range)o;
 return startIndex==range.startIndex 
 length==range.length;
@@ -285,10 +285,11 @@ class SortedRanges implements Writable{
 }
 
 public int compareTo(Range o) {
-  if(this.equals(o)) {
-return 0;
-  }
-  return (this.startIndex  o.startIndex) ? 1:-1;
+  // Ensure sgn(x.compareTo(y) == -sgn(y.compareTo(x))
+  return this.startIndex  o.startIndex ? -1 :
+  (this.startIndex  o.startIndex ? 1 :
+  (this.length  o.length ? -1 :
+  (this.length  o.length ? 1 : 0)));
 }
 
 public void readFields(DataInput in) throws IOException {




svn commit: r1425187 - in /hadoop/common/branches/branch-1: CHANGES.txt src/mapred/org/apache/hadoop/mapred/SortedRanges.java

2012-12-21 Thread llu
Author: llu
Date: Fri Dec 21 23:20:28 2012
New Revision: 1425187

URL: http://svn.apache.org/viewvc?rev=1425187view=rev
Log:
MAPREDUCE-4272. SortedRanges.Range#compareTo is not spec compliant.  (Yu Gao 
via llu)

Modified:
hadoop/common/branches/branch-1/CHANGES.txt

hadoop/common/branches/branch-1/src/mapred/org/apache/hadoop/mapred/SortedRanges.java

Modified: hadoop/common/branches/branch-1/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-1/CHANGES.txt?rev=1425187r1=1425186r2=1425187view=diff
==
--- hadoop/common/branches/branch-1/CHANGES.txt (original)
+++ hadoop/common/branches/branch-1/CHANGES.txt Fri Dec 21 23:20:28 2012
@@ -161,6 +161,9 @@ Release 1.2.0 - unreleased
 
   BUG FIXES
 
+MAPREDUCE-4272. SortedRanges.Range#compareTo is not spec compliant.
+(Yu Gao via llu)
+
 HADOOP-9051 Fix ant clean/test with circular symlinks in build dir. (llu)
 
 MAPREDUCE-4396. Port support private distributed cache to 

Modified: 
hadoop/common/branches/branch-1/src/mapred/org/apache/hadoop/mapred/SortedRanges.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-1/src/mapred/org/apache/hadoop/mapred/SortedRanges.java?rev=1425187r1=1425186r2=1425187view=diff
==
--- 
hadoop/common/branches/branch-1/src/mapred/org/apache/hadoop/mapred/SortedRanges.java
 (original)
+++ 
hadoop/common/branches/branch-1/src/mapred/org/apache/hadoop/mapred/SortedRanges.java
 Fri Dec 21 23:20:28 2012
@@ -271,7 +271,7 @@ class SortedRanges implements Writable{
 }
 
 public boolean equals(Object o) {
-  if(o!=null  o instanceof Range) {
+  if (o instanceof Range) {
 Range range = (Range)o;
 return startIndex==range.startIndex 
 length==range.length;
@@ -285,10 +285,11 @@ class SortedRanges implements Writable{
 }
 
 public int compareTo(Range o) {
-  if(this.equals(o)) {
-return 0;
-  }
-  return (this.startIndex  o.startIndex) ? 1:-1;
+  // Ensure sgn(x.compareTo(y) == -sgn(y.compareTo(x))
+  return this.startIndex  o.startIndex ? -1 :
+  (this.startIndex  o.startIndex ? 1 :
+  (this.length  o.length ? -1 :
+  (this.length  o.length ? 1 : 0)));
 }
 
 public void readFields(DataInput in) throws IOException {




svn commit: r1425193 - in /hadoop/common/branches/branch-1.1: CHANGES.txt src/c++/task-controller/impl/configuration.c src/c++/task-controller/impl/configuration.h src/c++/task-controller/impl/main.c

2012-12-21 Thread llu
Author: llu
Date: Fri Dec 21 23:29:30 2012
New Revision: 1425193

URL: http://svn.apache.org/viewvc?rev=1425193view=rev
Log:
MAPREUDCE-4397. Introduce HADOOP_SECURITY_CONF_DIR for task-controller. (Yu Gao 
via llu)

Modified:
hadoop/common/branches/branch-1.1/CHANGES.txt

hadoop/common/branches/branch-1.1/src/c++/task-controller/impl/configuration.c

hadoop/common/branches/branch-1.1/src/c++/task-controller/impl/configuration.h
hadoop/common/branches/branch-1.1/src/c++/task-controller/impl/main.c

hadoop/common/branches/branch-1.1/src/c++/task-controller/test/test-task-controller.c

Modified: hadoop/common/branches/branch-1.1/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-1.1/CHANGES.txt?rev=1425193r1=1425192r2=1425193view=diff
==
--- hadoop/common/branches/branch-1.1/CHANGES.txt (original)
+++ hadoop/common/branches/branch-1.1/CHANGES.txt Fri Dec 21 23:29:30 2012
@@ -6,6 +6,9 @@ Release 1.1.2 - 2012.12.07
 
   NEW FEATURES
 
+MAPREUDCE-4397. Introduce HADOOP_SECURITY_CONF_DIR for task-controller.
+(Yu Gao via llu)
+
 HADOOP-8561. Introduce HADOOP_PROXY_USER for secure impersonation in child
 hadoop client processes. (Yu Gao via llu)
 

Modified: 
hadoop/common/branches/branch-1.1/src/c++/task-controller/impl/configuration.c
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-1.1/src/c%2B%2B/task-controller/impl/configuration.c?rev=1425193r1=1425192r2=1425193view=diff
==
--- 
hadoop/common/branches/branch-1.1/src/c++/task-controller/impl/configuration.c 
(original)
+++ 
hadoop/common/branches/branch-1.1/src/c++/task-controller/impl/configuration.c 
Fri Dec 21 23:29:30 2012
@@ -88,6 +88,29 @@ static int is_only_root_writable(const c
 }
 
 /**
+ * Get the full path of the configuration file.
+ * Use $HADOOP_SECURITY_CONF_DIR for the configuration directory, and if
+ * it's not set, use the default value in default_conf_dir.
+ */
+void get_config_path(char* conf_file_path, int size,
+ char* default_conf_dir,
+ const char* conf_file_name) {
+  if (conf_file_name == NULL) {
+fprintf(LOGFILE, Null configuration filename passed in\n);
+exit(INVALID_CONFIG_FILE);
+  }
+  char *orig_conf_dir = getenv(HADOOP_SECURITY_CONF_DIR);
+  if (orig_conf_dir == NULL) {
+if (default_conf_dir == NULL) {
+  fprintf(LOGFILE, Null default configuration directory passed in\n);
+  exit(INVALID_CONFIG_FILE);
+}
+orig_conf_dir = default_conf_dir;
+  }
+  snprintf(conf_file_path, size, %s/%s, orig_conf_dir, conf_file_name);
+}
+
+/**
  * Ensure that the configuration file and all of the containing directories
  * are only writable by root. Otherwise, an attacker can change the 
  * configuration and potentially cause damage.

Modified: 
hadoop/common/branches/branch-1.1/src/c++/task-controller/impl/configuration.h
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-1.1/src/c%2B%2B/task-controller/impl/configuration.h?rev=1425193r1=1425192r2=1425193view=diff
==
--- 
hadoop/common/branches/branch-1.1/src/c++/task-controller/impl/configuration.h 
(original)
+++ 
hadoop/common/branches/branch-1.1/src/c++/task-controller/impl/configuration.h 
Fri Dec 21 23:29:30 2012
@@ -17,6 +17,15 @@
  */
 
 /**
+ * Get the full path of the configuration file.
+ * Use $HADOOP_SECURITY_CONF_DIR for the configuration directory, and if
+ * it's not set, use the default value in default_conf_dir.
+ */
+void get_config_path(char* conf_file_path, int size,
+ char* default_conf_dir,
+ const char* conf_file_name);
+
+/**
  * Ensure that the configuration file and all of the containing directories
  * are only writable by root. Otherwise, an attacker can change the 
  * configuration and potentially cause damage.

Modified: hadoop/common/branches/branch-1.1/src/c++/task-controller/impl/main.c
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-1.1/src/c%2B%2B/task-controller/impl/main.c?rev=1425193r1=1425192r2=1425193view=diff
==
--- hadoop/common/branches/branch-1.1/src/c++/task-controller/impl/main.c 
(original)
+++ hadoop/common/branches/branch-1.1/src/c++/task-controller/impl/main.c Fri 
Dec 21 23:29:30 2012
@@ -80,7 +80,11 @@ int main(int argc, char **argv) {
   #error HADOOP_CONF_DIR must be defined
 #endif
 
-  char *orig_conf_file = STRINGIFY(HADOOP_CONF_DIR) / CONF_FILENAME;
+  char orig_conf_file[PATH_MAX + 1]; // realpath is limitted by PATH_MAX
+  orig_conf_file[PATH_MAX] = 0; // in case of snprintf error
+  get_config_path(orig_conf_file, PATH_MAX + 1,
+  STRINGIFY(HADOOP_CONF_DIR),
+  CONF_FILENAME);
   char

svn commit: r1422429 - in /hadoop/common/trunk/hadoop-common-project/hadoop-common/src: main/java/org/apache/hadoop/security/UserGroupInformation.java test/java/org/apache/hadoop/security/TestProxyUse

2012-12-15 Thread llu
Author: llu
Date: Sun Dec 16 00:38:59 2012
New Revision: 1422429

URL: http://svn.apache.org/viewvc?rev=1422429view=rev
Log:
HADOOP-8561. Introduce HADOOP_PROXY_USER for secure impersonation in child 
hadoop client processes. (Yu Gao via llu)

Added:

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestProxyUserFromEnv.java
Modified:

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/UserGroupInformation.java

Modified: 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/UserGroupInformation.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/UserGroupInformation.java?rev=1422429r1=1422428r2=1422429view=diff
==
--- 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/UserGroupInformation.java
 (original)
+++ 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/UserGroupInformation.java
 Sun Dec 16 00:38:59 2012
@@ -81,6 +81,7 @@ public class UserGroupInformation {
*/
   private static final float TICKET_RENEW_WINDOW = 0.80f;
   static final String HADOOP_USER_NAME = HADOOP_USER_NAME;
+  static final String HADOOP_PROXY_USER = HADOOP_PROXY_USER;
   
   /** 
* UgiMetrics maintains UGI activity statistics
@@ -641,10 +642,18 @@ public class UserGroupInformation {
 newLoginContext(authenticationMethod.getLoginAppName(), 
 subject, new HadoopConfiguration());
 login.login();
-loginUser = new UserGroupInformation(subject);
-loginUser.setLogin(login);
-loginUser.setAuthenticationMethod(authenticationMethod);
-loginUser = new UserGroupInformation(login.getSubject());
+UserGroupInformation realUser = new UserGroupInformation(subject);
+realUser.setLogin(login);
+realUser.setAuthenticationMethod(authenticationMethod);
+realUser = new UserGroupInformation(login.getSubject());
+// If the HADOOP_PROXY_USER environment variable or property
+// is specified, create a proxy user as the logged in user.
+String proxyUser = System.getenv(HADOOP_PROXY_USER);
+if (proxyUser == null) {
+  proxyUser = System.getProperty(HADOOP_PROXY_USER);
+}
+loginUser = proxyUser == null ? realUser : createProxyUser(proxyUser, 
realUser);
+
 String fileLocation = System.getenv(HADOOP_TOKEN_FILE_LOCATION);
 if (fileLocation != null) {
   // load the token storage file and put all of the tokens into the

Added: 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestProxyUserFromEnv.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestProxyUserFromEnv.java?rev=1422429view=auto
==
--- 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestProxyUserFromEnv.java
 (added)
+++ 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestProxyUserFromEnv.java
 Sun Dec 16 00:38:59 2012
@@ -0,0 +1,47 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * License); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an AS IS BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations 
under
+ * the License.
+ */
+
+package org.apache.hadoop.security;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotNull;
+
+import java.io.BufferedReader;
+import java.io.IOException;
+import java.io.InputStreamReader;
+
+import org.junit.Test;
+
+public class TestProxyUserFromEnv {
+  /** Test HADOOP_PROXY_USER for impersonation */
+  @Test
+  public void testProxyUserFromEnvironment() throws IOException {
+String proxyUser = foo.bar;
+System.setProperty(UserGroupInformation.HADOOP_PROXY_USER, proxyUser);
+UserGroupInformation ugi = UserGroupInformation.getLoginUser();
+assertEquals(proxyUser, ugi.getUserName());
+
+UserGroupInformation realUgi

svn commit: r1422430 - in /hadoop/common/trunk/hadoop-common-project/hadoop-common/src: main/java/org/apache/hadoop/fs/FsUrlStreamHandlerFactory.java test/java/org/apache/hadoop/fs/TestFileSystemIniti

2012-12-15 Thread llu
Author: llu
Date: Sun Dec 16 00:39:17 2012
New Revision: 1422430

URL: http://svn.apache.org/viewvc?rev=1422430view=rev
Log:
HADOOP-9041. FsUrlStreamHandlerFactory could cause an infinite loop in 
FileSystem initialization. (Yanbo Liang and Radim Kolar via llu)

Added:

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileSystemInitialization.java
Modified:

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FsUrlStreamHandlerFactory.java

Modified: 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FsUrlStreamHandlerFactory.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FsUrlStreamHandlerFactory.java?rev=1422430r1=1422429r2=1422430view=diff
==
--- 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FsUrlStreamHandlerFactory.java
 (original)
+++ 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FsUrlStreamHandlerFactory.java
 Sun Dec 16 00:39:17 2012
@@ -56,6 +56,12 @@ public class FsUrlStreamHandlerFactory i
 
   public FsUrlStreamHandlerFactory(Configuration conf) {
 this.conf = new Configuration(conf);
+// force init of FileSystem code to avoid HADOOP-9041
+try {
+  FileSystem.getFileSystemClass(file, conf);
+} catch (IOException io) {
+  throw new RuntimeException(io);
+}
 this.handler = new FsUrlStreamHandler(this.conf);
   }
 

Added: 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileSystemInitialization.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileSystemInitialization.java?rev=1422430view=auto
==
--- 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileSystemInitialization.java
 (added)
+++ 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileSystemInitialization.java
 Sun Dec 16 00:39:17 2012
@@ -0,0 +1,46 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * License); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an AS IS BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.fs;
+
+import org.apache.hadoop.conf.Configuration;
+
+import java.io.IOException;
+import java.net.URL;
+
+import org.junit.Test;
+import static org.junit.Assert.*;
+
+public class TestFileSystemInitialization {
+
+ /**
+   * Check if FileSystem can be properly initialized if URLStreamHandlerFactory
+   * is registered.
+   */
+  @Test
+  public void testInitializationWithRegisteredStreamFactory() {
+Configuration conf = new Configuration();
+URL.setURLStreamHandlerFactory(new FsUrlStreamHandlerFactory(conf));
+try {
+  FileSystem.getFileSystemClass(file, conf);
+}
+catch (IOException ok) {
+  // we might get an exception but this not related to infinite loop 
problem
+  assertFalse(false);
+}
+  }
+}




svn commit: r1422431 - /hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt

2012-12-15 Thread llu
Author: llu
Date: Sun Dec 16 00:39:27 2012
New Revision: 1422431

URL: http://svn.apache.org/viewvc?rev=1422431view=rev
Log:
Update CHANGES.txt separately for clean cherry-picks.

Modified:
hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt

Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1422431r1=1422430r2=1422431view=diff
==
--- hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt 
(original)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt Sun Dec 
16 00:39:27 2012
@@ -19,6 +19,9 @@ Trunk (Unreleased)
 
   IMPROVEMENTS
 
+HADOOP-8561. Introduce HADOOP_PROXY_USER for secure impersonation in child
+hadoop client processes. (Yu Gao via llu)
+
 HADOOP-8017. Configure hadoop-main pom to get rid of M2E plugin execution
 not covered (Eric Charles via bobby)
 
@@ -143,6 +146,9 @@ Trunk (Unreleased)
 
   BUG FIXES
 
+HADOOP-9041. FsUrlStreamHandlerFactory could cause an infinite loop in
+FileSystem initialization. (Yanbo Liang and Radim Kolar via llu)
+
 HADOOP-8418. Update UGI Principal classes name for running with
 IBM JDK on 64 bits Windows.  (Yu Gao via eyang)
 




svn commit: r1422435 - in /hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src: main/java/org/apache/hadoop/fs/FsUrlStreamHandlerFactory.java test/java/org/apache/hadoop/fs/TestFil

2012-12-15 Thread llu
Author: llu
Date: Sun Dec 16 00:49:55 2012
New Revision: 1422435

URL: http://svn.apache.org/viewvc?rev=1422435view=rev
Log:
HADOOP-9041. FsUrlStreamHandlerFactory could cause an infinite loop in 
FileSystem initialization. (Yanbo Liang and Radim Kolar via llu)

Added:

hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileSystemInitialization.java
Modified:

hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FsUrlStreamHandlerFactory.java

Modified: 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FsUrlStreamHandlerFactory.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FsUrlStreamHandlerFactory.java?rev=1422435r1=1422434r2=1422435view=diff
==
--- 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FsUrlStreamHandlerFactory.java
 (original)
+++ 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FsUrlStreamHandlerFactory.java
 Sun Dec 16 00:49:55 2012
@@ -56,6 +56,12 @@ public class FsUrlStreamHandlerFactory i
 
   public FsUrlStreamHandlerFactory(Configuration conf) {
 this.conf = new Configuration(conf);
+// force init of FileSystem code to avoid HADOOP-9041
+try {
+  FileSystem.getFileSystemClass(file, conf);
+} catch (IOException io) {
+  throw new RuntimeException(io);
+}
 this.handler = new FsUrlStreamHandler(this.conf);
   }
 

Added: 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileSystemInitialization.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileSystemInitialization.java?rev=1422435view=auto
==
--- 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileSystemInitialization.java
 (added)
+++ 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileSystemInitialization.java
 Sun Dec 16 00:49:55 2012
@@ -0,0 +1,46 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * License); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an AS IS BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.fs;
+
+import org.apache.hadoop.conf.Configuration;
+
+import java.io.IOException;
+import java.net.URL;
+
+import org.junit.Test;
+import static org.junit.Assert.*;
+
+public class TestFileSystemInitialization {
+
+ /**
+   * Check if FileSystem can be properly initialized if URLStreamHandlerFactory
+   * is registered.
+   */
+  @Test
+  public void testInitializationWithRegisteredStreamFactory() {
+Configuration conf = new Configuration();
+URL.setURLStreamHandlerFactory(new FsUrlStreamHandlerFactory(conf));
+try {
+  FileSystem.getFileSystemClass(file, conf);
+}
+catch (IOException ok) {
+  // we might get an exception but this not related to infinite loop 
problem
+  assertFalse(false);
+}
+  }
+}




svn commit: r1422436 - /hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt

2012-12-15 Thread llu
Author: llu
Date: Sun Dec 16 00:50:06 2012
New Revision: 1422436

URL: http://svn.apache.org/viewvc?rev=1422436view=rev
Log:
Update CHANGES.txt separately for clean cherry-picks.

Modified:

hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt

Modified: 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1422436r1=1422435r2=1422436view=diff
==
--- 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt 
(original)
+++ 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt 
Sun Dec 16 00:50:06 2012
@@ -6,6 +6,9 @@ Release 2.0.3-alpha - Unreleased 
 
   NEW FEATURES
 
+HADOOP-8561. Introduce HADOOP_PROXY_USER for secure impersonation in child
+hadoop client processes. (Yu Gao via llu)
+
 HADOOP-8597. Permit FsShell's text command to read Avro files.
 (Ivan Vladimirov Ivanov via cutting)
 
@@ -108,6 +111,9 @@ Release 2.0.3-alpha - Unreleased 
 
   BUG FIXES
 
+HADOOP-9041. FsUrlStreamHandlerFactory could cause an infinite loop in
+FileSystem initialization. (Yanbo Liang and Radim Kolar via llu)
+
 HADOOP-8418. Update UGI Principal classes name for running with
 IBM JDK on 64 bits Windows.  (Yu Gao via eyang)
 




svn commit: r1422438 - /hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt

2012-12-15 Thread llu
Author: llu
Date: Sun Dec 16 00:51:43 2012
New Revision: 1422438

URL: http://svn.apache.org/viewvc?rev=1422438view=rev
Log:
Move to the right section in CHANGES.txt

Modified:
hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt

Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1422438r1=1422437r2=1422438view=diff
==
--- hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt 
(original)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt Sun Dec 
16 00:51:43 2012
@@ -11,6 +11,9 @@ Trunk (Unreleased)
 
   NEW FEATURES
 
+HADOOP-8561. Introduce HADOOP_PROXY_USER for secure impersonation in child
+hadoop client processes. (Yu Gao via llu)
+
 HADOOP-8469. Make NetworkTopology class pluggable.  (Junping Du via
 szetszwo)
 
@@ -19,9 +22,6 @@ Trunk (Unreleased)
 
   IMPROVEMENTS
 
-HADOOP-8561. Introduce HADOOP_PROXY_USER for secure impersonation in child
-hadoop client processes. (Yu Gao via llu)
-
 HADOOP-8017. Configure hadoop-main pom to get rid of M2E plugin execution
 not covered (Eric Charles via bobby)
 




svn commit: r1422441 - in /hadoop/common/branches/branch-1/src: core/org/apache/hadoop/security/UserGroupInformation.java test/org/apache/hadoop/security/TestProxyUserFromEnv.java

2012-12-15 Thread llu
Author: llu
Date: Sun Dec 16 01:01:03 2012
New Revision: 1422441

URL: http://svn.apache.org/viewvc?rev=1422441view=rev
Log:
HADOOP-8561. Introduce HADOOP_PROXY_USER for secure impersonation in child 
hadoop client processes. (Yu Gao via llu)

Added:

hadoop/common/branches/branch-1/src/test/org/apache/hadoop/security/TestProxyUserFromEnv.java
Modified:

hadoop/common/branches/branch-1/src/core/org/apache/hadoop/security/UserGroupInformation.java

Modified: 
hadoop/common/branches/branch-1/src/core/org/apache/hadoop/security/UserGroupInformation.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-1/src/core/org/apache/hadoop/security/UserGroupInformation.java?rev=1422441r1=1422440r2=1422441view=diff
==
--- 
hadoop/common/branches/branch-1/src/core/org/apache/hadoop/security/UserGroupInformation.java
 (original)
+++ 
hadoop/common/branches/branch-1/src/core/org/apache/hadoop/security/UserGroupInformation.java
 Sun Dec 16 01:01:03 2012
@@ -69,6 +69,7 @@ public class UserGroupInformation {
*/
   private static final float TICKET_RENEW_WINDOW = 0.80f;
   static final String HADOOP_USER_NAME = HADOOP_USER_NAME;
+  static final String HADOOP_PROXY_USER = HADOOP_PROXY_USER;
   
   /**
* A login module that looks at the Kerberos, Unix, or Windows principal and
@@ -484,12 +485,20 @@ public class UserGroupInformation {
   login = newLoginContext(HadoopConfiguration.SIMPLE_CONFIG_NAME, 
subject);
 }
 login.login();
-loginUser = new UserGroupInformation(subject);
-loginUser.setLogin(login);
-loginUser.setAuthenticationMethod(isSecurityEnabled() ?
+UserGroupInformation realUser = new UserGroupInformation(subject);
+realUser.setLogin(login);
+realUser.setAuthenticationMethod(isSecurityEnabled() ?
   AuthenticationMethod.KERBEROS :
   AuthenticationMethod.SIMPLE);
-loginUser = new UserGroupInformation(login.getSubject());
+realUser = new UserGroupInformation(login.getSubject());
+// If the HADOOP_PROXY_USER environment variable or property
+// is specified, create a proxy user as the logged in user.
+String proxyUser = System.getenv(HADOOP_PROXY_USER);
+if (proxyUser == null) {
+  proxyUser = System.getProperty(HADOOP_PROXY_USER);
+}
+loginUser = proxyUser == null ? realUser : createProxyUser(proxyUser, 
realUser);
+
 String fileLocation = System.getenv(HADOOP_TOKEN_FILE_LOCATION);
 if (fileLocation != null  isSecurityEnabled()) {
   // load the token storage file and put all of the tokens into the

Added: 
hadoop/common/branches/branch-1/src/test/org/apache/hadoop/security/TestProxyUserFromEnv.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-1/src/test/org/apache/hadoop/security/TestProxyUserFromEnv.java?rev=1422441view=auto
==
--- 
hadoop/common/branches/branch-1/src/test/org/apache/hadoop/security/TestProxyUserFromEnv.java
 (added)
+++ 
hadoop/common/branches/branch-1/src/test/org/apache/hadoop/security/TestProxyUserFromEnv.java
 Sun Dec 16 01:01:03 2012
@@ -0,0 +1,47 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * License); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an AS IS BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations 
under
+ * the License.
+ */
+
+package org.apache.hadoop.security;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotNull;
+
+import java.io.BufferedReader;
+import java.io.IOException;
+import java.io.InputStreamReader;
+
+import org.junit.Test;
+
+public class TestProxyUserFromEnv {
+  /** Test HADOOP_PROXY_USER for impersonation */
+  @Test
+  public void testProxyUserFromEnvironment() throws IOException {
+String proxyUser = foo.bar;
+System.setProperty(UserGroupInformation.HADOOP_PROXY_USER, proxyUser);
+UserGroupInformation ugi = UserGroupInformation.getLoginUser();
+assertEquals(proxyUser, ugi.getUserName());
+
+UserGroupInformation realUgi = ugi.getRealUser();
+assertNotNull(realUgi);
+// get the expected real user name
+Process pp = Runtime.getRuntime().exec

svn commit: r1422443 - in /hadoop/common/branches/branch-1.1/src: core/org/apache/hadoop/security/UserGroupInformation.java test/org/apache/hadoop/security/TestProxyUserFromEnv.java

2012-12-15 Thread llu
Author: llu
Date: Sun Dec 16 01:01:54 2012
New Revision: 1422443

URL: http://svn.apache.org/viewvc?rev=1422443view=rev
Log:
HADOOP-8561. Introduce HADOOP_PROXY_USER for secure impersonation in child 
hadoop client processes. (Yu Gao via llu)

Added:

hadoop/common/branches/branch-1.1/src/test/org/apache/hadoop/security/TestProxyUserFromEnv.java
Modified:

hadoop/common/branches/branch-1.1/src/core/org/apache/hadoop/security/UserGroupInformation.java

Modified: 
hadoop/common/branches/branch-1.1/src/core/org/apache/hadoop/security/UserGroupInformation.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-1.1/src/core/org/apache/hadoop/security/UserGroupInformation.java?rev=1422443r1=1422442r2=1422443view=diff
==
--- 
hadoop/common/branches/branch-1.1/src/core/org/apache/hadoop/security/UserGroupInformation.java
 (original)
+++ 
hadoop/common/branches/branch-1.1/src/core/org/apache/hadoop/security/UserGroupInformation.java
 Sun Dec 16 01:01:54 2012
@@ -69,6 +69,7 @@ public class UserGroupInformation {
*/
   private static final float TICKET_RENEW_WINDOW = 0.80f;
   static final String HADOOP_USER_NAME = HADOOP_USER_NAME;
+  static final String HADOOP_PROXY_USER = HADOOP_PROXY_USER;
   
   /**
* A login module that looks at the Kerberos, Unix, or Windows principal and
@@ -484,12 +485,20 @@ public class UserGroupInformation {
   login = newLoginContext(HadoopConfiguration.SIMPLE_CONFIG_NAME, 
subject);
 }
 login.login();
-loginUser = new UserGroupInformation(subject);
-loginUser.setLogin(login);
-loginUser.setAuthenticationMethod(isSecurityEnabled() ?
+UserGroupInformation realUser = new UserGroupInformation(subject);
+realUser.setLogin(login);
+realUser.setAuthenticationMethod(isSecurityEnabled() ?
   AuthenticationMethod.KERBEROS :
   AuthenticationMethod.SIMPLE);
-loginUser = new UserGroupInformation(login.getSubject());
+realUser = new UserGroupInformation(login.getSubject());
+// If the HADOOP_PROXY_USER environment variable or property
+// is specified, create a proxy user as the logged in user.
+String proxyUser = System.getenv(HADOOP_PROXY_USER);
+if (proxyUser == null) {
+  proxyUser = System.getProperty(HADOOP_PROXY_USER);
+}
+loginUser = proxyUser == null ? realUser : createProxyUser(proxyUser, 
realUser);
+
 String fileLocation = System.getenv(HADOOP_TOKEN_FILE_LOCATION);
 if (fileLocation != null  isSecurityEnabled()) {
   // load the token storage file and put all of the tokens into the

Added: 
hadoop/common/branches/branch-1.1/src/test/org/apache/hadoop/security/TestProxyUserFromEnv.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-1.1/src/test/org/apache/hadoop/security/TestProxyUserFromEnv.java?rev=1422443view=auto
==
--- 
hadoop/common/branches/branch-1.1/src/test/org/apache/hadoop/security/TestProxyUserFromEnv.java
 (added)
+++ 
hadoop/common/branches/branch-1.1/src/test/org/apache/hadoop/security/TestProxyUserFromEnv.java
 Sun Dec 16 01:01:54 2012
@@ -0,0 +1,47 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * License); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an AS IS BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations 
under
+ * the License.
+ */
+
+package org.apache.hadoop.security;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotNull;
+
+import java.io.BufferedReader;
+import java.io.IOException;
+import java.io.InputStreamReader;
+
+import org.junit.Test;
+
+public class TestProxyUserFromEnv {
+  /** Test HADOOP_PROXY_USER for impersonation */
+  @Test
+  public void testProxyUserFromEnvironment() throws IOException {
+String proxyUser = foo.bar;
+System.setProperty(UserGroupInformation.HADOOP_PROXY_USER, proxyUser);
+UserGroupInformation ugi = UserGroupInformation.getLoginUser();
+assertEquals(proxyUser, ugi.getUserName());
+
+UserGroupInformation realUgi = ugi.getRealUser();
+assertNotNull(realUgi);
+// get the expected real user name
+Process pp

svn commit: r1422178 - /hadoop/common/branches/branch-1/build.xml

2012-12-14 Thread llu
Author: llu
Date: Sat Dec 15 03:16:17 2012
New Revision: 1422178

URL: http://svn.apache.org/viewvc?rev=1422178view=rev
Log:
HADOOP-9051 Fix ant clean/test with circular symlinks in build dir. (llu)

Modified:
hadoop/common/branches/branch-1/build.xml

Modified: hadoop/common/branches/branch-1/build.xml
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-1/build.xml?rev=1422178r1=1422177r2=1422178view=diff
==
--- hadoop/common/branches/branch-1/build.xml (original)
+++ hadoop/common/branches/branch-1/build.xml Sat Dec 15 03:16:17 2012
@@ -1054,7 +1054,7 @@
   delete file=${test.build.dir}/testsfailed/
   delete dir=@{test.dir}/data /
   mkdir dir=@{test.dir}/data /
-  delete dir=@{test.dir}/logs /
+  delete dir=@{test.dir}/logs followSymlinks=false/
   mkdir dir=@{test.dir}/logs /
   copy file=${test.src.dir}/hadoop-policy.xml
 todir=@{test.dir}/extraconf /
@@ -1971,7 +1971,7 @@
   !-- Clean.  Delete the build files, and their directories  --
   !-- == --
   target name=clean depends=clean-contrib, clean-sign, clean-fi 
description=Clean.  Delete the build files, and their directories
-delete dir=${build.dir}/
+delete dir=${build.dir} followSymlinks=false/
 delete dir=${docs.src}/build/
 delete dir=${src.docs.cn}/build/
 delete dir=${package.buildroot}/




svn commit: r1422179 - /hadoop/common/branches/branch-1.1/build.xml

2012-12-14 Thread llu
Author: llu
Date: Sat Dec 15 03:18:45 2012
New Revision: 1422179

URL: http://svn.apache.org/viewvc?rev=1422179view=rev
Log:
HADOOP-9051 Fix ant clean/test with circular symlinks in build dir. (llu)

Modified:
hadoop/common/branches/branch-1.1/build.xml

Modified: hadoop/common/branches/branch-1.1/build.xml
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-1.1/build.xml?rev=1422179r1=1422178r2=1422179view=diff
==
--- hadoop/common/branches/branch-1.1/build.xml (original)
+++ hadoop/common/branches/branch-1.1/build.xml Sat Dec 15 03:18:45 2012
@@ -1047,7 +1047,7 @@
   delete file=${test.build.dir}/testsfailed/
   delete dir=@{test.dir}/data /
   mkdir dir=@{test.dir}/data /
-  delete dir=@{test.dir}/logs /
+  delete dir=@{test.dir}/logs followSymlinks=false/
   mkdir dir=@{test.dir}/logs /
   copy file=${test.src.dir}/hadoop-policy.xml
 todir=@{test.dir}/extraconf /
@@ -1964,7 +1964,7 @@
   !-- Clean.  Delete the build files, and their directories  --
   !-- == --
   target name=clean depends=clean-contrib, clean-sign, clean-fi 
description=Clean.  Delete the build files, and their directories
-delete dir=${build.dir}/
+delete dir=${build.dir} followSymlinks=false/
 delete dir=${docs.src}/build/
 delete dir=${src.docs.cn}/build/
 delete dir=${package.buildroot}/




svn commit: r1422180 - /hadoop/common/branches/branch-1.0/build.xml

2012-12-14 Thread llu
Author: llu
Date: Sat Dec 15 03:21:28 2012
New Revision: 1422180

URL: http://svn.apache.org/viewvc?rev=1422180view=rev
Log:
HADOOP-9051 Fix ant clean/test with circular symlinks in build dir. (llu)

Modified:
hadoop/common/branches/branch-1.0/build.xml

Modified: hadoop/common/branches/branch-1.0/build.xml
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-1.0/build.xml?rev=1422180r1=1422179r2=1422180view=diff
==
--- hadoop/common/branches/branch-1.0/build.xml (original)
+++ hadoop/common/branches/branch-1.0/build.xml Sat Dec 15 03:21:28 2012
@@ -1045,7 +1045,7 @@
   delete file=${test.build.dir}/testsfailed/
   delete dir=@{test.dir}/data /
   mkdir dir=@{test.dir}/data /
-  delete dir=@{test.dir}/logs /
+  delete dir=@{test.dir}/logs followSymlinks=false/
   mkdir dir=@{test.dir}/logs /
   copy file=${test.src.dir}/hadoop-policy.xml
 todir=@{test.dir}/extraconf /
@@ -1959,7 +1959,7 @@
   !-- Clean.  Delete the build files, and their directories  --
   !-- == --
   target name=clean depends=clean-contrib, clean-sign, clean-fi 
description=Clean.  Delete the build files, and their directories
-delete dir=${build.dir}/
+delete dir=${build.dir} followSymlinks=false/
 delete dir=${docs.src}/build/
 delete dir=${src.docs.cn}/build/
 delete dir=${package.buildroot}/




svn commit: r1422182 - /hadoop/common/branches/branch-1.1/CHANGES.txt

2012-12-14 Thread llu
Author: llu
Date: Sat Dec 15 03:43:45 2012
New Revision: 1422182

URL: http://svn.apache.org/viewvc?rev=1422182view=rev
Log:
Update CHANGES.txt separately for clean cherry-pick.

Modified:
hadoop/common/branches/branch-1.1/CHANGES.txt

Modified: hadoop/common/branches/branch-1.1/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-1.1/CHANGES.txt?rev=1422182r1=1422181r2=1422182view=diff
==
--- hadoop/common/branches/branch-1.1/CHANGES.txt (original)
+++ hadoop/common/branches/branch-1.1/CHANGES.txt Sat Dec 15 03:43:45 2012
@@ -16,6 +16,8 @@ Release 1.1.2 - 2012.12.07
 
   BUG FIXES
 
+HADOOP-9051 Fix ant clean/test with circular symlinks in build dir. (llu)
+
 MAPREDUCE-4396. Port support private distributed cache to
 LocalJobRunner from branch-1-win. (Yu Gao via eyang)
 




svn commit: r1422183 - /hadoop/common/branches/branch-1/CHANGES.txt

2012-12-14 Thread llu
Author: llu
Date: Sat Dec 15 03:47:38 2012
New Revision: 1422183

URL: http://svn.apache.org/viewvc?rev=1422183view=rev
Log:
Update CHANGES.txt separately for clean cherry-pick.

Modified:
hadoop/common/branches/branch-1/CHANGES.txt

Modified: hadoop/common/branches/branch-1/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-1/CHANGES.txt?rev=1422183r1=1422182r2=1422183view=diff
==
--- hadoop/common/branches/branch-1/CHANGES.txt (original)
+++ hadoop/common/branches/branch-1/CHANGES.txt Sat Dec 15 03:47:38 2012
@@ -142,6 +142,8 @@ Release 1.2.0 - unreleased
 
   BUG FIXES
 
+HADOOP-9051 Fix ant clean/test with circular symlinks in build dir. (llu)
+
 MAPREDUCE-4396. Port support private distributed cache to 
 LocalJobRunner from branch-1-win. (Yu Gao via eyang)
 




svn commit: r1365851 - in /hadoop/common/branches/branch-1.1: CHANGES.txt src/mapred/org/apache/hadoop/mapred/Task.java

2012-07-25 Thread llu
Author: llu
Date: Thu Jul 26 00:08:36 2012
New Revision: 1365851

URL: http://svn.apache.org/viewvc?rev=1365851view=rev
Log:
MAPREDUCE-4400: Avoid task finish sleep to improve small job/workflow latency. 
(llu)

Modified:
hadoop/common/branches/branch-1.1/CHANGES.txt

hadoop/common/branches/branch-1.1/src/mapred/org/apache/hadoop/mapred/Task.java

Modified: hadoop/common/branches/branch-1.1/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-1.1/CHANGES.txt?rev=1365851r1=1365850r2=1365851view=diff
==
--- hadoop/common/branches/branch-1.1/CHANGES.txt (original)
+++ hadoop/common/branches/branch-1.1/CHANGES.txt Thu Jul 26 00:08:36 2012
@@ -143,6 +143,9 @@ Release 1.1.0 - 2012.07.09
 MAPREDUCE-4399. Change the Jetty response buffer size to improve 
 shuffle performance. (Luke Lu via suresh)
 
+MAPREDUCE-4400. Avoid task finish sleep to improve small job/workflow
+latency. (llu)
+
   BUG FIXES
 
 MAPREDUCE-4087. [Gridmix] GenerateDistCacheData job of Gridmix can

Modified: 
hadoop/common/branches/branch-1.1/src/mapred/org/apache/hadoop/mapred/Task.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-1.1/src/mapred/org/apache/hadoop/mapred/Task.java?rev=1365851r1=1365850r2=1365851view=diff
==
--- 
hadoop/common/branches/branch-1.1/src/mapred/org/apache/hadoop/mapred/Task.java 
(original)
+++ 
hadoop/common/branches/branch-1.1/src/mapred/org/apache/hadoop/mapred/Task.java 
Thu Jul 26 00:08:36 2012
@@ -649,14 +649,19 @@ abstract public class Task implements Wr
   // get current flag value and reset it as well
   boolean sendProgress = resetProgressFlag();
   while (!taskDone.get()) {
-synchronized(lock) {
-  done = false;
-}
 try {
   boolean taskFound = true; // whether TT knows about this task
   // sleep for a bit
   try {
-Thread.sleep(PROGRESS_INTERVAL);
+synchronized(lock) {
+  done = false;
+  lock.wait(PROGRESS_INTERVAL);
+  if (taskDone.get()) {
+done = true;
+lock.notify();
+return;
+  }
+}
   } 
   catch (InterruptedException e) {
 if (LOG.isDebugEnabled()) {
@@ -724,6 +729,7 @@ abstract public class Task implements Wr
   // Updating resources specified in ResourceCalculatorPlugin
   if (pingThread != null) {
 synchronized(lock) {
+  lock.notify(); // wake up the wait in the while loop
   while(!done) {
 lock.wait();
   }




svn commit: r1365331 - in /hadoop/common/branches/branch-1: CHANGES.txt src/mapred/org/apache/hadoop/mapred/Task.java

2012-07-24 Thread llu
Author: llu
Date: Tue Jul 24 22:52:22 2012
New Revision: 1365331

URL: http://svn.apache.org/viewvc?rev=1365331view=rev
Log:
MAPREDUCE-4400: Fix performance regression for small jobs/workflows

Modified:
hadoop/common/branches/branch-1/CHANGES.txt

hadoop/common/branches/branch-1/src/mapred/org/apache/hadoop/mapred/Task.java

Modified: hadoop/common/branches/branch-1/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-1/CHANGES.txt?rev=1365331r1=1365330r2=1365331view=diff
==
--- hadoop/common/branches/branch-1/CHANGES.txt (original)
+++ hadoop/common/branches/branch-1/CHANGES.txt Tue Jul 24 22:52:22 2012
@@ -242,6 +242,9 @@ Release 1.1.0 - unreleased
 MAPREDUCE-4399. Change the Jetty response buffer size to improve 
 shuffle performance. (Luke Lu via suresh)
 
+MAPREDUCE-4400. Avoid task finish sleep to improve small job/workflow
+latency. (llu)
+
   BUG FIXES
 
 MAPREDUCE-4087. [Gridmix] GenerateDistCacheData job of Gridmix can

Modified: 
hadoop/common/branches/branch-1/src/mapred/org/apache/hadoop/mapred/Task.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-1/src/mapred/org/apache/hadoop/mapred/Task.java?rev=1365331r1=1365330r2=1365331view=diff
==
--- 
hadoop/common/branches/branch-1/src/mapred/org/apache/hadoop/mapred/Task.java 
(original)
+++ 
hadoop/common/branches/branch-1/src/mapred/org/apache/hadoop/mapred/Task.java 
Tue Jul 24 22:52:22 2012
@@ -649,14 +649,19 @@ abstract public class Task implements Wr
   // get current flag value and reset it as well
   boolean sendProgress = resetProgressFlag();
   while (!taskDone.get()) {
-synchronized(lock) {
-  done = false;
-}
 try {
   boolean taskFound = true; // whether TT knows about this task
   // sleep for a bit
   try {
-Thread.sleep(PROGRESS_INTERVAL);
+synchronized(lock) {
+  done = false;
+  lock.wait(PROGRESS_INTERVAL);
+  if (taskDone.get()) {
+done = true;
+lock.notify();
+return;
+  }
+}
   } 
   catch (InterruptedException e) {
 if (LOG.isDebugEnabled()) {
@@ -724,6 +729,7 @@ abstract public class Task implements Wr
   // Updating resources specified in ResourceCalculatorPlugin
   if (pingThread != null) {
 synchronized(lock) {
+  lock.notify(); // wake up the wait in the while loop
   while(!done) {
 lock.wait();
   }




svn commit: r1157187 - in /hadoop/common/trunk/hadoop-common: CHANGES.txt src/main/java/org/apache/hadoop/metrics2/lib/DefaultMetricsSystem.java

2011-08-12 Thread llu
Author: llu
Date: Fri Aug 12 16:57:23 2011
New Revision: 1157187

URL: http://svn.apache.org/viewvc?rev=1157187view=rev
Log:
HADOOP-7529. Fix lock cycles in metrics system. (llu)

Modified:
hadoop/common/trunk/hadoop-common/CHANGES.txt

hadoop/common/trunk/hadoop-common/src/main/java/org/apache/hadoop/metrics2/lib/DefaultMetricsSystem.java

Modified: hadoop/common/trunk/hadoop-common/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common/CHANGES.txt?rev=1157187r1=1157186r2=1157187view=diff
==
--- hadoop/common/trunk/hadoop-common/CHANGES.txt (original)
+++ hadoop/common/trunk/hadoop-common/CHANGES.txt Fri Aug 12 16:57:23 2011
@@ -493,6 +493,8 @@ Trunk (unreleased changes)
 
 HADOOP-6622. Token should not print the password in toString. (eli)
 
+HADOOP-7529. Fix lock cycles in metrics system. (llu)
+
 Release 0.22.0 - Unreleased
 
   INCOMPATIBLE CHANGES

Modified: 
hadoop/common/trunk/hadoop-common/src/main/java/org/apache/hadoop/metrics2/lib/DefaultMetricsSystem.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common/src/main/java/org/apache/hadoop/metrics2/lib/DefaultMetricsSystem.java?rev=1157187r1=1157186r2=1157187view=diff
==
--- 
hadoop/common/trunk/hadoop-common/src/main/java/org/apache/hadoop/metrics2/lib/DefaultMetricsSystem.java
 (original)
+++ 
hadoop/common/trunk/hadoop-common/src/main/java/org/apache/hadoop/metrics2/lib/DefaultMetricsSystem.java
 Fri Aug 12 16:57:23 2011
@@ -18,6 +18,7 @@
 
 package org.apache.hadoop.metrics2.lib;
 
+import java.util.concurrent.atomic.AtomicReference;
 import javax.management.ObjectName;
 
 import org.apache.hadoop.classification.InterfaceAudience;
@@ -34,7 +35,8 @@ import org.apache.hadoop.metrics2.impl.M
 public enum DefaultMetricsSystem {
   INSTANCE; // the singleton
 
-  private MetricsSystem impl = new MetricsSystemImpl();
+  private AtomicReferenceMetricsSystem impl =
+  new AtomicReferenceMetricsSystem(new MetricsSystemImpl());
   volatile boolean miniClusterMode = false;
   final UniqueNames mBeanNames = new UniqueNames();
   final UniqueNames sourceNames = new UniqueNames();
@@ -48,8 +50,8 @@ public enum DefaultMetricsSystem {
 return INSTANCE.init(prefix);
   }
 
-  synchronized MetricsSystem init(String prefix) {
-return impl.init(prefix);
+  MetricsSystem init(String prefix) {
+return impl.get().init(prefix);
   }
 
   /**
@@ -66,8 +68,9 @@ public enum DefaultMetricsSystem {
 INSTANCE.shutdownInstance();
   }
 
-  synchronized void shutdownInstance() {
-if (impl.shutdown()) {
+  void shutdownInstance() {
+boolean last = impl.get().shutdown();
+if (last) synchronized(this) {
   mBeanNames.map.clear();
   sourceNames.map.clear();
 }
@@ -78,13 +81,11 @@ public enum DefaultMetricsSystem {
 return INSTANCE.setImpl(ms);
   }
 
-  synchronized MetricsSystem setImpl(MetricsSystem ms) {
-MetricsSystem old = impl;
-impl = ms;
-return old;
+  MetricsSystem setImpl(MetricsSystem ms) {
+return impl.getAndSet(ms);
   }
 
-  synchronized MetricsSystem getImpl() { return impl; }
+  MetricsSystem getImpl() { return impl.get(); }
 
   @InterfaceAudience.Private
   public static void setMiniClusterMode(boolean choice) {




svn commit: r1153586 - in /hadoop/common/trunk/hadoop-common: CHANGES.txt pom.xml

2011-08-03 Thread llu
Author: llu
Date: Wed Aug  3 17:13:24 2011
New Revision: 1153586

URL: http://svn.apache.org/viewvc?rev=1153586view=rev
Log:
HADOOP-7502. Make generated sources IDE friendly.  (Alejandro Abdelnur via llu)

Modified:
hadoop/common/trunk/hadoop-common/CHANGES.txt
hadoop/common/trunk/hadoop-common/pom.xml

Modified: hadoop/common/trunk/hadoop-common/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common/CHANGES.txt?rev=1153586r1=1153585r2=1153586view=diff
==
--- hadoop/common/trunk/hadoop-common/CHANGES.txt (original)
+++ hadoop/common/trunk/hadoop-common/CHANGES.txt Wed Aug  3 17:13:24 2011
@@ -303,6 +303,9 @@ Trunk (unreleased changes)
 HADOOP-6671. Use maven for hadoop common builds. (Alejandro Abdelnur
 via tomwhite)
 
+HADOOP-7502. Make generated sources IDE friendly.
+(Alejandro Abdelnur via llu)
+
   OPTIMIZATIONS
   
 HADOOP-7333. Performance improvement in PureJavaCrc32. (Eric Caspole

Modified: hadoop/common/trunk/hadoop-common/pom.xml
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common/pom.xml?rev=1153586r1=1153585r2=1153586view=diff
==
--- hadoop/common/trunk/hadoop-common/pom.xml (original)
+++ hadoop/common/trunk/hadoop-common/pom.xml Wed Aug  3 17:13:24 2011
@@ -322,10 +322,10 @@
 /goals
 configuration
   target
-mkdir 
dir=${project.build.directory}/generated-src/main/java/
+mkdir 
dir=${project.build.directory}/generated-sources/java/
 exec executable=sh
   arg
-  line=${basedir}/dev-support/saveVersion.sh 
${project.version} ${project.build.directory}/generated-src/main/java/
+  line=${basedir}/dev-support/saveVersion.sh 
${project.version} ${project.build.directory}/generated-sources/java/
 /exec
   /target
 /configuration
@@ -339,19 +339,19 @@
 configuration
   target
 
-mkdir 
dir=${project.build.directory}/generated-src/test/java/
+mkdir 
dir=${project.build.directory}/generated-test-sources/java/
 
 taskdef name=recordcc 
classname=org.apache.hadoop.record.compiler.ant.RccTask
   classpath refid=maven.compile.classpath/
 /taskdef
-recordcc 
destdir=${project.build.directory}/generated-src/test/java
+recordcc 
destdir=${project.build.directory}/generated-test-sources/java
   fileset dir=${basedir}/src/test/ddl includes=**/*.jr/
 /recordcc
 
 taskdef name=schema 
classname=org.apache.avro.specific.SchemaTask
   classpath refid=maven.test.classpath/
 /taskdef
-schema 
destdir=${project.build.directory}/generated-src/test/java
+schema 
destdir=${project.build.directory}/generated-test-sources/java
   fileset dir=${basedir}/src/test
 include name=**/*.avsc/
   /fileset
@@ -360,7 +360,7 @@
 taskdef name=schema 
classname=org.apache.avro.specific.ProtocolTask
   classpath refid=maven.test.classpath/
 /taskdef
-schema 
destdir=${project.build.directory}/generated-src/test/java
+schema 
destdir=${project.build.directory}/generated-test-sources/java
   fileset dir=${basedir}/src/test
 include name=**/*.avpr/
   /fileset
@@ -403,7 +403,7 @@
 /goals
 configuration
   sources
-
source${project.build.directory}/generated-src/main/java/source
+
source${project.build.directory}/generated-sources/java/source
   /sources
 /configuration
   /execution
@@ -415,7 +415,7 @@
 /goals
 configuration
   sources
-
source${project.build.directory}/generated-src/test/java/source
+
source${project.build.directory}/generated-test-sources/java/source
   /sources
 /configuration
   /execution




svn commit: r1151287 - in /hadoop/common/branches/branch-0.20-security: CHANGES.txt src/c++/task-controller/impl/task-controller.c

2011-07-26 Thread llu
Author: llu
Date: Tue Jul 26 23:15:32 2011
New Revision: 1151287

URL: http://svn.apache.org/viewvc?rev=1151287view=rev
Log:
MAPREDUCE-2651. Fix race condition in Linux task controller for job log 
directory creation. (Bharath Mundlapudi via llu)

Modified:
hadoop/common/branches/branch-0.20-security/CHANGES.txt

hadoop/common/branches/branch-0.20-security/src/c++/task-controller/impl/task-controller.c

Modified: hadoop/common/branches/branch-0.20-security/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security/CHANGES.txt?rev=1151287r1=1151286r2=1151287view=diff
==
--- hadoop/common/branches/branch-0.20-security/CHANGES.txt (original)
+++ hadoop/common/branches/branch-0.20-security/CHANGES.txt Tue Jul 26 23:15:32 
2011
@@ -6,6 +6,9 @@ Release 0.20.205.0 - unreleased
 
   BUG FIXES
 
+MAPREDUCE-2651. Fix race condition in Linux task controller for
+job log directory creation. (Bharath Mundlapudi via llu)
+
 HADOOP-6833. IPC leaks call parameters when exceptions thrown.
 (Todd Lipcon via Eli Collins)
   

Modified: 
hadoop/common/branches/branch-0.20-security/src/c++/task-controller/impl/task-controller.c
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security/src/c%2B%2B/task-controller/impl/task-controller.c?rev=1151287r1=1151286r2=1151287view=diff
==
--- 
hadoop/common/branches/branch-0.20-security/src/c++/task-controller/impl/task-controller.c
 (original)
+++ 
hadoop/common/branches/branch-0.20-security/src/c++/task-controller/impl/task-controller.c
 Tue Jul 26 23:15:32 2011
@@ -620,9 +620,16 @@ int create_directory_for_user(const char
   uid_t user = geteuid();
   gid_t group = getegid();
   int ret = 0;
-  ret = change_effective_user(tt_uid, tt_gid);
+  uid_t root = 0;
+
+  //This check is particularly required for c-based unit tests since 
+  //tests run as a regular user.
+  if (getuid() == root) {
+ret = change_effective_user(root, tt_gid);
+  }
+
   if (ret == 0) {
-if (mkdir(path, permissions) == 0) {
+if (mkdir(path, permissions) == 0 || errno == EEXIST) {
   // need to reassert the group sticky bit
   if (chmod(path, permissions) != 0) {
 fprintf(LOGFILE, Can't chmod %s to add the sticky bit - %s\n,
@@ -631,21 +638,6 @@ int create_directory_for_user(const char
   } else if (change_owner(path, user, tt_gid) != 0) {
 ret = -1;
   }
-} else if (errno == EEXIST) {
-  struct stat file_stat;
-  if (stat(path, file_stat) != 0) {
-fprintf(LOGFILE, Can't stat directory %s - %s\n, path, 
-strerror(errno));
-ret = -1;
-  } else {
-if (file_stat.st_uid != user ||
-file_stat.st_gid != tt_gid) {
-  fprintf(LOGFILE, Directory %s owned by wrong user or group. 
-  Expected %d:%d and found %d:%d.\n,
-  path, user, tt_gid, file_stat.st_uid, file_stat.st_gid);
-  ret = -1;
-}
-  }
 } else {
   fprintf(LOGFILE, Failed to create directory %s - %s\n, path,
   strerror(errno));




svn commit: r1148525 - in /hadoop/common/branches/branch-0.20-security: ./ src/core/org/apache/hadoop/fs/ src/core/org/apache/hadoop/ipc/ src/mapred/ src/test/org/apache/hadoop/fs/ src/tools/org/apach

2011-07-19 Thread llu
Author: llu
Date: Tue Jul 19 20:58:34 2011
New Revision: 1148525

URL: http://svn.apache.org/viewvc?rev=1148525view=rev
Log:
Merge HADOOP-6833 (r990003) into branch-0.20-security

Modified:
hadoop/common/branches/branch-0.20-security/   (props changed)
hadoop/common/branches/branch-0.20-security/CHANGES.txt   (contents, props 
changed)

hadoop/common/branches/branch-0.20-security/src/core/org/apache/hadoop/fs/HarFileSystem.java
   (props changed)

hadoop/common/branches/branch-0.20-security/src/core/org/apache/hadoop/ipc/Client.java
hadoop/common/branches/branch-0.20-security/src/mapred/   (props changed)

hadoop/common/branches/branch-0.20-security/src/test/org/apache/hadoop/fs/TestHarFileSystem.java
   (props changed)

hadoop/common/branches/branch-0.20-security/src/tools/org/apache/hadoop/tools/HadoopArchives.java
   (props changed)

Propchange: hadoop/common/branches/branch-0.20-security/
--
--- svn:mergeinfo (original)
+++ svn:mergeinfo Tue Jul 19 20:58:34 2011
@@ -1,4 +1,4 @@
-/hadoop/common/branches/branch-0.20:826138,826568,829987,831184,833001,880632,898713,909245,909723,960946,1044225
+/hadoop/common/branches/branch-0.20:826138,826568,829987,831184,833001,880632,898713,909245,909723,960946,990003,1044225
 
/hadoop/common/branches/branch-0.20-security-203:1096071,1097011,1097249,1097269,1097281,1097966,1098816,1098819,1098823,1098827,1098832,1098839,1098854,1098863,1099088,1099191,1099324,1099330,1099333,1102071,1128115
 /hadoop/common/branches/branch-0.20-security-204:1128390,1147228,1148069
 /hadoop/core/branches/branch-0.19:713112

Modified: hadoop/common/branches/branch-0.20-security/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security/CHANGES.txt?rev=1148525r1=1148524r2=1148525view=diff
==
--- hadoop/common/branches/branch-0.20-security/CHANGES.txt (original)
+++ hadoop/common/branches/branch-0.20-security/CHANGES.txt Tue Jul 19 20:58:34 
2011
@@ -5,9 +5,12 @@ Release 0.20.205.0 - unreleased
   NEW FEATURES
 
   BUG FIXES
+
+HADOOP-6833. IPC leaks call parameters when exceptions thrown.
+(Todd Lipcon via Eli Collins)
   
-  HADOOP-7400. Fix HdfsProxyTests fails when the -Dtest.build.dir 
-   and -Dbuild.test is set a dir other than build dir (gkesavan).
+HADOOP-7400. Fix HdfsProxyTests fails when the -Dtest.build.dir 
+and -Dbuild.test is set a dir other than build dir (gkesavan).
 
   IMPROVEMENTS
 

Propchange: hadoop/common/branches/branch-0.20-security/CHANGES.txt
--
--- svn:mergeinfo (original)
+++ svn:mergeinfo Tue Jul 19 20:58:34 2011
@@ -1,4 +1,4 @@
-/hadoop/common/branches/branch-0.20/CHANGES.txt:826138,826568,829987,831184,833001,880632,898713,909245,909723,960946,1044225
+/hadoop/common/branches/branch-0.20/CHANGES.txt:826138,826568,829987,831184,833001,880632,898713,909245,909723,960946,990003,1044225
 
/hadoop/common/branches/branch-0.20-security-203/CHANGES.txt:1096071,1097011,1097249,1097269,1097281,1097966,1098816,1098819,1098823,1098827,1098832,1098839,1098854,1098863,1099088,1099191,1099324,1099330,1099333,1102071,1128115
 
/hadoop/common/branches/branch-0.20-security-204/CHANGES.txt:1128390,1147228,1148069
 /hadoop/core/branches/branch-0.18/CHANGES.txt:727226

Propchange: 
hadoop/common/branches/branch-0.20-security/src/core/org/apache/hadoop/fs/HarFileSystem.java
--
--- svn:mergeinfo (original)
+++ svn:mergeinfo Tue Jul 19 20:58:34 2011
@@ -1,3 +1,4 @@
+/hadoop/common/branches/branch-0.20/src/core/org/apache/hadoop/fs/HarFileSystem.java:990003
 
/hadoop/common/branches/branch-0.20-security-203/src/core/org/apache/hadoop/fs/HarFileSystem.java:1097249,1097269,1097281,1097966,1098816,1098819,1098823,1098827,1098832,1098839,1098854,1098863,1099088,1099191,1099324,1099330,1099333,1128115
 
/hadoop/common/branches/branch-0.20-security-204/src/core/org/apache/hadoop/fs/HarFileSystem.java:1128390
 /hadoop/common/trunk/src/core/org/apache/hadoop/fs/HarFileSystem.java:910709

Modified: 
hadoop/common/branches/branch-0.20-security/src/core/org/apache/hadoop/ipc/Client.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security/src/core/org/apache/hadoop/ipc/Client.java?rev=1148525r1=1148524r2=1148525view=diff
==
--- 
hadoop/common/branches/branch-0.20-security/src/core/org/apache/hadoop/ipc/Client.java
 (original)
+++ 
hadoop/common/branches/branch-0.20-security/src/core/org/apache/hadoop/ipc/Client.java
 Tue Jul 19 20:58:34 2011
@@ -780,6 +780,7 @@ public class Client {
 } else if (state == Status.ERROR.state) {
   call.setException(new RemoteException(WritableUtils.readString

svn commit: r1145523 - in /hadoop/common/branches/branch-0.20-security-204: ./ conf/ src/core/org/apache/hadoop/metrics2/sink/ganglia/ src/core/org/apache/hadoop/metrics2/util/ src/test/org/apache/had

2011-07-12 Thread llu
Author: llu
Date: Tue Jul 12 10:22:31 2011
New Revision: 1145523

URL: http://svn.apache.org/viewvc?rev=1145523view=rev
Log:
HADOOP-7324. Ganglia support for metrics v2. (Priyo Mustafi via llu)

Added:

hadoop/common/branches/branch-0.20-security-204/src/core/org/apache/hadoop/metrics2/sink/ganglia/

hadoop/common/branches/branch-0.20-security-204/src/core/org/apache/hadoop/metrics2/sink/ganglia/AbstractGangliaSink.java

hadoop/common/branches/branch-0.20-security-204/src/core/org/apache/hadoop/metrics2/sink/ganglia/GangliaConf.java

hadoop/common/branches/branch-0.20-security-204/src/core/org/apache/hadoop/metrics2/sink/ganglia/GangliaMetricVisitor.java

hadoop/common/branches/branch-0.20-security-204/src/core/org/apache/hadoop/metrics2/sink/ganglia/GangliaSink30.java

hadoop/common/branches/branch-0.20-security-204/src/core/org/apache/hadoop/metrics2/sink/ganglia/GangliaSink31.java

hadoop/common/branches/branch-0.20-security-204/src/test/org/apache/hadoop/metrics2/impl/TestGangliaMetrics.java

hadoop/common/branches/branch-0.20-security-204/src/test/org/apache/hadoop/metrics2/sink/

hadoop/common/branches/branch-0.20-security-204/src/test/org/apache/hadoop/metrics2/sink/ganglia/

hadoop/common/branches/branch-0.20-security-204/src/test/org/apache/hadoop/metrics2/sink/ganglia/GangliaMetricsTestHelper.java
Modified:
hadoop/common/branches/branch-0.20-security-204/CHANGES.txt

hadoop/common/branches/branch-0.20-security-204/conf/hadoop-metrics2.properties

hadoop/common/branches/branch-0.20-security-204/src/core/org/apache/hadoop/metrics2/util/MetricsCache.java

hadoop/common/branches/branch-0.20-security-204/src/test/org/apache/hadoop/metrics2/util/TestMetricsCache.java

Modified: hadoop/common/branches/branch-0.20-security-204/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-204/CHANGES.txt?rev=1145523r1=1145522r2=1145523view=diff
==
--- hadoop/common/branches/branch-0.20-security-204/CHANGES.txt (original)
+++ hadoop/common/branches/branch-0.20-security-204/CHANGES.txt Tue Jul 12 
10:22:31 2011
@@ -9,6 +9,8 @@ Release 0.20.204.0 - unreleased
 scripts for easy one node cluster configuration and user creation.
 (Eric Yang via omalley)
 
+HADOOP-7324. Ganglia plugins for metrics v2. (Priyo Mustafi via llu)
+
   BUG FIXES
 
 MAPREDUCE-2495. exit() the TaskTracker when the distributed cache cleanup

Modified: 
hadoop/common/branches/branch-0.20-security-204/conf/hadoop-metrics2.properties
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-204/conf/hadoop-metrics2.properties?rev=1145523r1=1145522r2=1145523view=diff
==
--- 
hadoop/common/branches/branch-0.20-security-204/conf/hadoop-metrics2.properties 
(original)
+++ 
hadoop/common/branches/branch-0.20-security-204/conf/hadoop-metrics2.properties 
Tue Jul 12 10:22:31 2011
@@ -14,3 +14,33 @@
 #maptask.sink.file.filename=maptask-metrics.out
 
 #reducetask.sink.file.filename=reducetask-metrics.out
+
+
+#
+# Below are for sending metrics to Ganglia
+#
+# for Ganglia 3.0 support
+# *.sink.ganglia.class=org.apache.hadoop.metrics2.sink.ganglia.GangliaSink30
+#
+# for Ganglia 3.1 support
+# *.sink.ganglia.class=org.apache.hadoop.metrics2.sink.ganglia.GangliaSink31
+
+# *.sink.ganglia.period=10
+
+# default for supportsparse is false
+# *.sink.ganglia.supportsparse=true
+
+#*.sink.ganglia.slope=jvm.metrics.gcCount=zero,jvm.metrics.memHeapUsedM=both
+#*.sink.ganglia.dmax=jvm.metrics.threadsBlocked=70,jvm.metrics.memHeapUsedM=40
+
+#namenode.sink.ganglia.servers=yourgangliahost_1:8649,yourgangliahost_2:8649
+
+#datanode.sink.ganglia.servers=yourgangliahost_1:8649,yourgangliahost_2:8649
+
+#jobtracker.sink.ganglia.servers=yourgangliahost_1:8649,yourgangliahost_2:8649
+
+#tasktracker.sink.ganglia.servers=yourgangliahost_1:8649,yourgangliahost_2:8649
+
+#maptask.sink.ganglia.servers=yourgangliahost_1:8649,yourgangliahost_2:8649
+
+#reducetask.sink.ganglia.servers=yourgangliahost_1:8649,yourgangliahost_2:8649

Added: 
hadoop/common/branches/branch-0.20-security-204/src/core/org/apache/hadoop/metrics2/sink/ganglia/AbstractGangliaSink.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-204/src/core/org/apache/hadoop/metrics2/sink/ganglia/AbstractGangliaSink.java?rev=1145523view=auto
==
--- 
hadoop/common/branches/branch-0.20-security-204/src/core/org/apache/hadoop/metrics2/sink/ganglia/AbstractGangliaSink.java
 (added)
+++ 
hadoop/common/branches/branch-0.20-security-204/src/core/org/apache/hadoop/metrics2/sink/ganglia/AbstractGangliaSink.java
 Tue Jul 12 10:22:31 2011
@@ -0,0 +1,287 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor

svn commit: r1145524 - in /hadoop/common/branches/branch-0.20-security/src: core/org/apache/hadoop/metrics2/sink/ganglia/ test/org/apache/hadoop/metrics2/impl/ test/org/apache/hadoop/metrics2/sink/gan

2011-07-12 Thread llu
Author: llu
Date: Tue Jul 12 10:23:03 2011
New Revision: 1145524

URL: http://svn.apache.org/viewvc?rev=1145524view=rev
Log:
Rename TestGangliaMetricsHelper to GangliaMetricsTestHelper.

Added:

hadoop/common/branches/branch-0.20-security/src/test/org/apache/hadoop/metrics2/sink/ganglia/GangliaMetricsTestHelper.java
  - copied, changed from r1144086, 
hadoop/common/branches/branch-0.20-security/src/test/org/apache/hadoop/metrics2/sink/ganglia/TestGangliaMetricsHelper.java
Removed:

hadoop/common/branches/branch-0.20-security/src/test/org/apache/hadoop/metrics2/sink/ganglia/TestGangliaMetricsHelper.java
Modified:

hadoop/common/branches/branch-0.20-security/src/core/org/apache/hadoop/metrics2/sink/ganglia/GangliaMetricVisitor.java

hadoop/common/branches/branch-0.20-security/src/test/org/apache/hadoop/metrics2/impl/TestGangliaMetrics.java

Modified: 
hadoop/common/branches/branch-0.20-security/src/core/org/apache/hadoop/metrics2/sink/ganglia/GangliaMetricVisitor.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security/src/core/org/apache/hadoop/metrics2/sink/ganglia/GangliaMetricVisitor.java?rev=1145524r1=1145523r2=1145524view=diff
==
--- 
hadoop/common/branches/branch-0.20-security/src/core/org/apache/hadoop/metrics2/sink/ganglia/GangliaMetricVisitor.java
 (original)
+++ 
hadoop/common/branches/branch-0.20-security/src/core/org/apache/hadoop/metrics2/sink/ganglia/GangliaMetricVisitor.java
 Tue Jul 12 10:23:03 2011
@@ -51,18 +51,14 @@ class GangliaMetricVisitor implements Me
 return slope;
   }
 
-  /* (non-Javadoc)
-   * @see 
org.apache.hadoop.metrics2.MetricsVisitor#gauge(org.apache.hadoop.metrics2.MetricGauge,
 int)
-   */
+  @Override
   public void gauge(MetricGaugeInteger metric, int value) {
 // MetricGaugeInt.class == int32
 type = INT32;
 slope = null; // set to null as cannot figure out from Metric
   }
 
-  /* (non-Javadoc)
-   * @see 
org.apache.hadoop.metrics2.MetricsVisitor#counter(org.apache.hadoop.metrics2.MetricCounter,
 int)
-   */
+  @Override
   public void counter(MetricCounterInteger metric, int value) {
 // MetricCounterInt.class == int32
 type = INT32;
@@ -71,18 +67,14 @@ class GangliaMetricVisitor implements Me
 slope = GangliaSlope.positive;
   }
 
-  /* (non-Javadoc)
-   * @see 
org.apache.hadoop.metrics2.MetricsVisitor#gauge(org.apache.hadoop.metrics2.MetricGauge,
 long)
-   */
+  @Override
   public void gauge(MetricGaugeLong metric, long value) {
 // MetricGaugeLong.class == float
 type = FLOAT;
 slope = null; // set to null as cannot figure out from Metric
   }
 
-  /* (non-Javadoc)
-   * @see 
org.apache.hadoop.metrics2.MetricsVisitor#counter(org.apache.hadoop.metrics2.MetricCounter,
 long)
-   */
+  @Override
   public void counter(MetricCounterLong metric, long value) {
 // MetricCounterLong.class == float
 type = FLOAT;
@@ -91,18 +83,14 @@ class GangliaMetricVisitor implements Me
 slope = GangliaSlope.positive;
   }
 
-  /* (non-Javadoc)
-   * @see 
org.apache.hadoop.metrics2.MetricsVisitor#gauge(org.apache.hadoop.metrics2.MetricGauge,
 float)
-   */
+  @Override
   public void gauge(MetricGaugeFloat metric, float value) {
 // MetricGaugeFloat.class == float
 type = FLOAT;
 slope = null; // set to null as cannot figure out from Metric
   }
 
-  /* (non-Javadoc)
-   * @see 
org.apache.hadoop.metrics2.MetricsVisitor#gauge(org.apache.hadoop.metrics2.MetricGauge,
 double)
-   */
+  @Override
   public void gauge(MetricGaugeDouble metric, double value) {
 // MetricGaugeDouble.class == double
 type = DOUBLE;

Modified: 
hadoop/common/branches/branch-0.20-security/src/test/org/apache/hadoop/metrics2/impl/TestGangliaMetrics.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security/src/test/org/apache/hadoop/metrics2/impl/TestGangliaMetrics.java?rev=1145524r1=1145523r2=1145524view=diff
==
--- 
hadoop/common/branches/branch-0.20-security/src/test/org/apache/hadoop/metrics2/impl/TestGangliaMetrics.java
 (original)
+++ 
hadoop/common/branches/branch-0.20-security/src/test/org/apache/hadoop/metrics2/impl/TestGangliaMetrics.java
 Tue Jul 12 10:23:03 2011
@@ -36,7 +36,7 @@ import org.apache.hadoop.metrics2.lib.Me
 import org.apache.hadoop.metrics2.sink.ganglia.AbstractGangliaSink;
 import org.apache.hadoop.metrics2.sink.ganglia.GangliaSink30;
 import org.apache.hadoop.metrics2.sink.ganglia.GangliaSink31;
-import org.apache.hadoop.metrics2.sink.ganglia.TestGangliaMetricsHelper;
+import org.apache.hadoop.metrics2.sink.ganglia.GangliaMetricsTestHelper;
 import org.junit.Test;
 
 public class TestGangliaMetrics {
@@ -61,12 +61,12 @@ public class TestGangliaMetrics {
 AbstractGangliaSink gsink30 = new GangliaSink30();
 gsink30.init(cb.subset(test));
 MockDatagramSocket mockds30 = new

svn commit: r1145525 - in /hadoop/common/trunk/common: ./ conf/ src/java/org/apache/hadoop/metrics2/sink/ganglia/ src/java/org/apache/hadoop/metrics2/util/ src/test/core/org/apache/hadoop/metrics2/imp

2011-07-12 Thread llu
Author: llu
Date: Tue Jul 12 10:23:32 2011
New Revision: 1145525

URL: http://svn.apache.org/viewvc?rev=1145525view=rev
Log:
HADOOP-7324. Ganglia plugins for metrics v2. (Priyo Mustafi via llu)

Added:
hadoop/common/trunk/common/src/java/org/apache/hadoop/metrics2/sink/ganglia/

hadoop/common/trunk/common/src/java/org/apache/hadoop/metrics2/sink/ganglia/AbstractGangliaSink.java

hadoop/common/trunk/common/src/java/org/apache/hadoop/metrics2/sink/ganglia/GangliaConf.java

hadoop/common/trunk/common/src/java/org/apache/hadoop/metrics2/sink/ganglia/GangliaMetricVisitor.java

hadoop/common/trunk/common/src/java/org/apache/hadoop/metrics2/sink/ganglia/GangliaSink30.java

hadoop/common/trunk/common/src/java/org/apache/hadoop/metrics2/sink/ganglia/GangliaSink31.java

hadoop/common/trunk/common/src/test/core/org/apache/hadoop/metrics2/impl/TestGangliaMetrics.java
hadoop/common/trunk/common/src/test/core/org/apache/hadoop/metrics2/sink/

hadoop/common/trunk/common/src/test/core/org/apache/hadoop/metrics2/sink/ganglia/

hadoop/common/trunk/common/src/test/core/org/apache/hadoop/metrics2/sink/ganglia/GangliaMetricsTestHelper.java
Modified:
hadoop/common/trunk/common/CHANGES.txt
hadoop/common/trunk/common/conf/hadoop-metrics2.properties

hadoop/common/trunk/common/src/java/org/apache/hadoop/metrics2/util/MetricsCache.java

hadoop/common/trunk/common/src/test/core/org/apache/hadoop/metrics2/util/TestMetricsCache.java

Modified: hadoop/common/trunk/common/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/common/CHANGES.txt?rev=1145525r1=1145524r2=1145525view=diff
==
--- hadoop/common/trunk/common/CHANGES.txt (original)
+++ hadoop/common/trunk/common/CHANGES.txt Tue Jul 12 10:23:32 2011
@@ -12,6 +12,8 @@ Trunk (unreleased changes)
 
   NEW FEATURES
 
+HADOOP-7324. Ganglia plugins for metrics v2. (Priyo Mustafi via llu)
+
 HADOOP-7342. Add an utility API in FileUtil for JDK File.list
 avoid NPEs on File.list() (Bharath Mundlapudi via mattf)
 

Modified: hadoop/common/trunk/common/conf/hadoop-metrics2.properties
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/common/conf/hadoop-metrics2.properties?rev=1145525r1=1145524r2=1145525view=diff
==
--- hadoop/common/trunk/common/conf/hadoop-metrics2.properties (original)
+++ hadoop/common/trunk/common/conf/hadoop-metrics2.properties Tue Jul 12 
10:23:32 2011
@@ -25,3 +25,33 @@
 
 #reducetask.sink.file.filename=reducetask-metrics.out
 
+
+#
+# Below are for sending metrics to Ganglia
+#
+# for Ganglia 3.0 support
+# *.sink.ganglia.class=org.apache.hadoop.metrics2.sink.ganglia.GangliaSink30
+#
+# for Ganglia 3.1 support
+# *.sink.ganglia.class=org.apache.hadoop.metrics2.sink.ganglia.GangliaSink31
+
+# *.sink.ganglia.period=10
+
+# default for supportsparse is false
+# *.sink.ganglia.supportsparse=true
+
+#*.sink.ganglia.slope=jvm.metrics.gcCount=zero,jvm.metrics.memHeapUsedM=both
+#*.sink.ganglia.dmax=jvm.metrics.threadsBlocked=70,jvm.metrics.memHeapUsedM=40
+
+#namenode.sink.ganglia.servers=yourgangliahost_1:8649,yourgangliahost_2:8649
+
+#datanode.sink.ganglia.servers=yourgangliahost_1:8649,yourgangliahost_2:8649
+
+#jobtracker.sink.ganglia.servers=yourgangliahost_1:8649,yourgangliahost_2:8649
+
+#tasktracker.sink.ganglia.servers=yourgangliahost_1:8649,yourgangliahost_2:8649
+
+#maptask.sink.ganglia.servers=yourgangliahost_1:8649,yourgangliahost_2:8649
+
+#reducetask.sink.ganglia.servers=yourgangliahost_1:8649,yourgangliahost_2:8649
+

Added: 
hadoop/common/trunk/common/src/java/org/apache/hadoop/metrics2/sink/ganglia/AbstractGangliaSink.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/common/src/java/org/apache/hadoop/metrics2/sink/ganglia/AbstractGangliaSink.java?rev=1145525view=auto
==
--- 
hadoop/common/trunk/common/src/java/org/apache/hadoop/metrics2/sink/ganglia/AbstractGangliaSink.java
 (added)
+++ 
hadoop/common/trunk/common/src/java/org/apache/hadoop/metrics2/sink/ganglia/AbstractGangliaSink.java
 Tue Jul 12 10:23:32 2011
@@ -0,0 +1,288 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * License); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an AS IS BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See

svn commit: r1138278 - in /hadoop/common/branches/MR-279/common: CHANGES.txt src/saveVersion.sh

2011-06-21 Thread llu
Author: llu
Date: Wed Jun 22 02:02:24 2011
New Revision: 1138278

URL: http://svn.apache.org/viewvc?rev=1138278view=rev
Log:
HADOOP-7390. VersionInfo not generated properly in git after unsplit. (todd via 
atm)

Modified:
hadoop/common/branches/MR-279/common/CHANGES.txt
hadoop/common/branches/MR-279/common/src/saveVersion.sh

Modified: hadoop/common/branches/MR-279/common/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/MR-279/common/CHANGES.txt?rev=1138278r1=1138277r2=1138278view=diff
==
--- hadoop/common/branches/MR-279/common/CHANGES.txt (original)
+++ hadoop/common/branches/MR-279/common/CHANGES.txt Wed Jun 22 02:02:24 2011
@@ -142,6 +142,9 @@ Trunk (unreleased changes)
 HADOOP-7082. Configuration.writeXML should not hold lock while outputting.
 (todd)
 
+HADOOP-7390. VersionInfo not generated properly in git after unsplit. (todd
+via atm)
+
 Release 0.22.0 - Unreleased
 
   INCOMPATIBLE CHANGES

Modified: hadoop/common/branches/MR-279/common/src/saveVersion.sh
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/MR-279/common/src/saveVersion.sh?rev=1138278r1=1138277r2=1138278view=diff
==
--- hadoop/common/branches/MR-279/common/src/saveVersion.sh (original)
+++ hadoop/common/branches/MR-279/common/src/saveVersion.sh Wed Jun 22 02:02:24 
2011
@@ -26,7 +26,7 @@ build_dir=$2
 user=`whoami`
 date=`date`
 cwd=`pwd`
-if [ -d .git ]; then
+if git rev-parse HEAD 2/dev/null  /dev/null ; then
   revision=`git log -1 --pretty=format:%H`
   hostname=`hostname`
   branch=`git branch | sed -n -e 's/^* //p'`




svn commit: r1137058 [2/2] - in /hadoop/common/site: common/ common/author/src/documentation/content/xdocs/ common/publish/ hdfs/ hdfs/author/src/documentation/content/xdocs/ hdfs/publish/ hdfs/publis

2011-06-17 Thread llu
Modified: hadoop/common/site/mapreduce/publish/version_control.pdf
URL: 
http://svn.apache.org/viewvc/hadoop/common/site/mapreduce/publish/version_control.pdf?rev=1137058r1=1137057r2=1137058view=diff
==
--- hadoop/common/site/mapreduce/publish/version_control.pdf (original)
+++ hadoop/common/site/mapreduce/publish/version_control.pdf Fri Jun 17 
22:36:09 2011
@@ -69,10 +69,10 @@ endobj
 
 endobj
 16 0 obj
- /Length 1605 /Filter [ /ASCII85Decode /FlateDecode ]
+ /Length 1621 /Filter [ /ASCII85Decode /FlateDecode ]
  
 stream
-GatU4h/h:0:`lHcsni[#j]C6\[NhYdWDY*?D-]0?1(/!)LD)Z@U,IJ*RUg)0Rle!FfPmDj-pg0NlVeEc!]Li=?,dB.WHnTurXgsm5UJJ.KF'J=TmtAo5?DG/ECGnD3`d$C3P^;6_hu@Ro/Vjgam@#or=L?jcT*9Do51i#!c`kB-6=,#*7R%233huoMW:DlN.-L)`S8,`\k:^lSTVe38Vip;(n]qk3ohItfa])hHOJM=E\aSg(/LSNjAQAtj*pujc70=OL*!-R'NR/#P`dGT86)J(DG%b!S2M2Y[*R7TE6/SK@t6C/FPaL^e`ok]]f.CJ2f5*@N7k3eUS+\?$,$hSdg0sI6=]Com+G;Hn:t7]gg!jOu`VqF7gT6!OP3kEkZX^jK+==1!pj`d_1/VcRSY]E=Z$/j%EsZHUD07-Z*^J4@KS'=G`WfNVC'C0NC/)3S(WQh(cgVOeU(O1$qAt33ltGlauW[m1@LK3JDK5#`DAKB0VT%Wd!.n@A*b/;V,%_f:5HAPiIpII70+1Z@/_/4,GBf9BU?KAD3=D#-u60mWEStmDCE\!?M*#hCUjI?.^1_kh!WjXa_%?k[;pO9PLU5sF]R_!)ZVus=JZ'SKD0RffnPqu5]f#\BAhGHV0UJu]?B,+6m2I@@4u1AT_bnA%KO;nVcfRPq899lMcrL6,;KpLrpaEZ#n3@rVeR0959?::KETbSncuiD2?)B$SX165Y#cjSt-6Dr#iIJ5iFSftV^o/9F0M6VWiGO\.s)TR`N[IZ1[XJ%5`q$m9@CbOYr,OgWD?j]OV#m):00?$GF=/31C0U@g4CA-9mq:AhbAqW5DCe+3LO\4IISq7CbMP=)r0#Q(`]--/26Yb?e\[WRYKk*tU^2(*I`%/T^GZB$nLD.q*+C)N.LtSEEgc4bH)3IQHTB(jBY/olXW6@_R6ahE3')`pP1t;\u.Yc@$AF
 
hdff%0Y^sJZE!TTC*((.]?7ENl^)(l02gZcU,,[G#Xt'ibrRRm+,ae+jLS84*a=X$1s=-UP/H:GF[rNa[61D1p+r:0o]Y`SHB'BJ.29qTfVg[,tI8IN!']:2SihI@^8NtTG_D=2ED)V2_Mc]rd@q`C#l6Z!7kk'JAQ2L,X0Xn7VGKR)0DT7r%:;:?6`.[CU$TaKK)ON^@/1KYnqf6hG*cn1nNfmN+^M^W7C+IprRNLJ!9E05RPTT@UNAr+bOnN-trjU+P[q+dV*.'2=IM*``!:q6XTC`LOjp,4M:##F3%#eqG/.L]fm9gtbVQfIo]iG-06Kk[d+I*Qc?C-jb\?35]HRjb?[!dk6-`B0I?GcYu%3\I^1GjWcqkt#!S)FTU0bPZW;j5T@iFmahn%_n(*fa[Tqnu9Xk1lGR4l^.)TCfjVBKq,)?c0aeM-ffWTIF*dWIKP2,=s3gBm;E;R$Mq[Y=d52Hd_j5nkU%u6s98J$O\C)]Z$248$[QQ,0p]jNHG.un`Q-JB::C/lMd5I?mKpTRF78a^ek\o?Xi5V)YrPc:=Vt*Vp*9Tbko'mS;g2kP`Go\MqrUZ,TpI5[2P4%nN6CA,~
+Gatm8TPe1'Ya/hFH!80dFro7S*ejg;j1c$8ZHAb#6?T%)-TGa5l'`lU=R1rdh*07M6q[IRBjV?srL4')NWV+clih(V,:J'aJs$+U+_R1H.KHT6CF1TlV\UrO]7KA_*j7S=tG[n5Je%f_Yh2bM0MqBk5^%R\18oB0P4@q68lp=5^Q;.T-[!(/H8.EUm;Fs_cJ_8Rdb'cZ47-EFCq,hFR,b@^*,bspPU2m/1\Z7T]=M+eE@JNDl2]61/aBSh_'8QBRjfM(5Od//*7*m'bT/m6(Mb3f1aRM;eNuo4'7IBdQH_LZ9CV;q'Dj*_$gNen(+ppC@fPoY8([f2Q_2g5[B2,HkDg4YgX7!l9:_u72UAnDd=1^1/d'aUoW_^RA2ZH?VjXFdceae0Wj9L*V%9n!50'$O%\hNc#=JZ+2,#:2?g,rd1.Sb:jCVjX=;aqeCa;K1l^#Ed80[uuHBs(WBhgW?+LYpu6R[j\PaPLrI\tddDt;e@NKORR+9pj7a!GVf73r5!HjX]8KT!#9\#jXoZ43;n,*sliT'Hjj/a'0KRX#V[7ob(1flc#DW0!^XK+lZ#`s]L%M:Bg3I_JZcA'O5)W#_,++cl`@H5.oL6CeWY9lRUi33WM:kfj0pI9-pE/alr-'`Ob6g,!Vk6!im^OZnL.?[[)[LWr#`b+%FrITI\r0n!+0I7,@-C:qUUF%qj09F+IEA?5#0Yr_t+\QAij6pUKkr]hhJLFZ+a+G0g-G.6,9QUNUnQ#`#;d:I/ElfP0/V,0*2`BCV#k1l8O3X^[jDNhP.K=r'X(7msaa^]^BnB?7QNdht^Aa/th;(Vb2p[`N^AMq'(2Ud,cSW)/\*`(8aXBH\AjZ(Xj^G)_TWP=Nf;_8RZ6UPZ/R-2M:jF]_`:i0mm5ON8WE_Y'RoY/64d$ZTCR%5*063.E'Z1WgD)*cYMbId0$ba]4Q\dicQmD3;R91MkD/lq
 
MfLp$rLY)nph?b$dML4k`%EbFB!,sJ[0SQ3/2(*I(_lsGa%YmFm59[5JD\WgTWdnh@1O3ER.8YRoqY)!uC;dSS:FT4Mf+j%Z,U1jlV_Di@G-C.M:2f*$3[$+$k\Go6kDn$aOQ`Ds_ma?Ru9`__ko950Z5%YM_FhU/:!k1%u$:e0f$`%NK^U=Uh'PLk^oIY2[MbUZHmTsoE!a`9XD23FF#*?TLuI)?G0J;4?g;Rio%^mCqM^=^\]W!s1R;`rLbdj1OZ]-1shu9rg:6qUUN?A1.PIIF845.3g5/0^)E##Pcm1HIcb$CA#aWO[UcT[@O;suXm,)QFB05ADs7JPH(c!jOJXn_#^dLYU8\FnJeVc^4PGB0]C?5aPJmqV*-SD1gi%5e%ZZ/P+_WKHI37Ktjh-T3.%](ep7EQEKlNfqd)obeI_Fo)pS4%uPE`,hn[:$cDXZZ*-F03#X)tQ6K7EK26L.6cMq4Rllto_Voh_^69-E,5L2DpocYgFES?+l]/Si4=Bj/'tb0;iSmr%)K,HsCl-tprf(qt;AR.7Wg/'Bj?tSqH-2[Q5C.fD%F/O^idkP1a0]/IDlD'=T`M$pE)r@5Y_~
 endstream
 endobj
 17 0 obj
@@ -156,10 +156,10 @@ endobj
 24 0 obj
  /Type /Annot
 /Subtype /Link
-/Rect [ 90.0 524.532 348.96 512.532 ]
+/Rect [ 90.0 524.532 422.964 512.532 ]
 /C [ 0 0 0 ]
 /Border [ 0 0 0 ]
-/A  /URI (http://svn.apache.org/viewcvs.cgi/hadoop/mapreduce/)
+/A  /URI (http://svn.apache.org/viewcvs.cgi/hadoop/common/trunk/mapreduce/)
 /S /URI 
 /H /I
 
@@ -167,10 +167,10 @@ endobj
 25 0 obj
  /Type /Annot
 /Subtype /Link
-/Rect [ 279.648 472.198 524.94 460.198 ]
+/Rect [ 90.0 445.798 409.296 433.798 ]
 /C [ 0 0 0 ]
 /Border [ 0 0 0 ]
-/A  /URI (http://svn.apache.org/repos/asf/hadoop/mapreduce/)
+/A  /URI (http://svn.apache.org/repos/asf/hadoop/common/trunk/mapreduce/)
 /S /URI 
 /H /I
 
@@ -178,7 +178,7 @@ endobj
 26 0 obj
  /Type /Annot
 /Subtype /Link
-/Rect [ 303.288 458.998 323.94 446.998 ]
+/Rect [ 226.632 432.598 247.284 420.598 ]
 /C [ 0 0 0 ]
 /Border [ 0 0 0 ]
 /A  /URI (http://www.apache.org/dev/version-control.html#anon-svn)
@@ -189,10 +189,10 @@ endobj
 27 0 obj
  /Type /Annot
 /Subtype /Link
-/Rect [ 250.656 406.664 500.616 394.664 ]
+/Rect [ 90.0 367.064 413.964 355.064 ]
 /C [ 0 0 0 ]
 /Border [ 0 0 0 ]
-/A  /URI (https://svn.apache.org/repos/asf/hadoop/mapreduce/)
+/A  /URI