svn commit: r1550486 - in /hadoop/common/trunk/hadoop-common-project/hadoop-common: CHANGES.txt src/main/java/org/apache/hadoop/ipc/RpcConstants.java src/main/java/org/apache/hadoop/ipc/Server.java

2013-12-12 Thread sradia
Author: sradia
Date: Thu Dec 12 18:56:45 2013
New Revision: 1550486

URL: http://svn.apache.org/r1550486
Log:
HADOOP-10044 Improve the javadoc of rpc code (sanjay Radia)

Modified:
hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RpcConstants.java

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java

Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1550486&r1=1550485&r2=1550486&view=diff
==
--- hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt 
(original)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt Thu Dec 
12 18:56:45 2013
@@ -280,6 +280,8 @@ Trunk (Unreleased)
 HDFS-5471. CacheAdmin -listPools fails when user lacks permissions to view
 all pools (Andrew Wang via Colin Patrick McCabe)
 
+HADOOP-10044 Improve the javadoc of rpc code (sanjay Radia)
+
   OPTIMIZATIONS
 
 HADOOP-7761. Improve the performance of raw comparisons. (todd)

Modified: 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RpcConstants.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RpcConstants.java?rev=1550486&r1=1550485&r2=1550486&view=diff
==
--- 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RpcConstants.java
 (original)
+++ 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RpcConstants.java
 Thu Dec 12 18:56:45 2013
@@ -37,10 +37,24 @@ public class RpcConstants {
   
   public static final int INVALID_RETRY_COUNT = -1;
   
+ /**
+  * The Rpc-connection header is as follows 
+  * +--+
+  * |  "hrpc" 4 bytes  |  
+  * +--+
+  * |  Version (1 byte)|
+  * +--+
+  * |  Service Class (1 byte)  |
+  * +--+
+  * |  AuthProtocol (1 byte)   |  
+  * +--+
+  */
+  
   /**
* The first four bytes of Hadoop RPC connections
*/
   public static final ByteBuffer HEADER = ByteBuffer.wrap("hrpc".getBytes());
+  public static final int HEADER_LEN_AFTER_HRPC_PART = 3; // 3 bytes that 
follow
   
   // 1 : Introduce ping and server does not throw away RPCs
   // 3 : Introduce the protocol into the RPC connection header

Modified: 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java?rev=1550486&r1=1550485&r2=1550486&view=diff
==
--- 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java
 (original)
+++ 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java
 Thu Dec 12 18:56:45 2013
@@ -1105,6 +1105,9 @@ public abstract class Server {
   this.channel = channel;
   this.lastContact = lastContact;
   this.data = null;
+  
+  // the buffer is initialized to read the "hrpc" and after that to read
+  // the length of the Rpc-packet (i.e 4 bytes)
   this.dataLengthBuffer = ByteBuffer.allocate(4);
   this.unwrappedData = null;
   this.unwrappedDataLengthBuffer = ByteBuffer.allocate(4);
@@ -1200,7 +1203,16 @@ public abstract class Server {
   }
 }
 
-private Throwable getCauseForInvalidToken(IOException e) {
+/**
+ * Some exceptions ({@link RetriableException} and {@link 
StandbyException})
+ * that are wrapped as a cause of parameter e are unwrapped so that they 
can
+ * be sent as the true cause to the client side. In case of
+ * {@link InvalidToken} we go one level deeper to get the true cause.
+ * 
+ * @param e the exception that may have a cause we want to unwrap.
+ * @return the true cause for some exceptions.
+ */
+private Throwable getTrueCause(IOException e) {
   Throwable cause = e;
   while (cause != null) {
 if (cause instanceof RetriableException) {
@@ -1223,6 +1235,18 @@ public abstract class Server {
   return e;
 }
 
+/**
+ * Process saslMessage and send saslResponse back
+ * @param saslMessage received SASL message
+ * @thr

svn commit: r1495573 - in /hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common: ./ src/main/proto/ src/site/apt/

2013-06-21 Thread sradia
Author: sradia
Date: Fri Jun 21 20:06:24 2013
New Revision: 1495573

URL: http://svn.apache.org/r1495573
Log:
Merged HADOOP-9619 svn merge -c 1495564

Modified:

hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt

hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/proto/HAServiceProtocol.proto

hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/proto/IpcConnectionContext.proto

hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/proto/ProtobufRpcEngine.proto

hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/proto/ProtocolInfo.proto

hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/proto/RpcHeader.proto

hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/proto/Security.proto

hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/proto/ZKFCProtocol.proto

hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/site/apt/Compatibility.apt.vm

Modified: 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1495573&r1=1495572&r2=1495573&view=diff
==
--- 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt 
(original)
+++ 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt 
Fri Jun 21 20:06:24 2013
@@ -140,6 +140,8 @@ Release 2.1.0-beta - UNRELEASED
 
 HADOOP-8608. Add Configuration API for parsing time durations. (cdouglas)
 
+HADOOP-9619 Mark stability of .proto files (sanjay Radia)
+
   OPTIMIZATIONS
 
 HADOOP-9150. Avoid unnecessary DNS resolution attempts for logical URIs

Modified: 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/proto/HAServiceProtocol.proto
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/proto/HAServiceProtocol.proto?rev=1495573&r1=1495572&r2=1495573&view=diff
==
--- 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/proto/HAServiceProtocol.proto
 (original)
+++ 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/proto/HAServiceProtocol.proto
 Fri Jun 21 20:06:24 2013
@@ -16,6 +16,12 @@
  * limitations under the License.
  */
 
+/**
+ * These .proto interfaces are private and stable.
+ * Please see http://wiki.apache.org/hadoop/Compatibility
+ * for what changes are allowed for a *stable* .proto interface.
+ */
+
 option java_package = "org.apache.hadoop.ha.proto";
 option java_outer_classname = "HAServiceProtocolProtos";
 option java_generic_services = true;

Modified: 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/proto/IpcConnectionContext.proto
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/proto/IpcConnectionContext.proto?rev=1495573&r1=1495572&r2=1495573&view=diff
==
--- 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/proto/IpcConnectionContext.proto
 (original)
+++ 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/proto/IpcConnectionContext.proto
 Fri Jun 21 20:06:24 2013
@@ -15,6 +15,13 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
+
+/**
+ * These .proto interfaces are private and stable.
+ * Please see http://wiki.apache.org/hadoop/Compatibility
+ * for what changes are allowed for a *stable* .proto interface.
+ */
+
 option java_package = "org.apache.hadoop.ipc.protobuf";
 option java_outer_classname = "IpcConnectionContextProtos";
 option java_generate_equals_and_hash = true;

Modified: 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/proto/ProtobufRpcEngine.proto
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/proto/ProtobufRpcEngine.proto?rev=1495573&r1=1495572&r2=1495573&view=diff
==
--- 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/proto/ProtobufRpcEngine.proto
 (original)
+++ 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/proto/ProtobufRpcEngine.proto
 Fri Jun 21 20:06:24 2013
@@ -17,6 +17,12 @@
  */
 
 /**
+ * These .proto interfaces are private and stable.
+ * Please see http://wiki.apache.o

svn commit: r1495564 - in /hadoop/common/trunk/hadoop-common-project/hadoop-common: ./ src/main/proto/ src/site/apt/

2013-06-21 Thread sradia
Author: sradia
Date: Fri Jun 21 19:53:21 2013
New Revision: 1495564

URL: http://svn.apache.org/r1495564
Log:
HADOOP-9619 Mark stability of .proto files (sanjay Radia)

Modified:
hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/proto/HAServiceProtocol.proto

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/proto/IpcConnectionContext.proto

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/proto/ProtobufRpcEngine.proto

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/proto/ProtocolInfo.proto

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/proto/RpcHeader.proto

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/proto/Security.proto

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/proto/ZKFCProtocol.proto

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/site/apt/Compatibility.apt.vm

Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1495564&r1=1495563&r2=1495564&view=diff
==
--- hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt 
(original)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt Fri Jun 
21 19:53:21 2013
@@ -409,6 +409,8 @@ Release 2.1.0-beta - UNRELEASED
 
 HADOOP-8608. Add Configuration API for parsing time durations. (cdouglas)
 
+HADOOP-9619 Mark stability of .proto files (sanjay Radia)
+
   OPTIMIZATIONS
 
 HADOOP-9150. Avoid unnecessary DNS resolution attempts for logical URIs

Modified: 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/proto/HAServiceProtocol.proto
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/proto/HAServiceProtocol.proto?rev=1495564&r1=1495563&r2=1495564&view=diff
==
--- 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/proto/HAServiceProtocol.proto
 (original)
+++ 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/proto/HAServiceProtocol.proto
 Fri Jun 21 19:53:21 2013
@@ -16,6 +16,12 @@
  * limitations under the License.
  */
 
+/**
+ * These .proto interfaces are private and stable.
+ * Please see http://wiki.apache.org/hadoop/Compatibility
+ * for what changes are allowed for a *stable* .proto interface.
+ */
+
 option java_package = "org.apache.hadoop.ha.proto";
 option java_outer_classname = "HAServiceProtocolProtos";
 option java_generic_services = true;

Modified: 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/proto/IpcConnectionContext.proto
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/proto/IpcConnectionContext.proto?rev=1495564&r1=1495563&r2=1495564&view=diff
==
--- 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/proto/IpcConnectionContext.proto
 (original)
+++ 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/proto/IpcConnectionContext.proto
 Fri Jun 21 19:53:21 2013
@@ -15,6 +15,13 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
+
+/**
+ * These .proto interfaces are private and stable.
+ * Please see http://wiki.apache.org/hadoop/Compatibility
+ * for what changes are allowed for a *stable* .proto interface.
+ */
+
 option java_package = "org.apache.hadoop.ipc.protobuf";
 option java_outer_classname = "IpcConnectionContextProtos";
 option java_generate_equals_and_hash = true;

Modified: 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/proto/ProtobufRpcEngine.proto
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/proto/ProtobufRpcEngine.proto?rev=1495564&r1=1495563&r2=1495564&view=diff
==
--- 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/proto/ProtobufRpcEngine.proto
 (original)
+++ 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/proto/ProtobufRpcEngine.proto
 Fri Jun 21 19:53:21 2013
@@ -17,6 +17,12 @@
  */
 
 /**
+ * These .proto interfaces are private and stable.
+ * Please see http://wiki.apache.org/hadoop/Compatibility
+ * for what changes are allowed for a *stable* .proto interface.
+ */
+
+/**
  * These are the messages used by Hadoop RPC for the Rpc Engine Protocol Buffer
  * to marshal the request and response in the RPC layer.
  * The messages are sent in addition to the normal RPC

svn commit: r1488071 - in /hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common: CHANGES.txt src/site/apt/InterfaceClassification.apt.vm

2013-05-30 Thread sradia
Author: sradia
Date: Fri May 31 00:54:05 2013
New Revision: 1488071

URL: http://svn.apache.org/r1488071
Log:
Merged HADOOP-7391  svn merge -c 1488069

Added:

hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/site/apt/InterfaceClassification.apt.vm
  - copied unchanged from r1488069, 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/site/apt/InterfaceClassification.apt.vm
Modified:

hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt

Modified: 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1488071&r1=1488070&r2=1488071&view=diff
==
--- 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt 
(original)
+++ 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt 
Fri May 31 00:54:05 2013
@@ -87,6 +87,8 @@ Release 2.0.5-beta - UNRELEASED
 helping YARN ResourceManager to reuse code for RM restart. (Jian He via
 vinodkv)
 
+HADOOP-7391 Document Interface Classification from HADOOP-5073 (sanjay 
Radia)
+
   OPTIMIZATIONS
 
 HADOOP-9150. Avoid unnecessary DNS resolution attempts for logical URIs




svn commit: r1488069 - in /hadoop/common/trunk/hadoop-common-project/hadoop-common: CHANGES.txt src/site/apt/InterfaceClassification.apt.vm

2013-05-30 Thread sradia
Author: sradia
Date: Fri May 31 00:39:18 2013
New Revision: 1488069

URL: http://svn.apache.org/r1488069
Log:
HADOOP-7391 Document Interface Classification from HADOOP-5073 (sanjay Radia)

Added:

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/site/apt/InterfaceClassification.apt.vm
Modified:
hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt

Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1488069&r1=1488068&r2=1488069&view=diff
==
--- hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt 
(original)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt Fri May 
31 00:39:18 2013
@@ -453,6 +453,8 @@ Release 2.0.5-beta - UNRELEASED
 helping YARN ResourceManager to reuse code for RM restart. (Jian He via
 vinodkv)
 
+HADOOP-7391 Document Interface Classification from HADOOP-5073 (sanjay 
Radia)
+
   OPTIMIZATIONS
 
 HADOOP-9150. Avoid unnecessary DNS resolution attempts for logical URIs

Added: 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/site/apt/InterfaceClassification.apt.vm
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/site/apt/InterfaceClassification.apt.vm?rev=1488069&view=auto
==
--- 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/site/apt/InterfaceClassification.apt.vm
 (added)
+++ 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/site/apt/InterfaceClassification.apt.vm
 Fri May 31 00:39:18 2013
@@ -0,0 +1,241 @@
+~~ Licensed under the Apache License, Version 2.0 (the "License");
+~~ you may not use this file except in compliance with the License.
+~~ You may obtain a copy of the License at
+~~
+~~   http://www.apache.org/licenses/LICENSE-2.0
+~~
+~~ Unless required by applicable law or agreed to in writing, software
+~~ distributed under the License is distributed on an "AS IS" BASIS,
+~~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+~~ See the License for the specific language governing permissions and
+~~ limitations under the License. See accompanying LICENSE file.
+
+  ---
+  Hadoop Interface Taxonomy: Audience and Stability Classification
+  ---
+  ---
+  ${maven.build.timestamp}
+
+Hadoop Interface Taxonomy: Audience and Stability Classification
+
+  \[ {{{./index.html}Go Back}} \]
+
+%{toc|section=1|fromDepth=0}
+
+* Motivation
+
+  The interface taxonomy classification provided here is for guidance to
+  developers and users of interfaces. The classification guides a developer
+  to declare the targeted audience or users of an interface and also its
+  stability.
+
+  * Benefits to the user of an interface: Knows which interfaces to use or not
+use and their stability.
+
+  * Benefits to the developer: to prevent accidental changes of interfaces and
+hence accidental impact on users or other components or system. This is
+particularly useful in large systems with many developers who may not all
+have a shared state/history of the project.
+
+* Interface Classification
+
+  Hadoop adopts the following interface classification,
+  this classification was derived from the
+  
{{{http://www.opensolaris.org/os/community/arc/policies/interface-taxonomy/#Advice}OpenSolaris
 taxonomy}}
+  and, to some extent, from taxonomy used inside Yahoo. Interfaces have two 
main
+  attributes: Audience and Stability
+
+** Audience
+
+   Audience denotes the potential consumers of the interface. While many
+   interfaces are internal/private to the implementation,
+   other are public/external interfaces are meant for wider consumption by
+   applications and/or clients. For example, in posix, libc is an external or
+   public interface, while large parts of the kernel are internal or private
+   interfaces. Also, some interfaces are targeted towards other specific
+   subsystems.
+
+   Identifying the audience of an interface helps define the impact of
+   breaking it. For instance, it might be okay to break the compatibility of
+   an interface whose audience is a small number of specific subsystems. On
+   the other hand, it is probably not okay to break a protocol interfaces
+   that millions of Internet users depend on.
+
+   Hadoop uses the following kinds of audience in order of
+   increasing/wider visibility:
+
+   * Private:
+
+ * The interface is for internal use within the project (such as HDFS or
+ MapReduce) and should not be used by applications or by other projects. It
+ is subject to change at anytime without notice. Most interfaces of a
+ project are Private (also referred to as project-private).
+
+   * Limited-Private:
+
+ * The int

svn commit: r1483146 - in /hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common: ./ src/main/docs/ src/main/java/ src/main/java/org/apache/hadoop/ipc/ src/main/proto/ src/test/core/ src

2013-05-15 Thread sradia
Author: sradia
Date: Thu May 16 01:38:02 2013
New Revision: 1483146

URL: http://svn.apache.org/r1483146
Log:
Merged HADOOP-9425

Added:

hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RpcNoSuchMethodException.java
  - copied unchanged from r1479143, 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RpcNoSuchMethodException.java

hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RpcNoSuchProtocolException.java
  - copied unchanged from r1479143, 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RpcNoSuchProtocolException.java
Modified:
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/   
(props changed)

hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt 
  (contents, props changed)

hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/docs/
   (props changed)

hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/
   (props changed)

hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java

hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtobufRpcEngine.java

hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RPC.java

hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RemoteException.java

hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RpcServerException.java

hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java

hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/WritableRpcEngine.java

hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/proto/ProtobufRpcEngine.proto

hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/proto/RpcHeader.proto

hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/test/core/
   (props changed)

hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestProtoBufRpc.java

hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestRPCCompatibility.java

Propchange: hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/
--
  Merged /hadoop/common/trunk/hadoop-common-project/hadoop-common:r1479143

Modified: 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1483146&r1=1483145&r2=1483146&view=diff
==
--- 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt 
(original)
+++ 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt 
Thu May 16 01:38:02 2013
@@ -12,6 +12,8 @@ Release 2.0.5-beta - UNRELEASED
 
 HADOOP-9380 Add totalLength to rpc response  (sanjay Radia)
 
+HADOOP-9425 Add error codes to rpc-response (sanjay Radia)
+
   NEW FEATURES
 
 HADOOP-9194. RPC support for QoS. (Junping Du via llu)

Propchange: 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt
--
  Merged 
/hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt:r1479143

Propchange: 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/docs/
--
  Merged 
/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/docs:r1479143

Propchange: 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/
--
  Merged 
/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java:r1479143

Modified: 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java?rev=1483146&r1=1483145&r2=1483146&view=diff
==
--- 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/ja

svn commit: r1483145 - in /hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common: ./ src/main/docs/ src/main/java/ src/main/java/org/apache/hadoop/ipc/ src/test/core/

2013-05-15 Thread sradia
Author: sradia
Date: Thu May 16 01:35:16 2013
New Revision: 1483145

URL: http://svn.apache.org/r1483145
Log:
Merged HADOOP-9380

Modified:
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/   
(props changed)

hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt 
  (contents, props changed)

hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/docs/
   (props changed)

hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/
   (props changed)

hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java

hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtobufRpcEngine.java

hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java

hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/test/core/
   (props changed)

Propchange: hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/
--
  Merged /hadoop/common/trunk/hadoop-common-project/hadoop-common:r1459392

Modified: 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1483145&r1=1483144&r2=1483145&view=diff
==
--- 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt 
(original)
+++ 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt 
Thu May 16 01:35:16 2013
@@ -10,6 +10,8 @@ Release 2.0.5-beta - UNRELEASED
 HADOOP-9151 Include RPC error info in RpcResponseHeader instead of sending
 it separately (sanjay Radia)
 
+HADOOP-9380 Add totalLength to rpc response  (sanjay Radia)
+
   NEW FEATURES
 
 HADOOP-9194. RPC support for QoS. (Junping Du via llu)

Propchange: 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt
--
  Merged 
/hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt:r1459392

Propchange: 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/docs/
--
  Merged 
/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/docs:r1459392

Propchange: 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/
--
  Merged 
/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java:r1459392

Modified: 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java?rev=1483145&r1=1483144&r2=1483145&view=diff
==
--- 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java
 (original)
+++ 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java
 Thu May 16 01:35:16 2013
@@ -83,6 +83,7 @@ import org.apache.hadoop.util.Reflection
 import org.apache.hadoop.util.Time;
 
 import com.google.common.util.concurrent.ThreadFactoryBuilder;
+import com.google.protobuf.CodedOutputStream;
 
 /** A client for an IPC service.  IPC calls take a single {@link Writable} as a
  * parameter, and return a {@link Writable} as their value.  A service runs on
@@ -240,7 +241,7 @@ public class Client {
   callComplete();
 }
 
-public synchronized Writable getRpcResult() {
+public synchronized Writable getRpcResponse() {
   return rpcResponse;
 }
   }
@@ -941,11 +942,14 @@ public class Client {
   touch();
   
   try {
+int totalLen = in.readInt();
 RpcResponseHeaderProto header = 
 RpcResponseHeaderProto.parseDelimitedFrom(in);
 if (header == null) {
   throw new IOException("Response is null.");
 }
+int headerLen = header.getSerializedSize();
+headerLen += CodedOutputStream.computeRawVarint32Size(headerLen);
 
 int callId = header.getCallId();
 if (LOG.isDebugEnabled())
@@ -958,11 +962,28 @@ public class Client {
   value.readFields(in); // read value
   call.setRpcResponse(value);
   calls.remove(callId);
+  
+  // verify that length was correct
+

svn commit: r1483144 - in /hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common: ./ dev-support/ src/main/docs/ src/main/java/ src/main/java/org/apache/hadoop/ipc/ src/main/proto/ src/t

2013-05-15 Thread sradia
Author: sradia
Date: Thu May 16 01:32:14 2013
New Revision: 1483144

URL: http://svn.apache.org/r1483144
Log:
Merged HADOOP-9151

Modified:
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/   
(props changed)

hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt 
  (contents, props changed)

hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/dev-support/findbugsExcludeFile.xml

hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/docs/
   (props changed)

hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/
   (props changed)

hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java

hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java

hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/proto/RpcHeader.proto

hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/test/core/
   (props changed)

Propchange: hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/
--
  Merged /hadoop/common/trunk/hadoop-common-project/hadoop-common:r1454593

Modified: 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1483144&r1=1483143&r2=1483144&view=diff
==
--- 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt 
(original)
+++ 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt 
Thu May 16 01:32:14 2013
@@ -7,6 +7,9 @@ Release 2.0.5-beta - UNRELEASED
 HADOOP-9163 The rpc msg in ProtobufRpcEngine.proto should be moved out to
 avoid an extra copy (Sanjay Radia)
 
+HADOOP-9151 Include RPC error info in RpcResponseHeader instead of sending
+it separately (sanjay Radia)
+
   NEW FEATURES
 
 HADOOP-9194. RPC support for QoS. (Junping Du via llu)

Propchange: 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt
--
  Merged 
/hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt:r1454593

Modified: 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/dev-support/findbugsExcludeFile.xml
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/dev-support/findbugsExcludeFile.xml?rev=1483144&r1=1483143&r2=1483144&view=diff
==
--- 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/dev-support/findbugsExcludeFile.xml
 (original)
+++ 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/dev-support/findbugsExcludeFile.xml
 Thu May 16 01:32:14 2013
@@ -320,4 +320,11 @@


  
+ 
+ 
+ 
+   
+   
+   
+ 
  

Propchange: 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/docs/
--
  Merged 
/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/docs:r1454593

Propchange: 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/
--
  Merged 
/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java:r1454593

Modified: 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java?rev=1483144&r1=1483143&r2=1483144&view=diff
==
--- 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java
 (original)
+++ 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java
 Thu May 16 01:32:14 2013
@@ -59,7 +59,6 @@ import org.apache.hadoop.fs.CommonConfig
 import org.apache.hadoop.io.DataOutputBuffer;
 import org.apache.hadoop.io.IOUtils;
 import org.apache.hadoop.io.Writable;
-import org.apache.hadoop.io.WritableUtils;
 import org.apache.hadoop.io.retry.RetryPolicies;
 import org.apache.hadoop.io.retry.RetryPolicy;
 import org.apache.hadoop.io.retry.RetryPolicy.RetryAction;
@@ -942,31 +941,38 @@ public class Client {
   touch();
   
   try {
-   

svn commit: r1483143 - in /hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common: ./ CHANGES.txt src/main/docs/ src/main/java/ src/main/java/org/apache/hadoop/ipc/ProtobufRpcEngine.java

2013-05-15 Thread sradia
Author: sradia
Date: Thu May 16 01:25:30 2013
New Revision: 1483143

URL: http://svn.apache.org/r1483143
Log:
Merged in HADOOP-9163

Modified:
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/   
(props changed)

hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt 
  (contents, props changed)

hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/docs/
   (props changed)

hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/
   (props changed)

hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtobufRpcEngine.java

hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/proto/ProtobufRpcEngine.proto

hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/test/core/
   (props changed)

Propchange: hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/
--
  Merged /hadoop/common/trunk/hadoop-common-project/hadoop-common:r1452581

Modified: 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1483143&r1=1483142&r2=1483143&view=diff
==
--- 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt 
(original)
+++ 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt 
Thu May 16 01:25:30 2013
@@ -4,6 +4,9 @@ Release 2.0.5-beta - UNRELEASED
 
   INCOMPATIBLE CHANGES
 
+HADOOP-9163 The rpc msg in ProtobufRpcEngine.proto should be moved out to
+avoid an extra copy (Sanjay Radia)
+
   NEW FEATURES
 
 HADOOP-9194. RPC support for QoS. (Junping Du via llu)

Propchange: 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt
--
  Merged 
/hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt:r1452581

Propchange: 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/docs/
--
  Merged 
/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/docs:r1452581

Propchange: 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/
--
  Merged 
/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java:r1452581

Modified: 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtobufRpcEngine.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtobufRpcEngine.java?rev=1483143&r1=1483142&r2=1483143&view=diff
==
--- 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtobufRpcEngine.java
 (original)
+++ 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtobufRpcEngine.java
 Thu May 16 01:25:30 2013
@@ -21,6 +21,7 @@ package org.apache.hadoop.ipc;
 import java.io.DataInput;
 import java.io.DataOutput;
 import java.io.IOException;
+import java.io.OutputStream;
 import java.lang.reflect.Method;
 import java.lang.reflect.Proxy;
 import java.net.InetSocketAddress;
@@ -39,7 +40,7 @@ import org.apache.hadoop.io.Writable;
 import org.apache.hadoop.io.retry.RetryPolicy;
 import org.apache.hadoop.ipc.Client.ConnectionId;
 import org.apache.hadoop.ipc.RPC.RpcInvoker;
-import org.apache.hadoop.ipc.protobuf.ProtobufRpcEngineProtos.RequestProto;
+import 
org.apache.hadoop.ipc.protobuf.ProtobufRpcEngineProtos.RequestHeaderProto;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.security.token.SecretManager;
 import org.apache.hadoop.security.token.TokenIdentifier;
@@ -128,25 +129,12 @@ public class ProtobufRpcEngine implement
   .getProtocolVersion(protocol);
 }
 
-private RequestProto constructRpcRequest(Method method,
-Object[] params) throws ServiceException {
-  RequestProto rpcRequest;
-  RequestProto.Builder builder = RequestProto
+private RequestHeaderProto constructRpcRequestHeader(Method method) {
+  RequestHeaderProto.Builder builder = RequestHeaderProto
   .newBuilder();
   builder.setMethodName(method.getName());
+ 
 
-  if (params.length != 2) { // RpcController + Message
-throw new ServiceException("Too many parameters for request. Method: [&q

svn commit: r1483142 - in /hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common: ./ CHANGES.txt src/main/docs/ src/main/java/ src/main/java/org/apache/hadoop/ipc/ProtobufRpcEngine.java

2013-05-15 Thread sradia
Author: sradia
Date: Thu May 16 01:17:43 2013
New Revision: 1483142

URL: http://svn.apache.org/r1483142
Log:
Merged in HADOOP-9218 Document the Rpc-wrappers used internally (sanjay Radia)

Modified:
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/   
(props changed)

hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt 
  (contents, props changed)

hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/docs/
   (props changed)

hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/
   (props changed)

hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtobufRpcEngine.java

hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/test/core/
   (props changed)

Propchange: hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/
--
  Merged /hadoop/common/trunk/hadoop-common-project/hadoop-common:r1446428

Modified: 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1483142&r1=1483141&r2=1483142&view=diff
==
--- 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt 
(original)
+++ 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt 
Thu May 16 01:17:43 2013
@@ -65,6 +65,8 @@ Release 2.0.5-beta - UNRELEASED
 
 HADOOP-9140 Cleanup rpc PB protos (sanjay Radia)
 
+HADOOP-9218 Document the Rpc-wrappers used internally (sanjay Radia)
+
   OPTIMIZATIONS
 
 HADOOP-9150. Avoid unnecessary DNS resolution attempts for logical URIs

Propchange: 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt
--
  Merged 
/hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt:r1446428

Propchange: 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/docs/
--
  Merged 
/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/docs:r1446428

Propchange: 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/
--
  Merged 
/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java:r1446428

Modified: 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtobufRpcEngine.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtobufRpcEngine.java?rev=1483142&r1=1483141&r2=1483142&view=diff
==
--- 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtobufRpcEngine.java
 (original)
+++ 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtobufRpcEngine.java
 Thu May 16 01:17:43 2013
@@ -62,7 +62,7 @@ public class ProtobufRpcEngine implement
   
   static { // Register the rpcRequest deserializer for WritableRpcEngine 
 org.apache.hadoop.ipc.Server.registerProtocolEngine(
-RPC.RpcKind.RPC_PROTOCOL_BUFFER, RpcRequestWritable.class,
+RPC.RpcKind.RPC_PROTOCOL_BUFFER, RpcRequestWrapper.class,
 new Server.ProtoBufRpcInvoker());
   }
 
@@ -122,7 +122,7 @@ public class ProtobufRpcEngine implement
 public Invoker(Class protocol, Client.ConnectionId connId,
 Configuration conf, SocketFactory factory) {
   this.remoteId = connId;
-  this.client = CLIENTS.getClient(conf, factory, 
RpcResponseWritable.class);
+  this.client = CLIENTS.getClient(conf, factory, RpcResponseWrapper.class);
   this.protocolName = RPC.getProtocolName(protocol);
   this.clientProtocolVersion = RPC
   .getProtocolVersion(protocol);
@@ -191,7 +191,7 @@ public class ProtobufRpcEngine implement
   }
 
   RequestProto rpcRequest = constructRpcRequest(method, args);
-  RpcResponseWritable val = null;
+  RpcResponseWrapper val = null;
   
   if (LOG.isTraceEnabled()) {
 LOG.trace(Thread.currentThread().getId() + ": Call -> " +
@@ -199,8 +199,8 @@ public class ProtobufRpcEngine implement
 " {" + TextFormat.shortDebugString((Message) args[1]) + "}");
   }
   try {
-val = (RpcResponseWritable) 
client.call(RPC.RpcKind.RPC_PROTOCOL_BUFFER,
-new RpcRequestWritable(r

svn commit: r1479143 - in /hadoop/common/trunk/hadoop-common-project/hadoop-common: ./ src/main/java/org/apache/hadoop/ipc/ src/main/proto/ src/test/java/org/apache/hadoop/ipc/

2013-05-04 Thread sradia
Author: sradia
Date: Sat May  4 17:51:22 2013
New Revision: 1479143

URL: http://svn.apache.org/r1479143
Log:
HADOOP-9425 Add error codes to rpc-response (sanjay Radia)

Added:

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RpcNoSuchMethodException.java

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RpcNoSuchProtocolException.java
Modified:
hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtobufRpcEngine.java

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RPC.java

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RemoteException.java

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RpcServerException.java

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/WritableRpcEngine.java

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/proto/ProtobufRpcEngine.proto

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/proto/RpcHeader.proto

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestProtoBufRpc.java

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestRPCCompatibility.java

Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1479143&r1=1479142&r2=1479143&view=diff
==
--- hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt 
(original)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt Sat May 
 4 17:51:22 2013
@@ -19,6 +19,8 @@ Trunk (Unreleased)
 
 HADOOP-9194. RPC Support for QoS. (Junping Du via llu)
 
+HADOOP-9425 Add error codes to rpc-response (sanjay Radia)
+
   NEW FEATURES
 
 HADOOP-8561. Introduce HADOOP_PROXY_USER for secure impersonation in child

Modified: 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java?rev=1479143&r1=1479142&r2=1479143&view=diff
==
--- 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java
 (original)
+++ 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java
 Sat May  4 17:51:22 2013
@@ -65,6 +65,7 @@ import org.apache.hadoop.io.retry.RetryP
 import org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto;
 import 
org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto.OperationProto;
 import org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto;
+import 
org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto.RpcErrorCodeProto;
 import 
org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto.RpcStatusProto;
 import org.apache.hadoop.net.ConnectTimeoutException;
 import org.apache.hadoop.net.NetUtils;
@@ -987,8 +988,15 @@ public class Client {
   "ServerDidNotSetExceptionClassName";
   final String errorMsg = header.hasErrorMsg() ? 
 header.getErrorMsg() : "ServerDidNotSetErrorMsg" ;
+  final RpcErrorCodeProto erCode = 
+(header.hasErrorDetail() ? header.getErrorDetail() : null);
+  if (erCode == null) {
+ LOG.warn("Detailed error code not set by server on rpc error");
+  }
   RemoteException re = 
-  new RemoteException(exceptionClassName, errorMsg);
+  ( (erCode == null) ? 
+  new RemoteException(exceptionClassName, errorMsg) :
+  new RemoteException(exceptionClassName, errorMsg, erCode));
   if (status == RpcStatusProto.ERROR) {
 call.setException(re);
 calls.remove(callId);

Modified: 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtobufRpcEngine.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtobufRpcE

svn commit: r1477994 - /hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileContextPermissionBase.java

2013-05-01 Thread sradia
Author: sradia
Date: Wed May  1 13:31:08 2013
New Revision: 1477994

URL: http://svn.apache.org/r1477994
Log:
HDFS-4659 Support setting execution bit for regular files (Brandon Li via 
sanjay)

Modified:

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileContextPermissionBase.java

Modified: 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileContextPermissionBase.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileContextPermissionBase.java?rev=1477994&r1=1477993&r2=1477994&view=diff
==
--- 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileContextPermissionBase.java
 (original)
+++ 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileContextPermissionBase.java
 Wed May  1 13:31:08 2013
@@ -202,8 +202,6 @@ public abstract class FileContextPermiss
   
   
   /*
-   * Some filesystem like HDFS ignore the "x" bit if the permission.
-   * Others like localFs does not.
* Override the method below if the file system being tested masks our
* certain bits for file masks.
*/




svn commit: r1460922 - in /hadoop/common/trunk/hadoop-common-project/hadoop-common: CHANGES.txt src/test/java/org/apache/hadoop/fs/TestLocalFileSystem.java

2013-03-25 Thread sradia
Author: sradia
Date: Mon Mar 25 22:30:30 2013
New Revision: 1460922

URL: http://svn.apache.org/r1460922
Log:
HADOOP-9433 TestLocalFileSystem#testHasFileDescriptor leaks file handle (Chris 
Nauroth via sanjay)

Modified:
hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalFileSystem.java

Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1460922&r1=1460921&r2=1460922&view=diff
==
--- hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt 
(original)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt Mon Mar 
25 22:30:30 2013
@@ -356,11 +356,14 @@ Trunk (Unreleased)
 HADOOP-9431 TestSecurityUtil#testLocalHostNameForNullOrWild on systems 
where hostname
 contains capital letters  (Chris Nauroth via sanjay)
 
-   HADOOP-9261 S3n filesystem can move a directory under itself -and so 
lose data
-   (fixed in HADOOP-9258) (stevel)
+HADOOP-9261 S3n filesystem can move a directory under itself -and so lose 
data
+(fixed in HADOOP-9258) (stevel)
 
-   HADOOP-9265 S3 blockstore filesystem breaks part of the Filesystem 
contract
-   (fixed in HADOOP-9258) (stevel)
+HADOOP-9265 S3 blockstore filesystem breaks part of the Filesystem contract
+(fixed in HADOOP-9258) (stevel)
+
+HADOOP-9433 TestLocalFileSystem#testHasFileDescriptor leaks file handle
+(Chris Nauroth via sanjay)
 
   OPTIMIZATIONS
 

Modified: 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalFileSystem.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalFileSystem.java?rev=1460922&r1=1460921&r2=1460922&view=diff
==
--- 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalFileSystem.java
 (original)
+++ 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalFileSystem.java
 Mon Mar 25 22:30:30 2013
@@ -19,6 +19,7 @@ package org.apache.hadoop.fs;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem.Statistics;
+import org.apache.hadoop.io.IOUtils;
 import org.apache.hadoop.util.Shell;
 
 import static org.apache.hadoop.fs.FileSystemTestHelper.*;
@@ -266,9 +267,14 @@ public class TestLocalFileSystem {
 LocalFileSystem fs = FileSystem.getLocal(conf);
 Path path = new Path(TEST_ROOT_DIR, "test-file");
 writeFile(fs, path, 1);
-BufferedFSInputStream bis = new BufferedFSInputStream(
-new RawLocalFileSystem().new LocalFSFileInputStream(path), 1024);
-assertNotNull(bis.getFileDescriptor());
+BufferedFSInputStream bis = null;
+try {
+  bis = new BufferedFSInputStream(new RawLocalFileSystem()
+.new LocalFSFileInputStream(path), 1024);
+  assertNotNull(bis.getFileDescriptor());
+} finally {
+  IOUtils.cleanup(null, bis);
+}
   }
 
   @Test




svn commit: r1460181 - in /hadoop/common/trunk/hadoop-common-project/hadoop-common: CHANGES.txt src/test/java/org/apache/hadoop/security/TestSecurityUtil.java

2013-03-23 Thread sradia
Author: sradia
Date: Sat Mar 23 16:07:48 2013
New Revision: 1460181

URL: http://svn.apache.org/r1460181
Log:
HADOOP-9431 TestSecurityUtil#testLocalHostNameForNullOrWild on systems where 
hostname contains capital letters  (Chris Nauroth via sanjay)

Modified:
hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestSecurityUtil.java

Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1460181&r1=1460180&r2=1460181&view=diff
==
--- hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt 
(original)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt Sat Mar 
23 16:07:48 2013
@@ -355,6 +355,9 @@ Trunk (Unreleased)
 HADOOP-9405. TestGridmixSummary#testExecutionSummarizer is broken. (Andrew
 Wang via atm)
 
+HADOOP-9431 TestSecurityUtil#testLocalHostNameForNullOrWild on systems 
where hostname
+contains capital letters  (Chris Nauroth via sanjay)
+
   OPTIMIZATIONS
 
 HADOOP-7761. Improve the performance of raw comparisons. (todd)

Modified: 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestSecurityUtil.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestSecurityUtil.java?rev=1460181&r1=1460180&r2=1460181&view=diff
==
--- 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestSecurityUtil.java
 (original)
+++ 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestSecurityUtil.java
 Sat Mar 23 16:07:48 2013
@@ -24,6 +24,7 @@ import java.io.IOException;
 import java.net.InetAddress;
 import java.net.InetSocketAddress;
 import java.net.URI;
+import java.util.Locale;
 
 import javax.security.auth.kerberos.KerberosPrincipal;
 
@@ -112,7 +113,7 @@ public class TestSecurityUtil {
 
   @Test
   public void testLocalHostNameForNullOrWild() throws Exception {
-String local = SecurityUtil.getLocalHostName();
+String local = SecurityUtil.getLocalHostName().toLowerCase(Locale.US);
 assertEquals("hdfs/" + local + "@REALM",
  SecurityUtil.getServerPrincipal("hdfs/_HOST@REALM", 
(String)null));
 assertEquals("hdfs/" + local + "@REALM",




svn commit: r1459392 - in /hadoop/common/trunk/hadoop-common-project/hadoop-common: CHANGES.txt src/main/java/org/apache/hadoop/ipc/Client.java src/main/java/org/apache/hadoop/ipc/ProtobufRpcEngine.ja

2013-03-21 Thread sradia
Author: sradia
Date: Thu Mar 21 16:41:28 2013
New Revision: 1459392

URL: http://svn.apache.org/r1459392
Log:
HADOOP-9380 Add totalLength to rpc response  (sanjay Radia)

Modified:
hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtobufRpcEngine.java

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java

Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1459392&r1=1459391&r2=1459392&view=diff
==
--- hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt 
(original)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt Thu Mar 
21 16:41:28 2013
@@ -15,6 +15,8 @@ Trunk (Unreleased)
 HADOOP-9151 Include RPC error info in RpcResponseHeader instead of sending
 it separately (sanjay Radia)
 
+HADOOP-9380 Add totalLength to rpc response  (sanjay Radia)
+
   NEW FEATURES
 
 HADOOP-8561. Introduce HADOOP_PROXY_USER for secure impersonation in child

Modified: 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java?rev=1459392&r1=1459391&r2=1459392&view=diff
==
--- 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java
 (original)
+++ 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java
 Thu Mar 21 16:41:28 2013
@@ -83,6 +83,7 @@ import org.apache.hadoop.util.Reflection
 import org.apache.hadoop.util.Time;
 
 import com.google.common.util.concurrent.ThreadFactoryBuilder;
+import com.google.protobuf.CodedOutputStream;
 
 /** A client for an IPC service.  IPC calls take a single {@link Writable} as a
  * parameter, and return a {@link Writable} as their value.  A service runs on
@@ -242,7 +243,7 @@ public class Client {
   callComplete();
 }
 
-public synchronized Writable getRpcResult() {
+public synchronized Writable getRpcResponse() {
   return rpcResponse;
 }
   }
@@ -944,11 +945,14 @@ public class Client {
   touch();
   
   try {
+int totalLen = in.readInt();
 RpcResponseHeaderProto header = 
 RpcResponseHeaderProto.parseDelimitedFrom(in);
 if (header == null) {
   throw new IOException("Response is null.");
 }
+int headerLen = header.getSerializedSize();
+headerLen += CodedOutputStream.computeRawVarint32Size(headerLen);
 
 int callId = header.getCallId();
 if (LOG.isDebugEnabled())
@@ -961,11 +965,28 @@ public class Client {
   value.readFields(in); // read value
   call.setRpcResponse(value);
   calls.remove(callId);
+  
+  // verify that length was correct
+  // only for ProtobufEngine where len can be verified easily
+  if (call.getRpcResponse() instanceof ProtobufRpcEngine.RpcWrapper) {
+ProtobufRpcEngine.RpcWrapper resWrapper = 
+(ProtobufRpcEngine.RpcWrapper) call.getRpcResponse();
+if (totalLen != headerLen + resWrapper.getLength()) { 
+  throw new RpcClientException(
+  "RPC response length mismatch on rpc success");
+}
+  }
 } else { // Rpc Request failed
-final String exceptionClassName = header.hasExceptionClassName() ?
+  // Verify that length was correct
+  if (totalLen != headerLen) {
+throw new RpcClientException(
+"RPC response length mismatch on rpc error");
+  }
+  
+  final String exceptionClassName = header.hasExceptionClassName() ?
 header.getExceptionClassName() : 
   "ServerDidNotSetExceptionClassName";
-final String errorMsg = header.hasErrorMsg() ? 
+  final String errorMsg = header.hasErrorMsg() ? 
 header.getErrorMsg() : "ServerDidNotSetErrorMsg" ;
   RemoteException re = 
   new RemoteException(exceptionClassName, errorMsg);
@@ -1251,7 +1272,7 @@ public class Client {
   call.error);
 }
   } else {
-return call.getRpcResult();
+return call.getRpcResponse();
   }
 }
   }

Modified: 
hadoop/common/tru

svn commit: r1454593 - in /hadoop/common/trunk/hadoop-common-project/hadoop-common: ./ dev-support/ src/main/java/org/apache/hadoop/ipc/ src/main/proto/

2013-03-08 Thread sradia
Author: sradia
Date: Fri Mar  8 21:47:57 2013
New Revision: 1454593

URL: http://svn.apache.org/r1454593
Log:
HADOOP-9151 Include RPC error info in RpcResponseHeader instead of sending it 
separately (sanjay Radia)

Modified:
hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt

hadoop/common/trunk/hadoop-common-project/hadoop-common/dev-support/findbugsExcludeFile.xml

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/proto/RpcHeader.proto

Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1454593&r1=1454592&r2=1454593&view=diff
==
--- hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt 
(original)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt Fri Mar 
 8 21:47:57 2013
@@ -8,6 +8,12 @@ Trunk (Unreleased)
 FSDataOutputStream.sync() and Syncable.sync().  (szetszwo)
 
 HADOOP-8886. Remove KFS support. (eli)
+
+HADOOP-9163 The rpc msg in ProtobufRpcEngine.proto should be moved out to
+avoid an extra copy (Sanjay Radia)
+
+HADOOP-9151 Include RPC error info in RpcResponseHeader instead of sending
+it separately (sanjay Radia)
 
   NEW FEATURES
 
@@ -157,8 +163,6 @@ Trunk (Unreleased)
 HADOOP-9112. test-patch should -1 for @Tests without a timeout 
 (Surenkumar Nihalani via bobby)
 
-HADOOP-9163 The rpc msg in ProtobufRpcEngine.proto should be moved out to
-avoid an extra copy (Sanjay Radia)
 
   BUG FIXES
 

Modified: 
hadoop/common/trunk/hadoop-common-project/hadoop-common/dev-support/findbugsExcludeFile.xml
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/dev-support/findbugsExcludeFile.xml?rev=1454593&r1=1454592&r2=1454593&view=diff
==
--- 
hadoop/common/trunk/hadoop-common-project/hadoop-common/dev-support/findbugsExcludeFile.xml
 (original)
+++ 
hadoop/common/trunk/hadoop-common-project/hadoop-common/dev-support/findbugsExcludeFile.xml
 Fri Mar  8 21:47:57 2013
@@ -308,4 +308,11 @@


  
+ 
+ 
+ 
+   
+   
+   
+ 
  

Modified: 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java?rev=1454593&r1=1454592&r2=1454593&view=diff
==
--- 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java
 (original)
+++ 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java
 Fri Mar  8 21:47:57 2013
@@ -59,7 +59,6 @@ import org.apache.hadoop.fs.CommonConfig
 import org.apache.hadoop.io.DataOutputBuffer;
 import org.apache.hadoop.io.IOUtils;
 import org.apache.hadoop.io.Writable;
-import org.apache.hadoop.io.WritableUtils;
 import org.apache.hadoop.io.retry.RetryPolicies;
 import org.apache.hadoop.io.retry.RetryPolicy;
 import org.apache.hadoop.io.retry.RetryPolicy.RetryAction;
@@ -945,31 +944,38 @@ public class Client {
   touch();
   
   try {
-RpcResponseHeaderProto response = 
+RpcResponseHeaderProto header = 
 RpcResponseHeaderProto.parseDelimitedFrom(in);
-if (response == null) {
+if (header == null) {
   throw new IOException("Response is null.");
 }
 
-int callId = response.getCallId();
+int callId = header.getCallId();
 if (LOG.isDebugEnabled())
   LOG.debug(getName() + " got value #" + callId);
 
 Call call = calls.get(callId);
-RpcStatusProto status = response.getStatus();
+RpcStatusProto status = header.getStatus();
 if (status == RpcStatusProto.SUCCESS) {
   Writable value = ReflectionUtils.newInstance(valueClass, conf);
   value.readFields(in); // read value
   call.setRpcResponse(value);
   calls.remove(callId);
-} else if (status == RpcStatusProto.ERROR) {
-  call.setException(new RemoteException(WritableUtils.readString(in),
-WritableUtils.readString(in)));
-  calls.remove(callId);
-} else if (status == RpcStatusProto.FATAL) {
-  // Close the connection
-  ma

svn commit: r1452581 - in /hadoop/common/trunk/hadoop-common-project/hadoop-common: CHANGES.txt src/main/java/org/apache/hadoop/ipc/ProtobufRpcEngine.java src/main/proto/ProtobufRpcEngine.proto

2013-03-04 Thread sradia
Author: sradia
Date: Mon Mar  4 22:55:22 2013
New Revision: 1452581

URL: http://svn.apache.org/r1452581
Log:
HADOOP-9163 The rpc msg in ProtobufRpcEngine.proto should be moved out to avoid 
an extra copy (Sanjay Radia)

Modified:
hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtobufRpcEngine.java

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/proto/ProtobufRpcEngine.proto

Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1452581&r1=1452580&r2=1452581&view=diff
==
--- hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt 
(original)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt Mon Mar 
 4 22:55:22 2013
@@ -153,6 +153,9 @@ Trunk (Unreleased)
 HADOOP-9112. test-patch should -1 for @Tests without a timeout 
 (Surenkumar Nihalani via bobby)
 
+HADOOP-9163 The rpc msg in ProtobufRpcEngine.proto should be moved out to
+avoid an extra copy (Sanjay Radia)
+
   BUG FIXES
 
 HADOOP-8419. Fixed GzipCode NPE reset for IBM JDK. (Yu Li via eyang)

Modified: 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtobufRpcEngine.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtobufRpcEngine.java?rev=1452581&r1=1452580&r2=1452581&view=diff
==
--- 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtobufRpcEngine.java
 (original)
+++ 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtobufRpcEngine.java
 Mon Mar  4 22:55:22 2013
@@ -21,6 +21,7 @@ package org.apache.hadoop.ipc;
 import java.io.DataInput;
 import java.io.DataOutput;
 import java.io.IOException;
+import java.io.OutputStream;
 import java.lang.reflect.Method;
 import java.lang.reflect.Proxy;
 import java.net.InetSocketAddress;
@@ -39,7 +40,7 @@ import org.apache.hadoop.io.Writable;
 import org.apache.hadoop.io.retry.RetryPolicy;
 import org.apache.hadoop.ipc.Client.ConnectionId;
 import org.apache.hadoop.ipc.RPC.RpcInvoker;
-import org.apache.hadoop.ipc.protobuf.ProtobufRpcEngineProtos.RequestProto;
+import 
org.apache.hadoop.ipc.protobuf.ProtobufRpcEngineProtos.RequestHeaderProto;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.security.token.SecretManager;
 import org.apache.hadoop.security.token.TokenIdentifier;
@@ -128,25 +129,12 @@ public class ProtobufRpcEngine implement
   .getProtocolVersion(protocol);
 }
 
-private RequestProto constructRpcRequest(Method method,
-Object[] params) throws ServiceException {
-  RequestProto rpcRequest;
-  RequestProto.Builder builder = RequestProto
+private RequestHeaderProto constructRpcRequestHeader(Method method) {
+  RequestHeaderProto.Builder builder = RequestHeaderProto
   .newBuilder();
   builder.setMethodName(method.getName());
+ 
 
-  if (params.length != 2) { // RpcController + Message
-throw new ServiceException("Too many parameters for request. Method: ["
-+ method.getName() + "]" + ", Expected: 2, Actual: "
-+ params.length);
-  }
-  if (params[1] == null) {
-throw new ServiceException("null param while calling Method: ["
-+ method.getName() + "]");
-  }
-
-  Message param = (Message) params[1];
-  builder.setRequest(param.toByteString());
   // For protobuf, {@code protocol} used when creating client side proxy is
   // the interface extending BlockingInterface, which has the annotations 
   // such as ProtocolName etc.
@@ -160,8 +148,7 @@ public class ProtobufRpcEngine implement
   // For PB this may limit the use of mixins on client side.
   builder.setDeclaringClassProtocolName(protocolName);
   builder.setClientProtocolVersion(clientProtocolVersion);
-  rpcRequest = builder.build();
-  return rpcRequest;
+  return builder.build();
 }
 
 /**
@@ -189,8 +176,18 @@ public class ProtobufRpcEngine implement
   if (LOG.isDebugEnabled()) {
 startTime = Time.now();
   }
+  
+  if (args.length != 2) { // RpcController + Message
+throw new ServiceException("Too many parameters for request. Method: ["
++ method.getName() + "]" + ", Expected: 2, Actual: "
++ args.length);
+  }
+  if (args[1] == null) {
+throw new ServiceException(&qu

svn commit: r1446428 - in /hadoop/common/trunk/hadoop-common-project/hadoop-common: CHANGES.txt src/main/java/org/apache/hadoop/ipc/ProtobufRpcEngine.java

2013-02-14 Thread sradia
Author: sradia
Date: Fri Feb 15 02:26:46 2013
New Revision: 1446428

URL: http://svn.apache.org/r1446428
Log:
HADOOP-9218 Document the Rpc-wrappers used internally (sanjay Radia)

Modified:
hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtobufRpcEngine.java

Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1446428&r1=1446427&r2=1446428&view=diff
==
--- hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt 
(original)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt Fri Feb 
15 02:26:46 2013
@@ -148,6 +148,8 @@ Trunk (Unreleased)
 
 HADOOP-9277. Improve javadoc for FileContext. (Andrew Wang via suresh)
 
+HADOOP-9218 Document the Rpc-wrappers used internally (sanjay Radia)
+
   BUG FIXES
 
 HADOOP-8419. Fixed GzipCode NPE reset for IBM JDK. (Yu Li via eyang)

Modified: 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtobufRpcEngine.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtobufRpcEngine.java?rev=1446428&r1=1446427&r2=1446428&view=diff
==
--- 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtobufRpcEngine.java
 (original)
+++ 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtobufRpcEngine.java
 Fri Feb 15 02:26:46 2013
@@ -62,7 +62,7 @@ public class ProtobufRpcEngine implement
   
   static { // Register the rpcRequest deserializer for WritableRpcEngine 
 org.apache.hadoop.ipc.Server.registerProtocolEngine(
-RPC.RpcKind.RPC_PROTOCOL_BUFFER, RpcRequestWritable.class,
+RPC.RpcKind.RPC_PROTOCOL_BUFFER, RpcRequestWrapper.class,
 new Server.ProtoBufRpcInvoker());
   }
 
@@ -122,7 +122,7 @@ public class ProtobufRpcEngine implement
 public Invoker(Class protocol, Client.ConnectionId connId,
 Configuration conf, SocketFactory factory) {
   this.remoteId = connId;
-  this.client = CLIENTS.getClient(conf, factory, 
RpcResponseWritable.class);
+  this.client = CLIENTS.getClient(conf, factory, RpcResponseWrapper.class);
   this.protocolName = RPC.getProtocolName(protocol);
   this.clientProtocolVersion = RPC
   .getProtocolVersion(protocol);
@@ -191,7 +191,7 @@ public class ProtobufRpcEngine implement
   }
 
   RequestProto rpcRequest = constructRpcRequest(method, args);
-  RpcResponseWritable val = null;
+  RpcResponseWrapper val = null;
   
   if (LOG.isTraceEnabled()) {
 LOG.trace(Thread.currentThread().getId() + ": Call -> " +
@@ -199,8 +199,8 @@ public class ProtobufRpcEngine implement
 " {" + TextFormat.shortDebugString((Message) args[1]) + "}");
   }
   try {
-val = (RpcResponseWritable) 
client.call(RPC.RpcKind.RPC_PROTOCOL_BUFFER,
-new RpcRequestWritable(rpcRequest), remoteId);
+val = (RpcResponseWrapper) client.call(RPC.RpcKind.RPC_PROTOCOL_BUFFER,
+new RpcRequestWrapper(rpcRequest), remoteId);
 
   } catch (Throwable e) {
 if (LOG.isTraceEnabled()) {
@@ -268,16 +268,20 @@ public class ProtobufRpcEngine implement
   }
 
   /**
-   * Writable Wrapper for Protocol Buffer Requests
+   * Wrapper for Protocol Buffer Requests
+   * 
+   * Note while this wrapper is writable, the request on the wire is in
+   * Protobuf. Several methods on {@link org.apache.hadoop.ipc.Server and RPC} 
+   * use type Writable as a wrapper to work across multiple RpcEngine kinds.
*/
-  private static class RpcRequestWritable implements Writable {
+  private static class RpcRequestWrapper implements Writable {
 RequestProto message;
 
 @SuppressWarnings("unused")
-public RpcRequestWritable() {
+public RpcRequestWrapper() {
 }
 
-RpcRequestWritable(RequestProto message) {
+RpcRequestWrapper(RequestProto message) {
   this.message = message;
 }
 
@@ -303,16 +307,20 @@ public class ProtobufRpcEngine implement
   }
 
   /**
-   * Writable Wrapper for Protocol Buffer Responses
+   *  Wrapper for Protocol Buffer Responses
+   * 
+   * Note while this wrapper is writable, the request on the wire is in
+   * Protobuf. Several methods on {@link org.apache.hadoop.ipc.Server and RPC} 
+   * use type Writable as a wrapper to work across multiple RpcEngine kinds.
*/
-  private static class RpcResponseWritable implements Writable {
+  private static class RpcResponseWrapper implemen

svn commit: r1432185 - in /hadoop/common/branches/branch-1: ./ src/hdfs/org/apache/hadoop/hdfs/ src/hdfs/org/apache/hadoop/hdfs/protocol/ src/hdfs/org/apache/hadoop/hdfs/server/namenode/ src/hdfs/org/

2013-01-11 Thread sradia
Author: sradia
Date: Fri Jan 11 17:39:25 2013
New Revision: 1432185

URL: http://svn.apache.org/viewvc?rev=1432185&view=rev
Log:
HDFS-4256 Backport concatenation of files into a single file to branch-1 
(sanjay Radia)

Added:

hadoop/common/branches/branch-1/src/hdfs/org/apache/hadoop/hdfs/tools/HDFSConcat.java

hadoop/common/branches/branch-1/src/test/org/apache/hadoop/hdfs/server/namenode/TestHDFSConcat.java
Modified:
hadoop/common/branches/branch-1/CHANGES.txt

hadoop/common/branches/branch-1/src/hdfs/org/apache/hadoop/hdfs/DFSClient.java

hadoop/common/branches/branch-1/src/hdfs/org/apache/hadoop/hdfs/DistributedFileSystem.java

hadoop/common/branches/branch-1/src/hdfs/org/apache/hadoop/hdfs/protocol/ClientProtocol.java

hadoop/common/branches/branch-1/src/hdfs/org/apache/hadoop/hdfs/protocol/FSConstants.java

hadoop/common/branches/branch-1/src/hdfs/org/apache/hadoop/hdfs/server/namenode/FSDirectory.java

hadoop/common/branches/branch-1/src/hdfs/org/apache/hadoop/hdfs/server/namenode/FSEditLog.java

hadoop/common/branches/branch-1/src/hdfs/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java

hadoop/common/branches/branch-1/src/hdfs/org/apache/hadoop/hdfs/server/namenode/INodeDirectoryWithQuota.java

hadoop/common/branches/branch-1/src/hdfs/org/apache/hadoop/hdfs/server/namenode/INodeFile.java

hadoop/common/branches/branch-1/src/hdfs/org/apache/hadoop/hdfs/server/namenode/NameNode.java
hadoop/common/branches/branch-1/src/test/commit-tests

hadoop/common/branches/branch-1/src/test/org/apache/hadoop/hdfs/DFSTestUtil.java

hadoop/common/branches/branch-1/src/test/org/apache/hadoop/hdfs/TestDFSClientRetries.java

Modified: hadoop/common/branches/branch-1/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-1/CHANGES.txt?rev=1432185&r1=1432184&r2=1432185&view=diff
==
--- hadoop/common/branches/branch-1/CHANGES.txt (original)
+++ hadoop/common/branches/branch-1/CHANGES.txt Fri Jan 11 17:39:25 2013
@@ -61,6 +61,10 @@ Release 1.2.0 - unreleased
 NetworkTopology with NodeGroup and use generic code for choosing datanode
 in Balancer.  (Junping Du via szetszwo)
 
+
+HDFS-4256 Backport concatenation of files into a single file to branch-1
+(sanjay Radia)
+
   IMPROVEMENTS
 
 HDFS-3515. Port HDFS-1457 to branch-1. (eli)

Modified: 
hadoop/common/branches/branch-1/src/hdfs/org/apache/hadoop/hdfs/DFSClient.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-1/src/hdfs/org/apache/hadoop/hdfs/DFSClient.java?rev=1432185&r1=1432184&r2=1432185&view=diff
==
--- 
hadoop/common/branches/branch-1/src/hdfs/org/apache/hadoop/hdfs/DFSClient.java 
(original)
+++ 
hadoop/common/branches/branch-1/src/hdfs/org/apache/hadoop/hdfs/DFSClient.java 
Fri Jan 11 17:39:25 2013
@@ -944,6 +944,19 @@ public class DFSClient implements FSCons
  DSQuotaExceededException.class);
 }
   }
+  
+  /**
+   * Move blocks from src to trg and delete src
+   * See {@link ClientProtocol#concat(String, String [])}. 
+   */
+  public void concat(String trg, String [] srcs) throws IOException {
+checkOpen();
+try {
+  namenode.concat(trg, srcs);
+} catch(RemoteException re) {
+  throw re.unwrapRemoteException(AccessControlException.class);
+}
+  }
 
   /**
* Rename file or directory.

Modified: 
hadoop/common/branches/branch-1/src/hdfs/org/apache/hadoop/hdfs/DistributedFileSystem.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-1/src/hdfs/org/apache/hadoop/hdfs/DistributedFileSystem.java?rev=1432185&r1=1432184&r2=1432185&view=diff
==
--- 
hadoop/common/branches/branch-1/src/hdfs/org/apache/hadoop/hdfs/DistributedFileSystem.java
 (original)
+++ 
hadoop/common/branches/branch-1/src/hdfs/org/apache/hadoop/hdfs/DistributedFileSystem.java
 Fri Jan 11 17:39:25 2013
@@ -220,6 +220,24 @@ public class DistributedFileSystem exten
   }
 
   /**
+   * THIS IS DFS only operations, it is not part of FileSystem
+   * move blocks from srcs to trg
+   * and delete srcs afterwards
+   * all blocks should be the same size
+   * @param trg existing file to append to
+   * @param psrcs list of files (same block size, same replication)
+   * @throws IOException
+   */
+  public void concat(Path trg, Path [] psrcs) throws IOException {
+String [] srcs = new String [psrcs.length];
+for(int i=0; ihttp://svn.apache.org/viewvc/hadoop/common/branches/branch-1/src/hdfs/org/apache/hadoop/hdfs/protocol/ClientProtocol.java?rev=1432185&r1=1432184&r2=1432185&view=diff
==
--- 
hadoop/common/branches/branch-1/src/hdfs

svn commit: r1423189 - in /hadoop/common/trunk/hadoop-common-project/hadoop-common: ./ dev-support/ src/main/java/org/apache/hadoop/ipc/ src/main/java/org/apache/hadoop/util/ src/main/proto/

2012-12-17 Thread sradia
Author: sradia
Date: Mon Dec 17 22:16:57 2012
New Revision: 1423189

URL: http://svn.apache.org/viewvc?rev=1423189&view=rev
Log:
HADOOP-9140 Cleanup rpc PB protos (sanjay Radia)

Added:

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/proto/ProtobufRpcEngine.proto
  - copied, changed from r1423171, 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/proto/hadoop_rpc.proto

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/proto/RpcHeader.proto
  - copied, changed from r1423171, 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/proto/RpcPayloadHeader.proto
Removed:

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/proto/RpcPayloadHeader.proto

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/proto/hadoop_rpc.proto
Modified:
hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt

hadoop/common/trunk/hadoop-common-project/hadoop-common/dev-support/findbugsExcludeFile.xml
hadoop/common/trunk/hadoop-common-project/hadoop-common/pom.xml

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtobufRpcEngine.java

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ProtoUtil.java

Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1423189&r1=1423188&r2=1423189&view=diff
==
--- hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt 
(original)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt Mon Dec 
17 22:16:57 2012
@@ -144,6 +144,8 @@ Trunk (Unreleased)
 HADOOP-9093. Move all the Exception in PathExceptions to o.a.h.fs package.
 (suresh)
 
+HADOOP-9140 Cleanup rpc PB protos (sanjay Radia)
+
   BUG FIXES
 
 HADOOP-9041. FsUrlStreamHandlerFactory could cause an infinite loop in

Modified: 
hadoop/common/trunk/hadoop-common-project/hadoop-common/dev-support/findbugsExcludeFile.xml
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/dev-support/findbugsExcludeFile.xml?rev=1423189&r1=1423188&r2=1423189&view=diff
==
--- 
hadoop/common/trunk/hadoop-common-project/hadoop-common/dev-support/findbugsExcludeFile.xml
 (original)
+++ 
hadoop/common/trunk/hadoop-common-project/hadoop-common/dev-support/findbugsExcludeFile.xml
 Mon Dec 17 22:16:57 2012
@@ -260,7 +260,7 @@
 
 
   
-  
+  
 
 
   
@@ -272,7 +272,7 @@
 
 
   
-  
+  
 
 
   

Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/pom.xml
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/pom.xml?rev=1423189&r1=1423188&r2=1423189&view=diff
==
--- hadoop/common/trunk/hadoop-common-project/hadoop-common/pom.xml (original)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-common/pom.xml Mon Dec 17 
22:16:57 2012
@@ -378,9 +378,9 @@
 src/main/proto/HAServiceProtocol.proto
 src/main/proto/IpcConnectionContext.proto
 src/main/proto/ProtocolInfo.proto
-src/main/proto/RpcPayloadHeader.proto
+src/main/proto/RpcHeader.proto
 src/main/proto/ZKFCProtocol.proto
-src/main/proto/hadoop_rpc.proto
+src/main/proto/ProtobufRpcEngine.proto
   
 
   

Modified: 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java?rev=1423189&r1=1423188&r2=1423189&view=diff
==
--- 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java
 (original)
+++ 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java
 Mon Dec 17 22:16:57 2012
@@ -63,11 +63,10 @@ import org.apache.hadoop.io.WritableUtil
 import org.apache.hadoop.io.retry.RetryPolicies;
 import org.apache.hadoop.io.retry.RetryPolicy;
 import org.apache.hadoop.io.retry.RetryPolicy.RetryAction;
-import 
org.apache.hadoop.ipc.protobuf.IpcConnec

svn commit: r1420965 - in /hadoop/common/trunk/hadoop-common-project/hadoop-common: ./ src/main/java/org/apache/hadoop/fs/ src/main/java/org/apache/hadoop/fs/local/ src/main/java/org/apache/hadoop/fs/

2012-12-12 Thread sradia
Author: sradia
Date: Wed Dec 12 20:54:30 2012
New Revision: 1420965

URL: http://svn.apache.org/viewvc?rev=1420965&view=rev
Log:
HADOOP-8957 AbstractFileSystem#IsValidName should be overridden for embedded 
file systems like ViewFs (Chris Nauroth via Sanjay)

Modified:
hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/AbstractFileSystem.java

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FilterFs.java

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/local/RawLocalFs.java

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ChRootedFs.java

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFileSystem.java

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFs.java

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestChRootedFs.java

Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1420965&r1=1420964&r2=1420965&view=diff
==
--- hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt 
(original)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt Wed Dec 
12 20:54:30 2012
@@ -298,6 +298,9 @@ Trunk (Unreleased)
 HADOOP-9131. Turn off TestLocalFileSystem#testListStatusWithColons on
 Windows. (Chris Nauroth via suresh)
 
+HADOOP-8957 AbstractFileSystem#IsValidName should be overridden for
+embedded file systems like ViewFs (Chris Nauroth via Sanjay Radia)
+
   OPTIMIZATIONS
 
 HADOOP-7761. Improve the performance of raw comparisons. (todd)

Modified: 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/AbstractFileSystem.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/AbstractFileSystem.java?rev=1420965&r1=1420964&r2=1420965&view=diff
==
--- 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/AbstractFileSystem.java
 (original)
+++ 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/AbstractFileSystem.java
 Wed Dec 12 20:54:30 2012
@@ -85,14 +85,20 @@ public abstract class AbstractFileSystem
   }
   
   /**
-   * Prohibits names which contain a ".", "..", ":" or "/" 
+   * Returns true if the specified string is considered valid in the path part
+   * of a URI by this file system.  The default implementation enforces the 
rules
+   * of HDFS, but subclasses may override this method to implement specific
+   * validation rules for specific file systems.
+   * 
+   * @param src String source filename to check, path part of the URI
+   * @return boolean true if the specified string is considered valid
*/
-  private static boolean isValidName(String src) {
-// Check for ".." "." ":" "/"
+  public boolean isValidName(String src) {
+// Prohibit ".." "." and anything containing ":"
 StringTokenizer tokens = new StringTokenizer(src, Path.SEPARATOR);
 while(tokens.hasMoreTokens()) {
   String element = tokens.nextToken();
-  if (element.equals("target/generated-sources") ||
+  if (element.equals("..") ||
   element.equals(".")  ||
   (element.indexOf(":") >= 0)) {
 return false;

Modified: 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FilterFs.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FilterFs.java?rev=1420965&r1=1420964&r2=1420965&view=diff
==
--- 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FilterFs.java
 (original)
+++ 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FilterFs.java
 Wed Dec 12 20:54:30 2012
@@ -278,4 +278,9 @@ public abstract class FilterFs extends A
   public List> getDelegationTokens(String renewer) throws IOException 
{
 return myFs.getDelegationTokens(renewer);
   }
+
+  @Override
+  public boolean isValidName(String src) {
+return myFs.isValidName(src);
+  }
 }

Modified: 
hadoop/common/trunk/h

svn commit: r1418424 - in /hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs: FileContextTestHelper.java FileSystemTestHelper.java

2012-12-07 Thread sradia
Author: sradia
Date: Fri Dec  7 18:32:27 2012
New Revision: 1418424

URL: http://svn.apache.org/viewvc?rev=1418424&view=rev
Log:
HDFS-4260 Fix HDFS tests to set test dir to a valid HDFS path as opposed to the 
local build path (Chris Nauroth via Sanjay)

Modified:

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileContextTestHelper.java

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileSystemTestHelper.java

Modified: 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileContextTestHelper.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileContextTestHelper.java?rev=1418424&r1=1418423&r2=1418424&view=diff
==
--- 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileContextTestHelper.java
 (original)
+++ 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileContextTestHelper.java
 Fri Dec  7 18:32:27 2012
@@ -32,7 +32,7 @@ import org.junit.Assert;
  */
 public final class FileContextTestHelper {
   // The test root is relative to the /build/test/data by default
-  public static final String TEST_ROOT_DIR = 
+  public static String TEST_ROOT_DIR = 
 System.getProperty("test.build.data", "build/test/data") + "/test";
   private static final int DEFAULT_BLOCK_SIZE = 1024;
   private static final int DEFAULT_NUM_BLOCKS = 2;

Modified: 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileSystemTestHelper.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileSystemTestHelper.java?rev=1418424&r1=1418423&r2=1418424&view=diff
==
--- 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileSystemTestHelper.java
 (original)
+++ 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileSystemTestHelper.java
 Fri Dec  7 18:32:27 2012
@@ -34,7 +34,7 @@ import static org.mockito.Mockito.mock;
  */
 public final class FileSystemTestHelper {
   // The test root is relative to the /build/test/data by default
-  public static final String TEST_ROOT_DIR = 
+  public static String TEST_ROOT_DIR = 
 System.getProperty("test.build.data", "target/test/data") + "/test";
   private static final int DEFAULT_BLOCK_SIZE = 1024;
   private static final int DEFAULT_NUM_BLOCKS = 2;




svn commit: r1406939 - in /hadoop/common/trunk/hadoop-common-project/hadoop-common: ./ src/main/java/org/apache/hadoop/fs/ src/main/java/org/apache/hadoop/fs/viewfs/ src/test/java/org/apache/hadoop/fs

2012-11-07 Thread sradia
Author: sradia
Date: Thu Nov  8 06:16:02 2012
New Revision: 1406939

URL: http://svn.apache.org/viewvc?rev=1406939&view=rev
Log:
HADOOP-8589 ViewFs tests fail when tests and home dirs are nested (sanjay Radia)

Modified:
hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/DelegateToFileSystem.java

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ChRootedFileSystem.java

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFileSystem.java

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFs.java

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileSystemTestHelper.java

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestChRootedFileSystem.java

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestChRootedFs.java

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestFcMainOperationsLocalFs.java

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/ViewFileSystemTestSetup.java

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/ViewFsTestSetup.java

Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1406939&r1=1406938&r2=1406939&view=diff
==
--- hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt 
(original)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt Thu Nov 
 8 06:16:02 2012
@@ -272,10 +272,13 @@ Trunk (Unreleased)
 HADOOP-8918. test-patch.sh is parsing modified files wrong.
 (Raja Aluri via suresh)
 
+HADOOP-8589 ViewFs tests fail when tests and home dirs are nested (sanjay 
Radia)
+
   OPTIMIZATIONS
 
 HADOOP-7761. Improve the performance of raw comparisons. (todd)
 
+HADOOP-8589 ViewFs tests fail when tests and home dirs are nested (sanjay 
Radia)
 Release 2.0.3-alpha - Unreleased 
 
   INCOMPATIBLE CHANGES

Modified: 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/DelegateToFileSystem.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/DelegateToFileSystem.java?rev=1406939&r1=1406938&r2=1406939&view=diff
==
--- 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/DelegateToFileSystem.java
 (original)
+++ 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/DelegateToFileSystem.java
 Thu Nov  8 06:16:02 2012
@@ -125,6 +125,11 @@ public abstract class DelegateToFileSyst
   public FsServerDefaults getServerDefaults() throws IOException {
 return fsImpl.getServerDefaults();
   }
+  
+  @Override
+  public Path getHomeDirectory() {
+return fsImpl.getHomeDirectory();
+  }
 
   @Override
   public int getUriDefaultPort() {

Modified: 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ChRootedFileSystem.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ChRootedFileSystem.java?rev=1406939&r1=1406938&r2=1406939&view=diff
==
--- 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ChRootedFileSystem.java
 (original)
+++ 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ChRootedFileSystem.java
 Thu Nov  8 06:16:02 2012
@@ -153,12 +153,6 @@ class ChRootedFileSystem extends FilterF
 return makeQualified(
 new Path(chRootPathPartString + f.toUri().toString()));
   }
-  
-  @Override
-  public Path getHomeDirectory() {
-return  new Path("/user/"+System.getProperty("user.name")).makeQualified(
-  getUri(), null);
-  }
 
   @Override
   public Path getWorkingDirectory() {

Modified: 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFileSystem.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/Vie

svn commit: r1362313 - in /hadoop/common/branches/branch-1: ./ src/hdfs/org/apache/hadoop/hdfs/ src/hdfs/org/apache/hadoop/hdfs/server/namenode/ src/test/org/apache/hadoop/hdfs/

2012-07-16 Thread sradia
Author: sradia
Date: Mon Jul 16 23:42:26 2012
New Revision: 1362313

URL: http://svn.apache.org/viewvc?rev=1362313&view=rev
Log:
HDFS-1108 Log newly allocated blocks (hdfs-1108-hadoop-1-v5.patch) (sanjay) 

Added:

hadoop/common/branches/branch-1/src/test/org/apache/hadoop/hdfs/TestPersistBlocks.java

hadoop/common/branches/branch-1/src/test/org/apache/hadoop/hdfs/hadoop-1.0-multiblock-file.tgz
   (with props)
Modified:
hadoop/common/branches/branch-1/CHANGES.txt
hadoop/common/branches/branch-1/build.xml

hadoop/common/branches/branch-1/src/hdfs/org/apache/hadoop/hdfs/DFSConfigKeys.java

hadoop/common/branches/branch-1/src/hdfs/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java

Modified: hadoop/common/branches/branch-1/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-1/CHANGES.txt?rev=1362313&r1=1362312&r2=1362313&view=diff
==
--- hadoop/common/branches/branch-1/CHANGES.txt (original)
+++ hadoop/common/branches/branch-1/CHANGES.txt Mon Jul 16 23:42:26 2012
@@ -215,6 +215,8 @@ Release 1.1.0 - unreleased
 
 HADOOP-8365. Provide ability to disable working sync. (eli)
 
+HDFS-1108 Log newly allocated blocks (hdfs-1108-hadoop-1-v5.patch) 
(sanjay) 
+
   BUG FIXES
 
 MAPREDUCE-4087. [Gridmix] GenerateDistCacheData job of Gridmix can

Modified: hadoop/common/branches/branch-1/build.xml
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-1/build.xml?rev=1362313&r1=1362312&r2=1362313&view=diff
==
--- hadoop/common/branches/branch-1/build.xml (original)
+++ hadoop/common/branches/branch-1/build.xml Mon Jul 16 23:42:26 2012
@@ -921,6 +921,7 @@
 
 
 
+
 
 
 

Modified: 
hadoop/common/branches/branch-1/src/hdfs/org/apache/hadoop/hdfs/DFSConfigKeys.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-1/src/hdfs/org/apache/hadoop/hdfs/DFSConfigKeys.java?rev=1362313&r1=1362312&r2=1362313&view=diff
==
--- 
hadoop/common/branches/branch-1/src/hdfs/org/apache/hadoop/hdfs/DFSConfigKeys.java
 (original)
+++ 
hadoop/common/branches/branch-1/src/hdfs/org/apache/hadoop/hdfs/DFSConfigKeys.java
 Mon Jul 16 23:42:26 2012
@@ -104,6 +104,8 @@ public class DFSConfigKeys extends Commo
   public static final boolean DFS_WEBHDFS_ENABLED_DEFAULT = false;
   public static final String  DFS_PERMISSIONS_ENABLED_KEY = 
"dfs.permissions.enabled";
   public static final boolean DFS_PERMISSIONS_ENABLED_DEFAULT = true;
+  public static final String  DFS_PERSIST_BLOCKS_KEY = "dfs.persist.blocks";
+  public static final boolean DFS_PERSIST_BLOCKS_DEFAULT = false;
   public static final String  DFS_PERMISSIONS_SUPERUSERGROUP_KEY = 
"dfs.permissions.superusergroup";
   public static final String  DFS_ADMIN = "dfs.cluster.administrators";
   public static final String  DFS_PERMISSIONS_SUPERUSERGROUP_DEFAULT = 
"supergroup";

Modified: 
hadoop/common/branches/branch-1/src/hdfs/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-1/src/hdfs/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java?rev=1362313&r1=1362312&r2=1362313&view=diff
==
--- 
hadoop/common/branches/branch-1/src/hdfs/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java
 (original)
+++ 
hadoop/common/branches/branch-1/src/hdfs/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java
 Mon Jul 16 23:42:26 2012
@@ -17,6 +17,9 @@
  */
 package org.apache.hadoop.hdfs.server.namenode;
 
+import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_PERSIST_BLOCKS_DEFAULT;
+import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_PERSIST_BLOCKS_KEY;
+
 import java.io.BufferedWriter;
 import java.io.ByteArrayInputStream;
 import java.io.DataInputStream;
@@ -180,6 +183,7 @@ public class FSNamesystem implements FSC
   public static final float DEFAULT_MAP_LOAD_FACTOR = 0.75f;
 
   private boolean isPermissionEnabled;
+  private boolean persistBlocks;
   private UserGroupInformation fsOwner;
   private String supergroup;
   private PermissionStatus defaultPermission;
@@ -487,6 +491,10 @@ public class FSNamesystem implements FSC
 this.isPermissionEnabled = conf.getBoolean("dfs.permissions", true);
 LOG.info("supergroup=" + supergroup);
 LOG.info("isPermissionEnabled=" + isPermissionEnabled);
+
+this.persistBlocks = conf.getBoolean(DFS_PERSIST_BLOCKS_KEY,
+  DFS_PERSIST_BLOCKS_DEFAULT);
+
 short filePermission = (short)conf.getInt("dfs.upgrade.permission", 0777);
 this.defaultPermissio

svn commit: r1361025 - in /hadoop/common/branches/branch-1-win: CHANGES.txt src/winutils/chmod.c src/winutils/chown.c

2012-07-12 Thread sradia
Author: sradia
Date: Fri Jul 13 00:59:19 2012
New Revision: 1361025

URL: http://svn.apache.org/viewvc?rev=1361025&view=rev
Log:
HADOOP-8544 Move an assertion location in 'winutils chmod' (Chuan Liu via 
sanjay)

Modified:
hadoop/common/branches/branch-1-win/CHANGES.txt
hadoop/common/branches/branch-1-win/src/winutils/chmod.c
hadoop/common/branches/branch-1-win/src/winutils/chown.c

Modified: hadoop/common/branches/branch-1-win/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-1-win/CHANGES.txt?rev=1361025&r1=1361024&r2=1361025&view=diff
==
--- hadoop/common/branches/branch-1-win/CHANGES.txt (original)
+++ hadoop/common/branches/branch-1-win/CHANGES.txt Fri Jul 13 00:59:19 2012
@@ -62,6 +62,8 @@ branch-hadoop-1-win - unreleased
 HADOOP-8414 Address problems related to localhost resolving to 127.0.0.1 
on Windows
 (Ivan Mitic via Sanjay Radia)
 
+HADOOP-8544 Move an assertion location in 'winutils chmod' (Chuan Liu via 
sanjay)
+
 BUG FIXES
 
 HDFS-6527. Backport HADOOP-7389: Use of TestingGroups by tests causes

Modified: hadoop/common/branches/branch-1-win/src/winutils/chmod.c
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-1-win/src/winutils/chmod.c?rev=1361025&r1=1361024&r2=1361025&view=diff
==
--- hadoop/common/branches/branch-1-win/src/winutils/chmod.c (original)
+++ hadoop/common/branches/branch-1-win/src/winutils/chmod.c Fri Jul 13 
00:59:19 2012
@@ -593,20 +593,22 @@ static USHORT ComputeNewMode(__in USHORT
   USHORT mask = 0;
   USHORT mode = 0;
 
-  // Operations are exclusive
+  // Operations are exclusive, and cannot be invalid
   //
   assert(op == CHMOD_OP_EQUAL || op == CHMOD_OP_PLUS || op == CHMOD_OP_MINUS);
 
-  // We should have only permissions or a reference target, not both.
+  // Nothing needs to be changed if there is not permission or reference
   //
-  assert((perm != CHMOD_PERM_NA && ref == CHMOD_WHO_NONE) ||
-(perm == CHMOD_PERM_NA && ref != CHMOD_WHO_NONE));
-
   if(perm == CHMOD_PERM_NA && ref == CHMOD_WHO_NONE)
   {
 return oldMode;
   }
 
+  // We should have only permissions or a reference target, not both.
+  //
+  assert((perm != CHMOD_PERM_NA && ref == CHMOD_WHO_NONE) ||
+(perm == CHMOD_PERM_NA && ref != CHMOD_WHO_NONE));
+
   if (perm != CHMOD_PERM_NA)
   {
 if ((perm & CHMOD_PERM_R) == CHMOD_PERM_R)

Modified: hadoop/common/branches/branch-1-win/src/winutils/chown.c
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-1-win/src/winutils/chown.c?rev=1361025&r1=1361024&r2=1361025&view=diff
==
--- hadoop/common/branches/branch-1-win/src/winutils/chown.c (original)
+++ hadoop/common/branches/branch-1-win/src/winutils/chown.c Fri Jul 13 
00:59:19 2012
@@ -181,7 +181,7 @@ static BOOL CreateDaclForNewOwner(
 
   assert(pDACL != NULL && ppNewDACL != NULL);
   assert(pOldOwnerSid != NULL && pOldGroupSid != NULL);
-  assert(pNewOwnerSid != NULL || pNewOwnerSid != NULL);
+  assert(pNewOwnerSid != NULL || pNewGroupSid != NULL);
 
   if (!GetAclInformation(pDACL, (LPVOID)&aclSizeInfo,
 sizeof(ACL_SIZE_INFORMATION), AclSizeInformation))




svn commit: r1357755 - in /hadoop/common/branches/branch-1-win: ./ src/hdfs/org/apache/hadoop/hdfs/ src/test/org/apache/hadoop/cli/ src/test/org/apache/hadoop/ipc/

2012-07-05 Thread sradia
Author: sradia
Date: Thu Jul  5 17:33:35 2012
New Revision: 1357755

URL: http://svn.apache.org/viewvc?rev=1357755&view=rev
Log:
HADOOP-8414 Address problems related to localhost resolving to 127.0.0.1 on 
Windows by Ivan Mitic

Modified:
hadoop/common/branches/branch-1-win/CHANGES.txt

hadoop/common/branches/branch-1-win/src/hdfs/org/apache/hadoop/hdfs/DistributedFileSystem.java

hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/cli/TestCLI.java

hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/cli/testConf.xml

hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/ipc/TestSaslRPC.java

Modified: hadoop/common/branches/branch-1-win/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-1-win/CHANGES.txt?rev=1357755&r1=1357754&r2=1357755&view=diff
==
--- hadoop/common/branches/branch-1-win/CHANGES.txt (original)
+++ hadoop/common/branches/branch-1-win/CHANGES.txt Thu Jul  5 17:33:35 2012
@@ -59,6 +59,9 @@ branch-hadoop-1-win - unreleased
 MAPREDUCE-4368. Fix TaskRunner to deal with java.library.path with a
 quoted path on Windows. (John Gordon via acmurthy) 
 
+HADOOP-8414 Address problems related to localhost resolving to 127.0.0.1 
on Windows
+(Ivan Mitic via Sanjay Radia)
+
 Release 1.1.0 - unreleased
 
   NEW FEATURES

Modified: 
hadoop/common/branches/branch-1-win/src/hdfs/org/apache/hadoop/hdfs/DistributedFileSystem.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-1-win/src/hdfs/org/apache/hadoop/hdfs/DistributedFileSystem.java?rev=1357755&r1=1357754&r2=1357755&view=diff
==
--- 
hadoop/common/branches/branch-1-win/src/hdfs/org/apache/hadoop/hdfs/DistributedFileSystem.java
 (original)
+++ 
hadoop/common/branches/branch-1-win/src/hdfs/org/apache/hadoop/hdfs/DistributedFileSystem.java
 Thu Jul  5 17:33:35 2012
@@ -80,9 +80,10 @@ public class DistributedFileSystem exten
 super.initialize(uri, conf);
 setConf(conf);
 
-String host = uri.getHost();
-if (host == null) {
-  throw new IOException("Incomplete HDFS URI, no host: "+ uri);
+// Check for authority as URI.getHost() returns null if host contains
+// numbers. This URI is still valid and it should not be rejected.
+if (uri.getAuthority() == null) {
+  throw new IOException("Incomplete HDFS URI, no authority: "+ uri);
 }
 
 InetSocketAddress namenode = NameNode.getAddress(uri.getAuthority());

Modified: 
hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/cli/TestCLI.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/cli/TestCLI.java?rev=1357755&r1=1357754&r2=1357755&view=diff
==
--- 
hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/cli/TestCLI.java 
(original)
+++ 
hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/cli/TestCLI.java 
Thu Jul  5 17:33:35 2012
@@ -89,7 +89,7 @@ public class TestCLI extends TestCase {
   /**
* Read the test config file - testConfig.xml
*/
-  private void readTestConfigFile() {
+  private void readTestConfigFile(String namenode) {
 
 if (testsFromConfigFile == null) {
   boolean success = false;
@@ -98,6 +98,13 @@ public class TestCLI extends TestCase {
 SAXParser p = (SAXParserFactory.newInstance()).newSAXParser();
 p.parse(testConfigFile, new TestConfigFileParser());
 success = true;
+// Expand the expected test results
+for (CLITestData testData: testsFromConfigFile) {
+  for (ComparatorData cd: testData.getComparatorData()) {
+cd.setExpectedOutput(cd.getExpectedOutput()
+.replaceAll("NAMENODE", namenode));
+  }
+}
   } catch (Exception e) {
 LOG.info("File: " + testConfigFile + " not found");
 success = false;
@@ -110,9 +117,6 @@ public class TestCLI extends TestCase {
* Setup
*/
   public void setUp() throws Exception {
-// Read the testConfig.xml file
-readTestConfigFile();
-
 // Start up the mini dfs cluster
 boolean success = false;
 conf = new Configuration();
@@ -138,6 +142,9 @@ public class TestCLI extends TestCase {
null, null, mrConf);
 jobtracker = mrCluster.createJobConf().get("mapred.job.tracker", "local");
 
+// Read the testConfig.xml file
+readTestConfigFile(namenode);
+
 success = true;
 
 assertTrue("Error setting up Mini DFS & MR clusters", success);

Modified: 
hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/cli/testConf.xml
URL: 
http://svn.apache.org/viewvc

svn commit: r1357351 - in /hadoop/common/branches/branch-1-win: ./ src/contrib/capacity-scheduler/src/test/org/apache/hadoop/mapred/ src/contrib/test/ src/test/org/apache/hadoop/fs/ src/test/org/apach

2012-07-04 Thread sradia
Author: sradia
Date: Wed Jul  4 17:56:51 2012
New Revision: 1357351

URL: http://svn.apache.org/viewvc?rev=1357351&view=rev
Log:
HADOOP-8487 Many HDFS tests use a test path intended for local file system 
tests (Ivan Mitic via Sanjay Radia)

Modified:
hadoop/common/branches/branch-1-win/CHANGES.txt

hadoop/common/branches/branch-1-win/src/contrib/capacity-scheduler/src/test/org/apache/hadoop/mapred/ClusterWithCapacityScheduler.java
hadoop/common/branches/branch-1-win/src/contrib/test/mapred-site.xml

hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/fs/FileSystemTestHelper.java

hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/fs/TestFSMainOperationsLocalFileSystem.java

hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/hdfs/server/namenode/TestBBWBlockReport.java

hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/hdfs/server/namenode/metrics/TestNameNodeMetrics.java

hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/hdfs/web/TestFSMainOperationsWebHdfs.java

hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/mapred/TestJobInProgress.java

hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/mapred/TestJobQueueInformation.java

hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/mapred/TestSubmitJob.java

Modified: hadoop/common/branches/branch-1-win/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-1-win/CHANGES.txt?rev=1357351&r1=1357350&r2=1357351&view=diff
==
--- hadoop/common/branches/branch-1-win/CHANGES.txt (original)
+++ hadoop/common/branches/branch-1-win/CHANGES.txt Wed Jul  4 17:56:51 2012
@@ -48,6 +48,8 @@ branch-hadoop-1-win - unreleased
 
 HADOOP-8534 Some tests leave a config file open causing failure on windows 
  (Ivan Mitic via Sanjay Radia)
 
+HADOOP-8487 Many HDFS tests use a test path intended for local file system 
tests (Ivan Mitic via Sanjay Radia)
+
 Release 1.1.0 - unreleased
 
   NEW FEATURES

Modified: 
hadoop/common/branches/branch-1-win/src/contrib/capacity-scheduler/src/test/org/apache/hadoop/mapred/ClusterWithCapacityScheduler.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-1-win/src/contrib/capacity-scheduler/src/test/org/apache/hadoop/mapred/ClusterWithCapacityScheduler.java?rev=1357351&r1=1357350&r2=1357351&view=diff
==
--- 
hadoop/common/branches/branch-1-win/src/contrib/capacity-scheduler/src/test/org/apache/hadoop/mapred/ClusterWithCapacityScheduler.java
 (original)
+++ 
hadoop/common/branches/branch-1-win/src/contrib/capacity-scheduler/src/test/org/apache/hadoop/mapred/ClusterWithCapacityScheduler.java
 Wed Jul  4 17:56:51 2012
@@ -222,7 +222,7 @@ public class ClusterWithCapacitySchedule
 // Check the resourcePath directory
 File file = new File(resourcePath, name);
 if (file.exists()) {
-  return new URL("file://" + file.getAbsolutePath());
+  return new URL("file", null, file.getAbsolutePath());
 }
   } catch (MalformedURLException mue) {
 LOG.warn("exception : " + mue);

Modified: hadoop/common/branches/branch-1-win/src/contrib/test/mapred-site.xml
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-1-win/src/contrib/test/mapred-site.xml?rev=1357351&r1=1357350&r2=1357351&view=diff
==
--- hadoop/common/branches/branch-1-win/src/contrib/test/mapred-site.xml 
(original)
+++ hadoop/common/branches/branch-1-win/src/contrib/test/mapred-site.xml Wed 
Jul  4 17:56:51 2012
@@ -10,4 +10,9 @@
   build/contrib/${contrib.name}/test/system
 
 
+
+  mapreduce.jobtracker.staging.root.dir
+  build/contrib/${contrib.name}/test/staging
+
+
 

Modified: 
hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/fs/FileSystemTestHelper.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/fs/FileSystemTestHelper.java?rev=1357351&r1=1357350&r2=1357351&view=diff
==
--- 
hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/fs/FileSystemTestHelper.java
 (original)
+++ 
hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/fs/FileSystemTestHelper.java
 Wed Jul  4 17:56:51 2012
@@ -31,7 +31,7 @@ import org.junit.Assert;
  */
 public final class FileSystemTestHelper {
   // The test root is relative to the /build/test/data by default
-  public static final String TEST_ROOT_DIR = 
+  public static String TEST_ROOT_DIR = 
 System.getProperty("test.build.data", "build/test/data") + "/test";
   private static final int DEFAULT_BLOCK_SIZE = 102

svn commit: r1356530 - in /hadoop/common/branches/branch-1-win: CHANGES.txt src/core/org/apache/hadoop/conf/Configuration.java

2012-07-02 Thread sradia
Author: sradia
Date: Tue Jul  3 00:28:19 2012
New Revision: 1356530

URL: http://svn.apache.org/viewvc?rev=1356530&view=rev
Log:
HADOOP-8534 Some tests leave a config file open causing failure on windows   
(Ivan Mitic via Sanjay Radia)

Modified:
hadoop/common/branches/branch-1-win/CHANGES.txt

hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/conf/Configuration.java

Modified: hadoop/common/branches/branch-1-win/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-1-win/CHANGES.txt?rev=1356530&r1=1356529&r2=1356530&view=diff
==
--- hadoop/common/branches/branch-1-win/CHANGES.txt (original)
+++ hadoop/common/branches/branch-1-win/CHANGES.txt Tue Jul  3 00:28:19 2012
@@ -46,6 +46,8 @@ branch-hadoop-1-win - unreleased
 
 HADOOP-8486 Resource leak ... SequenceFile  (Fix with 
HADOOP-8486-branch-1-win-(5).patch) (Kanna Karanam via Sanjay)
 
+HADOOP-8534 Some tests leave a config file open causing failure on windows 
  (Ivan Mitic via Sanjay Radia)
+
 Release 1.1.0 - unreleased
 
   NEW FEATURES

Modified: 
hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/conf/Configuration.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/conf/Configuration.java?rev=1356530&r1=1356529&r2=1356530&view=diff
==
--- 
hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/conf/Configuration.java
 (original)
+++ 
hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/conf/Configuration.java
 Tue Jul  3 00:28:19 2012
@@ -1157,7 +1157,14 @@ public class Configuration implements It
   if (!quiet) {
 LOG.info("parsing " + url);
   }
-  doc = builder.parse(url.toString());
+  // Do not pass url directly to DocumentBuilder.parse() since it
+  // will keep the file open after SAXException
+  InputStream in = new BufferedInputStream(url.openStream());
+  try {
+doc = builder.parse(in);
+  } finally {
+in.close();
+  }
 }
   } else if (name instanceof String) {// a CLASSPATH resource
 URL url = getResource((String)name);
@@ -1165,7 +1172,12 @@ public class Configuration implements It
   if (!quiet) {
 LOG.info("parsing " + url);
   }
-  doc = builder.parse(url.toString());
+  InputStream in = new BufferedInputStream(url.openStream());
+  try {
+doc = builder.parse(in);
+  } finally {
+in.close();
+  }
 }
   } else if (name instanceof Path) {  // a file resource
 // Can't use FileSystem API or we get an infinite loop




svn commit: r1352709 - in /hadoop/common/branches/branch-1-win: CHANGES.txt src/test/org/apache/hadoop/io/TestSequenceFile.java

2012-06-21 Thread sradia
Author: sradia
Date: Thu Jun 21 21:44:36 2012
New Revision: 1352709

URL: http://svn.apache.org/viewvc?rev=1352709&view=rev
Log:
HADOOP-8486 Resource leak ... SequenceFile  (Fix with 
HADOOP-8486-branch-1-win-(5).patch) (Kanna Karanam via Sanjay)

Modified:
hadoop/common/branches/branch-1-win/CHANGES.txt

hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/io/TestSequenceFile.java

Modified: hadoop/common/branches/branch-1-win/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-1-win/CHANGES.txt?rev=1352709&r1=1352708&r2=1352709&view=diff
==
--- hadoop/common/branches/branch-1-win/CHANGES.txt (original)
+++ hadoop/common/branches/branch-1-win/CHANGES.txt Thu Jun 21 21:44:36 2012
@@ -44,6 +44,8 @@ branch-hadoop-1-win - unreleased
 
 HADOOP-8486 Resource leak ... SequenceFile  (Kanna Karanam via Sanjay)
 
+HADOOP-8486 Resource leak ... SequenceFile  (Fix with 
HADOOP-8486-branch-1-win-(5).patch) (Kanna Karanam via Sanjay)
+
 Release 1.1.0 - unreleased
 
   NEW FEATURES

Modified: 
hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/io/TestSequenceFile.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/io/TestSequenceFile.java?rev=1352709&r1=1352708&r2=1352709&view=diff
==
--- 
hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/io/TestSequenceFile.java
 (original)
+++ 
hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/io/TestSequenceFile.java
 Thu Jun 21 21:44:36 2012
@@ -56,7 +56,7 @@ public class TestSequenceFile extends Te
 new 
Path(System.getProperty("test.build.data",".")+"/nonSequenceFile.seq");
 SequenceFile.Reader reader = null;
 FileSystem fs = FileSystem.getLocal(conf);
-File f = new File(nonSeqFile.getUriPath());
+File f = new File(nonSeqFile.toUri().getPath());
 if(!f.exists()) {
   fs.createNewFile(nonSeqFile);
 }




svn commit: r1352390 - in /hadoop/common/branches/branch-1-win: CHANGES.txt src/core/org/apache/hadoop/io/SequenceFile.java src/test/org/apache/hadoop/io/TestSequenceFile.java

2012-06-20 Thread sradia
Author: sradia
Date: Thu Jun 21 01:44:22 2012
New Revision: 1352390

URL: http://svn.apache.org/viewvc?rev=1352390&view=rev
Log:
HADOOP-8486 Resource leak ... SequenceFile  (Kanna Karanam via Sanjay)

Modified:
hadoop/common/branches/branch-1-win/CHANGES.txt

hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/io/SequenceFile.java

hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/io/TestSequenceFile.java

Modified: hadoop/common/branches/branch-1-win/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-1-win/CHANGES.txt?rev=1352390&r1=1352389&r2=1352390&view=diff
==
--- hadoop/common/branches/branch-1-win/CHANGES.txt (original)
+++ hadoop/common/branches/branch-1-win/CHANGES.txt Thu Jun 21 01:44:22 2012
@@ -42,6 +42,8 @@ branch-hadoop-1-win - unreleased
 MAPREDUCE-4203. Added an implementation of the process tree for Windows.
 (Bikas Saha via acmurthy) 
 
+HADOOP-8486 Resource leak ... SequenceFile  (Kanna Karanam via Sanjay)
+
 Release 1.1.0 - unreleased
 
   NEW FEATURES

Modified: 
hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/io/SequenceFile.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/io/SequenceFile.java?rev=1352390&r1=1352389&r2=1352390&view=diff
==
--- 
hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/io/SequenceFile.java
 (original)
+++ 
hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/io/SequenceFile.java
 Thu Jun 21 01:44:22 2012
@@ -1414,7 +1414,7 @@ public class SequenceFile {
   /** Reads key/value pairs from a sequence-format file. */
   public static class Reader implements java.io.Closeable {
 private Path file;
-private FSDataInputStream in;
+private FSDataInputStream in = null;
 private DataOutputBuffer outBuf = new DataOutputBuffer();
 
 private byte version;
@@ -1465,8 +1465,8 @@ public class SequenceFile {
 private DataInputStream valIn = null;
 private Decompressor valDecompressor = null;
 
-private Deserializer keyDeserializer;
-private Deserializer valDeserializer;
+private Deserializer keyDeserializer = null;
+private Deserializer valDeserializer = null;
 
 /** Open the named file. */
 public Reader(FileSystem fs, Path file, Configuration conf)
@@ -1482,12 +1482,19 @@ public class SequenceFile {
 private Reader(FileSystem fs, Path file, int bufferSize, long start,
long length, Configuration conf, boolean tempReader) 
 throws IOException {
-  this.file = file;
-  this.in = openFile(fs, file, bufferSize, length);
-  this.conf = conf;
-  seek(start);
-  this.end = in.getPos() + length;
-  init(tempReader);
+  try {
+this.file = file;
+this.in = openFile(fs, file, bufferSize, length);
+this.conf = conf;
+seek(start);
+this.end = in.getPos() + length;
+init(tempReader);
+  } catch(IOException e) {
+// Close if there are any open resources before throwing exceptions
+// from constructor.
+close();
+throw e;
+  }
 }
 
 /**
@@ -1636,14 +1643,19 @@ public class SequenceFile {
   valLenDecompressor = valDecompressor = null;
   
   if (keyDeserializer != null) {
-   keyDeserializer.close();
+keyDeserializer.close();
+keyDeserializer = null;
   }
   if (valDeserializer != null) {
 valDeserializer.close();
+valDeserializer = null;
   }
   
   // Close the input-stream
-  in.close();
+  if(in != null) {
+in.close();
+in = null;
+  }
 }
 
 /** Returns the name of the key class. */

Modified: 
hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/io/TestSequenceFile.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/io/TestSequenceFile.java?rev=1352390&r1=1352389&r2=1352390&view=diff
==
--- 
hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/io/TestSequenceFile.java
 (original)
+++ 
hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/io/TestSequenceFile.java
 Thu Jun 21 01:44:22 2012
@@ -47,7 +47,34 @@ public class TestSequenceFile extends Te
 compressedSeqFileTest(new DefaultCodec());
 LOG.info("Successfully tested SequenceFile with DefaultCodec");
   }
-  
+
+  /* Test to ensure that file handle is closed properly when the 
+   * SequenceFile.Reader throws an exception from the constructor.*/
+  public void testReadNonSequenceFile() throws Exception {
+LOG.info("Testing SequenceFile Reader with non-sequence file");
+Path nonSeqF

svn commit: r1352385 - in /hadoop/common/branches/branch-1-win: CHANGES.txt src/winutils/chmod.c

2012-06-20 Thread sradia
Author: sradia
Date: Thu Jun 21 01:16:15 2012
New Revision: 1352385

URL: http://svn.apache.org/viewvc?rev=1352385&view=rev
Log:
HADOOP-8454 Fix the ‘chmod =[perm]’ bug in winutils (Chuan Liu via sanjay)

Modified:
hadoop/common/branches/branch-1-win/CHANGES.txt
hadoop/common/branches/branch-1-win/src/winutils/chmod.c

Modified: hadoop/common/branches/branch-1-win/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-1-win/CHANGES.txt?rev=1352385&r1=1352384&r2=1352385&view=diff
==
--- hadoop/common/branches/branch-1-win/CHANGES.txt (original)
+++ hadoop/common/branches/branch-1-win/CHANGES.txt Thu Jun 21 01:16:15 2012
@@ -37,6 +37,7 @@ branch-hadoop-1-win - unreleased
 
 HADOOP-8409 Fix TestCommandLineJobSubmission and TestGenericOptionsParser 
to work for windows (Ivan Mitic via Sanjay Radia) 
 
+HADOOP-8454 Fix the ‘chmod =[perm]’ bug in winutils (Chuan Liu via 
sanjay)
 
 Release 1.1.0 - unreleased
 

Modified: hadoop/common/branches/branch-1-win/src/winutils/chmod.c
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-1-win/src/winutils/chmod.c?rev=1352385&r1=1352384&r2=1352385&view=diff
==
--- hadoop/common/branches/branch-1-win/src/winutils/chmod.c (original)
+++ hadoop/common/branches/branch-1-win/src/winutils/chmod.c Thu Jun 21 
01:16:15 2012
@@ -102,10 +102,10 @@ static USHORT ComputeNewMode(__in USHORT
 // Function: Chmod
 //
 // Description:
-// The main method for chmod command
+//  The main method for chmod command
 //
 // Returns:
-// 0: on success
+//  0: on success
 //
 // Notes:
 //
@@ -169,11 +169,11 @@ ChmodEnd:
 // Function: ChangeFileMode
 //
 // Description:
-// Wrapper function for change file mode. Choose either change by action 
or by
+//  Wrapper function for change file mode. Choose either change by action or by
 //  access mask.
 //
 // Returns:
-// TRUE: on success
+//  TRUE: on success
 //  FALSE: otherwise
 //
 // Notes:
@@ -191,10 +191,10 @@ static BOOL ChangeFileMode(__in LPCWSTR 
 // Function: ChangeFileModeRecursively
 //
 // Description:
-// Travel the directory recursively to change the permissions.
+//  Travel the directory recursively to change the permissions.
 //
 // Returns:
-// TRUE: on success
+//  TRUE: on success
 //  FALSE: otherwise
 //
 // Notes:
@@ -235,10 +235,7 @@ static BOOL ChangeFileModeRecursively(__
  }
   }
 
-  // MAX_PATH is used here, because we use relative path, and relative
-  // paths are always limited to a total of MAX_PATH characters.
-  //
-  if (FAILED(StringCchLengthW(path, MAX_PATH - 3, &pathSize)))
+  if (FAILED(StringCchLengthW(path, STRSAFE_MAX_CCH - 3, &pathSize)))
   {
 return FALSE;
   }
@@ -315,10 +312,10 @@ ChangeFileModeRecursivelyEnd:
 // Function: ChangeFileModeByMask
 //
 // Description:
-// Change a file or direcotry at the path to Unix mode
+//  Change a file or direcotry at the path to Unix mode
 //
 // Returns:
-// TRUE: on success
+//  TRUE: on success
 //  FALSE: otherwise
 //
 // Notes:
@@ -466,10 +463,10 @@ ChangeFileMode:
 // Function: ParseCommandLineArguments
 //
 // Description:
-// Parse command line arguments for chmod.
+//  Parse command line arguments for chmod.
 //
 // Returns:
-// TRUE: on success
+//  TRUE: on success
 //  FALSE: otherwise
 //
 // Notes:
@@ -539,14 +536,14 @@ static BOOL ParseCommandLineArguments(__
 // Function: FreeActions
 //
 // Description:
-// Free a linked list of mode change actions given the head node.
+//  Free a linked list of mode change actions given the head node.
 //
 // Returns:
-// TRUE: on success
+//  TRUE: on success
 //  FALSE: otherwise
 //
 // Notes:
-// none
+//  none
 //
 static BOOL FreeActions(PMODE_CHANGE_ACTION actions)
 {
@@ -576,13 +573,13 @@ static BOOL FreeActions(PMODE_CHANGE_ACT
 // Function: ComputeNewMode
 //
 // Description:
-// Compute a new mode based on the old mode and a mode change action.
+//  Compute a new mode based on the old mode and a mode change action.
 //
 // Returns:
-// The newly computed mode
+//  The newly computed mode
 //
 // Notes:
-// Apply 'rwx' permission mask or reference permission mode according to 
the
+//  Apply 'rwx' permission mask or reference permission mode according to the
 //  '+', '-', or '=' operator.
 //
 static USHORT ComputeNewMode(__in USHORT oldMode,
@@ -596,11 +593,14 @@ static USHORT ComputeNewMode(__in USHORT
   USHORT mask = 0;
   USHORT mode = 0;
 
-  // Operation and reference mode are exclusive
+  // Operations are exclusive
   //
   assert(op == CHMOD_OP_EQUAL || op == CHMOD_OP_PLUS || op == CHMOD_OP_MINUS);
-  assert(ref == CHMOD_WHO_GROUP || ref == CHMOD_WHO_USER ||
-ref == CHMOD_WHO_OTHER);
+
+  // We should have only perm

svn commit: r1352377 - in /hadoop/common/branches/branch-1-win: CHANGES.txt src/test/org/apache/hadoop/mapred/TestCommandLineJobSubmission.java src/test/org/apache/hadoop/util/TestGenericOptionsParser

2012-06-20 Thread sradia
Author: sradia
Date: Thu Jun 21 00:43:16 2012
New Revision: 1352377

URL: http://svn.apache.org/viewvc?rev=1352377&view=rev
Log:
HADOOP-8409 Fix TestCommandLineJobSubmission and TestGenericOptionsParser to 
work for windows (Ivan Mitic via Sanjay Radia) 

Modified:
hadoop/common/branches/branch-1-win/CHANGES.txt

hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/mapred/TestCommandLineJobSubmission.java

hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/util/TestGenericOptionsParser.java

Modified: hadoop/common/branches/branch-1-win/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-1-win/CHANGES.txt?rev=1352377&r1=1352376&r2=1352377&view=diff
==
--- hadoop/common/branches/branch-1-win/CHANGES.txt (original)
+++ hadoop/common/branches/branch-1-win/CHANGES.txt Thu Jun 21 00:43:16 2012
@@ -35,6 +35,8 @@ branch-hadoop-1-win - unreleased
 
 HADOOP-8440 HarFileSystem.decodeHarURI 2nd patch 
(HADOOP-8440-branch-1-win.2.patch) (Ivan Mitic via Sanjay Radia)
 
+HADOOP-8409 Fix TestCommandLineJobSubmission and TestGenericOptionsParser 
to work for windows (Ivan Mitic via Sanjay Radia) 
+
 
 Release 1.1.0 - unreleased
 

Modified: 
hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/mapred/TestCommandLineJobSubmission.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/mapred/TestCommandLineJobSubmission.java?rev=1352377&r1=1352376&r2=1352377&view=diff
==
--- 
hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/mapred/TestCommandLineJobSubmission.java
 (original)
+++ 
hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/mapred/TestCommandLineJobSubmission.java
 Thu Jun 21 00:43:16 2012
@@ -57,7 +57,11 @@ public class TestCommandLineJobSubmissio
   stream.close();
   mr = new MiniMRCluster(2, fs.getUri().toString(), 1);
   File thisbuildDir = new File(buildDir, "jobCommand");
-  assertTrue("create build dir", thisbuildDir.mkdirs()); 
+  // Create the build dir only if it does not exist as mkdirs
+  // fails on Windows otherwise
+  if (!thisbuildDir.exists()) {
+assertTrue("create build dir", thisbuildDir.mkdirs());
+  }
   File f = new File(thisbuildDir, "files_tmp");
   FileOutputStream fstream = new FileOutputStream(f);
   fstream.write("somestrings".getBytes());
@@ -85,20 +89,21 @@ public class TestCommandLineJobSubmissio
 
   // construct options for -files
   String[] files = new String[3];
-  files[0] = f.toString();
-  files[1] = f1.toString() + "#localfilelink";
+  files[0] = f.toURI().toString();
+  files[1] = f1.toURI().toString() + "#localfilelink";
   files[2] = 
 fs.getUri().resolve(cachePath + "/test.txt#dfsfilelink").toString();
 
   // construct options for -libjars
   String[] libjars = new String[2];
-  libjars[0] = "build/test/testjar/testjob.jar";
+  libjars[0] = new File(System.getProperty("test.build.dir", "build/test"),
+"testjar/testjob.jar").toURI().toString();
   libjars[1] = fs.getUri().resolve(cachePath + "/test.jar").toString();
   
   // construct options for -archives
   String[] archives = new String[4];
-  archives[0] = tgzPath.toString();
-  archives[1] = tarPath + "#tarlink";
+  archives[0] = tgzPath.toUri().toString();
+  archives[1] = tarPath.toUri().toString() + "#tarlink";
   archives[2] = 
 fs.getUri().resolve(cachePath + "/test.zip#ziplink").toString();
   archives[3] = 
@@ -142,7 +147,10 @@ public class TestCommandLineJobSubmissio
 Configuration jobConf = mr.createJobConf();
 FileSystem fs = dfs.getFileSystem();
 Path dfsPath = new Path("/test/testjob.jar");
-fs.copyFromLocalFile(new Path("build/test/testjar/testjob.jar"), dfsPath);
+fs.copyFromLocalFile(
+new Path(System.getProperty("test.build.dir", "build/test"),
+ "testjar/testjob.jar"),
+dfsPath);
 String url = fs.getDefaultUri(jobConf).toString() + dfsPath.toString();
 String[] args = new String[6];
 args[0] = "-files";

Modified: 
hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/util/TestGenericOptionsParser.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/util/TestGenericOptionsParser.java?rev=1352377&r1=1352376&r2=1352377&view=diff
==
--- 
hadoop/common/branches/branch-1-win/s

svn commit: r1352350 - in /hadoop/common/branches/branch-1-win: CHANGES.txt src/core/org/apache/hadoop/fs/HarFileSystem.java src/test/org/apache/hadoop/fs/TestHarFileSystem.java

2012-06-20 Thread sradia
Author: sradia
Date: Wed Jun 20 21:45:54 2012
New Revision: 1352350

URL: http://svn.apache.org/viewvc?rev=1352350&view=rev
Log:
HADOOP-8440 HarFileSystem.decodeHarURI 2nd patch 
(HADOOP-8440-branch-1-win.2.patch) (Ivan Mitic via Sanjay Radia)

Modified:
hadoop/common/branches/branch-1-win/CHANGES.txt

hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/fs/HarFileSystem.java

hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/fs/TestHarFileSystem.java

Modified: hadoop/common/branches/branch-1-win/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-1-win/CHANGES.txt?rev=1352350&r1=1352349&r2=1352350&view=diff
==
--- hadoop/common/branches/branch-1-win/CHANGES.txt (original)
+++ hadoop/common/branches/branch-1-win/CHANGES.txt Wed Jun 20 21:45:54 2012
@@ -33,6 +33,9 @@ branch-hadoop-1-win - unreleased
 
 MAPREDUCE-4260 Use JobObject to spawn tasks on Windows (Bikas Saha via 
Sanjay)
 
+HADOOP-8440 HarFileSystem.decodeHarURI 2nd patch 
(HADOOP-8440-branch-1-win.2.patch) (Ivan Mitic via Sanjay Radia)
+
+
 Release 1.1.0 - unreleased
 
   NEW FEATURES

Modified: 
hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/fs/HarFileSystem.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/fs/HarFileSystem.java?rev=1352350&r1=1352349&r2=1352350&view=diff
==
--- 
hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/fs/HarFileSystem.java
 (original)
+++ 
hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/fs/HarFileSystem.java
 Wed Jun 20 21:45:54 2012
@@ -213,7 +213,8 @@ public class HarFileSystem extends Filte
   tmp = new URI(baseUri.getScheme(), baseUri.getAuthority(),
 rawURI.getPath(), rawURI.getQuery(), rawURI.getFragment());
 } catch (URISyntaxException e) {
-// do nothing should not happen
+  throw new IOException("URI: " + rawURI
+  + " is an invalid Har URI. Expecting har://-/.");
 }
 return tmp;
   }

Modified: 
hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/fs/TestHarFileSystem.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/fs/TestHarFileSystem.java?rev=1352350&r1=1352349&r2=1352350&view=diff
==
--- 
hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/fs/TestHarFileSystem.java
 (original)
+++ 
hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/fs/TestHarFileSystem.java
 Wed Jun 20 21:45:54 2012
@@ -170,7 +170,29 @@ public class TestHarFileSystem extends T
   assertTrue(status.getBlockSize() == blockSize);
 }
   }
-  
+
+  public void testHarUri() {
+final Configuration conf = new Configuration();
+checkInvalidPath("har://hdfs-/foo.har", conf);
+checkInvalidPath("har://hdfs/foo.har", conf);
+checkInvalidPath("har://-hdfs/foo.har", conf);
+checkInvalidPath("har://-/foo.har", conf);
+checkInvalidPath("har://127.0.0.1-/foo.har", conf);
+checkInvalidPath("har://127.0.0.1/foo.har", conf);
+  }
+
+  static void checkInvalidPath(String s, Configuration conf) {
+System.out.println("\ncheckInvalidPath: " + s);
+final Path p = new Path(s);
+try {
+  p.getFileSystem(conf);
+  assertTrue(p + " is an invalid path.", false);
+} catch (IOException e) {
+  System.out.println("GOOD: Got an exception.");
+  e.printStackTrace(System.out);
+}
+  }
+
   // test archives with a -p option
   public void testRelativeArchives() throws Exception {
 fs.delete(archivePath, true);




svn commit: r1350835 - in /hadoop/common/branches/branch-1-win: ./ src/core/org/apache/hadoop/util/ src/mapred/ src/mapred/org/apache/hadoop/mapred/ src/test/org/apache/hadoop/mapred/ src/winutils/

2012-06-15 Thread sradia
Author: sradia
Date: Sat Jun 16 00:33:15 2012
New Revision: 1350835

URL: http://svn.apache.org/viewvc?rev=1350835&view=rev
Log:
MAPREDUCE-4260 Use JobObject to spawn tasks on Windows (Bikas Saha via Sanjay)

Added:
hadoop/common/branches/branch-1-win/src/winutils/task.c
Modified:
hadoop/common/branches/branch-1-win/CHANGES.txt

hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/util/ProcessTree.java
hadoop/common/branches/branch-1-win/src/mapred/mapred-default.xml

hadoop/common/branches/branch-1-win/src/mapred/org/apache/hadoop/mapred/Child.java

hadoop/common/branches/branch-1-win/src/mapred/org/apache/hadoop/mapred/DefaultTaskController.java

hadoop/common/branches/branch-1-win/src/mapred/org/apache/hadoop/mapred/JobConf.java

hadoop/common/branches/branch-1-win/src/mapred/org/apache/hadoop/mapred/JvmManager.java

hadoop/common/branches/branch-1-win/src/mapred/org/apache/hadoop/mapred/LinuxTaskController.java

hadoop/common/branches/branch-1-win/src/mapred/org/apache/hadoop/mapred/TaskController.java

hadoop/common/branches/branch-1-win/src/mapred/org/apache/hadoop/mapred/TaskTracker.java

hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/mapred/TestKillSubProcesses.java
hadoop/common/branches/branch-1-win/src/winutils/common.h
hadoop/common/branches/branch-1-win/src/winutils/main.c
hadoop/common/branches/branch-1-win/src/winutils/winutils.vcxproj

Modified: hadoop/common/branches/branch-1-win/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-1-win/CHANGES.txt?rev=1350835&r1=1350834&r2=1350835&view=diff
==
--- hadoop/common/branches/branch-1-win/CHANGES.txt (original)
+++ hadoop/common/branches/branch-1-win/CHANGES.txt Sat Jun 16 00:33:15 2012
@@ -31,6 +31,8 @@ branch-hadoop-1-win - unreleased
 MAPREDUCE-4321. Fix both DCE & LCE to use File.getAbsolutePath of the
 taskjvm.(sh,cmd) to ensure it works on Windows. (Ivan Mitic via acmurthy)
 
+MAPREDUCE-4260 Use JobObject to spawn tasks on Windows (Bikas Saha via 
Sanjay)
+
 Release 1.1.0 - unreleased
 
   NEW FEATURES

Modified: 
hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/util/ProcessTree.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/util/ProcessTree.java?rev=1350835&r1=1350834&r2=1350835&view=diff
==
--- 
hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/util/ProcessTree.java
 (original)
+++ 
hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/util/ProcessTree.java
 Sat Jun 16 00:33:15 2012
@@ -49,23 +49,49 @@ public class ProcessTree {
 }
   }
 
-  public static final boolean isSetsidAvailable = isSetsidSupported();
-  private static boolean isSetsidSupported() {
-if (Shell.DISABLEWINDOWS_TEMPORARILY)
-  return false;
-ShellCommandExecutor shexec = null;
-boolean setsidSupported = true;
-try {
-  String[] args = {"setsid", "bash", "-c", "echo $$"};
-  shexec = new ShellCommandExecutor(args);
-  shexec.execute();
-} catch (IOException ioe) {
-  LOG.warn("setsid is not available on this machine. So not using it.");
-  setsidSupported = false;
-} finally { // handle the exit code
-  LOG.info("setsid exited with exit code " + shexec.getExitCode());
+  // TODO rename isSetsidAvailable after merge of branch-1-win (MAPREDUCE-4325)
+  public static boolean isSetsidAvailable = isProcessGroupSupported();
+  private static boolean isProcessGroupSupported() {
+boolean processGroupSupported = true;
+if (Shell.WINDOWS) {
+  ShellCommandExecutor shexec = null;
+  try {
+String args[] = {Shell.WINUTILS};
+shexec = new ShellCommandExecutor(args);
+shexec.execute();
+  } catch (IOException e) {
+  } finally {
+String result = shexec.getOutput();
+if (result == null
+|| !result.contains("Creates a new task jobobject with taskname")) 
{
+  processGroupSupported = false;
+}
+  }
 }
-return setsidSupported;
+else {
+  ShellCommandExecutor shexec = null;
+  try {
+String[] args = {"setsid", "bash", "-c", "echo $$"};
+shexec = new ShellCommandExecutor(args);
+shexec.execute();
+  } catch (IOException ioe) {
+LOG.warn("setsid is not available on this machine. So not using it.");
+processGroupSupported = false;
+  } finally { // handle the exit code
+LOG.info("setsid exited with exit code " + shexec.getExitCode());
+  }
+}
+if(processGroupSupported) {
+  LOG.info("Platform supports pro

svn commit: r1346702 - in /hadoop/common/branches/branch-1-win: CHANGES.txt src/test/org/apache/hadoop/hdfs/TestDatanodeBlockScanner.java src/test/org/apache/hadoop/hdfs/TestReplication.java

2012-06-05 Thread sradia
Author: sradia
Date: Wed Jun  6 01:53:58 2012
New Revision: 1346702

URL: http://svn.apache.org/viewvc?rev=1346702&view=rev
Log:
HDFS-3424 TestDatanodeBlockScanner and TestReplication fail intermittently on 
Windows (Bikas Saha via Sanjay)

Modified:
hadoop/common/branches/branch-1-win/CHANGES.txt

hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/hdfs/TestDatanodeBlockScanner.java

hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/hdfs/TestReplication.java

Modified: hadoop/common/branches/branch-1-win/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-1-win/CHANGES.txt?rev=1346702&r1=1346701&r2=1346702&view=diff
==
--- hadoop/common/branches/branch-1-win/CHANGES.txt (original)
+++ hadoop/common/branches/branch-1-win/CHANGES.txt Wed Jun  6 01:53:58 2012
@@ -27,6 +27,8 @@ branch-hadoop-1-win - unreleased
 
 HADOOP-8374 Improve support for hard link manipulation on Windows (Bikas 
Saha via Sanjay)
 
+HDFS-3424 TestDatanodeBlockScanner and TestReplication fail intermittently 
on Windows (Bikas Saha via Sanjay)
+
 Release 1.1.0 - unreleased
 
   NEW FEATURES

Modified: 
hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/hdfs/TestDatanodeBlockScanner.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/hdfs/TestDatanodeBlockScanner.java?rev=1346702&r1=1346701&r2=1346702&view=diff
==
--- 
hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/hdfs/TestDatanodeBlockScanner.java
 (original)
+++ 
hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/hdfs/TestDatanodeBlockScanner.java
 Wed Jun  6 01:53:58 2012
@@ -388,26 +388,31 @@ public class TestDatanodeBlockScanner ex
 final short REPLICATION_FACTOR = (short)2;
 
 MiniDFSCluster cluster = new MiniDFSCluster(conf, REPLICATION_FACTOR, 
true, null);
-cluster.waitActive();
-FileSystem fs = cluster.getFileSystem();
+String block = null;
+final Path fileName = new Path("/file1");
 try {
-  final Path fileName = new Path("/file1");
+  cluster.waitActive();
+  FileSystem fs = cluster.getFileSystem();
   DFSTestUtil.createFile(fs, fileName, 1, REPLICATION_FACTOR, 0);
   DFSTestUtil.waitReplication(fs, fileName, REPLICATION_FACTOR);
 
-  String block = DFSTestUtil.getFirstBlock(fs, fileName).getBlockName();
-
-  // Truncate replica of block
-  changeReplicaLength(block, 0, -1);
+  block = DFSTestUtil.getFirstBlock(fs, fileName).getBlockName();
 
+} finally {
   cluster.shutdown();
+}
+
+// Truncate replica of block
+changeReplicaLength(block, 0, -1);
 
-  // restart the cluster
-  cluster = new MiniDFSCluster(
-  0, conf, REPLICATION_FACTOR, false, true, null, null, null);
+// restart the cluster
+cluster = new MiniDFSCluster(
+0, conf, REPLICATION_FACTOR, false, true, null, null, null);
+
+try {
   cluster.startDataNodes(conf, 1, true, null, null);
   cluster.waitActive();  // now we have 3 datanodes
-
+  
   // wait for truncated block be detected and the block to be replicated
   DFSTestUtil.waitReplication(
   cluster.getFileSystem(), fileName, REPLICATION_FACTOR);
@@ -425,7 +430,7 @@ public class TestDatanodeBlockScanner ex
   static boolean changeReplicaLength(String blockName, int dnIndex, int 
lenDelta) throws IOException {
 File baseDir = new File(System.getProperty("test.build.data"), "dfs/data");
 for (int i=dnIndex*2; ihttp://svn.apache.org/viewvc/hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/hdfs/TestReplication.java?rev=1346702&r1=1346701&r2=1346702&view=diff
==
--- 
hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/hdfs/TestReplication.java
 (original)
+++ 
hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/hdfs/TestReplication.java
 Wed Jun  6 01:53:58 2012
@@ -396,20 +396,22 @@ public class TestReplication extends Tes
* @throws Exception
*/
   public void testReplicateLenMismatchedBlock() throws Exception {
-MiniDFSCluster cluster = new MiniDFSCluster(new Configuration(), 2, true, 
null);
+Configuration conf = new Configuration();
+int numDN = 2;
+MiniDFSCluster cluster = new MiniDFSCluster(conf, numDN, true, null);
 try {
   cluster.waitActive();
   // test truncated block
-  changeBlockLen(cluster, -1);
+  changeBlockLen(cluster, -1, conf, numDN);
   // test extended block
-  changeBlockLen(cluster, 1);
+  changeBlockLen(cluster, 1, conf, numDN);
 } finally {
   cluster.shutdown();
 }
   }
   
   private void changeBlockLen(MiniDF

svn commit: r1344963 - in /hadoop/common/branches/branch-1-win: CHANGES.txt src/core/org/apache/hadoop/fs/HardLink.java src/test/org/apache/hadoop/fs/TestHardLink.java

2012-05-31 Thread sradia
Author: sradia
Date: Fri Jun  1 01:34:35 2012
New Revision: 1344963

URL: http://svn.apache.org/viewvc?rev=1344963&view=rev
Log:
HADOOP-8374 Improve support for hard link manipulation on Windows (Bikas Saha 
via Sanjay)

Modified:
hadoop/common/branches/branch-1-win/CHANGES.txt

hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/fs/HardLink.java

hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/fs/TestHardLink.java

Modified: hadoop/common/branches/branch-1-win/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-1-win/CHANGES.txt?rev=1344963&r1=1344962&r2=1344963&view=diff
==
--- hadoop/common/branches/branch-1-win/CHANGES.txt (original)
+++ hadoop/common/branches/branch-1-win/CHANGES.txt Fri Jun  1 01:34:35 2012
@@ -25,6 +25,8 @@ branch-hadoop-1-win - unreleased
 
 HADOOP-8440 HarFileSystem.decodeHarURI fails for URIs whose host contains 
numbers  (Ivan Mitic via Sanjay Radia)
 
+HADOOP-8374 Improve support for hard link manipulation on Windows (Bikas 
Saha via Sanjay)
+
 Release 1.1.0 - unreleased
 
   NEW FEATURES

Modified: 
hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/fs/HardLink.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/fs/HardLink.java?rev=1344963&r1=1344962&r2=1344963&view=diff
==
--- 
hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/fs/HardLink.java 
(original)
+++ 
hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/fs/HardLink.java 
Fri Jun  1 01:34:35 2012
@@ -25,9 +25,11 @@ import java.io.IOException;
 import java.io.InputStreamReader;
 import java.util.Arrays;
 
+import org.apache.hadoop.util.Shell;
+
 /**
  * Class for creating hardlinks.
- * Supports Unix/Linux, WinXP/2003/Vista via Cygwin, and Mac OS X.
+ * Supports Unix/Linux, Windows via winutils , and Mac OS X.
  * 
  * The HardLink class was formerly a static inner class of FSUtil,
  * and the methods provided were blatantly non-thread-safe.
@@ -41,7 +43,7 @@ public class HardLink { 
 
   public enum OSType {
 OS_TYPE_UNIX,
-OS_TYPE_WINXP,
+OS_TYPE_WIN,
 OS_TYPE_SOLARIS,
 OS_TYPE_MAC
   }
@@ -55,7 +57,7 @@ public class HardLink { 
   //methods without instantiating the HardLink object
   static { 
 osType = getOSType();
-if (osType == OSType.OS_TYPE_WINXP) {
+if (osType == OSType.OS_TYPE_WIN) {
   // Windows
   getHardLinkCommand = new HardLinkCGWin();
 } else {
@@ -79,14 +81,8 @@ public class HardLink { 
   
   static private OSType getOSType() {
 String osName = System.getProperty("os.name");
-if (osName.contains("Windows") &&
-(osName.contains("XP") 
-|| osName.contains("2003") 
-|| osName.contains("Vista")
-|| osName.contains("Windows_7")
-|| osName.contains("Windows 7") 
-|| osName.contains("Windows7"))) {
-  return OSType.OS_TYPE_WINXP;
+if (Shell.WINDOWS) {
+  return OSType.OS_TYPE_WIN;
 }
 else if (osName.contains("SunOS") 
 || osName.contains("Solaris")) {
@@ -254,11 +250,6 @@ public class HardLink { 
   
   /**
* Implementation of HardLinkCommandGetter class for Windows
-   * 
-   * Note that the linkCount shell command for Windows is actually
-   * a Cygwin shell command, and depends on ${cygwin}/bin
-   * being in the Windows PATH environment variable, so
-   * stat.exe can be found.
*/
   static class HardLinkCGWin extends HardLinkCommandGetter {
 //The Windows command getter impl class and its member fields are
@@ -266,14 +257,16 @@ public class HardLink { 
 //unit testing (sort of) on non-Win servers
 
 static String[] hardLinkCommand = {
-"fsutil","hardlink","create", null, null};
+Shell.WINUTILS,"hardlink","create", null, null};
 static String[] hardLinkMultPrefix = {
 "cmd","/q","/c","for", "%f", "in", "("};
 static String   hardLinkMultDir = "\\%f";
 static String[] hardLinkMultSuffix = {
-")", "do", "fsutil", "hardlink", "create", null, 
+")", "do", Shell.WINUTILS, "hardlink", "create", null,
 "%f", "1>NUL"};
-static String[] getLinkCountCommand = {"stat","-c%h", null};
+static String[] getLinkCountCommand = {
+Shell.WIN

svn commit: r1344959 - in /hadoop/common/branches/branch-1-win: CHANGES.txt src/core/org/apache/hadoop/fs/HarFileSystem.java

2012-05-31 Thread sradia
Author: sradia
Date: Fri Jun  1 01:20:23 2012
New Revision: 1344959

URL: http://svn.apache.org/viewvc?rev=1344959&view=rev
Log:
HADOOP-8440 HarFileSystem.decodeHarURI fails for URIs whose host contains 
numbers  (Ivan Mitic via Sanjay Radia)

Modified:
hadoop/common/branches/branch-1-win/CHANGES.txt

hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/fs/HarFileSystem.java

Modified: hadoop/common/branches/branch-1-win/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-1-win/CHANGES.txt?rev=1344959&r1=1344958&r2=1344959&view=diff
==
--- hadoop/common/branches/branch-1-win/CHANGES.txt (original)
+++ hadoop/common/branches/branch-1-win/CHANGES.txt Fri Jun  1 01:20:23 2012
@@ -23,6 +23,8 @@ branch-hadoop-1-win - unreleased
 HADOOP-8411 TestStorageDirecotyFailure, TestTaskLogsTruncater, 
TestWebHdfsUrl and TestSecurityUtil
 fail on Windows  (Ivan Mitic via Sanjay Radia)
 
+HADOOP-8440 HarFileSystem.decodeHarURI fails for URIs whose host contains 
numbers  (Ivan Mitic via Sanjay Radia)
+
 Release 1.1.0 - unreleased
 
   NEW FEATURES

Modified: 
hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/fs/HarFileSystem.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/fs/HarFileSystem.java?rev=1344959&r1=1344958&r2=1344959&view=diff
==
--- 
hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/fs/HarFileSystem.java
 (original)
+++ 
hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/fs/HarFileSystem.java
 Fri Jun  1 01:20:23 2012
@@ -185,32 +185,33 @@ public class HarFileSystem extends Filte
   //create a path 
   return FileSystem.getDefaultUri(conf);
 }
-String host = rawURI.getHost();
-if (host == null) {
+String authority = rawURI.getAuthority();
+if (authority == null) {
   throw new IOException("URI: " + rawURI
-  + " is an invalid Har URI since host==null."
+  + " is an invalid Har URI since authority==null."
   + "  Expecting har://-/.");
 }
-int i = host.indexOf('-');
+
+int i = authority.indexOf('-');
 if (i < 0) {
   throw new IOException("URI: " + rawURI
   + " is an invalid Har URI since '-' not found."
   + "  Expecting har://-/.");
 }
-final String underLyingScheme = host.substring(0, i);
-i++;
-final String underLyingHost = i == host.length()? null: host.substring(i);
-int underLyingPort = rawURI.getPort();
-String auth = (underLyingHost == null && underLyingPort == -1)?
-  null:(underLyingHost+":"+underLyingPort);
-URI tmp = null;
+
 if (rawURI.getQuery() != null) {
   // query component not allowed
   throw new IOException("query component in Path not supported  " + 
rawURI);
 }
+
+URI tmp = null;
+
 try {
-  tmp = new URI(underLyingScheme, auth, rawURI.getPath(), 
-rawURI.getQuery(), rawURI.getFragment());
+  // convert - to ://
+  URI baseUri = new URI(authority.replaceFirst("-", "://"));
+
+  tmp = new URI(baseUri.getScheme(), baseUri.getAuthority(),
+rawURI.getPath(), rawURI.getQuery(), rawURI.getFragment());
 } catch (URISyntaxException e) {
 // do nothing should not happen
 }




svn commit: r1344955 - in /hadoop/common/branches/branch-1-win: ./ src/mapred/org/apache/hadoop/mapred/ src/test/org/apache/hadoop/hdfs/server/namenode/ src/test/org/apache/hadoop/hdfs/web/ src/test/o

2012-05-31 Thread sradia
Author: sradia
Date: Fri Jun  1 01:04:40 2012
New Revision: 1344955

URL: http://svn.apache.org/viewvc?rev=1344955&view=rev
Log:
HADOOP-8411 TestStorageDirecotyFailure, TestTaskLogsTruncater, TestWebHdfsUrl 
and TestSecurityUtil fail on Windows  (Ivan Mitic via Sanjay Radia)

Modified:
hadoop/common/branches/branch-1-win/CHANGES.txt

hadoop/common/branches/branch-1-win/src/mapred/org/apache/hadoop/mapred/TaskLogsTruncater.java

hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/hdfs/server/namenode/TestStorageDirectoryFailure.java

hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/hdfs/web/TestWebHdfsUrl.java

hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/security/TestSecurityUtil.java

Modified: hadoop/common/branches/branch-1-win/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-1-win/CHANGES.txt?rev=1344955&r1=1344954&r2=1344955&view=diff
==
--- hadoop/common/branches/branch-1-win/CHANGES.txt (original)
+++ hadoop/common/branches/branch-1-win/CHANGES.txt Fri Jun  1 01:04:40 2012
@@ -20,6 +20,9 @@ branch-hadoop-1-win - unreleased
 
 HADOOP-8235 Support file permissions and ownership on Windows for 
RawLocalFileSystem  (Chuan Liu via sanjay)
 
+HADOOP-8411 TestStorageDirecotyFailure, TestTaskLogsTruncater, 
TestWebHdfsUrl and TestSecurityUtil
+fail on Windows  (Ivan Mitic via Sanjay Radia)
+
 Release 1.1.0 - unreleased
 
   NEW FEATURES

Modified: 
hadoop/common/branches/branch-1-win/src/mapred/org/apache/hadoop/mapred/TaskLogsTruncater.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-1-win/src/mapred/org/apache/hadoop/mapred/TaskLogsTruncater.java?rev=1344955&r1=1344954&r2=1344955&view=diff
==
--- 
hadoop/common/branches/branch-1-win/src/mapred/org/apache/hadoop/mapred/TaskLogsTruncater.java
 (original)
+++ 
hadoop/common/branches/branch-1-win/src/mapred/org/apache/hadoop/mapred/TaskLogsTruncater.java
 Fri Jun  1 01:04:40 2012
@@ -46,6 +46,7 @@ import org.apache.hadoop.mapred.TaskLog.
 import org.apache.hadoop.mapred.TaskLog.LogFileDetail;
 import org.apache.hadoop.mapreduce.server.tasktracker.JVMInfo;
 import org.apache.hadoop.mapreduce.server.tasktracker.userlogs.UserLogManager;
+import org.apache.hadoop.util.Shell;
 
 /**
  * The class for truncating the user logs. 
@@ -284,7 +285,7 @@ public class TaskLogsTruncater {
   // // End of closing the file streams 
 
   // // Commit the changes from tmp file to the logFile 
-  if (!tmpFile.renameTo(logFile)) {
+  if (!renameAtomicWithOverride(tmpFile, logFile)) {
 // If the tmpFile cannot be renamed revert back
 // updatedTaskLogFileDetails to maintain the consistency of the
 // original log file
@@ -305,6 +306,43 @@ public class TaskLogsTruncater {
   }
 
   /**
+   * Renames a source file into a target file. Overrides the target file
+   * if it already exists.
+   */
+  private boolean renameAtomicWithOverride(File source, File target) {
+if (Shell.WINDOWS && target.exists()) {
+  // Rename into the existing file fails on Windows, hence we have to
+  // rename using a temp file to provide some level of atomic behavior
+  // (and revert changes back if something fails)
+  File tmpTargetFile = new File(target.getPath() + ".tmp");
+  if (target.renameTo(tmpTargetFile)) {
+if (source.renameTo(target)) {
+  // Rename succeeded, try to delete the target backup
+  if (!tmpTargetFile.delete()) {
+LOG.warn("Cannot delete tmpTargetFile "
+ + tmpTargetFile.getAbsolutePath());
+tmpTargetFile.deleteOnExit();
+  }
+  // Return true anyways as we successfully renamed source
+  // into target
+  return true;
+} else {
+  // Revert back the original
+  if (!tmpTargetFile.renameTo(target)) {
+LOG.error("Cannot revert back the original log file "
+ + target.getAbsolutePath());
+  }
+  return false;
+}
+  } else {
+return false;
+  }
+} else {
+  return source.renameTo(target);
+}
+  }
+
+  /**
* @param lInfo
* @param taskLogFileDetails
* @param updatedTaskLogFileDetails

Modified: 
hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/hdfs/server/namenode/TestStorageDirectoryFailure.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/hdfs/server/namenode/TestStorageDirectoryFailure.java?rev=1344955&r1=1344954&r2=1344955&view=diff
==
--- 
hadoo

svn commit: r1344527 [2/2] - in /hadoop/common/branches/branch-1-win: ./ src/core/org/apache/hadoop/fs/ src/core/org/apache/hadoop/security/ src/core/org/apache/hadoop/util/ src/mapred/org/apache/hado

2012-05-30 Thread sradia
Added: hadoop/common/branches/branch-1-win/src/winutils/common.c
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-1-win/src/winutils/common.c?rev=1344527&view=auto
==
--- hadoop/common/branches/branch-1-win/src/winutils/common.c (added)
+++ hadoop/common/branches/branch-1-win/src/winutils/common.c Thu May 31 
01:52:15 2012
@@ -0,0 +1,613 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations 
under
+ * the License.
+ */
+
+#pragma comment(lib, "authz.lib")
+#include "common.h"
+#include 
+/*
+ * The array of 12 months' three-letter abbreviations 
+ */
+const LPCWSTR MONTHS[] = { L"Jan", L"Feb", L"Mar", L"Apr", L"May", L"Jun",
+  L"Jul", L"Aug", L"Sep", L"Oct", L"Nov", L"Dec" };
+
+/*
+ * The WindowsAclMask and WinMasks contain the definitions used to establish
+ * the mapping between Unix and Windows.
+ * We set up the mapping with the following rules. 
+ *   1. Everyone will have WIN_ALL permissions;
+ *   2. Owner will always have WIN_OWNER_SE permissions in addition;
+ *   2. When Unix read/write/excute permission is set on the file, the
+ *  corresponding Windows allow ACE will be added to the file.
+ * More details and explaination can be found in the following white paper:
+ *   http://technet.microsoft.com/en-us/library/bb463216.aspx
+ */
+const ACCESS_MASK WinMasks[WIN_MASKS_TOTAL] =
+{
+  /* WIN_READ */
+  FILE_READ_DATA,
+  /* WIN_WRITE */
+  FILE_WRITE_DATA | FILE_WRITE_ATTRIBUTES | FILE_APPEND_DATA | FILE_WRITE_EA |
+  FILE_DELETE_CHILD,
+  /* WIN_EXECUTE */
+  FILE_EXECUTE,
+  /* WIN_OWNER_SE */
+  DELETE | WRITE_DAC | WRITE_OWNER | FILE_WRITE_EA | FILE_WRITE_ATTRIBUTES, 
+  /* WIN_ALL */
+  READ_CONTROL |  FILE_READ_EA | FILE_READ_ATTRIBUTES | SYNCHRONIZE,
+};
+
+//
+// Function: GetFileInformationByName
+//
+// Description:
+//  To retrieve the by handle file information given the file name
+//
+// Returns:
+//  ERROR_SUCCESS: on success
+//  error code: otherwise
+//
+// Notes:
+//
+DWORD GetFileInformationByName(
+  __in LPCWSTR pathName,
+  __out LPBY_HANDLE_FILE_INFORMATION lpFileInformation)
+{
+  HANDLE fileHandle = NULL;
+  DWORD dwErrorCode = ERROR_SUCCESS;
+
+  assert(lpFileInformation != NULL);
+
+  fileHandle = CreateFileW(
+pathName,
+FILE_READ_ATTRIBUTES,
+FILE_SHARE_READ,
+NULL,
+OPEN_EXISTING,
+FILE_ATTRIBUTE_NORMAL | FILE_FLAG_BACKUP_SEMANTICS,
+NULL);
+  if (fileHandle == INVALID_HANDLE_VALUE)
+  {
+dwErrorCode = GetLastError();
+return dwErrorCode;
+  }
+
+  if (!GetFileInformationByHandle(fileHandle, lpFileInformation))
+  {
+dwErrorCode = GetLastError();
+CloseHandle(fileHandle);
+return dwErrorCode;
+  }
+
+  CloseHandle(fileHandle);
+
+  return dwErrorCode;
+}
+
+//
+// Function: IsLongWindowsPath
+//
+// Description:
+//  Checks if the path is longer than MAX_PATH in which case it needs to be
+//  prepended with \\?\ for Windows OS to understand it.
+//
+// Returns:
+//  TRUE long path
+//  FALSE otherwise
+static BOOL IsLongWindowsPath(__in PCWSTR path)
+{
+  return (wcslen(path) + 1) > MAX_PATH;
+}
+
+//
+// Function: ConvertToLongPath
+//
+// Description:
+//  Prepends the path with the \\?\ prefix if the path is longer than MAX_PATH.
+//  On success, newPath should be freed with LocalFree(). Given that relative
+//  paths cannot be longer than MAX_PATH, we will never prepend the prefix
+//  to relative paths.
+//
+// Returns:
+//  ERROR_SUCCESS on success
+//  error code on failure
+DWORD ConvertToLongPath(__in PCWSTR path, __deref_out PWSTR *newPath)
+{
+  DWORD dwErrorCode = ERROR_SUCCESS;
+  static const PCWSTR LongPathPrefix = L"?\\";
+  BOOL bAppendPrefix = IsLongWindowsPath(path);
+
+  size_t newPathLen = wcslen(path) + (bAppendPrefix ? wcslen(LongPathPrefix) : 
0);
+
+  // Allocate the buffer for the output path (+1 for terminating NULL char)
+  //
+  PWSTR newPathValue = (PWSTR)LocalAlloc(LPTR, (newPathLen + 1) * 
sizeof(WCHAR));
+  if (newPathValue == NULL)
+  {
+   

svn commit: r1342051 - in /hadoop/common/trunk/hadoop-common-project/hadoop-common: CHANGES.txt src/main/java/org/apache/hadoop/ipc/ProtobufRpcEngine.java src/main/proto/hadoop_rpc.proto

2012-05-23 Thread sradia
Author: sradia
Date: Wed May 23 21:16:39 2012
New Revision: 1342051

URL: http://svn.apache.org/viewvc?rev=1342051&view=rev
Log:
HADOOP-8367 Improve documentation of declaringClassProtocolName in rpc headers 
(Sanjay Radia)

Modified:
hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtobufRpcEngine.java

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/proto/hadoop_rpc.proto

Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1342051&r1=1342050&r2=1342051&view=diff
==
--- hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt 
(original)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt Wed May 
23 21:16:39 2012
@@ -70,6 +70,9 @@ Trunk (unreleased changes)
 HADOOP-8360. empty-configuration.xml fails xml validation
 (Radim Kolar via harsh)
 
+HADOOP-8367 Improve documentation of declaringClassProtocolName in rpc 
headers 
+(Sanjay Radia)
+
   BUG FIXES
 
 HADOOP-8177. MBeans shouldn't try to register when it fails to create 
MBeanName.

Modified: 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtobufRpcEngine.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtobufRpcEngine.java?rev=1342051&r1=1342050&r2=1342051&view=diff
==
--- 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtobufRpcEngine.java
 (original)
+++ 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtobufRpcEngine.java
 Wed May 23 21:16:39 2012
@@ -396,24 +396,44 @@ public class ProtobufRpcEngine implement
* it is.
* 
*/
-  public Writable call(RPC.Server server, String protocol,
+  public Writable call(RPC.Server server, String connectionProtocolName,
   Writable writableRequest, long receiveTime) throws Exception {
 RpcRequestWritable request = (RpcRequestWritable) writableRequest;
 HadoopRpcRequestProto rpcRequest = request.message;
 String methodName = rpcRequest.getMethodName();
-String protoName = rpcRequest.getDeclaringClassProtocolName();
+
+
+/** 
+ * RPCs for a particular interface (ie protocol) are done using a
+ * IPC connection that is setup using rpcProxy.
+ * The rpcProxy's has a declared protocol name that is 
+ * sent form client to server at connection time. 
+ * 
+ * Each Rpc call also sends a protocol name 
+ * (called declaringClassprotocolName). This name is usually the same
+ * as the connection protocol name except in some cases. 
+ * For example metaProtocols such ProtocolInfoProto which get info
+ * about the protocol reuse the connection but need to indicate that
+ * the actual protocol is different (i.e. the protocol is
+ * ProtocolInfoProto) since they reuse the connection; in this case
+ * the declaringClassProtocolName field is set to the 
ProtocolInfoProto.
+ */
+
+String declaringClassProtoName = 
+rpcRequest.getDeclaringClassProtocolName();
 long clientVersion = rpcRequest.getClientProtocolVersion();
 if (server.verbose)
-  LOG.info("Call: protocol=" + protocol + ", method=" + methodName);
+  LOG.info("Call: connectionProtocolName=" + connectionProtocolName + 
+  ", method=" + methodName);
 
-ProtoClassProtoImpl protocolImpl = getProtocolImpl(server, protoName,
-clientVersion);
+ProtoClassProtoImpl protocolImpl = getProtocolImpl(server, 
+  declaringClassProtoName, clientVersion);
 BlockingService service = (BlockingService) protocolImpl.protocolImpl;
 MethodDescriptor methodDescriptor = service.getDescriptorForType()
 .findMethodByName(methodName);
 if (methodDescriptor == null) {
-  String msg = "Unknown method " + methodName + " called on " + 
protocol
-  + " protocol.";
+  String msg = "Unknown method " + methodName + " called on " 
++ connectionProtocolName + " protocol.";
   LOG.warn(msg);
   throw new RpcServerException(msg);
 }

Modified: 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/proto/hadoo

svn commit: r1341330 - in /hadoop/common/branches/branch-1-win: ./ src/test/org/apache/hadoop/hdfs/ src/test/org/apache/hadoop/mapreduce/security/token/delegation/ src/test/org/apache/hadoop/security/

2012-05-22 Thread sradia
Author: sradia
Date: Tue May 22 07:23:55 2012
New Revision: 1341330

URL: http://svn.apache.org/viewvc?rev=1341330&view=rev
Log:
HADOOP-8412 TestModTime, TestDelegationToken and TestAuthenticationToken 
fali
intermittently on Windows  (Ivan Mitic via Sanjay Radia)

Modified:
hadoop/common/branches/branch-1-win/CHANGES.txt

hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/hdfs/TestModTime.java

hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/mapreduce/security/token/delegation/TestDelegationToken.java

hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/security/authentication/server/TestAuthenticationToken.java

Modified: hadoop/common/branches/branch-1-win/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-1-win/CHANGES.txt?rev=1341330&r1=1341329&r2=1341330&view=diff
==
--- hadoop/common/branches/branch-1-win/CHANGES.txt (original)
+++ hadoop/common/branches/branch-1-win/CHANGES.txt Tue May 22 07:23:55 2012
@@ -15,6 +15,9 @@ branch-hadoop-1-win - unreleased
 
 MAPREDUCE-4204 Refactor ProcfsBasedProcessTree to make the resource 
collection object pluggable (Bikas Saha via Sanjay)
 
+HADOOP-8412 TestModTime, TestDelegationToken and TestAuthenticationToken 
fail
+intermittently on Windows  (Ivan Mitic via Sanjay Radia)
+
 Release 1.1.0 - unreleased
 
   NEW FEATURES

Modified: 
hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/hdfs/TestModTime.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/hdfs/TestModTime.java?rev=1341330&r1=1341329&r2=1341330&view=diff
==
--- 
hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/hdfs/TestModTime.java
 (original)
+++ 
hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/hdfs/TestModTime.java
 Tue May 22 07:23:55 2012
@@ -73,8 +73,9 @@ public class TestModTime extends TestCas
 
   /**
* Tests modification time in DFS.
+   * @throws InterruptedException 
*/
-  public void testModTime() throws IOException {
+  public void testModTime() throws IOException, InterruptedException {
 Configuration conf = new Configuration();
 
 MiniDFSCluster cluster = new MiniDFSCluster(conf, numDatanodes, true, 
null);
@@ -134,6 +135,8 @@ public class TestModTime extends TestCas
  //
  Path newfile = new Path(dir2, "testnew.dat");
  System.out.println("Moving " + file1 + " to " + newfile);
+ // Sleep for a while to make sure modification time changes
+ Thread.sleep(20);
  fileSys.rename(file1, newfile);
  //
  // verify that modification time of file1 did not change.
@@ -155,6 +158,7 @@ public class TestModTime extends TestCas
  // delete newfile
  //
  System.out.println("Deleting testdir2/testnew.dat.");
+ Thread.sleep(20);
  assertTrue(fileSys.delete(newfile, true));
  //
  // verify that modification time of testdir1 has not changed.

Modified: 
hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/mapreduce/security/token/delegation/TestDelegationToken.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/mapreduce/security/token/delegation/TestDelegationToken.java?rev=1341330&r1=1341329&r2=1341330&view=diff
==
--- 
hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/mapreduce/security/token/delegation/TestDelegationToken.java
 (original)
+++ 
hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/mapreduce/security/token/delegation/TestDelegationToken.java
 Tue May 22 07:23:55 2012
@@ -86,7 +86,7 @@ public class TestDelegationToken {
 System.out.println("create time: " + createTime);
 System.out.println("current time: " + currentTime);
 System.out.println("max time: " + maxTime);
-assertTrue("createTime < current", createTime < currentTime);
+assertTrue("createTime <= current", createTime <= currentTime);
 assertTrue("current < maxTime", currentTime < maxTime);
 client.renewDelegationToken(token);
 client.renewDelegationToken(token);

Modified: 
hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/security/authentication/server/TestAuthenticationToken.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/security/authentication/server/TestAuthenticationToken.java?rev=1341330&r1=1341329&r2=1341330&view=diff
==
--- 
hadoop/common/branches/branch-1-win/src/test/org/apache/

svn commit: r1337283 - in /hadoop/common/trunk/hadoop-common-project/hadoop-common: ./ src/main/java/org/apache/hadoop/ipc/ src/main/proto/

2012-05-11 Thread sradia
Author: sradia
Date: Fri May 11 16:56:52 2012
New Revision: 1337283

URL: http://svn.apache.org/viewvc?rev=1337283&view=rev
Log:
HADOOP-8366 Use ProtoBuf for RpcResponseHeader (sanjay radia)

Removed:

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Status.java
Modified:
hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/proto/RpcPayloadHeader.proto

Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1337283&r1=1337282&r2=1337283&view=diff
==
--- hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt 
(original)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt Fri May 
11 16:56:52 2012
@@ -67,6 +67,8 @@ Trunk (unreleased changes)
 
 HADOOP-8308. Support cross-project Jenkins builds. (tomwhite)
 
+HADOOP-8366 Use ProtoBuf for RpcResponseHeader (sanjay radia)
+
   BUG FIXES
 
 HADOOP-8177. MBeans shouldn't try to register when it fails to create 
MBeanName.

Modified: 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java?rev=1337283&r1=1337282&r2=1337283&view=diff
==
--- 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java
 (original)
+++ 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java
 Fri May 11 16:56:52 2012
@@ -53,6 +53,8 @@ import org.apache.hadoop.fs.CommonConfig
 import 
org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.IpcConnectionContextProto;
 import 
org.apache.hadoop.ipc.protobuf.RpcPayloadHeaderProtos.RpcPayloadHeaderProto;
 import 
org.apache.hadoop.ipc.protobuf.RpcPayloadHeaderProtos.RpcPayloadOperationProto;
+import 
org.apache.hadoop.ipc.protobuf.RpcPayloadHeaderProtos.RpcResponseHeaderProto;
+import org.apache.hadoop.ipc.protobuf.RpcPayloadHeaderProtos.RpcStatusProto;
 import org.apache.hadoop.io.IOUtils;
 import org.apache.hadoop.io.Writable;
 import org.apache.hadoop.io.WritableUtils;
@@ -845,24 +847,24 @@ public class Client {
   touch();
   
   try {
-int id = in.readInt();// try to read an id
-
+RpcResponseHeaderProto response = 
+RpcResponseHeaderProto.parseDelimitedFrom(in);
+int callId = response.getCallId();
 if (LOG.isDebugEnabled())
-  LOG.debug(getName() + " got value #" + id);
-
-Call call = calls.get(id);
+  LOG.debug(getName() + " got value #" + callId);
 
-int state = in.readInt(); // read call status
-if (state == Status.SUCCESS.state) {
+Call call = calls.get(callId);
+RpcStatusProto status = response.getStatus();
+if (status == RpcStatusProto.SUCCESS) {
   Writable value = ReflectionUtils.newInstance(valueClass, conf);
   value.readFields(in); // read value
   call.setRpcResponse(value);
-  calls.remove(id);
-} else if (state == Status.ERROR.state) {
+  calls.remove(callId);
+} else if (status == RpcStatusProto.ERROR) {
   call.setException(new RemoteException(WritableUtils.readString(in),
 WritableUtils.readString(in)));
-  calls.remove(id);
-} else if (state == Status.FATAL.state) {
+  calls.remove(callId);
+} else if (status == RpcStatusProto.FATAL) {
   // Close the connection
   markClosed(new RemoteException(WritableUtils.readString(in), 
  WritableUtils.readString(in)));

Modified: 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java?rev=1337283&r1=1337282&r2=1337283&view=diff
==
--- 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java
 (original)
+++ 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/S

svn commit: r1331645 - in /hadoop/common/branches/branch-1-win: ./ src/core/org/apache/hadoop/util/ src/mapred/org/apache/hadoop/mapred/ src/test/org/apache/hadoop/mapred/ src/test/org/apache/hadoop/u

2012-04-27 Thread sradia
Author: sradia
Date: Sat Apr 28 01:00:04 2012
New Revision: 1331645

URL: http://svn.apache.org/viewvc?rev=1331645&view=rev
Log:
MAPREDUCE-4204 Refactor ProcfsBasedProcessTree to make the resource 
collection object pluggable (Bikas Saha via Sanjay)

Added:

hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/util/ResourceCalculatorProcessTree.java
Modified:
hadoop/common/branches/branch-1-win/CHANGES.txt

hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/util/LinuxResourceCalculatorPlugin.java

hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/util/ProcfsBasedProcessTree.java

hadoop/common/branches/branch-1-win/src/mapred/org/apache/hadoop/mapred/TaskMemoryManagerThread.java

hadoop/common/branches/branch-1-win/src/mapred/org/apache/hadoop/mapred/TaskTracker.java

hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/mapred/TestTaskTrackerMemoryManager.java

hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/util/TestProcfsBasedProcessTree.java

Modified: hadoop/common/branches/branch-1-win/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-1-win/CHANGES.txt?rev=1331645&r1=1331644&r2=1331645&view=diff
==
--- hadoop/common/branches/branch-1-win/CHANGES.txt (original)
+++ hadoop/common/branches/branch-1-win/CHANGES.txt Sat Apr 28 01:00:04 2012
@@ -7,11 +7,13 @@ Hadoop Change Log
 branch-hadoop-1-win - unreleased
   IMPROVEMENTS
 
-HADOOP-8223 - Initial patch for branch-1-win (David Lao via Sanjay)
+HADOOP-8223 Initial patch for branch-1-win (David Lao via Sanjay)
 
-HADOOP-8234 - Enable user group mappings on Windows (Bikas Saha via Sanjay)
+HADOOP-8234 Enable user group mappings on Windows (Bikas Saha via Sanjay)
 
-MAPREDUCE-4201 - Getting PID not working on Windows. Termination of 
Task/TaskJVM's not working (Bikas Saha via Sanjay)
+MAPREDUCE-4201 Getting PID not working on Windows. Termination of 
Task/TaskJVM's not working (Bikas Saha via Sanjay)
+
+MAPREDUCE-4204 Refactor ProcfsBasedProcessTree to make the resource 
collection object pluggable (Bikas Saha via Sanjay)
 
 Release 1.1.0 - unreleased
 

Modified: 
hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/util/LinuxResourceCalculatorPlugin.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/util/LinuxResourceCalculatorPlugin.java?rev=1331645&r1=1331644&r2=1331645&view=diff
==
--- 
hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/util/LinuxResourceCalculatorPlugin.java
 (original)
+++ 
hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/util/LinuxResourceCalculatorPlugin.java
 Sat Apr 28 01:00:04 2012
@@ -86,7 +86,7 @@ public class LinuxResourceCalculatorPlug
   private float cpuUsage = UNAVAILABLE;
   private long sampleTime = UNAVAILABLE;
   private long lastSampleTime = UNAVAILABLE;
-  private ProcfsBasedProcessTree pTree = null;
+  private ResourceCalculatorProcessTree pTree = null;
 
   boolean readMemInfoFile = false;
   boolean readCpuInfoFile = false;

Modified: 
hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/util/ProcfsBasedProcessTree.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/util/ProcfsBasedProcessTree.java?rev=1331645&r1=1331644&r2=1331645&view=diff
==
--- 
hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/util/ProcfsBasedProcessTree.java
 (original)
+++ 
hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/util/ProcfsBasedProcessTree.java
 Sat Apr 28 01:00:04 2012
@@ -39,7 +39,7 @@ import org.apache.hadoop.util.Shell.Shel
 /**
  * A Proc file-system based ProcessTree. Works only on Linux.
  */
-public class ProcfsBasedProcessTree extends ProcessTree {
+public class ProcfsBasedProcessTree extends ResourceCalculatorProcessTree {
 
   static final Log LOG = LogFactory
   .getLog(ProcfsBasedProcessTree.class);
@@ -92,15 +92,12 @@ public class ProcfsBasedProcessTree exte
   private Long cpuTime = 0L;
 
   private Map processTree = new HashMap();
-
-  public ProcfsBasedProcessTree(String pid) {
-this(pid, false);
-  }
   
-  public ProcfsBasedProcessTree(String pid, boolean setsidUsed) {
+  public ProcfsBasedProcessTree(String pid) {
 this(pid,PROCFS);
   }
 
+  // exposing procfs dir for testing overrides
   public ProcfsBasedProcessTree(String pid, String procfsDir) {
 this.pid = getValidPID(pid);
 this.procfsDir = procfsDir;
@@ -132,7 +129,8 @@ public class ProcfsBasedProcessTree exte
* 
* @return the process-tree with latest state.
*/
-  public ProcfsBasedProcessTree getProcessTree() {
+ 

svn commit: r1331621 - in /hadoop/common/branches/branch-1-win: ./ src/core/org/apache/hadoop/util/ src/mapred/org/apache/hadoop/mapred/ src/test/org/apache/hadoop/mapred/

2012-04-27 Thread sradia
Author: sradia
Date: Fri Apr 27 22:54:44 2012
New Revision: 1331621

URL: http://svn.apache.org/viewvc?rev=1331621&view=rev
Log:
MAPREDUCE-4201 - Getting PID not working on Windows. Termination of 
Task/TaskJVM's not working (Bikas Saha via Sanjay)

Modified:
hadoop/common/branches/branch-1-win/CHANGES.txt

hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/util/ProcessTree.java

hadoop/common/branches/branch-1-win/src/mapred/org/apache/hadoop/mapred/Child.java

hadoop/common/branches/branch-1-win/src/mapred/org/apache/hadoop/mapred/JvmManager.java

hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/mapred/TestKillSubProcesses.java

Modified: hadoop/common/branches/branch-1-win/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-1-win/CHANGES.txt?rev=1331621&r1=1331620&r2=1331621&view=diff
==
--- hadoop/common/branches/branch-1-win/CHANGES.txt (original)
+++ hadoop/common/branches/branch-1-win/CHANGES.txt Fri Apr 27 22:54:44 2012
@@ -11,6 +11,8 @@ branch-hadoop-1-win - unreleased
 
 HADOOP-8234 - Enable user group mappings on Windows (Bikas Saha via Sanjay)
 
+MAPREDUCE-4201 - Getting PID not working on Windows. Termination of 
Task/TaskJVM's not working (Bikas Saha via Sanjay)
+
 Release 1.1.0 - unreleased
 
   NEW FEATURES

Modified: 
hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/util/ProcessTree.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/util/ProcessTree.java?rev=1331621&r1=1331620&r2=1331621&view=diff
==
--- 
hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/util/ProcessTree.java
 (original)
+++ 
hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/util/ProcessTree.java
 Fri Apr 27 22:54:44 2012
@@ -107,7 +107,19 @@ public class ProcessTree {
 if(!ProcessTree.isAlive(pid)) {
   return;
 }
-String[] args = { "kill", "-" + signal.getValue(), pid };
+String[] args = null;
+if(Shell.WINDOWS){
+  if (signal == Signal.KILL) {
+String[] wargs = { "taskkill", "/F", "/PID", pid };
+args = wargs;
+  } else {
+String[] wargs = { "taskkill", "/PID", pid };
+args = wargs;
+  }
+} else {
+ String[] uargs = { "kill", "-" + signal.getValue(), pid };
+ args = uargs;
+}
 ShellCommandExecutor shexec = new ShellCommandExecutor(args);
 try {
   shexec.execute();
@@ -157,19 +169,29 @@ public class ProcessTree {
* @return true if process is alive.
*/
   public static boolean isAlive(String pid) {
-ShellCommandExecutor shexec = null;
-try {
-  String[] args = { "kill", "-0", pid };
-  shexec = new ShellCommandExecutor(args);
-  shexec.execute();
-} catch (ExitCodeException ee) {
-  return false;
-} catch (IOException ioe) {
-  LOG.warn("Error executing shell command "
-  + Arrays.toString(shexec.getExecString()) + ioe);
-  return false;
+if (Shell.WINDOWS) {
+  try {
+String result = Shell.execCommand("cmd", "/c", "tasklist /FI \"PID eq 
"+pid+" \" /NH");
+return (result.contains(pid));
+  } catch (IOException ioe) {
+LOG.warn("Error executing shell command", ioe);
+return false;
+  }
+} else {
+  ShellCommandExecutor shexec = null;
+  try {
+String[] args = { "kill", "-0", pid };
+shexec = new ShellCommandExecutor(args);
+shexec.execute();
+  } catch (ExitCodeException ee) {
+return false;
+  } catch (IOException ioe) {
+LOG.warn("Error executing shell command "
++ Arrays.toString(shexec.getExecString()) + ioe);
+return false;
+  }
+  return (shexec.getExitCode() == 0 ? true : false);
 }
-return (shexec.getExitCode() == 0 ? true : false);
   }
   
   /**

Modified: 
hadoop/common/branches/branch-1-win/src/mapred/org/apache/hadoop/mapred/Child.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-1-win/src/mapred/org/apache/hadoop/mapred/Child.java?rev=1331621&r1=1331620&r2=1331621&view=diff
==
--- 
hadoop/common/branches/branch-1-win/src/mapred/org/apache/hadoop/mapred/Child.java
 (original)
+++ 
hadoop/common/branches/branch-1-win/src/mapred/org/apache/hadoop/mapred/Child.java
 Fri Apr 27 22:54:44 2012
@@ -47,6 +47,8 @@ import org.apache.hadoop.util.Shell;
 import org.apache.hadoop.util.StringUtils;
 import org.apache.log4j.LogManager;
 
+import java.lang.mana

svn commit: r1329319 - in /hadoop/common/trunk/hadoop-common-project/hadoop-common: ./ dev-support/ src/main/java/org/apache/hadoop/ipc/ src/main/java/org/apache/hadoop/util/ src/main/proto/ src/test/

2012-04-23 Thread sradia
Author: sradia
Date: Mon Apr 23 16:34:21 2012
New Revision: 1329319

URL: http://svn.apache.org/viewvc?rev=1329319&view=rev
Log:
HADOOP-8285 Use ProtoBuf for RpcPayLoadHeader (sanjay radia)

Added:

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/proto/RpcPayloadHeader.proto
Removed:

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RpcPayloadHeader.java
Modified:
hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt

hadoop/common/trunk/hadoop-common-project/hadoop-common/dev-support/findbugsExcludeFile.xml

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtobufRpcEngine.java

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtocolMetaInfoServerSideTranslatorPB.java

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RPC.java

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RpcClientUtil.java

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/WritableRpcEngine.java

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ProtoUtil.java

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestIPC.java

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestIPCServerResponder.java

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestMultipleProtocolServer.java

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestProtoBufRpc.java

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestRPCCompatibility.java

Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1329319&r1=1329318&r2=1329319&view=diff
==
--- hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt 
(original)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt Mon Apr 
23 16:34:21 2012
@@ -63,6 +63,8 @@ Trunk (unreleased changes)
 
 HADOOP-8290. Remove remaining references to hadoop.native.lib (harsh)
 
+HADOOP-8285 Use ProtoBuf for RpcPayLoadHeader (sanjay radia)
+
   BUG FIXES
 
 HADOOP-8177. MBeans shouldn't try to register when it fails to create 
MBeanName.

Modified: 
hadoop/common/trunk/hadoop-common-project/hadoop-common/dev-support/findbugsExcludeFile.xml
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/dev-support/findbugsExcludeFile.xml?rev=1329319&r1=1329318&r2=1329319&view=diff
==
--- 
hadoop/common/trunk/hadoop-common-project/hadoop-common/dev-support/findbugsExcludeFile.xml
 (original)
+++ 
hadoop/common/trunk/hadoop-common-project/hadoop-common/dev-support/findbugsExcludeFile.xml
 Mon Apr 23 16:34:21 2012
@@ -282,8 +282,13 @@
   
   
 
+
+  
+  
+
 
   
   
 
+
  

Modified: 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java?rev=1329319&r1=1329318&r2=1329319&view=diff
==
--- 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java
 (original)
+++ 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java
 Mon Apr 23 16:34:21 2012
@@ -50,8 +50,9 @@ import org.apache.hadoop.classification.
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.CommonConfigurationKeys;
 import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
-import org.apache.hadoop.ipc.RpcPayloadHeader.*;
 import 
org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.IpcConnectionContextProto;
+import 
org.apache.hadoop.ipc.protobuf.RpcPayloadHeaderProtos.RpcPayloadHeaderProto;
+import 
org.apache.hadoop.ipc.protobuf.RpcPayloadHeaderProtos.RpcPayloadOperationProto;
 import org.apache.hadoop.io.IOUtils;
 import org.apache.hadoop.io.Writable;
 import org.apache.hadoop.io.WritableUtils;
@@ -163,10 +164,10 @@ public class Cli

svn commit: r1310617 - in /hadoop/common/branches/branch-1-win: ./ src/core/org/apache/hadoop/security/ src/core/org/apache/hadoop/util/ src/mapred/org/apache/hadoop/mapred/

2012-04-06 Thread sradia
Author: sradia
Date: Fri Apr  6 22:17:08 2012
New Revision: 1310617

URL: http://svn.apache.org/viewvc?rev=1310617&view=rev
Log:
HADOOP-8234 - Enable user group mappings on Windows (Bikas Saha via Sanjay)

Modified:
hadoop/common/branches/branch-1-win/CHANGES.txt

hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/security/Credentials.java

hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/security/ShellBasedUnixGroupsMapping.java

hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/security/UserGroupInformation.java

hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/util/Shell.java

hadoop/common/branches/branch-1-win/src/mapred/org/apache/hadoop/mapred/Child.java

hadoop/common/branches/branch-1-win/src/mapred/org/apache/hadoop/mapred/JobInProgress.java

hadoop/common/branches/branch-1-win/src/mapred/org/apache/hadoop/mapred/ReduceTask.java

hadoop/common/branches/branch-1-win/src/mapred/org/apache/hadoop/mapred/TaskTracker.java

Modified: hadoop/common/branches/branch-1-win/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-1-win/CHANGES.txt?rev=1310617&r1=1310616&r2=1310617&view=diff
==
--- hadoop/common/branches/branch-1-win/CHANGES.txt (original)
+++ hadoop/common/branches/branch-1-win/CHANGES.txt Fri Apr  6 22:17:08 2012
@@ -9,6 +9,8 @@ branch-hadoop-1-win - unreleased
 
 HADOOP-8223 - Initial patch for branch-1-win (David Lao via Sanjay)
 
+HADOOP-8234 - Enable user group mappings on Windows (Bikas Saha via Sanjay)
+
 Release 1.1.0 - unreleased
 
   NEW FEATURES

Modified: 
hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/security/Credentials.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/security/Credentials.java?rev=1310617&r1=1310616&r2=1310617&view=diff
==
--- 
hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/security/Credentials.java
 (original)
+++ 
hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/security/Credentials.java
 Fri Apr  6 22:17:08 2012
@@ -126,8 +126,6 @@ public class Credentials implements Writ
  ) throws IOException {
 FSDataInputStream in = null;
 Credentials credentials = new Credentials();
-if (Shell.DISABLEWINDOWS_TEMPORARILY)
-  return credentials;
 try {
   in = filename.getFileSystem(conf).open(filename);
   credentials.readTokenStorageStream(in);

Modified: 
hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/security/ShellBasedUnixGroupsMapping.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/security/ShellBasedUnixGroupsMapping.java?rev=1310617&r1=1310616&r2=1310617&view=diff
==
--- 
hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/security/ShellBasedUnixGroupsMapping.java
 (original)
+++ 
hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/security/ShellBasedUnixGroupsMapping.java
 Fri Apr  6 22:17:08 2012
@@ -20,11 +20,7 @@ package org.apache.hadoop.security;
 import java.io.IOException;
 import java.util.LinkedList;
 import java.util.List;
-import java.util.Map;
-import java.util.Set;
-import java.util.HashSet;
 import java.util.StringTokenizer;
-import java.util.concurrent.ConcurrentHashMap;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
@@ -33,7 +29,7 @@ import org.apache.hadoop.util.Shell.Exit
 
 /**
  * A simple shell-based implementation of {@link GroupMappingServiceProvider} 
- * that exec's the groups shell command to fetch the group
+ * that exec's a shell command to fetch the group
  * memberships of a given user.
  */
 public class ShellBasedUnixGroupsMapping implements 
GroupMappingServiceProvider {
@@ -42,7 +38,7 @@ public class ShellBasedUnixGroupsMapping
   
   @Override
   public List getGroups(String user) throws IOException {
-return getUnixGroups(user);
+return getUserGroups(user);
   }
 
   @Override
@@ -62,24 +58,53 @@ public class ShellBasedUnixGroupsMapping
* @return the groups list that the user belongs to
* @throws IOException if encounter any error when running the command
*/
-  private static List getUnixGroups(final String user) throws 
IOException {
-String result = "";
-if (Shell.DISABLEWINDOWS_TEMPORARILY)
-  result = "hadoopusers";
+  private static List getUserGroups(final String user) throws 
IOException {
+List groups = new LinkedList();
+if (Shell.WINDOWS) {
+  String result = Shell.execCommand(Shell.getGroupsForUserCommand(user));
+  String[] lines = result.split(&qu

svn commit: r1295261 - in /hadoop/common/trunk/hadoop-common-project/hadoop-common: ./ dev-support/ src/main/java/org/apache/hadoop/ipc/ src/main/java/org/apache/hadoop/util/ src/main/proto/

2012-02-29 Thread sradia
Author: sradia
Date: Wed Feb 29 20:43:21 2012
New Revision: 1295261

URL: http://svn.apache.org/viewvc?rev=1295261&view=rev
Log:
HADOOP-7557 Make IPC header be extensible (sanjay radia)

Added:

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/IpcException.java

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/proto/IpcConnectionContext.proto
Removed:

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ConnectionHeader.java
Modified:
hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt

hadoop/common/trunk/hadoop-common-project/hadoop-common/dev-support/findbugsExcludeFile.xml

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ProtoUtil.java

Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1295261&r1=1295260&r2=1295261&view=diff
==
--- hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt 
(original)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt Wed Feb 
29 20:43:21 2012
@@ -55,6 +55,8 @@ Trunk (unreleased changes)
 HADOOP-7994. Remove getProtocolVersion and getProtocolSignature from the 
 client side translator and server side implementation. (jitendra)
 
+HADOOP-7557 Make IPC header be extensible (sanjay radia)
+
   BUG FIXES
 
 HADOOP-8018.  Hudson auto test for HDFS has started throwing javadoc

Modified: 
hadoop/common/trunk/hadoop-common-project/hadoop-common/dev-support/findbugsExcludeFile.xml
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/dev-support/findbugsExcludeFile.xml?rev=1295261&r1=1295260&r2=1295261&view=diff
==
--- 
hadoop/common/trunk/hadoop-common-project/hadoop-common/dev-support/findbugsExcludeFile.xml
 (original)
+++ 
hadoop/common/trunk/hadoop-common-project/hadoop-common/dev-support/findbugsExcludeFile.xml
 Wed Feb 29 20:43:21 2012
@@ -278,4 +278,8 @@
   
   
 
+   
+  
+  
+
  

Modified: 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java?rev=1295261&r1=1295260&r2=1295261&view=diff
==
--- 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java
 (original)
+++ 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java
 Wed Feb 29 20:43:21 2012
@@ -51,6 +51,7 @@ import org.apache.hadoop.conf.Configurat
 import org.apache.hadoop.fs.CommonConfigurationKeys;
 import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
 import org.apache.hadoop.ipc.RpcPayloadHeader.*;
+import 
org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.IpcConnectionContextProto;
 import org.apache.hadoop.io.IOUtils;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.io.Writable;
@@ -66,6 +67,7 @@ import org.apache.hadoop.security.token.
 import org.apache.hadoop.security.token.TokenIdentifier;
 import org.apache.hadoop.security.token.TokenSelector;
 import org.apache.hadoop.security.token.TokenInfo;
+import org.apache.hadoop.util.ProtoUtil;
 import org.apache.hadoop.util.ReflectionUtils;
 
 /** A client for an IPC service.  IPC calls take a single {@link Writable} as a
@@ -211,7 +213,7 @@ public class Client {
   private class Connection extends Thread {
 private InetSocketAddress server; // server ip:port
 private String serverPrincipal;  // server's krb5 principal name
-private ConnectionHeader header;  // connection header
+private IpcConnectionContextProto connectionContext;   // connection 
context
 private final ConnectionId remoteId;// connection id
 private AuthMethod authMethod; // authentication method
 private boolean useSasl;
@@ -292,8 +294,8 @@ public class Client {
 authMethod = AuthMethod.KERBEROS;
   }
   
-  header = 
-new ConnectionHeader(RPC.getProtocolName(protocol), ticket, 
authMethod);
+  connectionContext = ProtoUtil.makeIpcConnectionContext(
+  RPC.getProtocolName(protocol), ticket, authMethod);
   
   if (LOG.isDebugEnabled())
 LOG.deb

svn commit: r1213619 - in /hadoop/common/trunk/hadoop-common-project/hadoop-common: CHANGES.txt src/main/java/org/apache/hadoop/ipc/ProtobufRpcEngine.java src/main/java/org/apache/hadoop/ipc/Server.ja

2011-12-13 Thread sradia
Author: sradia
Date: Tue Dec 13 09:18:04 2011
New Revision: 1213619

URL: http://svn.apache.org/viewvc?rev=1213619&view=rev
Log:
HADOOP-7913 Fix bug in ProtoBufRpcEngine  (sanjay)

Modified:
hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtobufRpcEngine.java

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java

Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1213619&r1=1213618&r2=1213619&view=diff
==
--- hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt 
(original)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt Tue Dec 
13 09:18:04 2011
@@ -134,6 +134,8 @@ Trunk (unreleased changes)
 HADOOP-7902. skipping name rules setting (if already set) should be done 
 on UGI initialization only. (tucu)
 
+HADOOP-7913 Fix bug in ProtoBufRpcEngine  (sanjay)
+
   OPTIMIZATIONS
 
 HADOOP-7761. Improve the performance of raw comparisons. (todd)

Modified: 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtobufRpcEngine.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtobufRpcEngine.java?rev=1213619&r1=1213618&r2=1213619&view=diff
==
--- 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtobufRpcEngine.java
 (original)
+++ 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtobufRpcEngine.java
 Tue Dec 13 09:18:04 2011
@@ -325,7 +325,7 @@ public class ProtobufRpcEngine implement
 int numReaders, int queueSizePerHandler, boolean verbose,
 SecretManager secretManager)
 throws IOException {
-  super(bindAddress, port, RpcRequestWritable.class, numHandlers,
+  super(bindAddress, port, null, numHandlers,
   numReaders, queueSizePerHandler, conf, classNameBase(protocolImpl
   .getClass().getName()), secretManager);
   this.verbose = verbose;  

Modified: 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java?rev=1213619&r1=1213618&r2=1213619&view=diff
==
--- 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java
 (original)
+++ 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java
 Tue Dec 13 09:18:04 2011
@@ -62,13 +62,13 @@ import org.apache.commons.logging.LogFac
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.CommonConfigurationKeys;
-import org.apache.hadoop.ipc.RpcPayloadHeader.*;
 import org.apache.hadoop.io.BytesWritable;
 import org.apache.hadoop.io.IntWritable;
 import org.apache.hadoop.io.Writable;
 import org.apache.hadoop.io.WritableUtils;
 import org.apache.hadoop.ipc.RPC.RpcInvoker;
 import org.apache.hadoop.ipc.RPC.VersionMismatch;
+import org.apache.hadoop.ipc.RpcPayloadHeader.RpcKind;
 import org.apache.hadoop.ipc.RpcPayloadHeader.RpcPayloadOperation;
 import org.apache.hadoop.ipc.metrics.RpcDetailedMetrics;
 import org.apache.hadoop.ipc.metrics.RpcMetrics;
@@ -76,18 +76,18 @@ import org.apache.hadoop.net.NetUtils;
 import org.apache.hadoop.security.AccessControlException;
 import org.apache.hadoop.security.SaslRpcServer;
 import org.apache.hadoop.security.SaslRpcServer.AuthMethod;
-import org.apache.hadoop.security.SaslRpcServer.SaslStatus;
 import org.apache.hadoop.security.SaslRpcServer.SaslDigestCallbackHandler;
 import org.apache.hadoop.security.SaslRpcServer.SaslGssCallbackHandler;
-import org.apache.hadoop.security.UserGroupInformation.AuthenticationMethod;
+import org.apache.hadoop.security.SaslRpcServer.SaslStatus;
 import org.apache.hadoop.security.UserGroupInformation;
-import org.apache.hadoop.security.authorize.ProxyUsers;
+import org.apache.hadoop.security.UserGroupInformation.AuthenticationMethod;
 import org.apache.hadoop.security.authorize.AuthorizationException;
 import org.apache.hadoop.security.authorize.PolicyProvider;
+import org.apache.hadoop.security.authorize.ProxyUsers;
 import org.apache.hadoop.security.authorize.ServiceAuthorizationManager;
-import org.apache.hadoop

svn commit: r1210208 [2/2] - in /hadoop/common/trunk/hadoop-common-project/hadoop-common: ./ src/main/java/org/apache/hadoop/ipc/ src/main/java/org/apache/hadoop/ipc/protobuf/ src/proto/ src/test/java

2011-12-04 Thread sradia
Modified: 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestIPCServerResponder.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestIPCServerResponder.java?rev=1210208&r1=1210207&r2=1210208&view=diff
==
--- 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestIPCServerResponder.java
 (original)
+++ 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestIPCServerResponder.java
 Sun Dec  4 20:44:36 2011
@@ -30,6 +30,7 @@ import org.apache.hadoop.conf.Configurat
 import org.apache.hadoop.fs.CommonConfigurationKeys;
 import org.apache.hadoop.io.BytesWritable;
 import org.apache.hadoop.io.Writable;
+import org.apache.hadoop.ipc.RpcPayloadHeader.RpcKind;
 import org.apache.hadoop.net.NetUtils;
 
 /**
@@ -72,8 +73,8 @@ public class TestIPCServerResponder exte
 }
 
 @Override
-public Writable call(String protocol, Writable param, long receiveTime)
-throws IOException {
+public Writable call(RpcKind rpcKind, String protocol, Writable param,
+long receiveTime) throws IOException {
   if (sleep) {
 try {
   Thread.sleep(RANDOM.nextInt(20)); // sleep a bit

Modified: 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestMultipleProtocolServer.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestMultipleProtocolServer.java?rev=1210208&r1=1210207&r2=1210208&view=diff
==
--- 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestMultipleProtocolServer.java
 (original)
+++ 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestMultipleProtocolServer.java
 Sun Dec  4 20:44:36 2011
@@ -23,10 +23,15 @@ import java.net.InetSocketAddress;
 import org.junit.Assert;
 
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.ipc.RpcPayloadHeader.RpcKind;
+import org.apache.hadoop.ipc.TestProtoBufRpc.PBServerImpl;
+import org.apache.hadoop.ipc.TestProtoBufRpc.TestRpcService;
+import 
org.apache.hadoop.ipc.protobuf.TestRpcServiceProtos.TestProtobufRpcProto;
 import org.apache.hadoop.net.NetUtils;
 import org.junit.Before;
 import org.junit.After;
 import org.junit.Test;
+import com.google.protobuf.BlockingService;
 
 public class TestMultipleProtocolServer {
   private static final String ADDRESS = "0.0.0.0";
@@ -173,9 +178,19 @@ public class TestMultipleProtocolServer 
 // create a server with two handlers
 server = RPC.getServer(Foo0.class,
   new Foo0Impl(), ADDRESS, 0, 2, false, conf, 
null);
-server.addProtocol(Foo1.class, new Foo1Impl());
-server.addProtocol(Bar.class, new BarImpl());
-server.addProtocol(Mixin.class, new BarImpl());
+server.addProtocol(RpcKind.RPC_WRITABLE, Foo1.class, new Foo1Impl());
+server.addProtocol(RpcKind.RPC_WRITABLE, Bar.class, new BarImpl());
+server.addProtocol(RpcKind.RPC_WRITABLE, Mixin.class, new BarImpl());
+
+
+// Add Protobuf server
+// Create server side implementation
+PBServerImpl pbServerImpl = 
+new PBServerImpl();
+BlockingService service = TestProtobufRpcProto
+.newReflectiveBlockingService(pbServerImpl);
+server.addProtocol(RpcKind.RPC_PROTOCOL_BUFFER, TestRpcService.class,
+service);
 server.start();
 addr = NetUtils.getConnectAddress(server);
   }
@@ -251,5 +266,16 @@ public class TestMultipleProtocolServer 
   public void testIncorrectServerCreation() throws IOException {
 RPC.getServer(Foo1.class,
 new Foo0Impl(), ADDRESS, 0, 2, false, conf, null);
+  } 
+  
+  // Now test a PB service - a server  hosts both PB and Writable Rpcs.
+  @Test
+  public void testPBService() throws Exception {
+// Set RPC engine to protobuf RPC engine
+Configuration conf2 = new Configuration();
+RPC.setProtocolEngine(conf2, TestRpcService.class,
+ProtobufRpcEngine.class);
+TestRpcService client = RPC.getProxy(TestRpcService.class, 0, addr, conf2);
+TestProtoBufRpc.testProtoBufRpc(client);
   }
 }

Modified: 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestProtoBufRpc.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestProtoBufRpc.java?rev=1210208&r1=1210207&r2=1210208&view=diff
==
--- 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestProtoBufRpc.java
 (original)

svn commit: r1197885 - in /hadoop/common/trunk/hadoop-common-project/hadoop-common: ./ src/main/java/org/apache/hadoop/ipc/

2011-11-04 Thread sradia
Author: sradia
Date: Sat Nov  5 05:06:44 2011
New Revision: 1197885

URL: http://svn.apache.org/viewvc?rev=1197885&view=rev
Log:
HADOOP-7776 Make the Ipc-Header in a RPC-Payload an explicit 
header (sanjay)

Added:

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RpcPayloadHeader.java
Modified:
hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtobufRpcEngine.java

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/WritableRpcEngine.java

Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1197885&r1=1197884&r2=1197885&view=diff
==
--- hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt 
(original)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt Sat Nov 
 5 05:06:44 2011
@@ -55,6 +55,8 @@ Trunk (unreleased changes)
 HADOOP-7792. Add verifyToken method to 
AbstractDelegationTokenSecretManager.
 (jitendra)
 
+   HADOOP-7776 Make the Ipc-Header in a RPC-Payload an explicit 
header (sanjay)
+
   BUGS
 
 HADOOP-7606. Upgrade Jackson to version 1.7.1 to match the version required

Modified: 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java?rev=1197885&r1=1197884&r2=1197885&view=diff
==
--- 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java
 (original)
+++ 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java
 Sat Nov  5 05:06:44 2011
@@ -48,6 +48,7 @@ import org.apache.commons.logging.*;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.ipc.RpcPayloadHeader.*;
 import org.apache.hadoop.io.IOUtils;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.io.Writable;
@@ -152,16 +153,20 @@ public class Client {
 return refCount==0;
   }
 
-  /** A call waiting for a value. */
+  /** 
+   * Class that represents an RPC call
+   */
   private class Call {
-int id;   // call id
-Writable param;   // parameter
-Writable value;   // value, null if error
-IOException error;// exception, null if value
-boolean done; // true when call is done
-
-protected Call(Writable param) {
-  this.param = param;
+final int id;   // call id
+final Writable rpcRequest;  // the serialized rpc request - RpcPayload
+Writable rpcResponse;   // null if rpc has error
+IOException error;  // exception, null if success
+final RpcKind rpcKind;  // Rpc EngineKind
+boolean done;   // true when call is done
+
+protected Call(RpcKind rpcKind, Writable param) {
+  this.rpcKind = rpcKind;
+  this.rpcRequest = param;
   synchronized (Client.this) {
 this.id = counter++;
   }
@@ -187,15 +192,15 @@ public class Client {
 /** Set the return value when there is no error. 
  * Notify the caller the call is done.
  * 
- * @param value return value of the call.
+ * @param rpcResponse return value of the rpc call.
  */
-public synchronized void setValue(Writable value) {
-  this.value = value;
+public synchronized void setRpcResponse(Writable rpcResponse) {
+  this.rpcResponse = rpcResponse;
   callComplete();
 }
 
-public synchronized Writable getValue() {
-  return value;
+public synchronized Writable getRpcResult() {
+  return rpcResponse;
 }
   }
 
@@ -727,6 +732,7 @@ public class Client {
   }
 }
 
+@SuppressWarnings("unused")
 public InetSocketAddress getRemoteAddress() {
   return server;
 }
@@ -787,8 +793,10 @@ public class Client {
   //for serializing the
   //data to be written
   d = new DataOutputBuffer();
-  d.writeInt(call.id);
-  call.param.write(d);
+  RpcPaylo

svn commit: r1178639 - in /hadoop/common/trunk/hadoop-common-project/hadoop-common: CHANGES.txt src/main/java/org/apache/hadoop/ipc/WritableRpcEngine.java

2011-10-03 Thread sradia
Author: sradia
Date: Mon Oct  3 23:39:25 2011
New Revision: 1178639

URL: http://svn.apache.org/viewvc?rev=1178639&view=rev
Log:

HADOOP-7716 RPC protocol registration on SS does not log the protocol 
name

Modified:
hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/WritableRpcEngine.java

Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1178639&r1=1178638&r2=1178639&view=diff
==
--- hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt 
(original)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt Mon Oct 
 3 23:39:25 2011
@@ -24,6 +24,9 @@ Trunk (unreleased changes)
 HADOOP-7693. Enhance AvroRpcEngine to support the new #addProtocol
 interface introduced in HADOOP-7524.  (cutting)
 
+   HADOOP-7716 RPC protocol registration on SS does not log the protocol 
name
+   (only the class which may be different) (sanjay)
+
   BUGS
 
 HADOOP-7606. Upgrade Jackson to version 1.7.1 to match the version required

Modified: 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/WritableRpcEngine.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/WritableRpcEngine.java?rev=1178639&r1=1178638&r2=1178639&view=diff
==
--- 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/WritableRpcEngine.java
 (original)
+++ 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/WritableRpcEngine.java
 Mon Oct  3 23:39:25 2011
@@ -388,8 +388,9 @@ public class WritableRpcEngine implement
   }
   protocolImplMap.put(new ProtoNameVer(protocolName, version),
   new ProtoClassProtoImpl(protocolClass, protocolImpl)); 
-  LOG.info("ProtocolImpl=" + protocolImpl.getClass().getName() + 
-  " protocolClass=" + protocolClass.getName() + " version=" + version);
+  LOG.info("Protocol Name = " + protocolName +  " version=" + version +
+  " ProtocolImpl=" + protocolImpl.getClass().getName() + 
+  " protocolClass=" + protocolClass.getName());
 }
 
 private static class VerProtocolImpl {




svn commit: r1177002 - in /hadoop/common/trunk/hadoop-common-project/hadoop-common: CHANGES.txt src/main/java/org/apache/hadoop/ipc/ProtocolSignature.java

2011-09-28 Thread sradia
Author: sradia
Date: Wed Sep 28 18:33:00 2011
New Revision: 1177002

URL: http://svn.apache.org/viewvc?rev=1177002&view=rev
Log:
HADOOP-7687 Make getProtocolSignature public  (sanjay)

Modified:
hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtocolSignature.java

Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1177002&r1=1177001&r2=1177002&view=diff
==
--- hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt 
(original)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt Wed Sep 
28 18:33:00 2011
@@ -23,6 +23,8 @@ Trunk (unreleased changes)
 HADOOP-7668. Add a NetUtils method that can tell if an InetAddress 
 belongs to local host. (suresh)
 
+HADOOP-7687 Make getProtocolSignature public  (sanjay)
+
   BUGS
 
 HADOOP-7606. Upgrade Jackson to version 1.7.1 to match the version required

Modified: 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtocolSignature.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtocolSignature.java?rev=1177002&r1=1177001&r2=1177002&view=diff
==
--- 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtocolSignature.java
 (original)
+++ 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtocolSignature.java
 Wed Sep 28 18:33:00 2011
@@ -199,7 +199,7 @@ public class ProtocolSignature implement
* @param protocol protocol
* @return the server's protocol signature
*/
-  static ProtocolSignature getProtocolSignature(
+  public static ProtocolSignature getProtocolSignature(
   int clientMethodsHashCode,
   long serverVersion,
   Class protocol) {




svn commit: r1167444 [3/3] - in /hadoop/common/branches/branch-0.20-security: ./ src/core/org/apache/hadoop/security/ src/core/org/apache/hadoop/security/authentication/ src/core/org/apache/hadoop/sec

2011-09-09 Thread sradia
Added: 
hadoop/common/branches/branch-0.20-security/src/test/org/apache/hadoop/security/authentication/server/TestPseudoAuthenticationHandler.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security/src/test/org/apache/hadoop/security/authentication/server/TestPseudoAuthenticationHandler.java?rev=1167444&view=auto
==
--- 
hadoop/common/branches/branch-0.20-security/src/test/org/apache/hadoop/security/authentication/server/TestPseudoAuthenticationHandler.java
 (added)
+++ 
hadoop/common/branches/branch-0.20-security/src/test/org/apache/hadoop/security/authentication/server/TestPseudoAuthenticationHandler.java
 Sat Sep 10 02:57:10 2011
@@ -0,0 +1,113 @@
+/**
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License. See accompanying LICENSE file.
+ */
+package org.apache.hadoop.security.authentication.server;
+
+import 
org.apache.hadoop.security.authentication.client.AuthenticationException;
+import junit.framework.TestCase;
+import org.apache.hadoop.security.authentication.client.PseudoAuthenticator;
+import org.mockito.Mockito;
+
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletResponse;
+import java.util.Properties;
+
+public class TestPseudoAuthenticationHandler extends TestCase {
+
+  public void testInit() throws Exception {
+PseudoAuthenticationHandler handler = new PseudoAuthenticationHandler();
+try {
+  Properties props = new Properties();
+  props.setProperty(PseudoAuthenticationHandler.ANONYMOUS_ALLOWED, 
"false");
+  handler.init(props);
+  assertEquals(false, handler.getAcceptAnonymous());
+} finally {
+  handler.destroy();
+}
+  }
+
+  public void testType() throws Exception {
+PseudoAuthenticationHandler handler = new PseudoAuthenticationHandler();
+assertEquals(PseudoAuthenticationHandler.TYPE, handler.getType());
+  }
+
+  public void testAnonymousOn() throws Exception {
+PseudoAuthenticationHandler handler = new PseudoAuthenticationHandler();
+try {
+  Properties props = new Properties();
+  props.setProperty(PseudoAuthenticationHandler.ANONYMOUS_ALLOWED, "true");
+  handler.init(props);
+
+  HttpServletRequest request = Mockito.mock(HttpServletRequest.class);
+  HttpServletResponse response = Mockito.mock(HttpServletResponse.class);
+
+  AuthenticationToken token = handler.authenticate(request, response);
+
+  assertEquals(AuthenticationToken.ANONYMOUS, token);
+} finally {
+  handler.destroy();
+}
+  }
+
+  public void testAnonymousOff() throws Exception {
+PseudoAuthenticationHandler handler = new PseudoAuthenticationHandler();
+try {
+  Properties props = new Properties();
+  props.setProperty(PseudoAuthenticationHandler.ANONYMOUS_ALLOWED, 
"false");
+  handler.init(props);
+
+  HttpServletRequest request = Mockito.mock(HttpServletRequest.class);
+  HttpServletResponse response = Mockito.mock(HttpServletResponse.class);
+
+  handler.authenticate(request, response);
+  fail();
+} catch (AuthenticationException ex) {
+  // Expected
+} catch (Exception ex) {
+  fail();
+} finally {
+  handler.destroy();
+}
+  }
+
+  private void _testUserName(boolean anonymous) throws Exception {
+PseudoAuthenticationHandler handler = new PseudoAuthenticationHandler();
+try {
+  Properties props = new Properties();
+  props.setProperty(PseudoAuthenticationHandler.ANONYMOUS_ALLOWED, 
Boolean.toString(anonymous));
+  handler.init(props);
+
+  HttpServletRequest request = Mockito.mock(HttpServletRequest.class);
+  HttpServletResponse response = Mockito.mock(HttpServletResponse.class);
+  
Mockito.when(request.getParameter(PseudoAuthenticator.USER_NAME)).thenReturn("user");
+
+  AuthenticationToken token = handler.authenticate(request, response);
+
+  assertNotNull(token);
+  assertEquals("user", token.getUserName());
+  assertEquals("user", token.getName());
+  assertEquals(PseudoAuthenticationHandler.TYPE, token.getType());
+} finally {
+  handler.destroy();
+}
+  }
+
+  public void testUserNameAnonymousOff() throws Exception {
+_testUserName(false);
+  }
+
+  public void testUserNameAnonymousOn() throws Exception {
+_testUserName(true);
+  }
+
+}

Added: 
hadoop/common/branches/branch-0.20-security/src/test/org/apache/hadoop/security/authentication/u

svn commit: r1164771 - in /hadoop/common/trunk/hadoop-common-project/hadoop-common: ./ src/main/java/org/apache/hadoop/ipc/ src/test/java/org/apache/hadoop/ipc/

2011-09-02 Thread sradia
Author: sradia
Date: Sat Sep  3 00:31:05 2011
New Revision: 1164771

URL: http://svn.apache.org/viewvc?rev=1164771&view=rev
Log:
  HADOOP-7524 and MapReduce-2887 Change RPC to allow multiple protocols 
including multuple versions of the same protocol (sanjay Radia)

Added:

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtocolInfo.java

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestMultipleProtocolServer.java
Modified:
hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RPC.java

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/VersionedProtocol.java

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/WritableRpcEngine.java

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestIPC.java

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestIPCServerResponder.java

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestRPCCompatibility.java

Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1164771&r1=1164770&r2=1164771&view=diff
==
--- hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt 
(original)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt Sat Sep 
 3 00:31:05 2011
@@ -5,6 +5,7 @@ Trunk (unreleased changes)
   IMPROVEMENTS
 
   HADOOP-7595. Upgrade dependency to Avro 1.5.3. (Alejandro Abdelnur via atm)
+  HADOOP-7524 Change RPC to allow multiple protocols including multuple 
versions of the same protocol (sanjay Radia)
 
 Release 0.23.0 - Unreleased
 

Modified: 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java?rev=1164771&r1=1164770&r2=1164771&view=diff
==
--- 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java
 (original)
+++ 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java
 Sat Sep  3 00:31:05 2011
@@ -285,8 +285,8 @@ public class Client {
 authMethod = AuthMethod.KERBEROS;
   }
   
-  header = new ConnectionHeader(protocol == null ? null : protocol
-  .getName(), ticket, authMethod);
+  header = 
+new ConnectionHeader(RPC.getProtocolName(protocol), ticket, 
authMethod);
   
   if (LOG.isDebugEnabled())
 LOG.debug("Use " + authMethod + " authentication for protocol "

Added: 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtocolInfo.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtocolInfo.java?rev=1164771&view=auto
==
--- 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtocolInfo.java
 (added)
+++ 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtocolInfo.java
 Sat Sep  3 00:31:05 2011
@@ -0,0 +1,38 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.ipc;
+
+import java.lang.annotation.Retention;
+import java

svn commit: r1129989 - in /hadoop/common/trunk: ./ src/java/org/apache/hadoop/fs/ src/java/org/apache/hadoop/fs/shell/ src/java/org/apache/hadoop/fs/viewfs/ src/test/core/org/apache/hadoop/fs/ src/tes

2011-05-31 Thread sradia
Author: sradia
Date: Wed Jun  1 03:27:23 2011
New Revision: 1129989

URL: http://svn.apache.org/viewvc?rev=1129989&view=rev
Log:
HADOOP-7284 Trash and shell's rm does not work for viewfs

Added:

hadoop/common/trunk/src/test/core/org/apache/hadoop/fs/viewfs/TestViewFsTrash.java
Modified:
hadoop/common/trunk/CHANGES.txt
hadoop/common/trunk/src/java/org/apache/hadoop/fs/Trash.java
hadoop/common/trunk/src/java/org/apache/hadoop/fs/shell/Delete.java

hadoop/common/trunk/src/java/org/apache/hadoop/fs/viewfs/ChRootedFileSystem.java
hadoop/common/trunk/src/java/org/apache/hadoop/fs/viewfs/ConfigUtil.java
hadoop/common/trunk/src/java/org/apache/hadoop/fs/viewfs/Constants.java
hadoop/common/trunk/src/java/org/apache/hadoop/fs/viewfs/InodeTree.java
hadoop/common/trunk/src/java/org/apache/hadoop/fs/viewfs/ViewFileSystem.java
hadoop/common/trunk/src/java/org/apache/hadoop/fs/viewfs/ViewFs.java
hadoop/common/trunk/src/test/core/org/apache/hadoop/fs/TestTrash.java

hadoop/common/trunk/src/test/core/org/apache/hadoop/fs/viewfs/TestChRootedFileSystem.java

hadoop/common/trunk/src/test/core/org/apache/hadoop/fs/viewfs/TestChRootedFs.java

hadoop/common/trunk/src/test/core/org/apache/hadoop/fs/viewfs/TestFSMainOperationsLocalFileSystem.java

hadoop/common/trunk/src/test/core/org/apache/hadoop/fs/viewfs/ViewFileSystemTestSetup.java

Modified: hadoop/common/trunk/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/CHANGES.txt?rev=1129989&r1=1129988&r2=1129989&view=diff
==
--- hadoop/common/trunk/CHANGES.txt (original)
+++ hadoop/common/trunk/CHANGES.txt Wed Jun  1 03:27:23 2011
@@ -264,6 +264,8 @@ Trunk (unreleased changes)
 HADOOP-7336. TestFileContextResolveAfs will fail with default 
 test.build.data property. (jitendra)
 
+HADOOP-7284 Trash and shell's rm does not work for viewfs (Sanjay Radia)
+
 Release 0.22.0 - Unreleased
 
   INCOMPATIBLE CHANGES

Modified: hadoop/common/trunk/src/java/org/apache/hadoop/fs/Trash.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/src/java/org/apache/hadoop/fs/Trash.java?rev=1129989&r1=1129988&r2=1129989&view=diff
==
--- hadoop/common/trunk/src/java/org/apache/hadoop/fs/Trash.java (original)
+++ hadoop/common/trunk/src/java/org/apache/hadoop/fs/Trash.java Wed Jun  1 
03:27:23 2011
@@ -17,6 +17,11 @@
  */
 package org.apache.hadoop.fs;
 
+import static 
org.apache.hadoop.fs.CommonConfigurationKeysPublic.FS_TRASH_CHECKPOINT_INTERVAL_DEFAULT;
+import static 
org.apache.hadoop.fs.CommonConfigurationKeysPublic.FS_TRASH_CHECKPOINT_INTERVAL_KEY;
+import static 
org.apache.hadoop.fs.CommonConfigurationKeysPublic.FS_TRASH_INTERVAL_DEFAULT;
+import static 
org.apache.hadoop.fs.CommonConfigurationKeysPublic.FS_TRASH_INTERVAL_KEY;
+
 import java.io.FileNotFoundException;
 import java.io.IOException;
 import java.text.DateFormat;
@@ -30,7 +35,6 @@ import org.apache.hadoop.classification.
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.conf.Configured;
-import static org.apache.hadoop.fs.CommonConfigurationKeys.*;
 import org.apache.hadoop.fs.permission.FsAction;
 import org.apache.hadoop.fs.permission.FsPermission;
 import org.apache.hadoop.util.StringUtils;
@@ -86,7 +90,31 @@ public class Trash extends Configured {
  FS_TRASH_INTERVAL_DEFAULT) *
 MSECS_PER_MINUTE);
   }
-
+  
+  /**
+   * In case of the symlinks or mount points, one has to move the appropriate
+   * trashbin in the actual volume of the path p being deleted.
+   * 
+   * Hence we get the file system of the fully-qualified resolved-path and
+   * then move the path p to the trashbin in that volume,
+   * @param fs - the filesystem of path p
+   * @param p - the  path being deleted - to be moved to trasg
+   * @param conf - configuration
+   * @return false if the item is already in the trash or trash is disabled
+   * @throws IOException on error
+   */
+  public static boolean moveToAppropriateTrash(FileSystem fs, Path p,
+  Configuration conf) throws IOException {
+Path fullyResolvedPath = fs.resolvePath(p);
+Trash trash = new Trash(FileSystem.get(fullyResolvedPath.toUri(), conf), 
conf);
+boolean success =  trash.moveToTrash(fullyResolvedPath);
+if (success) {
+  System.out.println("Moved: '" + p + "' to trash at: " +
+  trash.getCurrentTrashDir() );
+}
+return success;
+  }
+  
   private Trash(Path home, Configuration conf) throws IOException {
 super(conf);
 this.fs = home.getFileSystem(conf);
@@ -122,7 +150,7 @@ public class Trash extends Configured {
 if (!fs.exists(path)) 

svn commit: r1127642 - in /hadoop/common/trunk: ./ src/java/org/apache/hadoop/fs/ src/java/org/apache/hadoop/fs/shell/ src/java/org/apache/hadoop/fs/viewfs/ src/test/core/org/apache/hadoop/fs/ src/tes

2011-05-25 Thread sradia
Author: sradia
Date: Wed May 25 19:23:18 2011
New Revision: 1127642

URL: http://svn.apache.org/viewvc?rev=1127642&view=rev
Log:
HADOOP-7284 Trash and shell's rm does not work for viewfs (Sanjay Radia)

Added:

hadoop/common/trunk/src/test/core/org/apache/hadoop/fs/viewfs/TestViewFsTrash.java
Modified:
hadoop/common/trunk/CHANGES.txt
hadoop/common/trunk/src/java/org/apache/hadoop/fs/Trash.java
hadoop/common/trunk/src/java/org/apache/hadoop/fs/shell/Delete.java

hadoop/common/trunk/src/java/org/apache/hadoop/fs/viewfs/ChRootedFileSystem.java
hadoop/common/trunk/src/java/org/apache/hadoop/fs/viewfs/ViewFileSystem.java
hadoop/common/trunk/src/test/core/org/apache/hadoop/fs/TestTrash.java

hadoop/common/trunk/src/test/core/org/apache/hadoop/fs/viewfs/TestChRootedFileSystem.java

hadoop/common/trunk/src/test/core/org/apache/hadoop/fs/viewfs/TestChRootedFs.java

hadoop/common/trunk/src/test/core/org/apache/hadoop/fs/viewfs/TestFSMainOperationsLocalFileSystem.java

hadoop/common/trunk/src/test/core/org/apache/hadoop/fs/viewfs/ViewFileSystemTestSetup.java

Modified: hadoop/common/trunk/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/CHANGES.txt?rev=1127642&r1=1127641&r2=1127642&view=diff
==
--- hadoop/common/trunk/CHANGES.txt (original)
+++ hadoop/common/trunk/CHANGES.txt Wed May 25 19:23:18 2011
@@ -247,6 +247,8 @@ Trunk (unreleased changes)
 HADOOP-7282. ipc.Server.getRemoteIp() may return null.  (John George
 via szetszwo)
 
+HADOOP-7284 Trash and shell's rm does not work for viewfs (Sanjay Radia)
+
 Release 0.22.0 - Unreleased
 
   INCOMPATIBLE CHANGES

Modified: hadoop/common/trunk/src/java/org/apache/hadoop/fs/Trash.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/src/java/org/apache/hadoop/fs/Trash.java?rev=1127642&r1=1127641&r2=1127642&view=diff
==
--- hadoop/common/trunk/src/java/org/apache/hadoop/fs/Trash.java (original)
+++ hadoop/common/trunk/src/java/org/apache/hadoop/fs/Trash.java Wed May 25 
19:23:18 2011
@@ -17,6 +17,11 @@
  */
 package org.apache.hadoop.fs;
 
+import static 
org.apache.hadoop.fs.CommonConfigurationKeysPublic.FS_TRASH_CHECKPOINT_INTERVAL_DEFAULT;
+import static 
org.apache.hadoop.fs.CommonConfigurationKeysPublic.FS_TRASH_CHECKPOINT_INTERVAL_KEY;
+import static 
org.apache.hadoop.fs.CommonConfigurationKeysPublic.FS_TRASH_INTERVAL_DEFAULT;
+import static 
org.apache.hadoop.fs.CommonConfigurationKeysPublic.FS_TRASH_INTERVAL_KEY;
+
 import java.io.FileNotFoundException;
 import java.io.IOException;
 import java.text.DateFormat;
@@ -30,7 +35,6 @@ import org.apache.hadoop.classification.
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.conf.Configured;
-import static org.apache.hadoop.fs.CommonConfigurationKeys.*;
 import org.apache.hadoop.fs.permission.FsAction;
 import org.apache.hadoop.fs.permission.FsPermission;
 import org.apache.hadoop.util.StringUtils;
@@ -86,7 +90,26 @@ public class Trash extends Configured {
  FS_TRASH_INTERVAL_DEFAULT) *
 MSECS_PER_MINUTE);
   }
-
+  
+  /**
+   * In case of the symlinks or mount points, one has to move the appropriate
+   * trashbin in the actual volume of the path p being deleted.
+   * 
+   * Hence we get the file system of the fully-qualified resolved-path and
+   * then move the path p to the trashbin in that volume,
+   * @param fs - the filesystem of path p
+   * @param p - the  path being deleted - to be moved to trasg
+   * @param conf - configuration
+   * @return false if the item is already in the trash or trash is disabled
+   * @throws IOException on error
+   */
+  public static boolean moveToAppropriateTrash(FileSystem fs, Path p,
+  Configuration conf) throws IOException {
+Path fullyResolvedPath = fs.resolvePath(p);
+Trash trash = new Trash(FileSystem.get(fullyResolvedPath.toUri(), conf), 
conf);
+return trash.moveToTrash(fullyResolvedPath);
+  }
+  
   private Trash(Path home, Configuration conf) throws IOException {
 super(conf);
 this.fs = home.getFileSystem(conf);
@@ -122,7 +145,7 @@ public class Trash extends Configured {
 if (!fs.exists(path)) // check that path exists
   throw new FileNotFoundException(path.toString());
 
-String qpath = path.makeQualified(fs).toString();
+String qpath = fs.makeQualified(path).toString();
 
 if (qpath.startsWith(trash.toString())) {
   return false;   // already in trash

Modified: hadoop/common/trunk/src/java/org/apache/hadoop/fs/shell/Delete.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/src/java/org/apache/hadoop/fs/shell/Delete.java?rev=1

svn commit: r1090068 - in /hadoop/site: author/src/documentation/content/xdocs/ publish/

2011-04-07 Thread sradia
Author: sradia
Date: Fri Apr  8 00:12:02 2011
New Revision: 1090068

URL: http://svn.apache.org/viewvc?rev=1090068&view=rev
Log:
Updated PMC list to include suresh and sanjay

Modified:
hadoop/site/author/src/documentation/content/xdocs/who.xml
hadoop/site/publish/general_lists.html
hadoop/site/publish/index.html
hadoop/site/publish/index.pdf
hadoop/site/publish/linkmap.html
hadoop/site/publish/linkmap.pdf
hadoop/site/publish/mailing_lists.html
hadoop/site/publish/privacy_policy.html
hadoop/site/publish/who.html
hadoop/site/publish/who.pdf

Modified: hadoop/site/author/src/documentation/content/xdocs/who.xml
URL: 
http://svn.apache.org/viewvc/hadoop/site/author/src/documentation/content/xdocs/who.xml?rev=1090068&r1=1090067&r2=1090068&view=diff
==
--- hadoop/site/author/src/documentation/content/xdocs/who.xml (original)
+++ hadoop/site/author/src/documentation/content/xdocs/who.xml Fri Apr  8 
00:12:02 2011
@@ -171,6 +171,15 @@
  -8

 
+
+   
+ sradia
+ http://people.apache.org/~sradia";>Sanjay Radia
+ Yahoo!
+ 
+ -8
+
+

  stack
  Michael Stack
@@ -178,6 +187,14 @@
  HBase
  -8

+   
+   
+ suresh
+ http://people.apache.org/~suresh";>Suresh 
Srinivas
+ Yahoo!
+ 
+ -8
+
 
 
   szetszwo

Modified: hadoop/site/publish/general_lists.html
URL: 
http://svn.apache.org/viewvc/hadoop/site/publish/general_lists.html?rev=1090068&r1=1090067&r2=1090068&view=diff
==
--- hadoop/site/publish/general_lists.html (original)
+++ hadoop/site/publish/general_lists.html Fri Apr  8 00:12:02 2011
@@ -150,6 +150,9 @@ document.write("Last Published: " + docu
 http://avro.apache.org/";>Avro
 
 
+http://cassandra.apache.org/";>Cassandra
+
+
 http://incubator.apache.org/chukwa/";>Chukwa
 
 

Modified: hadoop/site/publish/index.html
URL: 
http://svn.apache.org/viewvc/hadoop/site/publish/index.html?rev=1090068&r1=1090067&r2=1090068&view=diff
==
--- hadoop/site/publish/index.html (original)
+++ hadoop/site/publish/index.html Fri Apr  8 00:12:02 2011
@@ -150,6 +150,9 @@ document.write("Last Published: " + docu
 http://avro.apache.org/";>Avro
 
 
+http://cassandra.apache.org/";>Cassandra
+
+
 http://incubator.apache.org/chukwa/";>Chukwa
 
 
@@ -266,6 +269,11 @@ document.write("Last Published: " + docu
 
   
 
+http://cassandra.apache.org/";>Cassandra: 
+  A scalable multi-master database with no single points of failure.
+
+  
+
 http://incubator.apache.org/chukwa/";>Chukwa: 
   A data collection system for managing large distributed systems.  
 
@@ -299,7 +307,7 @@ document.write("Last Published: " + docu
 
 
 
-
+
 Who Uses Hadoop?
 
 
@@ -310,17 +318,17 @@ document.write("Last Published: " + docu
 
 
 
-
+
 News
 
-
+
 January 2011 - ZooKeeper Graduates
 Hadoop's ZooKeeper subproject has graduated to become a
  top-level Apache project.
 ZooKeeper can now be found
  at http://zookeeper.apache.org/";>http://zookeeper.apache.org/
 
-
+
 September 2010 - Hive and Pig Graduate
 Hadoop's Hive and Pig subprojects have graduated to become
  top-level Apache projects.
@@ -330,7 +338,7 @@ document.write("Last Published: " + docu
 Pig can now be found
  at http://pig.apache.org/";>http://pig.apache.org/
 
-
+
 May 2010 - Avro and HBase Graduate
 Hadoop's Avro and HBase subprojects have graduated to become
  top-level Apache projects.
@@ -340,7 +348,7 @@ document.write("Last Published: " + docu
 HBase can now be found
  at http://hbase.apache.org/";>http://hbase.apache.org/
 
-
+
 July 2009 - New Hadoop Subprojects
 Hadoop is getting bigger! 
 
@@ -354,15 +362,15 @@ document.write("Last Published: " + docu
 
 
 See the summary descriptions for all subprojects above. Visit the 
individual sites for more detailed information.
-
+
 March 2009 - ApacheCon EU
 In case you missed it http://www.eu.apachecon.com/c/aceu2009/";>ApacheCon Europe 2009 
 
-
+
 November 2008 - ApacheCon US
 In case you missed it http://us.apachecon.com/c/acus2008/";>ApacheCon US 2008 
 
-
+
 July 2008 - Hadoop Wins Terabyte Sort Benchmark
 
  

Modified: hadoop/site/publish/index.pdf
URL: 
http://svn.apache.org/viewvc/hadoop/site/publish/index.pdf?rev=1090068&r1=1090067&r2=1090068&view=diff
==
--- hadoop/site/publish/index.

svn commit: r1043117 - in /hadoop/common/trunk: CHANGES.txt src/test/core/org/apache/hadoop/fs/loadGenerator/DataGenerator.java src/test/core/org/apache/hadoop/fs/loadGenerator/LoadGenerator.java

2010-12-07 Thread sradia
Author: sradia
Date: Tue Dec  7 16:32:17 2010
New Revision: 1043117

URL: http://svn.apache.org/viewvc?rev=1043117&view=rev
Log:
HADOOP-7054 Change NN LoadGenerator to use FileContext APIs (Sanjay Radia)

Modified:
hadoop/common/trunk/CHANGES.txt

hadoop/common/trunk/src/test/core/org/apache/hadoop/fs/loadGenerator/DataGenerator.java

hadoop/common/trunk/src/test/core/org/apache/hadoop/fs/loadGenerator/LoadGenerator.java

Modified: hadoop/common/trunk/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/CHANGES.txt?rev=1043117&r1=1043116&r2=1043117&view=diff
==
--- hadoop/common/trunk/CHANGES.txt (original)
+++ hadoop/common/trunk/CHANGES.txt Tue Dec  7 16:32:17 2010
@@ -23,6 +23,9 @@ Trunk (unreleased changes)
 HADOOP-7049. TestReconfiguration should be junit v4.
 (Patrick Kling via eli)
 
+HADOOP-7054 Change NN LoadGenerator to use FileContext APIs
+   (Sanjay Radia)
+
   OPTIMIZATIONS
 
   BUG FIXES

Modified: 
hadoop/common/trunk/src/test/core/org/apache/hadoop/fs/loadGenerator/DataGenerator.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/src/test/core/org/apache/hadoop/fs/loadGenerator/DataGenerator.java?rev=1043117&r1=1043116&r2=1043117&view=diff
==
--- 
hadoop/common/trunk/src/test/core/org/apache/hadoop/fs/loadGenerator/DataGenerator.java
 (original)
+++ 
hadoop/common/trunk/src/test/core/org/apache/hadoop/fs/loadGenerator/DataGenerator.java
 Tue Dec  7 16:32:17 2010
@@ -22,12 +22,15 @@ import java.io.BufferedReader;
 import java.io.File;
 import java.io.FileReader;
 import java.io.IOException;
+import java.util.EnumSet;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.conf.Configured;
+import org.apache.hadoop.fs.CreateFlag;
 import org.apache.hadoop.fs.FSDataOutputStream;
-import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.FileContext;
 import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.fs.Options.CreateOpts;
 import org.apache.hadoop.util.Tool;
 import org.apache.hadoop.util.ToolRunner;
 
@@ -48,7 +51,7 @@ import org.apache.hadoop.util.ToolRunner
 public class DataGenerator extends Configured implements Tool {
   private File inDir = StructureGenerator.DEFAULT_STRUCTURE_DIRECTORY;
   private Path root = DEFAULT_ROOT;
-  private FileSystem fs;
+  private FileContext fc;
   final static private long BLOCK_SIZE = 10;
   final static private String USAGE = "java DataGenerator " +
"-inDir  " +
@@ -78,7 +81,7 @@ public class DataGenerator extends Confi
   /** Parse the command line arguments and initialize the data */
   private int init(String[] args) {
 try { // initialize file system handle
-  fs = FileSystem.get(getConf());
+  fc = FileContext.getFileContext(getConf());
 } catch (IOException ioe) {
   System.err.println("Can not initialize the file system: " + 
   ioe.getLocalizedMessage());
@@ -109,7 +112,7 @@ public class DataGenerator extends Confi
 StructureGenerator.DIR_STRUCTURE_FILE_NAME)));
 String line;
 while ((line=in.readLine()) != null) {
-  fs.mkdirs(new Path(root+line));
+  fc.mkdir(new Path(root+line), FileContext.DEFAULT_PERM, true);
 }
   }
 
@@ -137,10 +140,9 @@ public class DataGenerator extends Confi
* a length of fileSize. The file is filled with character 'a'.
*/
   private void genFile(Path file, long fileSize) throws IOException {
-FSDataOutputStream out = fs.create(file, true, 
-getConf().getInt("io.file.buffer.size", 4096),
-(short)getConf().getInt("dfs.replication", 3),
-fs.getDefaultBlockSize());
+FSDataOutputStream out = fc.create(file, EnumSet.of(CreateFlag.OVERWRITE),
+CreateOpts.createParent(), CreateOpts.bufferSize(4096),
+CreateOpts.repFac((short) 3));
 for(long i=0; ihttp://svn.apache.org/viewvc/hadoop/common/trunk/src/test/core/org/apache/hadoop/fs/loadGenerator/LoadGenerator.java?rev=1043117&r1=1043116&r2=1043117&view=diff
==
--- 
hadoop/common/trunk/src/test/core/org/apache/hadoop/fs/loadGenerator/LoadGenerator.java
 (original)
+++ 
hadoop/common/trunk/src/test/core/org/apache/hadoop/fs/loadGenerator/LoadGenerator.java
 Tue Dec  7 16:32:17 2010
@@ -26,16 +26,19 @@ import java.io.InputStream;
 import java.net.InetAddress;
 import java.net.UnknownHostException;
 import java.util.ArrayList;
+import java.util.EnumSet;
 import java.util.Random;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.conf.Configured;
+import org.apache.hadoop.fs.CreateFlag;
 import org.apache.hadoop.fs.FSDataOut

svn commit: r1034480 - in /hadoop/common/trunk: ./ src/java/org/apache/hadoop/fs/ src/java/org/apache/hadoop/fs/ftp/ src/java/org/apache/hadoop/fs/local/ src/test/core/org/apache/hadoop/fs/

2010-11-12 Thread sradia
Author: sradia
Date: Fri Nov 12 17:23:53 2010
New Revision: 1034480

URL: http://svn.apache.org/viewvc?rev=1034480&view=rev
Log:
HADOOP-6903 Make AbstractFSileSystem methods and some FileContext methods to be 
public

Modified:
hadoop/common/trunk/CHANGES.txt
hadoop/common/trunk/src/java/org/apache/hadoop/fs/AbstractFileSystem.java
hadoop/common/trunk/src/java/org/apache/hadoop/fs/ChecksumFs.java
hadoop/common/trunk/src/java/org/apache/hadoop/fs/DelegateToFileSystem.java
hadoop/common/trunk/src/java/org/apache/hadoop/fs/FileContext.java
hadoop/common/trunk/src/java/org/apache/hadoop/fs/FilterFs.java
hadoop/common/trunk/src/java/org/apache/hadoop/fs/FsConstants.java
hadoop/common/trunk/src/java/org/apache/hadoop/fs/ftp/FtpFs.java
hadoop/common/trunk/src/java/org/apache/hadoop/fs/local/LocalConfigKeys.java
hadoop/common/trunk/src/java/org/apache/hadoop/fs/local/RawLocalFs.java

hadoop/common/trunk/src/test/core/org/apache/hadoop/fs/FileContextMainOperationsBaseTest.java

hadoop/common/trunk/src/test/core/org/apache/hadoop/fs/FileContextPermissionBase.java

hadoop/common/trunk/src/test/core/org/apache/hadoop/fs/FileContextSymlinkBaseTest.java

hadoop/common/trunk/src/test/core/org/apache/hadoop/fs/FileContextTestHelper.java

Modified: hadoop/common/trunk/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/CHANGES.txt?rev=1034480&r1=1034479&r2=1034480&view=diff
==
--- hadoop/common/trunk/CHANGES.txt (original)
+++ hadoop/common/trunk/CHANGES.txt Fri Nov 12 17:23:53 2010
@@ -171,6 +171,10 @@ Trunk (unreleased changes)
 HADOOP-7024. Create a test method for adding file systems during tests.
 (Kan Zhang via jghoman)
 
+HADOOP-6903 Make AbstractFSileSystem methods and some FileContext methods 
to be public
+(Sanjay Radia via Sanjay Radia)
+
+
   OPTIMIZATIONS
 
 HADOOP-6884. Add LOG.isDebugEnabled() guard for each LOG.debug(..).

Modified: 
hadoop/common/trunk/src/java/org/apache/hadoop/fs/AbstractFileSystem.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/src/java/org/apache/hadoop/fs/AbstractFileSystem.java?rev=1034480&r1=1034479&r2=1034480&view=diff
==
--- hadoop/common/trunk/src/java/org/apache/hadoop/fs/AbstractFileSystem.java 
(original)
+++ hadoop/common/trunk/src/java/org/apache/hadoop/fs/AbstractFileSystem.java 
Fri Nov 12 17:23:53 2010
@@ -25,7 +25,6 @@ import java.net.URI;
 import java.net.URISyntaxException;
 import java.util.EnumSet;
 import java.util.IdentityHashMap;
-import java.util.Iterator;
 import java.util.Map;
 import java.util.NoSuchElementException;
 import java.util.StringTokenizer;
@@ -77,7 +76,7 @@ public abstract class AbstractFileSystem
   
   private final URI myUri;
   
-  protected Statistics getStatistics() {
+  public Statistics getStatistics() {
 return statistics;
   }
   
@@ -135,7 +134,7 @@ public abstract class AbstractFileSystem
* @throws UnsupportedFileSystemException file system for uri is
*   not found
*/
-  private static AbstractFileSystem createFileSystem(URI uri, Configuration 
conf)
+  public static AbstractFileSystem createFileSystem(URI uri, Configuration 
conf)
   throws UnsupportedFileSystemException {
 Class clazz = conf.getClass("fs.AbstractFileSystem." + 
 uri.getScheme() + ".impl", null);
@@ -152,7 +151,7 @@ public abstract class AbstractFileSystem
* @param cls the class to lookup
* @return a statistics object
*/
-  protected static synchronized Statistics getStatistics(String scheme,
+  public static synchronized Statistics getStatistics(String scheme,
   Class cls) {
 Statistics result = STATISTICS_TABLE.get(cls);
 if (result == null) {
@@ -162,13 +161,13 @@ public abstract class AbstractFileSystem
 return result;
   }
   
-  protected static synchronized void clearStatistics() {
+  public static synchronized void clearStatistics() {
 for(Statistics stat: STATISTICS_TABLE.values()) {
   stat.reset();
 }
   }
 
-  protected static synchronized void printStatistics() {
+  public static synchronized void printStatistics() {
 for (Map.Entry, Statistics> pair: 
 STATISTICS_TABLE.entrySet()) {
   System.out.println("  FileSystem " + pair.getKey().getName() + 
@@ -193,7 +192,7 @@ public abstract class AbstractFileSystem
* @throws UnsupportedFileSystemException if the file system for
*   uri is not supported.
*/
-  static AbstractFileSystem get(final URI uri, final Configuration conf)
+  public static AbstractFileSystem get(final URI uri, final Configuration conf)
   throws UnsupportedFileSystemException {
 return createFileSystem(uri, conf);
   }
@@ -208,14 +207,19 @@ public abstract class AbstractFil

svn commit: r1032730 - in /hadoop/common/trunk: ./ src/java/org/apache/hadoop/fs/ src/test/core/org/apache/hadoop/fs/

2010-11-08 Thread sradia
Author: sradia
Date: Mon Nov  8 21:29:05 2010
New Revision: 1032730

URL: http://svn.apache.org/viewvc?rev=1032730&view=rev
Log:
HADOOP-6899 RawLocalFileSystem#setWorkingDir() does not work for relative names

Added:

hadoop/common/trunk/src/test/core/org/apache/hadoop/fs/FSMainOperationsBaseTest.java

hadoop/common/trunk/src/test/core/org/apache/hadoop/fs/FileSystemTestHelper.java

hadoop/common/trunk/src/test/core/org/apache/hadoop/fs/TestFSMainOperationsLocalFileSystem.java
Modified:
hadoop/common/trunk/CHANGES.txt
hadoop/common/trunk/src/java/org/apache/hadoop/fs/Path.java
hadoop/common/trunk/src/java/org/apache/hadoop/fs/RawLocalFileSystem.java

hadoop/common/trunk/src/java/org/apache/hadoop/fs/UnsupportedFileSystemException.java

Modified: hadoop/common/trunk/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/CHANGES.txt?rev=1032730&r1=1032729&r2=1032730&view=diff
==
--- hadoop/common/trunk/CHANGES.txt (original)
+++ hadoop/common/trunk/CHANGES.txt Mon Nov  8 21:29:05 2010
@@ -309,6 +309,9 @@ Trunk (unreleased changes)
 HADOOP-6926. SocketInputStream incorrectly implements read().
 (Todd Lipcon via tomwhite)
 
+HADOOP-6899 RawLocalFileSystem#setWorkingDir() does not work for relative 
names
+ (Sanjay Radia)
+
 Release 0.21.1 - Unreleased
 
   IMPROVEMENTS

Modified: hadoop/common/trunk/src/java/org/apache/hadoop/fs/Path.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/src/java/org/apache/hadoop/fs/Path.java?rev=1032730&r1=1032729&r2=1032730&view=diff
==
--- hadoop/common/trunk/src/java/org/apache/hadoop/fs/Path.java (original)
+++ hadoop/common/trunk/src/java/org/apache/hadoop/fs/Path.java Mon Nov  8 
21:29:05 2010
@@ -190,6 +190,15 @@ public class Path implements Comparable 
   }
 
   /**
+   * Is an absolute path (ie a slash relative path part)
+   *  AND  a scheme is null AND  authority is null.
+   */
+  public boolean isAbsoluteAndSchemeAuthorityNull() {
+return  (isUriPathAbsolute() && 
+uri.getScheme() == null && uri.getAuthority() == null);
+  }
+  
+  /**
*  True if the path component (i.e. directory) of this URI is absolute.
*/
   public boolean isUriPathAbsolute() {

Modified: 
hadoop/common/trunk/src/java/org/apache/hadoop/fs/RawLocalFileSystem.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/src/java/org/apache/hadoop/fs/RawLocalFileSystem.java?rev=1032730&r1=1032729&r2=1032730&view=diff
==
--- hadoop/common/trunk/src/java/org/apache/hadoop/fs/RawLocalFileSystem.java 
(original)
+++ hadoop/common/trunk/src/java/org/apache/hadoop/fs/RawLocalFileSystem.java 
Mon Nov  8 21:29:05 2010
@@ -53,6 +53,14 @@ public class RawLocalFileSystem extends 
 workingDir = getInitialWorkingDirectory();
   }
   
+  private Path makeAbsolute(Path f) {
+if (f.isAbsolute()) {
+  return f;
+} else {
+  return new Path(workingDir, f);
+}
+  }
+  
   /** Convert a path to a File. */
   public File pathToFile(Path path) {
 checkPath(path);
@@ -368,7 +376,9 @@ public class RawLocalFileSystem extends 
*/
   @Override
   public void setWorkingDirectory(Path newDir) {
-workingDir = newDir;
+workingDir = makeAbsolute(newDir);
+checkPath(workingDir);
+
   }
   
   @Override
@@ -545,4 +555,4 @@ public class RawLocalFileSystem extends 
 return output;
   }
 
-}
+}
\ No newline at end of file

Modified: 
hadoop/common/trunk/src/java/org/apache/hadoop/fs/UnsupportedFileSystemException.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/src/java/org/apache/hadoop/fs/UnsupportedFileSystemException.java?rev=1032730&r1=1032729&r2=1032730&view=diff
==
--- 
hadoop/common/trunk/src/java/org/apache/hadoop/fs/UnsupportedFileSystemException.java
 (original)
+++ 
hadoop/common/trunk/src/java/org/apache/hadoop/fs/UnsupportedFileSystemException.java
 Mon Nov  8 21:29:05 2010
@@ -34,7 +34,7 @@ public class UnsupportedFileSystemExcept
* Constructs exception with the specified detail message. 
* @param message exception message.
*/
-  UnsupportedFileSystemException(final String message) {
+  public UnsupportedFileSystemException(final String message) {
 super(message);
   }
 }

Added: 
hadoop/common/trunk/src/test/core/org/apache/hadoop/fs/FSMainOperationsBaseTest.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/src/test/core/org/apache/hadoop/fs/FSMainOperationsBaseTest.java?rev=1032730&view=auto
==
--- 
hadoop/common/trunk/src/test/core/org/apache/hadoop/fs/FSMainOperationsBaseTest.j

svn commit: r918309 [1/2] - in /hadoop/common/trunk: ./ src/java/org/apache/hadoop/ src/java/org/apache/hadoop/fs/ src/java/org/apache/hadoop/ipc/

2010-03-02 Thread sradia
Author: sradia
Date: Wed Mar  3 02:48:49 2010
New Revision: 918309

URL: http://svn.apache.org/viewvc?rev=918309&view=rev
Log:
   HADOOP-6537 Declare more detailed exceptions in FileContext and 
AbstractFileSystem
   (Suresh Srinivas via Sanjay Radia)

Added:

hadoop/common/trunk/src/java/org/apache/hadoop/HadoopIllegalArgumentException.java
hadoop/common/trunk/src/java/org/apache/hadoop/fs/InvalidPathException.java

hadoop/common/trunk/src/java/org/apache/hadoop/fs/UnsupportedFileSystemException.java
hadoop/common/trunk/src/java/org/apache/hadoop/ipc/RpcClientException.java
hadoop/common/trunk/src/java/org/apache/hadoop/ipc/RpcException.java
hadoop/common/trunk/src/java/org/apache/hadoop/ipc/RpcServerException.java

hadoop/common/trunk/src/java/org/apache/hadoop/ipc/UnexpectedServerException.java
Modified:
hadoop/common/trunk/CHANGES.txt
hadoop/common/trunk/src/java/org/apache/hadoop/fs/AbstractFileSystem.java
hadoop/common/trunk/src/java/org/apache/hadoop/fs/FileContext.java

Modified: hadoop/common/trunk/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/CHANGES.txt?rev=918309&r1=918308&r2=918309&view=diff
==
--- hadoop/common/trunk/CHANGES.txt (original)
+++ hadoop/common/trunk/CHANGES.txt Wed Mar  3 02:48:49 2010
@@ -185,6 +185,9 @@
HADOOP-6599  Split existing RpcMetrics into RpcMetrics & RpcDetailedMetrics.
(Suresh Srinivas via Sanjay Radia)
 
+   HADOOP-6537 Declare more detailed exceptions in FileContext and 
AbstractFileSystem
+   (Suresh Srinivas via Sanjay Radia)
+
   OPTIMIZATIONS
 
 HADOOP-6467. Improve the performance on HarFileSystem.listStatus(..).

Added: 
hadoop/common/trunk/src/java/org/apache/hadoop/HadoopIllegalArgumentException.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/src/java/org/apache/hadoop/HadoopIllegalArgumentException.java?rev=918309&view=auto
==
--- 
hadoop/common/trunk/src/java/org/apache/hadoop/HadoopIllegalArgumentException.java
 (added)
+++ 
hadoop/common/trunk/src/java/org/apache/hadoop/HadoopIllegalArgumentException.java
 Wed Mar  3 02:48:49 2010
@@ -0,0 +1,35 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop;
+
+/**
+ * Indicates that a method has been passed illegal or invalid argument. This
+ * exception is thrown instead of IllegalArgumentException to differentiate the
+ * exception thrown in Hadoop implementation from the one thrown in JDK.
+ */
+public class HadoopIllegalArgumentException extends IllegalArgumentException {
+  private static final long serialVersionUID = 1L;
+  
+  /**
+   * Constructs exception with the specified detail message. 
+   * @param message detailed message.
+   */
+  public HadoopIllegalArgumentException(final String message) {
+super(message);
+  }
+}

Modified: 
hadoop/common/trunk/src/java/org/apache/hadoop/fs/AbstractFileSystem.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/src/java/org/apache/hadoop/fs/AbstractFileSystem.java?rev=918309&r1=918308&r2=918309&view=diff
==
--- hadoop/common/trunk/src/java/org/apache/hadoop/fs/AbstractFileSystem.java 
(original)
+++ hadoop/common/trunk/src/java/org/apache/hadoop/fs/AbstractFileSystem.java 
Wed Mar  3 02:48:49 2010
@@ -31,6 +31,7 @@
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.HadoopIllegalArgumentException;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;
@@ -38,29 +39,31 @@
 import org.apache.hadoop.fs.Options.CreateOpts;
 import org.apache.hadoop.fs.Options.Rename;
 import org.apache.hadoop.fs.permission.FsPermission;
+import org.apache.hadoop.fs.InvalidPathException;
+import org.apache.hadoop.security.AccessControlException;
 import org.apache.hadoop.util.Progressable;
 
 /**
- * This class provides an interface for implem

svn commit: r917737 - in /hadoop/common/trunk: ./ src/java/org/apache/hadoop/ipc/ src/java/org/apache/hadoop/ipc/metrics/ src/test/core/org/apache/hadoop/ipc/

2010-03-01 Thread sradia
Author: sradia
Date: Mon Mar  1 21:36:23 2010
New Revision: 917737

URL: http://svn.apache.org/viewvc?rev=917737&view=rev
Log:
HADOOP-6599  Split existing RpcMetrics into RpcMetrics & RpcDetailedMetrics.
   (Suresh Srinivas via Sanjay Radia)


Added:

hadoop/common/trunk/src/java/org/apache/hadoop/ipc/metrics/RpcDetailedActivityMBean.java

hadoop/common/trunk/src/java/org/apache/hadoop/ipc/metrics/RpcDetailedMetrics.java
Modified:
hadoop/common/trunk/CHANGES.txt
hadoop/common/trunk/src/java/org/apache/hadoop/ipc/Server.java
hadoop/common/trunk/src/java/org/apache/hadoop/ipc/WritableRpcEngine.java

hadoop/common/trunk/src/java/org/apache/hadoop/ipc/metrics/RpcActivityMBean.java
hadoop/common/trunk/src/java/org/apache/hadoop/ipc/metrics/RpcMetrics.java
hadoop/common/trunk/src/test/core/org/apache/hadoop/ipc/TestRPC.java

Modified: hadoop/common/trunk/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/CHANGES.txt?rev=917737&r1=917736&r2=917737&view=diff
==
--- hadoop/common/trunk/CHANGES.txt (original)
+++ hadoop/common/trunk/CHANGES.txt Mon Mar  1 21:36:23 2010
@@ -1,4 +1,5 @@
 Hadoop Change Log
+# Add directory level at the storage directory
 
 Trunk (unreleased changes)
 
@@ -181,6 +182,9 @@
 HADOOP-6589. Provide better error messages when RPC authentication fails.
 (Kan Zhang via omalley)
 
+   HADOOP-6599  Split existing RpcMetrics into RpcMetrics & RpcDetailedMetrics.
+   (Suresh Srinivas via Sanjay Radia)
+
   OPTIMIZATIONS
 
 HADOOP-6467. Improve the performance on HarFileSystem.listStatus(..).

Modified: hadoop/common/trunk/src/java/org/apache/hadoop/ipc/Server.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/src/java/org/apache/hadoop/ipc/Server.java?rev=917737&r1=917736&r2=917737&view=diff
==
--- hadoop/common/trunk/src/java/org/apache/hadoop/ipc/Server.java (original)
+++ hadoop/common/trunk/src/java/org/apache/hadoop/ipc/Server.java Mon Mar  1 
21:36:23 2010
@@ -64,6 +64,7 @@
 import org.apache.hadoop.io.IntWritable;
 import org.apache.hadoop.io.Writable;
 import org.apache.hadoop.io.WritableUtils;
+import org.apache.hadoop.ipc.metrics.RpcDetailedMetrics;
 import org.apache.hadoop.ipc.metrics.RpcMetrics;
 import org.apache.hadoop.security.AccessControlException;
 import org.apache.hadoop.security.SaslRpcServer;
@@ -172,7 +173,8 @@
   // connections to nuke
   //during a cleanup
   
-  protected RpcMetrics  rpcMetrics;
+  protected RpcMetrics rpcMetrics;
+  protected RpcDetailedMetrics rpcDetailedMetrics;
   
   private Configuration conf;
   private SecretManager secretManager;
@@ -1268,8 +1270,9 @@
 // its own message ordering.
 setupResponse(buf, call, (error == null) ? Status.SUCCESS
 : Status.ERROR, value, errorClass, error);
-// Discard the large buf and reset it back to
-// smaller size to freeup heap
+
+// Discard the large buf and reset it back to smaller size 
+// to free up heap
 if (buf.size() > maxRespSize) {
   LOG.warn("Large response size " + buf.size() + " for call "
   + call.toString());
@@ -1336,6 +1339,8 @@
 this.port = listener.getAddress().getPort();
 this.rpcMetrics = new RpcMetrics(serverName,
   Integer.toString(this.port), this);
+this.rpcDetailedMetrics = new RpcDetailedMetrics(serverName,
+Integer.toString(this.port));
 this.tcpNoDelay = conf.getBoolean("ipc.server.tcpnodelay", false);
 
 
@@ -1450,6 +1455,9 @@
 if (this.rpcMetrics != null) {
   this.rpcMetrics.shutdown();
 }
+if (this.rpcDetailedMetrics != null) {
+  this.rpcDetailedMetrics.shutdown();
+}
   }
 
   /** Wait for the server to be stopped.
@@ -1540,11 +1548,15 @@
*
* @see WritableByteChannel#write(ByteBuffer)
*/
-  private static int channelWrite(WritableByteChannel channel, 
-  ByteBuffer buffer) throws IOException {
+  private int channelWrite(WritableByteChannel channel, 
+   ByteBuffer buffer) throws IOException {
 
-return (buffer.remaining() <= NIO_BUFFER_LIMIT) ?
-   channel.write(buffer) : channelIO(null, channel, buffer);
+int count =  (buffer.remaining() <= NIO_BUFFER_LIMIT) ?
+ channel.write(buffer) : channelIO(null, channel, buffer);
+if (count > 0) {
+  rpcMetrics.sentBytes.inc(count);
+}
+return count;
   }
   
   
@@ -1556,11 +1568,15 @@
* 
* @see ReadableByteChannel#read(ByteBuffer)
*/
-  private static int channelRead(ReadableByt

svn commit: r910706 [2/2] - in /hadoop/common/trunk: ./ src/java/org/apache/hadoop/fs/ src/java/org/apache/hadoop/fs/local/ src/java/org/apache/hadoop/util/ src/test/core/org/apache/hadoop/fs/

2010-02-16 Thread sradia
Added: 
hadoop/common/trunk/src/test/core/org/apache/hadoop/fs/FileContextSymlinkBaseTest.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/src/test/core/org/apache/hadoop/fs/FileContextSymlinkBaseTest.java?rev=910706&view=auto
==
--- 
hadoop/common/trunk/src/test/core/org/apache/hadoop/fs/FileContextSymlinkBaseTest.java
 (added)
+++ 
hadoop/common/trunk/src/test/core/org/apache/hadoop/fs/FileContextSymlinkBaseTest.java
 Tue Feb 16 21:43:30 2010
@@ -0,0 +1,818 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.fs;
+
+import java.io.*;
+import java.net.URI;
+import java.util.Random;
+import java.util.EnumSet;
+import org.apache.hadoop.fs.FileContext;
+import org.apache.hadoop.fs.Options.CreateOpts;
+import org.apache.hadoop.fs.Options.Rename;
+import org.apache.hadoop.fs.permission.FsPermission;
+import org.apache.hadoop.fs.CreateFlag;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.fs.FSDataOutputStream;
+import org.apache.hadoop.fs.FSDataInputStream;
+import static org.junit.Assert.*;
+
+import org.junit.Test;
+import org.junit.Before;
+import org.junit.After;
+
+/**
+ * Test symbolic links using FileContext.
+ */
+public abstract class FileContextSymlinkBaseTest {
+  static final long seed = 0xDEADBEEFL;
+  static final int  blockSize =  8192;
+  static final int  fileSize  = 16384;
+ 
+  protected static FileContext fc;
+  
+  abstract protected String getScheme();
+  abstract protected String testBaseDir1();
+  abstract protected String testBaseDir2();
+  abstract protected URI testURI();
+
+  protected static void createAndWriteFile(FileContext fc, Path p) 
+  throws IOException {
+FSDataOutputStream out;
+out = fc.create(p, EnumSet.of(CreateFlag.CREATE),
+CreateOpts.createParent(),
+CreateOpts.repFac((short) 1),
+CreateOpts.blockSize(blockSize));
+byte[] buf = new byte[fileSize];
+Random rand = new Random(seed);
+rand.nextBytes(buf);
+out.write(buf);
+out.close();
+  }
+  
+  protected static void createAndWriteFile(Path p) throws IOException {
+createAndWriteFile(fc, p);
+  }
+
+  protected void readFile(Path p) throws IOException {
+FSDataInputStream out = fc.open(p);
+byte[] actual = new byte[fileSize];
+out.readFully(actual);
+out.close();
+  }
+
+  protected void readFile(FileContext fc, Path p) throws IOException {
+FSDataInputStream out = fc.open(p);
+byte[] actual = new byte[fileSize];
+out.readFully(actual);
+out.close();
+  }
+  
+  protected void appendToFile(Path p) throws IOException {
+FSDataOutputStream out;
+out = fc.create(p, EnumSet.of(CreateFlag.APPEND));
+byte[] buf = new byte[fileSize];
+Random rand = new Random(seed);
+rand.nextBytes(buf);
+out.write(buf);
+out.close();
+  }
+  
+  @Before
+  public void setUp() throws Exception {
+fc.mkdir(new Path(testBaseDir1()), FileContext.DEFAULT_PERM, true);
+fc.mkdir(new Path(testBaseDir2()), FileContext.DEFAULT_PERM, true);
+  }
+  
+  @After
+  public void tearDown() throws Exception { 
+fc.delete(new Path(testBaseDir1()), true);
+fc.delete(new Path(testBaseDir2()), true);
+  } 
+  
+  @Test
+  /** The root is not a symlink */
+  public void testStatRoot() throws IOException {
+assertFalse(fc.getFileLinkStatus(new Path("/")).isSymlink());
+  }
+  
+  @Test
+  /** Test setWorkingDirectory resolves symlinks */
+  public void testSetWDResolvesLinks() throws IOException {
+Path dir   = new Path(testBaseDir1());
+Path linkToDir = new Path(testBaseDir1()+"/link");
+fc.createSymlink(dir, linkToDir, false);
+fc.setWorkingDirectory(linkToDir);
+// Local file system does not resolve symlinks, others do.
+if ("file".equals(getScheme())) {
+  assertEquals(linkToDir.getName(), fc.getWorkingDirectory().getName());
+} else {
+  assertEquals(dir.getName(), fc.getWorkingDirectory().getName());
+}
+  }
+  
+  @Test
+  /** Test create a dangling link */
+  public void testCreateDanglingLink() throws IOException {
+Path file = new Path("/noSuchFil