svn commit: r1565508 - in /hadoop/common/branches/branch-2/hadoop-common-project: hadoop-common/CHANGES.txt hadoop-nfs/src/test/java/org/apache/hadoop/oncrpc/TestFrameDecoder.java

2014-02-06 Thread arp
Author: arp
Date: Fri Feb  7 01:14:11 2014
New Revision: 1565508

URL: http://svn.apache.org/r1565508
Log:
HADOOP-10330. Merging r1565507 from trunk to branch-2.

Modified:

hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt

hadoop/common/branches/branch-2/hadoop-common-project/hadoop-nfs/src/test/java/org/apache/hadoop/oncrpc/TestFrameDecoder.java

Modified: 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1565508&r1=1565507&r2=1565508&view=diff
==
--- 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt 
(original)
+++ 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt 
Fri Feb  7 01:14:11 2014
@@ -27,6 +27,9 @@ Release 2.4.0 - UNRELEASED
 HADOOP-10327. Trunk windows build broken after HDFS-5746.
 (Vinay via cnauroth)
 
+HADOOP-10330. TestFrameDecoder fails if it cannot bind port 12345.
+(Arpit Agarwal)
+
 Release 2.3.0 - UNRELEASED
 
   INCOMPATIBLE CHANGES

Modified: 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-nfs/src/test/java/org/apache/hadoop/oncrpc/TestFrameDecoder.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-common-project/hadoop-nfs/src/test/java/org/apache/hadoop/oncrpc/TestFrameDecoder.java?rev=1565508&r1=1565507&r2=1565508&view=diff
==
--- 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-nfs/src/test/java/org/apache/hadoop/oncrpc/TestFrameDecoder.java
 (original)
+++ 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-nfs/src/test/java/org/apache/hadoop/oncrpc/TestFrameDecoder.java
 Fri Feb  7 01:14:11 2014
@@ -23,6 +23,7 @@ import static org.junit.Assert.assertFal
 import static org.junit.Assert.assertTrue;
 
 import java.nio.ByteBuffer;
+import java.util.Random;
 
 import org.apache.hadoop.oncrpc.RpcUtil.RpcFrameDecoder;
 import org.apache.hadoop.oncrpc.security.CredentialsNone;
@@ -31,17 +32,17 @@ import org.jboss.netty.buffer.ByteBuffer
 import org.jboss.netty.buffer.ChannelBuffer;
 import org.jboss.netty.buffer.ChannelBuffers;
 import org.jboss.netty.channel.Channel;
+import org.jboss.netty.channel.ChannelException;
 import org.jboss.netty.channel.ChannelHandlerContext;
 import org.junit.Test;
 import org.mockito.Mockito;
 
 public class TestFrameDecoder {
 
-  private static int port = 12345; // some random server port
   private static int resultSize;
 
-  static void testRequest(XDR request) {
-SimpleTcpClient tcpClient = new SimpleTcpClient("localhost", port, request,
+  static void testRequest(XDR request, int serverPort) {
+SimpleTcpClient tcpClient = new SimpleTcpClient("localhost", serverPort, 
request,
 true);
 tcpClient.run();
   }
@@ -148,10 +149,25 @@ public class TestFrameDecoder {
   @Test
   public void testFrames() {
 
-RpcProgram program = new TestFrameDecoder.TestRpcProgram("TestRpcProgram",
-"localhost", port, 10, 1, 2);
-SimpleTcpServer tcpServer = new SimpleTcpServer(port, program, 1);
-tcpServer.run();
+Random rand = new Random();
+int serverPort = 3 + rand.nextInt(1);
+int retries = 10;// A few retries in case initial choice is in use.
+
+while (true) {
+  try {
+RpcProgram program = new 
TestFrameDecoder.TestRpcProgram("TestRpcProgram",
+"localhost", serverPort, 10, 1, 2);
+SimpleTcpServer tcpServer = new SimpleTcpServer(serverPort, program, 
1);
+tcpServer.run();
+break;  // Successfully bound a port, break out.
+  } catch (ChannelException ce) {
+if (retries-- > 0) {
+  serverPort += rand.nextInt(20); // Port in use? Try another.
+} else {
+  throw ce; // Out of retries.
+}
+  }
+}
 
 XDR xdrOut = createGetportMount();
 int headerSize = xdrOut.size();
@@ -161,7 +177,7 @@ public class TestFrameDecoder {
 int requestSize = xdrOut.size() - headerSize;
 
 // Send the request to the server
-testRequest(xdrOut);
+testRequest(xdrOut, serverPort);
 
 // Verify the server got the request with right size
 assertEquals(requestSize, resultSize);




svn commit: r1565507 - in /hadoop/common/trunk/hadoop-common-project: hadoop-common/CHANGES.txt hadoop-nfs/src/test/java/org/apache/hadoop/oncrpc/TestFrameDecoder.java

2014-02-06 Thread arp
Author: arp
Date: Fri Feb  7 01:12:52 2014
New Revision: 1565507

URL: http://svn.apache.org/r1565507
Log:
HADOOP-10330. TestFrameDecoder fails if it cannot bind port 12345.

Modified:
hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt

hadoop/common/trunk/hadoop-common-project/hadoop-nfs/src/test/java/org/apache/hadoop/oncrpc/TestFrameDecoder.java

Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1565507&r1=1565506&r2=1565507&view=diff
==
--- hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt 
(original)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt Fri Feb 
 7 01:12:52 2014
@@ -325,6 +325,9 @@ Release 2.4.0 - UNRELEASED
 HADOOP-10327. Trunk windows build broken after HDFS-5746.
 (Vinay via cnauroth)
 
+HADOOP-10330. TestFrameDecoder fails if it cannot bind port 12345.
+(Arpit Agarwal)
+
 Release 2.3.0 - UNRELEASED
 
   INCOMPATIBLE CHANGES

Modified: 
hadoop/common/trunk/hadoop-common-project/hadoop-nfs/src/test/java/org/apache/hadoop/oncrpc/TestFrameDecoder.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-nfs/src/test/java/org/apache/hadoop/oncrpc/TestFrameDecoder.java?rev=1565507&r1=1565506&r2=1565507&view=diff
==
--- 
hadoop/common/trunk/hadoop-common-project/hadoop-nfs/src/test/java/org/apache/hadoop/oncrpc/TestFrameDecoder.java
 (original)
+++ 
hadoop/common/trunk/hadoop-common-project/hadoop-nfs/src/test/java/org/apache/hadoop/oncrpc/TestFrameDecoder.java
 Fri Feb  7 01:12:52 2014
@@ -23,6 +23,7 @@ import static org.junit.Assert.assertFal
 import static org.junit.Assert.assertTrue;
 
 import java.nio.ByteBuffer;
+import java.util.Random;
 
 import org.apache.hadoop.oncrpc.RpcUtil.RpcFrameDecoder;
 import org.apache.hadoop.oncrpc.security.CredentialsNone;
@@ -31,17 +32,17 @@ import org.jboss.netty.buffer.ByteBuffer
 import org.jboss.netty.buffer.ChannelBuffer;
 import org.jboss.netty.buffer.ChannelBuffers;
 import org.jboss.netty.channel.Channel;
+import org.jboss.netty.channel.ChannelException;
 import org.jboss.netty.channel.ChannelHandlerContext;
 import org.junit.Test;
 import org.mockito.Mockito;
 
 public class TestFrameDecoder {
 
-  private static int port = 12345; // some random server port
   private static int resultSize;
 
-  static void testRequest(XDR request) {
-SimpleTcpClient tcpClient = new SimpleTcpClient("localhost", port, request,
+  static void testRequest(XDR request, int serverPort) {
+SimpleTcpClient tcpClient = new SimpleTcpClient("localhost", serverPort, 
request,
 true);
 tcpClient.run();
   }
@@ -148,10 +149,25 @@ public class TestFrameDecoder {
   @Test
   public void testFrames() {
 
-RpcProgram program = new TestFrameDecoder.TestRpcProgram("TestRpcProgram",
-"localhost", port, 10, 1, 2);
-SimpleTcpServer tcpServer = new SimpleTcpServer(port, program, 1);
-tcpServer.run();
+Random rand = new Random();
+int serverPort = 3 + rand.nextInt(1);
+int retries = 10;// A few retries in case initial choice is in use.
+
+while (true) {
+  try {
+RpcProgram program = new 
TestFrameDecoder.TestRpcProgram("TestRpcProgram",
+"localhost", serverPort, 10, 1, 2);
+SimpleTcpServer tcpServer = new SimpleTcpServer(serverPort, program, 
1);
+tcpServer.run();
+break;  // Successfully bound a port, break out.
+  } catch (ChannelException ce) {
+if (retries-- > 0) {
+  serverPort += rand.nextInt(20); // Port in use? Try another.
+} else {
+  throw ce; // Out of retries.
+}
+  }
+}
 
 XDR xdrOut = createGetportMount();
 int headerSize = xdrOut.size();
@@ -161,7 +177,7 @@ public class TestFrameDecoder {
 int requestSize = xdrOut.size() - headerSize;
 
 // Send the request to the server
-testRequest(xdrOut);
+testRequest(xdrOut, serverPort);
 
 // Verify the server got the request with right size
 assertEquals(requestSize, resultSize);




svn commit: r1565519 - in /hadoop/common/branches/HDFS-5535/hadoop-common-project: hadoop-common/ hadoop-common/src/main/java/ hadoop-common/src/main/java/org/apache/hadoop/http/ hadoop-common/src/mai

2014-02-06 Thread szetszwo
Author: szetszwo
Date: Fri Feb  7 02:43:04 2014
New Revision: 1565519

URL: http://svn.apache.org/r1565519
Log:
Merge r1555021 through r1565516 from trunk.

Added:

hadoop/common/branches/HDFS-5535/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/service/TestCompositeService.java
  - copied unchanged from r1565516, 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/service/TestCompositeService.java
Modified:

hadoop/common/branches/HDFS-5535/hadoop-common-project/hadoop-common/CHANGES.txt
   (contents, props changed)

hadoop/common/branches/HDFS-5535/hadoop-common-project/hadoop-common/src/main/java/
   (props changed)

hadoop/common/branches/HDFS-5535/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpConfig.java

hadoop/common/branches/HDFS-5535/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/RetryPolicies.java

hadoop/common/branches/HDFS-5535/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java

hadoop/common/branches/HDFS-5535/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/ProxyUsers.java

hadoop/common/branches/HDFS-5535/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/ServiceAuthorizationManager.java

hadoop/common/branches/HDFS-5535/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/service/CompositeService.java

hadoop/common/branches/HDFS-5535/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/nativeio/NativeIO.c

hadoop/common/branches/HDFS-5535/hadoop-common-project/hadoop-common/src/site/apt/SecureMode.apt.vm

hadoop/common/branches/HDFS-5535/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/crypto/key/TestKeyShell.java

hadoop/common/branches/HDFS-5535/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/IdUserGroup.java

hadoop/common/branches/HDFS-5535/hadoop-common-project/hadoop-nfs/src/test/java/org/apache/hadoop/nfs/nfs3/TestIdUserGroup.java

hadoop/common/branches/HDFS-5535/hadoop-common-project/hadoop-nfs/src/test/java/org/apache/hadoop/oncrpc/TestFrameDecoder.java

Modified: 
hadoop/common/branches/HDFS-5535/hadoop-common-project/hadoop-common/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-5535/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1565519&r1=1565518&r2=1565519&view=diff
==
--- 
hadoop/common/branches/HDFS-5535/hadoop-common-project/hadoop-common/CHANGES.txt
 (original)
+++ 
hadoop/common/branches/HDFS-5535/hadoop-common-project/hadoop-common/CHANGES.txt
 Fri Feb  7 02:43:04 2014
@@ -113,6 +113,11 @@ Trunk (Unreleased)
 
 HADOOP-10177. Create CLI tools for managing keys. (Larry McCay via omalley)
 
+HADOOP-10244. TestKeyShell improperly tests the results of delete (Larry
+McCay via omalley)
+
+HADOOP-10325. Improve jenkins javadoc warnings from test-patch.sh (cmccabe)
+
   BUG FIXES
 
 HADOOP-9451. Fault single-layer config if node group topology is enabled.
@@ -313,6 +318,15 @@ Release 2.4.0 - UNRELEASED
 
 HADOOP-10320. Javadoc in InterfaceStability.java lacks final .
 (René Nyffenegger via cnauroth)
+
+HADOOP-10085. CompositeService should allow adding services while being 
+inited. (Steve Loughran via kasha)
+
+HADOOP-10327. Trunk windows build broken after HDFS-5746.
+(Vinay via cnauroth)
+
+HADOOP-10330. TestFrameDecoder fails if it cannot bind port 12345.
+(Arpit Agarwal)
 
 Release 2.3.0 - UNRELEASED
 
@@ -685,6 +699,8 @@ Release 2.3.0 - UNRELEASED
 
 HADOOP-10311. Cleanup vendor names from the code base. (tucu)
 
+HADOOP-10273. Fix 'mvn site'. (Arpit Agarwal)
+
 Release 2.2.0 - 2013-10-13
 
   INCOMPATIBLE CHANGES

Propchange: 
hadoop/common/branches/HDFS-5535/hadoop-common-project/hadoop-common/CHANGES.txt
--
  Merged 
/hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt:r1563385-1565516

Propchange: 
hadoop/common/branches/HDFS-5535/hadoop-common-project/hadoop-common/src/main/java/
--
  Merged 
/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java:r1563385-1565516

Modified: 
hadoop/common/branches/HDFS-5535/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpConfig.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-5535/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpConfig.java?rev=1565519&r1=1565518&r2=1565519&view=diff
==
--- 
hadoop/common/branches/HDFS-5535/hadoop-common-project/hadoop-common/src/main

svn commit: r1565519 - in /hadoop/common/branches/HDFS-5535: ./ dev-support/test-patch.sh pom.xml

2014-02-06 Thread szetszwo
Author: szetszwo
Date: Fri Feb  7 02:43:04 2014
New Revision: 1565519

URL: http://svn.apache.org/r1565519
Log:
Merge r1555021 through r1565516 from trunk.

Modified:
hadoop/common/branches/HDFS-5535/   (props changed)
hadoop/common/branches/HDFS-5535/dev-support/test-patch.sh
hadoop/common/branches/HDFS-5535/pom.xml

Propchange: hadoop/common/branches/HDFS-5535/
--
  Merged /hadoop/common/trunk:r1563385-1565516

Modified: hadoop/common/branches/HDFS-5535/dev-support/test-patch.sh
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-5535/dev-support/test-patch.sh?rev=1565519&r1=1565518&r2=1565519&view=diff
==
--- hadoop/common/branches/HDFS-5535/dev-support/test-patch.sh (original)
+++ hadoop/common/branches/HDFS-5535/dev-support/test-patch.sh Fri Feb  7 
02:43:04 2014
@@ -300,6 +300,17 @@ prebuildWithoutPatch () {
 {color:red}-1 patch{color}.  Trunk compilation may be broken."
 return 1
   fi
+
+  echo "$MVN clean test javadoc:javadoc -DskipTests -Pdocs 
-D${PROJECT_NAME}PatchProcess > $PATCH_DIR/trunkJavadocWarnings.txt 2>&1"
+  $MVN clean test javadoc:javadoc -DskipTests -Pdocs 
-D${PROJECT_NAME}PatchProcess > $PATCH_DIR/trunkJavadocWarnings.txt 2>&1
+  if [[ $? != 0 ]] ; then
+echo "Trunk javadoc compilation is broken?"
+JIRA_COMMENT="$JIRA_COMMENT
+
+{color:red}-1 patch{color}.  Trunk compilation may be broken."
+return 1
+  fi
+
   return 0
 }
 
@@ -401,6 +412,11 @@ applyPatch () {
 }
 
 ###
+calculateJavadocWarnings() {
+WARNING_FILE="$1"
+RET=$(egrep "^[0-9]+ warnings$" "$WARNING_FILE" | awk '{sum+=$1} END 
{print sum}')
+}
+
 ### Check there are no javadoc warnings
 checkJavadocWarnings () {
   echo ""
@@ -420,24 +436,29 @@ checkJavadocWarnings () {
 (cd hadoop-common-project/hadoop-annotations; $MVN install > /dev/null 
2>&1)
   fi
   $MVN clean test javadoc:javadoc -DskipTests -Pdocs 
-D${PROJECT_NAME}PatchProcess > $PATCH_DIR/patchJavadocWarnings.txt 2>&1
-  javadocWarnings=`$GREP '\[WARNING\]' $PATCH_DIR/patchJavadocWarnings.txt | 
$AWK '/Javadoc Warnings/,EOF' | $GREP warning | $AWK 'BEGIN {total = 0} {total 
+= 1} END {print total}'`
-  echo ""
-  echo ""
-  echo "There appear to be $javadocWarnings javadoc warnings generated by the 
patched build."
-
-  #There are 14 warnings that are caused by things that are caused by using 
sun internal APIs.
-  #There are 2 warnings that are caused by the Apache DS Dn class used in 
MiniKdc.
-  OK_JAVADOC_WARNINGS=16;
-  ### if current warnings greater than OK_JAVADOC_WARNINGS
-  if [[ $javadocWarnings -ne $OK_JAVADOC_WARNINGS ]] ; then
-JIRA_COMMENT="$JIRA_COMMENT
+  calculateJavadocWarnings "$PATCH_DIR/trunkJavadocWarnings.txt"
+  numTrunkJavadocWarnings=$RET
+  calculateJavadocWarnings "$PATCH_DIR/patchJavadocWarnings.txt"
+  numPatchJavadocWarnings=$RET
+  grep -i warning "$PATCH_DIR/trunkJavadocWarnings.txt" > 
"$PATCH_DIR/trunkJavadocWarningsFiltered.txt"
+  grep -i warning "$PATCH_DIR/patchJavadocWarnings.txt" > 
"$PATCH_DIR/patchJavadocWarningsFiltered.txt"
+  diff -u "$PATCH_DIR/trunkJavadocWarningsFiltered.txt" \
+  "$PATCH_DIR/patchJavadocWarningsFiltered.txt" > \
+  "$PATCH_DIR/diffJavadocWarnings.txt"
+  rm -f "$PATCH_DIR/trunkJavadocWarningsFiltered.txt" 
"$PATCH_DIR/patchJavadocWarningsFiltered.txt"
+  echo "There appear to be $numTrunkJavadocWarnings javadoc warnings before 
the patch and $numPatchJavadocWarnings javadoc warnings after applying the 
patch."
+  if [[ $numTrunkJavadocWarnings != "" && $numPatchJavadocWarnings != "" ]] ; 
then
+if [[ $numPatchJavadocWarnings -gt $numTrunkJavadocWarnings ]] ; then
+  JIRA_COMMENT="$JIRA_COMMENT
 
-{color:red}-1 javadoc{color}.  The javadoc tool appears to have generated 
`expr $(($javadocWarnings-$OK_JAVADOC_WARNINGS))` warning messages."
-return 1
+{color:red}-1 javadoc{color}.  The javadoc tool appears to have generated 
`expr $(($numPatchJavadocWarnings-$numTrunkJavadocWarnings))` warning messages.
+See $BUILD_URL/artifact/trunk/patchprocess/diffJavadocWarnings.txt for 
details."
+return 1
+fi
   fi
   JIRA_COMMENT="$JIRA_COMMENT
 
-{color:green}+1 javadoc{color}.  The javadoc tool did not generate any 
warning messages."
+{color:green}+1 javadoc{color}.  There were no new javadoc warning 
messages."
   return 0
 }
 

Modified: hadoop/common/branches/HDFS-5535/pom.xml
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-5535/pom.xml?rev=1565519&r1=1565518&r2=1565519&view=diff
==
--- hadoop/common/branches/HDFS-5535/pom.xml (original)
+++ hadoop/common/branches/HDFS-5535/pom.xml Fri Feb  7 02:43:04 2014
@@ -152,7 +152,7 @@ xsi:schemaLocation="http://maven.ap

svn commit: r1565515 [3/3] - in /hadoop/common/branches/HDFS-5698: ./ dev-support/ hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/ hadoop-tools/hadoop-distcp/src/main/java/org/apache

2014-02-06 Thread jing9
Modified: 
hadoop/common/branches/HDFS-5698/hadoop-tools/hadoop-sls/src/main/java/org/apache/hadoop/yarn/sls/scheduler/ResourceSchedulerWrapper.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-5698/hadoop-tools/hadoop-sls/src/main/java/org/apache/hadoop/yarn/sls/scheduler/ResourceSchedulerWrapper.java?rev=1565515&r1=1565514&r2=1565515&view=diff
==
--- 
hadoop/common/branches/HDFS-5698/hadoop-tools/hadoop-sls/src/main/java/org/apache/hadoop/yarn/sls/scheduler/ResourceSchedulerWrapper.java
 (original)
+++ 
hadoop/common/branches/HDFS-5698/hadoop-tools/hadoop-sls/src/main/java/org/apache/hadoop/yarn/sls/scheduler/ResourceSchedulerWrapper.java
 Fri Feb  7 01:57:21 2014
@@ -54,6 +54,7 @@ import org.apache.hadoop.yarn.api.record
 import org.apache.hadoop.yarn.api.records.QueueUserACLInfo;
 import org.apache.hadoop.yarn.api.records.Resource;
 import org.apache.hadoop.yarn.api.records.ResourceRequest;
+import org.apache.hadoop.yarn.exceptions.YarnException;
 import org.apache.hadoop.yarn.server.resourcemanager.RMContext;
 import org.apache.hadoop.yarn.server.resourcemanager.recovery.RMStateStore;
 import org.apache.hadoop.yarn.server.resourcemanager.rmcontainer.RMContainer;
@@ -865,4 +866,10 @@ public class ResourceSchedulerWrapper im
   public RMContainer getRMContainer(ContainerId containerId) {
 return null;
   }
+
+  @Override
+  public String moveApplication(ApplicationId appId, String newQueue)
+  throws YarnException {
+return scheduler.moveApplication(appId, newQueue);
+  }
 }

Modified: hadoop/common/branches/HDFS-5698/pom.xml
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-5698/pom.xml?rev=1565515&r1=1565514&r2=1565515&view=diff
==
--- hadoop/common/branches/HDFS-5698/pom.xml (original)
+++ hadoop/common/branches/HDFS-5698/pom.xml Fri Feb  7 01:57:21 2014
@@ -152,7 +152,7 @@ xsi:schemaLocation="http://maven.apache.
 
   org.apache.maven.plugins
   maven-site-plugin
-  3.2
+  3.3
   
 
   org.apache.maven.wagon
@@ -329,7 +329,7 @@ xsi:schemaLocation="http://maven.apache.
   
   
 maven-site-plugin
-3.0
+3.3
 
   
 attach-descriptor




svn commit: r1565515 [1/3] - in /hadoop/common/branches/HDFS-5698: ./ dev-support/ hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/ hadoop-tools/hadoop-distcp/src/main/java/org/apache

2014-02-06 Thread jing9
Author: jing9
Date: Fri Feb  7 01:57:21 2014
New Revision: 1565515

URL: http://svn.apache.org/r1565515
Log:
Merging r1562962 through r1565513 from trunk

Added:
hadoop/common/branches/HDFS-5698/dev-support/create-release.sh
  - copied unchanged from r1565513, 
hadoop/common/trunk/dev-support/create-release.sh
Modified:
hadoop/common/branches/HDFS-5698/   (props changed)
hadoop/common/branches/HDFS-5698/dev-support/test-patch.sh

hadoop/common/branches/HDFS-5698/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/DistCpOptionSwitch.java

hadoop/common/branches/HDFS-5698/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/DistCpOptions.java

hadoop/common/branches/HDFS-5698/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/OptionsParser.java

hadoop/common/branches/HDFS-5698/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/mapred/CopyMapper.java

hadoop/common/branches/HDFS-5698/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/mapred/RetriableFileCopyCommand.java

hadoop/common/branches/HDFS-5698/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/util/DistCpUtils.java

hadoop/common/branches/HDFS-5698/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/TestOptionsParser.java

hadoop/common/branches/HDFS-5698/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/mapred/TestCopyMapper.java

hadoop/common/branches/HDFS-5698/hadoop-tools/hadoop-sls/src/main/data/2jobs2min-rumen-jh.json

hadoop/common/branches/HDFS-5698/hadoop-tools/hadoop-sls/src/main/java/org/apache/hadoop/yarn/sls/scheduler/ResourceSchedulerWrapper.java
hadoop/common/branches/HDFS-5698/pom.xml

Propchange: hadoop/common/branches/HDFS-5698/
--
  Merged /hadoop/common/trunk:r1562962-1565513

Modified: hadoop/common/branches/HDFS-5698/dev-support/test-patch.sh
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-5698/dev-support/test-patch.sh?rev=1565515&r1=1565514&r2=1565515&view=diff
==
--- hadoop/common/branches/HDFS-5698/dev-support/test-patch.sh (original)
+++ hadoop/common/branches/HDFS-5698/dev-support/test-patch.sh Fri Feb  7 
01:57:21 2014
@@ -300,6 +300,17 @@ prebuildWithoutPatch () {
 {color:red}-1 patch{color}.  Trunk compilation may be broken."
 return 1
   fi
+
+  echo "$MVN clean test javadoc:javadoc -DskipTests -Pdocs 
-D${PROJECT_NAME}PatchProcess > $PATCH_DIR/trunkJavadocWarnings.txt 2>&1"
+  $MVN clean test javadoc:javadoc -DskipTests -Pdocs 
-D${PROJECT_NAME}PatchProcess > $PATCH_DIR/trunkJavadocWarnings.txt 2>&1
+  if [[ $? != 0 ]] ; then
+echo "Trunk javadoc compilation is broken?"
+JIRA_COMMENT="$JIRA_COMMENT
+
+{color:red}-1 patch{color}.  Trunk compilation may be broken."
+return 1
+  fi
+
   return 0
 }
 
@@ -401,6 +412,11 @@ applyPatch () {
 }
 
 ###
+calculateJavadocWarnings() {
+WARNING_FILE="$1"
+RET=$(egrep "^[0-9]+ warnings$" "$WARNING_FILE" | awk '{sum+=$1} END 
{print sum}')
+}
+
 ### Check there are no javadoc warnings
 checkJavadocWarnings () {
   echo ""
@@ -420,24 +436,29 @@ checkJavadocWarnings () {
 (cd hadoop-common-project/hadoop-annotations; $MVN install > /dev/null 
2>&1)
   fi
   $MVN clean test javadoc:javadoc -DskipTests -Pdocs 
-D${PROJECT_NAME}PatchProcess > $PATCH_DIR/patchJavadocWarnings.txt 2>&1
-  javadocWarnings=`$GREP '\[WARNING\]' $PATCH_DIR/patchJavadocWarnings.txt | 
$AWK '/Javadoc Warnings/,EOF' | $GREP warning | $AWK 'BEGIN {total = 0} {total 
+= 1} END {print total}'`
-  echo ""
-  echo ""
-  echo "There appear to be $javadocWarnings javadoc warnings generated by the 
patched build."
-
-  #There are 12 warnings that are caused by things that are caused by using 
sun internal APIs.
-  #There are 2 warnings that are caused by the Apache DS Dn class used in 
MiniKdc.
-  OK_JAVADOC_WARNINGS=14;
-  ### if current warnings greater than OK_JAVADOC_WARNINGS
-  if [[ $javadocWarnings -ne $OK_JAVADOC_WARNINGS ]] ; then
-JIRA_COMMENT="$JIRA_COMMENT
+  calculateJavadocWarnings "$PATCH_DIR/trunkJavadocWarnings.txt"
+  numTrunkJavadocWarnings=$RET
+  calculateJavadocWarnings "$PATCH_DIR/patchJavadocWarnings.txt"
+  numPatchJavadocWarnings=$RET
+  grep -i warning "$PATCH_DIR/trunkJavadocWarnings.txt" > 
"$PATCH_DIR/trunkJavadocWarningsFiltered.txt"
+  grep -i warning "$PATCH_DIR/patchJavadocWarnings.txt" > 
"$PATCH_DIR/patchJavadocWarningsFiltered.txt"
+  diff -u "$PATCH_DIR/trunkJavadocWarningsFiltered.txt" \
+  "$PATCH_DIR/patchJavadocWarningsFiltered.txt" > \
+  "$PATCH_DIR/diffJavadocWarnings.txt"
+  rm -f "$PATCH_DIR/trunkJavadocWarningsFiltered.txt" 
"$PATCH_DIR/patchJavadocWarningsFiltered.txt"
+  echo "There appear to be $numTrunkJavado

svn commit: r1565474 - /hadoop/common/branches/branch-0.23/hadoop-tools/hadoop-archives/src/test/java/org/apache/hadoop/tools/TestHadoopArchives.java

2014-02-06 Thread jlowe
Author: jlowe
Date: Thu Feb  6 23:09:41 2014
New Revision: 1565474

URL: http://svn.apache.org/r1565474
Log:
HADOOP-10112. har file listing doesn't work with wild card. Contributed by 
Brandon Li

Modified:

hadoop/common/branches/branch-0.23/hadoop-tools/hadoop-archives/src/test/java/org/apache/hadoop/tools/TestHadoopArchives.java

Modified: 
hadoop/common/branches/branch-0.23/hadoop-tools/hadoop-archives/src/test/java/org/apache/hadoop/tools/TestHadoopArchives.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23/hadoop-tools/hadoop-archives/src/test/java/org/apache/hadoop/tools/TestHadoopArchives.java?rev=1565474&r1=1565473&r2=1565474&view=diff
==
--- 
hadoop/common/branches/branch-0.23/hadoop-tools/hadoop-archives/src/test/java/org/apache/hadoop/tools/TestHadoopArchives.java
 (original)
+++ 
hadoop/common/branches/branch-0.23/hadoop-tools/hadoop-archives/src/test/java/org/apache/hadoop/tools/TestHadoopArchives.java
 Thu Feb  6 23:09:41 2014
@@ -18,7 +18,10 @@
 
 package org.apache.hadoop.tools;
 
+import static org.junit.Assert.assertTrue;
+
 import java.io.ByteArrayOutputStream;
+import java.io.DataOutputStream;
 import java.io.IOException;
 import java.io.PrintStream;
 import java.net.URI;
@@ -34,6 +37,7 @@ import org.apache.hadoop.fs.FSDataOutput
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.FsShell;
 import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hdfs.DistributedFileSystem;
 import org.apache.hadoop.hdfs.MiniDFSCluster;
 import org.apache.hadoop.io.IOUtils;
 import org.apache.hadoop.util.JarFinder;
@@ -111,7 +115,14 @@ public class TestHadoopArchives {
   System.err.println(e);
 }
   }
-
+  static Path writeFile(FileSystem fs, Path f) throws IOException {
+DataOutputStream out = fs.create(f);
+out.writeBytes("dhruba: " + f);
+out.close();
+assertTrue(fs.exists(f));
+return f;
+  }
+  
   @Test
   public void testRelativePath() throws Exception {
 fs.delete(archivePath, true);
@@ -222,4 +233,64 @@ public class TestHadoopArchives {
 .println("lsr paths = " + paths.toString().replace(", ", ",\n  "));
 return paths;
   }
+  
+  // Make sure har file system works with wildcards
+  @Test
+  public void testHar() throws IOException {
+assertTrue("Not a HDFS: " + fs.getUri(),
+fs instanceof DistributedFileSystem);
+PrintStream psBackup = System.out;
+ByteArrayOutputStream out = new ByteArrayOutputStream();
+PrintStream psOut = new PrintStream(out);
+System.setOut(psOut);
+HadoopArchives archiveShell = new HadoopArchives(conf);
+archiveShell.setConf(conf);
+
+FsShell fsShell = new FsShell();
+fsShell.setConf(conf);
+
+try {
+  Path myPath = new Path("/test/dir");
+  assertTrue(fs.mkdirs(myPath));
+  assertTrue(fs.exists(myPath));
+  myPath = new Path("/test/dir/dir2");
+  assertTrue(fs.mkdirs(myPath));
+  assertTrue(fs.exists(myPath));
+  Path myFile = new Path("/test/dir/dir2/file");
+  writeFile(fs, myFile);
+  assertTrue(fs.exists(myFile));
+
+  String[] args = new String[5];
+  args[0] = "-archiveName";
+  args[1] = "foo.har";
+  args[2] = "-p";
+  args[3] = "/test/dir";
+  args[4] = "/test";
+  int val = -1;
+  try {
+val = archiveShell.run(args);
+  } catch (Exception e) {
+System.err.println("Exception raised from HadoopArchives.run "
++ e.getLocalizedMessage());
+  }
+  assertTrue(val == 0);
+
+  args = new String[2];
+  args[0] = "-ls";
+  args[1] = "har:///test/foo.har/d*";
+  val = -1;
+  try {
+val = fsShell.run(args);
+  } catch (Exception e) {
+System.err.println("Exception raised from HadoopArchives.run "
++ e.getLocalizedMessage());
+  }
+
+  String returnString = out.toString();
+  out.reset();
+  assertTrue(returnString.contains("har:///test/foo.har/dir2/file"));
+} finally {
+  System.setOut(psBackup);
+}
+  }
 }




svn commit: r1565474 - in /hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common: CHANGES.txt src/main/java/org/apache/hadoop/fs/HarFileSystem.java

2014-02-06 Thread jlowe
Author: jlowe
Date: Thu Feb  6 23:09:41 2014
New Revision: 1565474

URL: http://svn.apache.org/r1565474
Log:
HADOOP-10112. har file listing doesn't work with wild card. Contributed by 
Brandon Li

Modified:

hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/CHANGES.txt

hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/HarFileSystem.java

Modified: 
hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1565474&r1=1565473&r2=1565474&view=diff
==
--- 
hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/CHANGES.txt
 (original)
+++ 
hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/CHANGES.txt
 Thu Feb  6 23:09:41 2014
@@ -21,6 +21,9 @@ Release 0.23.11 - UNRELEASED
 
 HADOOP-10146. Workaround JDK7 Process fd close bug (daryn)
 
+HADOOP-10112. har file listing doesn't work with wild card (Brandon Li via
+jlowe)
+
 Release 0.23.10 - 2013-12-09
 
   INCOMPATIBLE CHANGES

Modified: 
hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/HarFileSystem.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/HarFileSystem.java?rev=1565474&r1=1565473&r2=1565474&view=diff
==
--- 
hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/HarFileSystem.java
 (original)
+++ 
hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/HarFileSystem.java
 Thu Feb  6 23:09:41 2014
@@ -614,15 +614,20 @@ public class HarFileSystem extends Filte
*/
   @Override
   public FileStatus getFileStatus(Path f) throws IOException {
-HarStatus hstatus = getFileHarStatus(f);
+Path p = makeQualified(f);
+if (p.toUri().getPath().length() < archivePath.toString().length()) {
+  // still in the source file system
+  return fs.getFileStatus(new Path(p.toUri().getPath()));
+}
+
+HarStatus hstatus = getFileHarStatus(p);
 return toFileStatus(hstatus, null);
   }
 
   private HarStatus getFileHarStatus(Path f) throws IOException {
 // get the fs DataInputStream for the underlying file
 // look up the index.
-Path p = makeQualified(f);
-Path harPath = getPathInHar(p);
+Path harPath = getPathInHar(f);
 if (harPath == null) {
   throw new IOException("Invalid file name: " + f + " in " + uri);
 }
@@ -716,6 +721,11 @@ public class HarFileSystem extends Filte
 // to the client
 List statuses = new ArrayList();
 Path tmpPath = makeQualified(f);
+if (tmpPath.toUri().getPath().length() < archivePath.toString().length()) {
+  // still in the source file system
+  return fs.listStatus(new Path(tmpPath.toUri().getPath()));
+}
+
 Path harPath = getPathInHar(tmpPath);
 HarStatus hstatus = metadata.archive.get(harPath);
 if (hstatus == null) {




svn commit: r1565456 - /hadoop/common/branches/branch-2.3/pom.xml

2014-02-06 Thread arp
Author: arp
Date: Thu Feb  6 21:59:43 2014
New Revision: 1565456

URL: http://svn.apache.org/r1565456
Log:
HADOOP-10273. Merging r1564639 and r1565454 from branch-2 to branch-2.3

Modified:
hadoop/common/branches/branch-2.3/pom.xml

Modified: hadoop/common/branches/branch-2.3/pom.xml
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2.3/pom.xml?rev=1565456&r1=1565455&r2=1565456&view=diff
==
--- hadoop/common/branches/branch-2.3/pom.xml (original)
+++ hadoop/common/branches/branch-2.3/pom.xml Thu Feb  6 21:59:43 2014
@@ -152,7 +152,7 @@ xsi:schemaLocation="http://maven.apache.
 
   org.apache.maven.plugins
   maven-site-plugin
-  3.2
+  3.3
   
 
   org.apache.maven.wagon
@@ -222,7 +222,7 @@ xsi:schemaLocation="http://maven.apache.
   
   
 maven-site-plugin
-3.0
+3.3
 
   
 attach-descriptor




svn commit: r1565456 - /hadoop/common/branches/branch-2.3/hadoop-common-project/hadoop-common/CHANGES.txt

2014-02-06 Thread arp
Author: arp
Date: Thu Feb  6 21:59:43 2014
New Revision: 1565456

URL: http://svn.apache.org/r1565456
Log:
HADOOP-10273. Merging r1564639 and r1565454 from branch-2 to branch-2.3

Modified:

hadoop/common/branches/branch-2.3/hadoop-common-project/hadoop-common/CHANGES.txt

Modified: 
hadoop/common/branches/branch-2.3/hadoop-common-project/hadoop-common/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2.3/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1565456&r1=1565455&r2=1565456&view=diff
==
--- 
hadoop/common/branches/branch-2.3/hadoop-common-project/hadoop-common/CHANGES.txt
 (original)
+++ 
hadoop/common/branches/branch-2.3/hadoop-common-project/hadoop-common/CHANGES.txt
 Thu Feb  6 21:59:43 2014
@@ -378,6 +378,8 @@ Release 2.3.0 - UNRELEASED
 
 HADOOP-10311. Cleanup vendor names from the code base. (tucu)
 
+HADOOP-10273. Fix 'mvn site'. (Arpit Agarwal)
+
 Release 2.2.0 - 2013-10-13
 
   INCOMPATIBLE CHANGES




svn commit: r1565454 - /hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt

2014-02-06 Thread arp
Author: arp
Date: Thu Feb  6 21:57:22 2014
New Revision: 1565454

URL: http://svn.apache.org/r1565454
Log:
HADOOP-10273. Update CHANGES.txt to reflect new target version is 2.3 (merged 
r1565453 from trunk to branch-2)

Modified:

hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt

Modified: 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1565454&r1=1565453&r2=1565454&view=diff
==
--- 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt 
(original)
+++ 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt 
Thu Feb  6 21:57:22 2014
@@ -24,8 +24,6 @@ Release 2.4.0 - UNRELEASED
 HADOOP-10085. CompositeService should allow adding services while being 
 inited. (Steve Loughran via kasha)
 
-HADOOP-10273. Fix 'mvn site'. (Arpit Agarwal)
-
 HADOOP-10327. Trunk windows build broken after HDFS-5746.
 (Vinay via cnauroth)
 
@@ -407,6 +405,8 @@ Release 2.3.0 - UNRELEASED
 
 HADOOP-10311. Cleanup vendor names from the code base. (tucu)
 
+HADOOP-10273. Fix 'mvn site'. (Arpit Agarwal)
+
 Release 2.2.0 - 2013-10-13
 
   INCOMPATIBLE CHANGES




svn commit: r1565453 - /hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt

2014-02-06 Thread arp
Author: arp
Date: Thu Feb  6 21:56:00 2014
New Revision: 1565453

URL: http://svn.apache.org/r1565453
Log:
HADOOP-10273. Update CHANGES.txt to reflect new target version is 2.3

Modified:
hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt

Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1565453&r1=1565452&r2=1565453&view=diff
==
--- hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt 
(original)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt Thu Feb 
 6 21:56:00 2014
@@ -322,8 +322,6 @@ Release 2.4.0 - UNRELEASED
 HADOOP-10085. CompositeService should allow adding services while being 
 inited. (Steve Loughran via kasha)
 
-HADOOP-10273. Fix 'mvn site'. (Arpit Agarwal)
-
 HADOOP-10327. Trunk windows build broken after HDFS-5746.
 (Vinay via cnauroth)
 
@@ -698,6 +696,8 @@ Release 2.3.0 - UNRELEASED
 
 HADOOP-10311. Cleanup vendor names from the code base. (tucu)
 
+HADOOP-10273. Fix 'mvn site'. (Arpit Agarwal)
+
 Release 2.2.0 - 2013-10-13
 
   INCOMPATIBLE CHANGES




svn commit: r1565438 - /hadoop/common/branches/branch-2/

2014-02-06 Thread cmccabe
Author: cmccabe
Date: Thu Feb  6 21:12:11 2014
New Revision: 1565438

URL: http://svn.apache.org/r1565438
Log:
HDFS-4911. Reduce PeerCache timeout to be commensurate with 
dfs.datanode.socket.reuse.keepalive (cmccabe)

Modified:
hadoop/common/branches/branch-2/   (props changed)

Propchange: hadoop/common/branches/branch-2/
--
  Merged /hadoop/common/trunk:r1565435




svn commit: r1565390 - in /hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common: CHANGES.txt src/main/native/src/org/apache/hadoop/io/nativeio/NativeIO.c

2014-02-06 Thread cnauroth
Author: cnauroth
Date: Thu Feb  6 18:42:14 2014
New Revision: 1565390

URL: http://svn.apache.org/r1565390
Log:
HADOOP-10327. Merging change r1565389 from trunk to branch-2.

Modified:

hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt

hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/nativeio/NativeIO.c

Modified: 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1565390&r1=1565389&r2=1565390&view=diff
==
--- 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt 
(original)
+++ 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt 
Thu Feb  6 18:42:14 2014
@@ -26,6 +26,9 @@ Release 2.4.0 - UNRELEASED
 
 HADOOP-10273. Fix 'mvn site'. (Arpit Agarwal)
 
+HADOOP-10327. Trunk windows build broken after HDFS-5746.
+(Vinay via cnauroth)
+
 Release 2.3.0 - UNRELEASED
 
   INCOMPATIBLE CHANGES

Modified: 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/nativeio/NativeIO.c
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/nativeio/NativeIO.c?rev=1565390&r1=1565389&r2=1565390&view=diff
==
--- 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/nativeio/NativeIO.c
 (original)
+++ 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/nativeio/NativeIO.c
 Thu Feb  6 18:42:14 2014
@@ -671,6 +671,7 @@ Java_org_apache_hadoop_io_nativeio_Nativ
   JNIEnv *env, jclass clazz, jobject jfd, jint jprot,
   jboolean jshared, jlong length)
 {
+#ifdef UNIX
   void *addr = 0;
   int prot, flags, fd;
   
@@ -684,18 +685,33 @@ Java_org_apache_hadoop_io_nativeio_Nativ
 throw_ioe(env, errno);
   }
   return (jlong)(intptr_t)addr;
+#endif  //   UNIX
+
+#ifdef WINDOWS
+  THROW(env, "java/io/IOException",
+"The function POSIX.mmap() is not supported on Windows");
+  return NULL;
+#endif
 }
 
 JNIEXPORT void JNICALL 
 Java_org_apache_hadoop_io_nativeio_NativeIO_00024POSIX_munmap(
   JNIEnv *env, jclass clazz, jlong jaddr, jlong length)
 {
+#ifdef UNIX
   void *addr;
 
   addr = (void*)(intptr_t)jaddr;
   if (munmap(addr, length) < 0) {
 throw_ioe(env, errno);
   }
+#endif  //   UNIX
+
+#ifdef WINDOWS
+  THROW(env, "java/io/IOException",
+"The function POSIX.munmap() is not supported on Windows");
+  return NULL;
+#endif
 }
 
 
@@ -1050,4 +1066,3 @@ JNIEnv *env, jclass clazz)
 /**
  * vim: sw=2: ts=2: et:
  */
-




svn commit: r1565389 - in /hadoop/common/trunk/hadoop-common-project/hadoop-common: CHANGES.txt src/main/native/src/org/apache/hadoop/io/nativeio/NativeIO.c

2014-02-06 Thread cnauroth
Author: cnauroth
Date: Thu Feb  6 18:40:23 2014
New Revision: 1565389

URL: http://svn.apache.org/r1565389
Log:
HADOOP-10327. Trunk windows build broken after HDFS-5746. Contributed by Vinay.

Modified:
hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/nativeio/NativeIO.c

Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1565389&r1=1565388&r2=1565389&view=diff
==
--- hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt 
(original)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt Thu Feb 
 6 18:40:23 2014
@@ -324,6 +324,9 @@ Release 2.4.0 - UNRELEASED
 
 HADOOP-10273. Fix 'mvn site'. (Arpit Agarwal)
 
+HADOOP-10327. Trunk windows build broken after HDFS-5746.
+(Vinay via cnauroth)
+
 Release 2.3.0 - UNRELEASED
 
   INCOMPATIBLE CHANGES

Modified: 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/nativeio/NativeIO.c
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/nativeio/NativeIO.c?rev=1565389&r1=1565388&r2=1565389&view=diff
==
--- 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/nativeio/NativeIO.c
 (original)
+++ 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/nativeio/NativeIO.c
 Thu Feb  6 18:40:23 2014
@@ -671,6 +671,7 @@ Java_org_apache_hadoop_io_nativeio_Nativ
   JNIEnv *env, jclass clazz, jobject jfd, jint jprot,
   jboolean jshared, jlong length)
 {
+#ifdef UNIX
   void *addr = 0;
   int prot, flags, fd;
   
@@ -684,18 +685,33 @@ Java_org_apache_hadoop_io_nativeio_Nativ
 throw_ioe(env, errno);
   }
   return (jlong)(intptr_t)addr;
+#endif  //   UNIX
+
+#ifdef WINDOWS
+  THROW(env, "java/io/IOException",
+"The function POSIX.mmap() is not supported on Windows");
+  return NULL;
+#endif
 }
 
 JNIEXPORT void JNICALL 
 Java_org_apache_hadoop_io_nativeio_NativeIO_00024POSIX_munmap(
   JNIEnv *env, jclass clazz, jlong jaddr, jlong length)
 {
+#ifdef UNIX
   void *addr;
 
   addr = (void*)(intptr_t)jaddr;
   if (munmap(addr, length) < 0) {
 throw_ioe(env, errno);
   }
+#endif  //   UNIX
+
+#ifdef WINDOWS
+  THROW(env, "java/io/IOException",
+"The function POSIX.munmap() is not supported on Windows");
+  return NULL;
+#endif
 }
 
 
@@ -1050,4 +1066,3 @@ JNIEnv *env, jclass clazz)
 /**
  * vim: sw=2: ts=2: et:
  */
-