Author: todd
Date: Wed Apr  4 20:00:15 2012
New Revision: 1309576

URL: http://svn.apache.org/viewvc?rev=1309576&view=rev
Log:
Merge trunk into auto-HA branch

Resolved some trivial conflicts in NNHAServiceTarget

Modified:
    hadoop/common/branches/HDFS-3042/   (props changed)
    hadoop/common/branches/HDFS-3042/hadoop-project/pom.xml
    
hadoop/common/branches/HDFS-3042/hadoop-tools/hadoop-archives/src/main/java/org/apache/hadoop/tools/HadoopArchives.java
    
hadoop/common/branches/HDFS-3042/hadoop-tools/hadoop-archives/src/test/java/org/apache/hadoop/tools/TestHadoopArchives.java

Propchange: hadoop/common/branches/HDFS-3042/
------------------------------------------------------------------------------
  Merged /hadoop/common/trunk:r1309162-1309567

Modified: hadoop/common/branches/HDFS-3042/hadoop-project/pom.xml
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3042/hadoop-project/pom.xml?rev=1309576&r1=1309575&r2=1309576&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3042/hadoop-project/pom.xml (original)
+++ hadoop/common/branches/HDFS-3042/hadoop-project/pom.xml Wed Apr  4 20:00:15 
2012
@@ -253,7 +253,7 @@
       <dependency>
         <groupId>com.google.guava</groupId>
         <artifactId>guava</artifactId>
-        <version>r09</version>
+        <version>11.0.2</version>
       </dependency>
       <dependency>
         <groupId>commons-cli</groupId>

Modified: 
hadoop/common/branches/HDFS-3042/hadoop-tools/hadoop-archives/src/main/java/org/apache/hadoop/tools/HadoopArchives.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3042/hadoop-tools/hadoop-archives/src/main/java/org/apache/hadoop/tools/HadoopArchives.java?rev=1309576&r1=1309575&r2=1309576&view=diff
==============================================================================
--- 
hadoop/common/branches/HDFS-3042/hadoop-tools/hadoop-archives/src/main/java/org/apache/hadoop/tools/HadoopArchives.java
 (original)
+++ 
hadoop/common/branches/HDFS-3042/hadoop-tools/hadoop-archives/src/main/java/org/apache/hadoop/tools/HadoopArchives.java
 Wed Apr  4 20:00:15 2012
@@ -830,11 +830,18 @@ public class HadoopArchives implements T
         throw new IOException("Parent path not specified.");
       }
       parentPath = new Path(args[i+1]);
+      if (!parentPath.isAbsolute()) {
+        parentPath= 
parentPath.getFileSystem(getConf()).makeQualified(parentPath);
+      }
+
       i+=2;
       //read the rest of the paths
       for (; i < args.length; i++) {
         if (i == (args.length - 1)) {
           destPath = new Path(args[i]);
+          if (!destPath.isAbsolute()) {
+            destPath = 
destPath.getFileSystem(getConf()).makeQualified(destPath);
+          }
         }
         else {
           Path argPath = new Path(args[i]);

Modified: 
hadoop/common/branches/HDFS-3042/hadoop-tools/hadoop-archives/src/test/java/org/apache/hadoop/tools/TestHadoopArchives.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3042/hadoop-tools/hadoop-archives/src/test/java/org/apache/hadoop/tools/TestHadoopArchives.java?rev=1309576&r1=1309575&r2=1309576&view=diff
==============================================================================
--- 
hadoop/common/branches/HDFS-3042/hadoop-tools/hadoop-archives/src/test/java/org/apache/hadoop/tools/TestHadoopArchives.java
 (original)
+++ 
hadoop/common/branches/HDFS-3042/hadoop-tools/hadoop-archives/src/test/java/org/apache/hadoop/tools/TestHadoopArchives.java
 Wed Apr  4 20:00:15 2012
@@ -103,7 +103,42 @@ public class TestHadoopArchives extends 
     super.tearDown();
   }
   
-  
+   
+  public void testRelativePath() throws Exception {
+    fs.delete(archivePath, true);
+
+    final Path sub1 = new Path(inputPath, "dir1");
+    fs.mkdirs(sub1);
+    createFile(sub1, "a", fs);
+    final Configuration conf = mapred.createJobConf();
+    final FsShell shell = new FsShell(conf);
+
+    final List<String> originalPaths = lsr(shell, "input");
+    System.out.println("originalPath: " + originalPaths);
+    final URI uri = fs.getUri();
+    final String prefix = "har://hdfs-" + uri.getHost() +":" + uri.getPort()
+        + archivePath.toUri().getPath() + Path.SEPARATOR;
+
+    {
+      final String harName = "foo.har";
+      final String[] args = {
+          "-archiveName",
+          harName,
+          "-p",
+          "input",
+          "*",
+          "archive"
+      };
+      System.setProperty(HadoopArchives.TEST_HADOOP_ARCHIVES_JAR_PATH, 
HADOOP_ARCHIVES_JAR);
+      final HadoopArchives har = new HadoopArchives(mapred.createJobConf());
+      assertEquals(0, ToolRunner.run(har, args));
+
+      //compare results
+      final List<String> harPaths = lsr(shell, prefix + harName);
+      assertEquals(originalPaths, harPaths);
+    }
+  }
+
   public void testPathWithSpaces() throws Exception {
     fs.delete(archivePath, true);
 
@@ -170,8 +205,11 @@ public class TestHadoopArchives extends 
       System.setErr(oldErr);
     }
     System.out.println("lsr results:\n" + results);
+    String dirname = dir;
+    if (dir.lastIndexOf(Path.SEPARATOR) != -1 ) {
+      dirname = dir.substring(dir.lastIndexOf(Path.SEPARATOR));
+    }
 
-    final String dirname = dir.substring(dir.lastIndexOf(Path.SEPARATOR));
     final List<String> paths = new ArrayList<String>();
     for(StringTokenizer t = new StringTokenizer(results, "\n");
         t.hasMoreTokens(); ) {


Reply via email to