Author: jeagles
Date: Fri Aug  3 20:29:29 2012
New Revision: 1369197

URL: http://svn.apache.org/viewvc?rev=1369197&view=rev
Log:
MAPREDUCE-4503. Should throw InvalidJobConfException if duplicates found in 
cacheArchives or cacheFiles (Robert Evans via jeagles)

Modified:
    hadoop/common/trunk/hadoop-mapreduce-project/CHANGES.txt
    
hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/util/MRApps.java
    
hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/v2/util/TestMRApps.java

Modified: hadoop/common/trunk/hadoop-mapreduce-project/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-mapreduce-project/CHANGES.txt?rev=1369197&r1=1369196&r2=1369197&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-mapreduce-project/CHANGES.txt (original)
+++ hadoop/common/trunk/hadoop-mapreduce-project/CHANGES.txt Fri Aug  3 
20:29:29 2012
@@ -795,6 +795,9 @@ Release 0.23.3 - UNRELEASED
     MAPREDUCE-4504. SortValidator writes to wrong directory (Robert Evans 
     via tgraves)
 
+    MAPREDUCE-4503. Should throw InvalidJobConfException if duplicates found in
+    cacheArchives or cacheFiles (Robert Evans via jeagles)
+
 Release 0.23.2 - UNRELEASED
 
   INCOMPATIBLE CHANGES

Modified: 
hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/util/MRApps.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/util/MRApps.java?rev=1369197&r1=1369196&r2=1369197&view=diff
==============================================================================
--- 
hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/util/MRApps.java
 (original)
+++ 
hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/util/MRApps.java
 Fri Aug  3 20:29:29 2012
@@ -35,6 +35,7 @@ import org.apache.hadoop.classification.
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.mapred.InvalidJobConfException;
 import org.apache.hadoop.mapreduce.JobID;
 import org.apache.hadoop.mapreduce.MRJobConfig;
 import org.apache.hadoop.mapreduce.TaskAttemptID;
@@ -56,6 +57,7 @@ import org.apache.hadoop.yarn.api.record
 import org.apache.hadoop.yarn.conf.YarnConfiguration;
 import org.apache.hadoop.yarn.util.Apps;
 import org.apache.hadoop.yarn.util.BuilderUtils;
+import org.apache.hadoop.yarn.util.ConverterUtils;
 
 /**
  * Helper class for MR applications
@@ -264,6 +266,13 @@ public class MRApps extends Apps {
         DistributedCache.getFileClassPaths(conf));
   }
 
+  private static String getResourceDescription(LocalResourceType type) {
+    if(type == LocalResourceType.ARCHIVE) {
+      return "cache archive (" + MRJobConfig.CACHE_ARCHIVES + ") ";
+    }
+    return "cache file (" + MRJobConfig.CACHE_FILES + ") ";
+  }
+  
   // TODO - Move this to MR!
   // Use TaskDistributedCacheManager.CacheFiles.makeCacheFiles(URI[], 
   // long[], boolean[], Path[], FileType)
@@ -309,6 +318,13 @@ public class MRApps extends Apps {
           throw new IllegalArgumentException("Resource name must be relative");
         }
         String linkName = name.toUri().getPath();
+        LocalResource orig = localResources.get(linkName);
+        if(orig != null && !orig.getResource().equals(
+            ConverterUtils.getYarnUrlFromURI(p.toUri()))) {
+          throw new InvalidJobConfException(
+              getResourceDescription(orig.getType()) + orig.getResource() + 
+              " conflicts with " + getResourceDescription(type) + u);
+        }
         localResources.put(
             linkName,
             BuilderUtils.newLocalResource(

Modified: 
hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/v2/util/TestMRApps.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/v2/util/TestMRApps.java?rev=1369197&r1=1369196&r2=1369197&view=diff
==============================================================================
--- 
hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/v2/util/TestMRApps.java
 (original)
+++ 
hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/v2/util/TestMRApps.java
 Fri Aug  3 20:29:29 2012
@@ -19,25 +19,33 @@
 package org.apache.hadoop.mapreduce.v2.util;
 
 import java.io.IOException;
+import java.net.URI;
 import java.util.HashMap;
 import java.util.Map;
 
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.FilterFileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.mapred.InvalidJobConfException;
 import org.apache.hadoop.mapreduce.Job;
 import org.apache.hadoop.mapreduce.JobID;
 import org.apache.hadoop.mapreduce.MRJobConfig;
+import org.apache.hadoop.mapreduce.filecache.DistributedCache;
 import org.apache.hadoop.mapreduce.v2.api.records.JobId;
 import org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId;
 import org.apache.hadoop.mapreduce.v2.api.records.TaskId;
 import org.apache.hadoop.mapreduce.v2.api.records.TaskType;
 import org.apache.hadoop.mapreduce.v2.util.MRApps;
-import org.apache.hadoop.yarn.YarnException;
 import org.apache.hadoop.yarn.api.records.ApplicationId;
+import org.apache.hadoop.yarn.api.records.LocalResource;
+import org.apache.hadoop.yarn.api.records.LocalResourceType;
 import org.apache.hadoop.yarn.conf.YarnConfiguration;
 import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider;
 
 import org.junit.Test;
 import static org.junit.Assert.*;
+import static org.mockito.Mockito.*;
 
 public class TestMRApps {
 
@@ -166,5 +174,122 @@ public class TestMRApps {
     assertNotSame("MAPREDUCE_JOB_USER_CLASSPATH_FIRST false, but taking 
effect!",
       env_str.indexOf("$PWD:job.jar"), 0);
   }
-
+  
+  @Test
+  public void testSetupDistributedCacheEmpty() throws IOException {
+    Configuration conf = new Configuration();
+    Map<String, LocalResource> localResources = new HashMap<String, 
LocalResource>();
+    MRApps.setupDistributedCache(conf, localResources);
+    assertTrue("Empty Config did not produce an empty list of resources",
+        localResources.isEmpty());
+  }
+  
+  @SuppressWarnings("deprecation")
+  @Test(expected = InvalidJobConfException.class)
+  public void testSetupDistributedCacheConflicts() throws Exception {
+    Configuration conf = new Configuration();
+    conf.setClass("fs.mockfs.impl", MockFileSystem.class, FileSystem.class);
+    
+    URI mockUri = URI.create("mockfs://mock/");
+    FileSystem mockFs = ((FilterFileSystem)FileSystem.get(mockUri, conf))
+        .getRawFileSystem();
+    
+    URI archive = new URI("mockfs://mock/tmp/something.zip#something");
+    Path archivePath = new Path(archive);
+    URI file = new URI("mockfs://mock/tmp/something.txt#something");
+    Path filePath = new Path(file);
+    
+    when(mockFs.resolvePath(archivePath)).thenReturn(archivePath);
+    when(mockFs.resolvePath(filePath)).thenReturn(filePath);
+    
+    DistributedCache.addCacheArchive(archive, conf);
+    conf.set(MRJobConfig.CACHE_ARCHIVES_TIMESTAMPS, "10");
+    conf.set(MRJobConfig.CACHE_ARCHIVES_SIZES, "10");
+    conf.set(MRJobConfig.CACHE_ARCHIVES_VISIBILITIES, "true");
+    DistributedCache.addCacheFile(file, conf);
+    conf.set(MRJobConfig.CACHE_FILE_TIMESTAMPS, "11");
+    conf.set(MRJobConfig.CACHE_FILES_SIZES, "11");
+    conf.set(MRJobConfig.CACHE_FILE_VISIBILITIES, "true");
+    Map<String, LocalResource> localResources = 
+      new HashMap<String, LocalResource>();
+    MRApps.setupDistributedCache(conf, localResources);
+  }
+  
+  @SuppressWarnings("deprecation")
+  @Test(expected = InvalidJobConfException.class)
+  public void testSetupDistributedCacheConflictsFiles() throws Exception {
+    Configuration conf = new Configuration();
+    conf.setClass("fs.mockfs.impl", MockFileSystem.class, FileSystem.class);
+    
+    URI mockUri = URI.create("mockfs://mock/");
+    FileSystem mockFs = ((FilterFileSystem)FileSystem.get(mockUri, conf))
+        .getRawFileSystem();
+    
+    URI file = new URI("mockfs://mock/tmp/something.zip#something");
+    Path filePath = new Path(file);
+    URI file2 = new URI("mockfs://mock/tmp/something.txt#something");
+    Path file2Path = new Path(file);
+    
+    when(mockFs.resolvePath(filePath)).thenReturn(filePath);
+    when(mockFs.resolvePath(file2Path)).thenReturn(file2Path);
+    
+    DistributedCache.addCacheFile(file, conf);
+    DistributedCache.addCacheFile(file2, conf);
+    conf.set(MRJobConfig.CACHE_FILE_TIMESTAMPS, "10,11");
+    conf.set(MRJobConfig.CACHE_FILES_SIZES, "10,11");
+    conf.set(MRJobConfig.CACHE_FILE_VISIBILITIES, "true,true");
+    Map<String, LocalResource> localResources = 
+      new HashMap<String, LocalResource>();
+    MRApps.setupDistributedCache(conf, localResources);
+  }
+  
+  @SuppressWarnings("deprecation")
+  @Test
+  public void testSetupDistributedCache() throws Exception {
+    Configuration conf = new Configuration();
+    conf.setClass("fs.mockfs.impl", MockFileSystem.class, FileSystem.class);
+    
+    URI mockUri = URI.create("mockfs://mock/");
+    FileSystem mockFs = ((FilterFileSystem)FileSystem.get(mockUri, conf))
+        .getRawFileSystem();
+    
+    URI archive = new URI("mockfs://mock/tmp/something.zip");
+    Path archivePath = new Path(archive);
+    URI file = new URI("mockfs://mock/tmp/something.txt#something");
+    Path filePath = new Path(file);
+    
+    when(mockFs.resolvePath(archivePath)).thenReturn(archivePath);
+    when(mockFs.resolvePath(filePath)).thenReturn(filePath);
+    
+    DistributedCache.addCacheArchive(archive, conf);
+    conf.set(MRJobConfig.CACHE_ARCHIVES_TIMESTAMPS, "10");
+    conf.set(MRJobConfig.CACHE_ARCHIVES_SIZES, "10");
+    conf.set(MRJobConfig.CACHE_ARCHIVES_VISIBILITIES, "true");
+    DistributedCache.addCacheFile(file, conf);
+    conf.set(MRJobConfig.CACHE_FILE_TIMESTAMPS, "11");
+    conf.set(MRJobConfig.CACHE_FILES_SIZES, "11");
+    conf.set(MRJobConfig.CACHE_FILE_VISIBILITIES, "true");
+    Map<String, LocalResource> localResources = 
+      new HashMap<String, LocalResource>();
+    MRApps.setupDistributedCache(conf, localResources);
+    assertEquals(2, localResources.size());
+    LocalResource lr = localResources.get("something.zip");
+    assertNotNull(lr);
+    assertEquals(10l, lr.getSize());
+    assertEquals(10l, lr.getTimestamp());
+    assertEquals(LocalResourceType.ARCHIVE, lr.getType());
+    lr = localResources.get("something");
+    assertNotNull(lr);
+    assertEquals(11l, lr.getSize());
+    assertEquals(11l, lr.getTimestamp());
+    assertEquals(LocalResourceType.FILE, lr.getType());
+  }
+  
+  static class MockFileSystem extends FilterFileSystem {
+    MockFileSystem() {
+      super(mock(FileSystem.class));
+    }
+    public void initialize(URI name, Configuration conf) throws IOException {}
+  }
+  
 }


Reply via email to