Author: tgraves
Date: Tue Apr 10 22:09:00 2012
New Revision: 1312018

URL: http://svn.apache.org/viewvc?rev=1312018&view=rev
Log:
MAPREDUCE-4108. Fix tests in org.apache.hadoop.util.TestRunJar (Devaraj K via 
tgraves)

Added:
    
hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/util/Hello.java
      - copied, changed from r1312013, 
hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/testjar/Hello.java
Removed:
    
hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/testjar/Hello.java
Modified:
    hadoop/common/trunk/hadoop-mapreduce-project/CHANGES.txt
    
hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/util/TestRunJar.java

Modified: hadoop/common/trunk/hadoop-mapreduce-project/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-mapreduce-project/CHANGES.txt?rev=1312018&r1=1312017&r2=1312018&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-mapreduce-project/CHANGES.txt (original)
+++ hadoop/common/trunk/hadoop-mapreduce-project/CHANGES.txt Tue Apr 10 
22:09:00 2012
@@ -229,6 +229,9 @@ Release 2.0.0 - UNRELEASED
 
     MAPREDUCE-4076. Stream job fails with ZipException when use yarn jar
     command (Devaraj K via bobby)
+ 
+    MAPREDUCE-4108. Fix tests in org.apache.hadoop.util.TestRunJar
+    (Devaraj K via tgraves)
 
 Release 0.23.3 - UNRELEASED
 

Copied: 
hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/util/Hello.java
 (from r1312013, 
hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/testjar/Hello.java)
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/util/Hello.java?p2=hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/util/Hello.java&p1=hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/testjar/Hello.java&r1=1312013&r2=1312018&rev=1312018&view=diff
==============================================================================
--- 
hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/testjar/Hello.java
 (original)
+++ 
hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/util/Hello.java
 Tue Apr 10 22:09:00 2012
@@ -16,25 +16,24 @@
  * limitations under the License.
  */
 
-package testjar;
+package org.apache.hadoop.util;
 
 import java.io.FileOutputStream;
 import java.io.IOException;
 
 /**
- * A simple Hello class that is called from TestRunJar 
- *
+ * A simple Hello class that is called from TestRunJar
+ * 
  */
 public class Hello {
-  public static void main(String[] args){
+  public static void main(String[] args) {
     try {
       System.out.println("Creating file" + args[0]);
       FileOutputStream fstream = new FileOutputStream(args[0]);
       fstream.write("Hello Hadoopers".getBytes());
       fstream.close();
-    } 
-    catch (IOException e) {
-      //do nothing
+    } catch (IOException e) {
+      // do nothing
     }
   }
 }

Modified: 
hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/util/TestRunJar.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/util/TestRunJar.java?rev=1312018&r1=1312017&r2=1312018&view=diff
==============================================================================
--- 
hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/util/TestRunJar.java
 (original)
+++ 
hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/util/TestRunJar.java
 Tue Apr 10 22:09:00 2012
@@ -18,34 +18,63 @@
 package org.apache.hadoop.util;
 
 
+import java.io.BufferedInputStream;
 import java.io.File;
-import org.apache.hadoop.fs.Path;
-import org.junit.Ignore;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.io.InputStream;
+import java.util.jar.JarOutputStream;
+import java.util.zip.ZipEntry;
 
-import junit.framework.TestCase;
+import org.apache.hadoop.fs.Path;
+import org.junit.Assert;
+import org.junit.Test;
 
 /**
  * A test to rest the RunJar class.
  */
-@Ignore
-public class TestRunJar extends TestCase {
-  
+public class TestRunJar {
+
   private static String TEST_ROOT_DIR = new Path(System.getProperty(
       "test.build.data", "/tmp")).toString();
-  
+
+  private static final String TEST_JAR_NAME = "testjar.jar";
+  private static final String CLASS_NAME = "Hello.class";
+
+  @Test
   public void testRunjar() throws Throwable {
-  
-   File outFile = new File(TEST_ROOT_DIR, "out");
-     // delete if output file already exists.
+    File outFile = new File(TEST_ROOT_DIR, "out");
+    // delete if output file already exists.
     if (outFile.exists()) {
       outFile.delete();
     }
-    
+    File makeTestJar = makeTestJar();
+
     String[] args = new String[3];
-    args[0] = "build/test/mapred/testjar/testjob.jar";
-    args[1] = "testjar.Hello";
+    args[0] = makeTestJar.getAbsolutePath();
+    args[1] = "org.apache.hadoop.util.Hello";
     args[2] = outFile.toString();
     RunJar.main(args);
-    assertTrue("RunJar failed", outFile.exists());
+    Assert.assertTrue("RunJar failed", outFile.exists());
+  }
+
+  private File makeTestJar() throws IOException {
+    File jarFile = new File(TEST_ROOT_DIR, TEST_JAR_NAME);
+    JarOutputStream jstream = new JarOutputStream(new 
FileOutputStream(jarFile));
+    InputStream entryInputStream = this.getClass().getResourceAsStream(
+        CLASS_NAME);
+    ZipEntry entry = new ZipEntry("org/apache/hadoop/util/" + CLASS_NAME);
+    jstream.putNextEntry(entry);
+    BufferedInputStream bufInputStream = new BufferedInputStream(
+        entryInputStream, 2048);
+    int count;
+    byte[] data = new byte[2048];
+    while ((count = bufInputStream.read(data, 0, 2048)) != -1) {
+      jstream.write(data, 0, count);
+    }
+    jstream.closeEntry();
+    jstream.close();
+
+    return jarFile;
   }
-}
+}
\ No newline at end of file


Reply via email to