Repository: hadoop
Updated Branches:
  refs/heads/branch-2 42d312663 -> 2b1f066d2


http://git-wip-us.apache.org/repos/asf/hadoop/blob/2b1f066d/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/output/TestMRSequenceFileAsBinaryOutputFormat.java
----------------------------------------------------------------------
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/output/TestMRSequenceFileAsBinaryOutputFormat.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/output/TestMRSequenceFileAsBinaryOutputFormat.java
index 5a8aeda..2e40f72 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/output/TestMRSequenceFileAsBinaryOutputFormat.java
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/output/TestMRSequenceFileAsBinaryOutputFormat.java
@@ -18,17 +18,12 @@
 
 package org.apache.hadoop.mapreduce.lib.output;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import java.io.IOException;
+import java.util.Random;
+
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.io.BooleanWritable;
-import org.apache.hadoop.io.BytesWritable;
-import org.apache.hadoop.io.DataOutputBuffer;
-import org.apache.hadoop.io.DoubleWritable;
-import org.apache.hadoop.io.FloatWritable;
-import org.apache.hadoop.io.IntWritable;
+import org.apache.hadoop.fs.*;
+import org.apache.hadoop.io.*;
 import org.apache.hadoop.io.SequenceFile.CompressionType;
 import org.apache.hadoop.mapred.InvalidJobConfException;
 import org.apache.hadoop.mapreduce.InputFormat;
@@ -43,22 +38,16 @@ import org.apache.hadoop.mapreduce.RecordWriter;
 import org.apache.hadoop.mapreduce.TaskAttemptContext;
 import org.apache.hadoop.mapreduce.lib.input.SequenceFileInputFormat;
 import org.apache.hadoop.mapreduce.task.MapContextImpl;
-import org.junit.Test;
 
-import java.io.IOException;
-import java.util.Random;
-
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertTrue;
-import static org.junit.Assert.fail;
+import junit.framework.TestCase;
+import org.apache.commons.logging.*;
 
-public class TestMRSequenceFileAsBinaryOutputFormat {
+public class TestMRSequenceFileAsBinaryOutputFormat extends TestCase {
   private static final Log LOG =
     LogFactory.getLog(TestMRSequenceFileAsBinaryOutputFormat.class.getName());
 
   private static final int RECORDS = 10000;
-
-  @Test
+  
   public void testBinary() throws IOException, InterruptedException {
     Configuration conf = new Configuration();
     Job job = Job.getInstance(conf);
@@ -155,8 +144,7 @@ public class TestMRSequenceFileAsBinaryOutputFormat {
     assertEquals("Some records not found", RECORDS, count);
   }
 
-  @Test
-  public void testSequenceOutputClassDefaultsToMapRedOutputClass()
+  public void testSequenceOutputClassDefaultsToMapRedOutputClass() 
          throws IOException {
     Job job = Job.getInstance();
     // Setting Random class to test getSequenceFileOutput{Key,Value}Class
@@ -184,8 +172,7 @@ public class TestMRSequenceFileAsBinaryOutputFormat {
       SequenceFileAsBinaryOutputFormat.getSequenceFileOutputValueClass(job));
   }
 
-  @Test
-  public void testcheckOutputSpecsForbidRecordCompression()
+  public void testcheckOutputSpecsForbidRecordCompression() 
       throws IOException {
     Job job = Job.getInstance();
     FileSystem fs = FileSystem.getLocal(job.getConfiguration());

http://git-wip-us.apache.org/repos/asf/hadoop/blob/2b1f066d/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/partition/TestBinaryPartitioner.java
----------------------------------------------------------------------
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/partition/TestBinaryPartitioner.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/partition/TestBinaryPartitioner.java
index f83bc11..7be538e 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/partition/TestBinaryPartitioner.java
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/partition/TestBinaryPartitioner.java
@@ -22,14 +22,11 @@ import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.io.BinaryComparable;
 import org.apache.hadoop.io.BytesWritable;
 import org.apache.hadoop.util.ReflectionUtils;
-import org.junit.Test;
 
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertTrue;
+import junit.framework.TestCase;
 
-public class TestBinaryPartitioner {
+public class TestBinaryPartitioner extends TestCase {
 
-  @Test
   public void testDefaultOffsets() {
     Configuration conf = new Configuration();
     BinaryPartitioner<?> partitioner = 
@@ -53,8 +50,7 @@ public class TestBinaryPartitioner {
     partition2 = partitioner.getPartition(key2, null, 10);
     assertTrue(partition1 != partition2);
   }
-
-  @Test
+  
   public void testCustomOffsets() {
     Configuration conf = new Configuration();
     BinaryComparable key1 = new BytesWritable(new byte[] { 1, 2, 3, 4, 5 }); 
@@ -79,8 +75,7 @@ public class TestBinaryPartitioner {
     partition2 = partitioner.getPartition(key2, null, 10);
     assertEquals(partition1, partition2);
   }
-
-  @Test
+  
   public void testLowerBound() {
     Configuration conf = new Configuration();
     BinaryPartitioner.setLeftOffset(conf, 0);
@@ -92,8 +87,7 @@ public class TestBinaryPartitioner {
     int partition2 = partitioner.getPartition(key2, null, 10);
     assertTrue(partition1 != partition2);
   }
-
-  @Test
+  
   public void testUpperBound() {
     Configuration conf = new Configuration();
     BinaryPartitioner.setRightOffset(conf, 4);

http://git-wip-us.apache.org/repos/asf/hadoop/blob/2b1f066d/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/partition/TestKeyFieldHelper.java
----------------------------------------------------------------------
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/partition/TestKeyFieldHelper.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/partition/TestKeyFieldHelper.java
index 4d05d13..6bad846 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/partition/TestKeyFieldHelper.java
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/partition/TestKeyFieldHelper.java
@@ -19,17 +19,14 @@ package org.apache.hadoop.mapreduce.lib.partition;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
-import org.junit.Test;
 
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertTrue;
+import junit.framework.TestCase;
 
-public class TestKeyFieldHelper {
+public class TestKeyFieldHelper extends TestCase {
   private static final Log LOG = LogFactory.getLog(TestKeyFieldHelper.class);
   /**
    * Test is key-field-helper's parse option.
    */
-  @Test
   public void testparseOption() throws Exception {
     KeyFieldHelper helper = new KeyFieldHelper();
     helper.setKeyFieldSeparator("\t");
@@ -215,7 +212,6 @@ public class TestKeyFieldHelper {
   /**
    * Test is key-field-helper's getWordLengths.
    */
-  @Test
   public void testGetWordLengths() throws Exception {
     KeyFieldHelper helper = new KeyFieldHelper();
     helper.setKeyFieldSeparator("\t");
@@ -274,7 +270,6 @@ public class TestKeyFieldHelper {
   /**
    * Test is key-field-helper's getStartOffset/getEndOffset.
    */
-  @Test
   public void testgetStartEndOffset() throws Exception {
     KeyFieldHelper helper = new KeyFieldHelper();
     helper.setKeyFieldSeparator("\t");

http://git-wip-us.apache.org/repos/asf/hadoop/blob/2b1f066d/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/partition/TestMRKeyFieldBasedPartitioner.java
----------------------------------------------------------------------
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/partition/TestMRKeyFieldBasedPartitioner.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/partition/TestMRKeyFieldBasedPartitioner.java
index 00b415f..9c2fb48 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/partition/TestMRKeyFieldBasedPartitioner.java
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/partition/TestMRKeyFieldBasedPartitioner.java
@@ -19,16 +19,14 @@ package org.apache.hadoop.mapreduce.lib.partition;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.io.Text;
-import org.junit.Test;
 
-import static org.junit.Assert.assertEquals;
+import junit.framework.TestCase;
 
-public class TestMRKeyFieldBasedPartitioner {
+public class TestMRKeyFieldBasedPartitioner extends TestCase {
 
   /**
    * Test is key-field-based partitioned works with empty key.
    */
-  @Test
   public void testEmptyKey() throws Exception {
     int numReducers = 10;
     KeyFieldBasedPartitioner<Text, Text> kfbp = 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/2b1f066d/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/partition/TestTotalOrderPartitioner.java
----------------------------------------------------------------------
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/partition/TestTotalOrderPartitioner.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/partition/TestTotalOrderPartitioner.java
index cc217da..a3cd18c 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/partition/TestTotalOrderPartitioner.java
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/partition/TestTotalOrderPartitioner.java
@@ -22,6 +22,8 @@ import java.io.IOException;
 import java.util.ArrayList;
 import java.util.Arrays;
 
+import junit.framework.TestCase;
+
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
@@ -33,11 +35,8 @@ import org.apache.hadoop.io.WritableComparable;
 import org.apache.hadoop.io.WritableComparator;
 import org.apache.hadoop.io.WritableUtils;
 import org.apache.hadoop.mapreduce.MRJobConfig;
-import org.junit.Test;
-
-import static org.junit.Assert.assertEquals;
 
-public class TestTotalOrderPartitioner {
+public class TestTotalOrderPartitioner extends TestCase {
 
   private static final Text[] splitStrings = new Text[] {
     // -inf            // 0
@@ -100,7 +99,6 @@ public class TestTotalOrderPartitioner {
     return p;
   }
 
-  @Test
   public void testTotalOrderMemCmp() throws Exception {
     TotalOrderPartitioner<Text,NullWritable> partitioner =
       new TotalOrderPartitioner<Text,NullWritable>();
@@ -120,7 +118,6 @@ public class TestTotalOrderPartitioner {
     }
   }
 
-  @Test
   public void testTotalOrderBinarySearch() throws Exception {
     TotalOrderPartitioner<Text,NullWritable> partitioner =
       new TotalOrderPartitioner<Text,NullWritable>();
@@ -153,7 +150,6 @@ public class TestTotalOrderPartitioner {
     }
   }
 
-  @Test
   public void testTotalOrderCustomComparator() throws Exception {
     TotalOrderPartitioner<Text,NullWritable> partitioner =
       new TotalOrderPartitioner<Text,NullWritable>();

http://git-wip-us.apache.org/repos/asf/hadoop/blob/2b1f066d/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/util/TestMRAsyncDiskService.java
----------------------------------------------------------------------
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/util/TestMRAsyncDiskService.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/util/TestMRAsyncDiskService.java
index 07b5d8b..e1849a3 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/util/TestMRAsyncDiskService.java
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/util/TestMRAsyncDiskService.java
@@ -20,6 +20,8 @@ package org.apache.hadoop.mapreduce.util;
 import java.io.File;
 import java.io.IOException;
 
+import junit.framework.TestCase;
+
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 
@@ -28,27 +30,20 @@ import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.FileUtil;
 import org.apache.hadoop.mapreduce.util.MRAsyncDiskService;
-import org.junit.Before;
 import org.junit.Test;
 
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.assertNotNull;
-import static org.junit.Assert.assertTrue;
-import static org.junit.Assert.fail;
-
 /**
  * A test for MRAsyncDiskService.
  */
-public class TestMRAsyncDiskService {
+public class TestMRAsyncDiskService extends TestCase {
 
   public static final Log LOG = 
LogFactory.getLog(TestMRAsyncDiskService.class);
   
   private static String TEST_ROOT_DIR = new Path(System.getProperty(
       "test.build.data", "/tmp")).toString();
   
-  @Before
-  public void setUp() {
+  @Override
+  protected void setUp() {
     FileUtil.fullyDelete(new File(TEST_ROOT_DIR));
   }
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/2b1f066d/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/v2/TestMiniMRProxyUser.java
----------------------------------------------------------------------
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/v2/TestMiniMRProxyUser.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/v2/TestMiniMRProxyUser.java
index f68cc83..aa769f8 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/v2/TestMiniMRProxyUser.java
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/v2/TestMiniMRProxyUser.java
@@ -17,6 +17,7 @@
  */
 package org.apache.hadoop.mapreduce.v2;
 
+import junit.framework.TestCase;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.permission.FsPermission;
@@ -28,25 +29,22 @@ import org.apache.hadoop.mapred.MiniMRCluster;
 import org.apache.hadoop.mapred.RunningJob;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.security.authorize.ProxyUsers;
-import org.junit.After;
-import org.junit.Before;
-import org.junit.Test;
 
+import java.net.InetAddress;
+import java.io.File;
+import java.io.FileOutputStream;
+import java.io.OutputStream;
 import java.io.OutputStreamWriter;
 import java.io.Writer;
-import java.net.InetAddress;
 import java.security.PrivilegedExceptionAction;
 
-import static org.junit.Assert.assertTrue;
-import static org.junit.Assert.fail;
-
-public class TestMiniMRProxyUser {
+public class TestMiniMRProxyUser extends TestCase {
 
   private MiniDFSCluster dfsCluster = null;
   private MiniMRCluster mrCluster = null;
-
-  @Before
-  public void setUp() throws Exception {
+    
+  protected void setUp() throws Exception {
+    super.setUp();
     if (System.getProperty("hadoop.log.dir") == null) {
       System.setProperty("hadoop.log.dir", "/tmp");
     }
@@ -93,14 +91,15 @@ public class TestMiniMRProxyUser {
     return mrCluster.createJobConf();
   }
   
-  @After
-  public void tearDown() throws Exception {
+  @Override
+  protected void tearDown() throws Exception {
     if (mrCluster != null) {
       mrCluster.shutdown();
     }
     if (dfsCluster != null) {
       dfsCluster.shutdown();
     }
+    super.tearDown();
   }
 
   private void mrRun() throws Exception {
@@ -126,13 +125,11 @@ public class TestMiniMRProxyUser {
     assertTrue(runJob.isComplete());
     assertTrue(runJob.isSuccessful());
   }
-
-  @Test
+    
   public void __testCurrentUser() throws Exception {
    mrRun();
   }  
 
-  @Test
   public void testValidProxyUser() throws Exception {
     UserGroupInformation ugi = UserGroupInformation.createProxyUser("u1", 
UserGroupInformation.getLoginUser());
     ugi.doAs(new PrivilegedExceptionAction<Void>() {
@@ -145,7 +142,6 @@ public class TestMiniMRProxyUser {
     });
   }
 
-  @Test
   public void ___testInvalidProxyUser() throws Exception {
     UserGroupInformation ugi = UserGroupInformation.createProxyUser("u2", 
UserGroupInformation.getLoginUser());
     ugi.doAs(new PrivilegedExceptionAction<Void>() {

http://git-wip-us.apache.org/repos/asf/hadoop/blob/2b1f066d/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/v2/TestNonExistentJob.java
----------------------------------------------------------------------
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/v2/TestNonExistentJob.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/v2/TestNonExistentJob.java
index e90c509..b6947f3 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/v2/TestNonExistentJob.java
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/v2/TestNonExistentJob.java
@@ -17,6 +17,7 @@
  */
 package org.apache.hadoop.mapreduce.v2;
 
+import junit.framework.TestCase;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.permission.FsPermission;
@@ -27,22 +28,17 @@ import org.apache.hadoop.mapred.JobID;
 import org.apache.hadoop.mapred.MiniMRCluster;
 import org.apache.hadoop.mapred.RunningJob;
 import org.apache.hadoop.security.authorize.ProxyUsers;
-import org.junit.After;
-import org.junit.Before;
-import org.junit.Test;
 
 import java.io.IOException;
 import java.net.InetAddress;
 
-import static org.junit.Assert.assertNull;
-
-public class TestNonExistentJob {
+public class TestNonExistentJob extends TestCase {
 
   private MiniDFSCluster dfsCluster = null;
   private MiniMRCluster mrCluster = null;
 
-  @Before
-  public void setUp() throws Exception {
+  protected void setUp() throws Exception {
+    super.setUp();
     if (System.getProperty("hadoop.log.dir") == null) {
       System.setProperty("hadoop.log.dir", "/tmp");
     }
@@ -82,17 +78,17 @@ public class TestNonExistentJob {
     return mrCluster.createJobConf();
   }
 
-  @After
-  public void tearDown() throws Exception {
+  @Override
+  protected void tearDown() throws Exception {
     if (mrCluster != null) {
       mrCluster.shutdown();
     }
     if (dfsCluster != null) {
       dfsCluster.shutdown();
     }
+    super.tearDown();
   }
 
-  @Test
   public void testGetInvalidJob() throws Exception {
     RunningJob runJob = new 
JobClient(getJobConf()).getJob(JobID.forName("job_0_0"));
     assertNull(runJob);

http://git-wip-us.apache.org/repos/asf/hadoop/blob/2b1f066d/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestStreamingBadRecords.java
----------------------------------------------------------------------
diff --git 
a/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestStreamingBadRecords.java
 
b/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestStreamingBadRecords.java
index 860fb89..7b7901f 100644
--- 
a/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestStreamingBadRecords.java
+++ 
b/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestStreamingBadRecords.java
@@ -42,11 +42,6 @@ import org.apache.hadoop.mapred.RunningJob;
 import org.apache.hadoop.mapred.SkipBadRecords;
 import org.apache.hadoop.mapred.Utils;
 import org.apache.hadoop.mapreduce.server.jobtracker.JTConfig;
-import org.junit.Before;
-import org.junit.Test;
-
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertTrue;
 
 public class TestStreamingBadRecords extends ClusterMapReduceTestCase
 {
@@ -73,8 +68,7 @@ public class TestStreamingBadRecords extends 
ClusterMapReduceTestCase
     utilTest.redirectIfAntJunit();
   }
 
-  @Before
-  public void setUp() throws Exception {
+  protected void setUp() throws Exception {
     Properties props = new Properties();
     props.setProperty(JTConfig.JT_RETIREJOBS, "false");
     props.setProperty(JTConfig.JT_PERSIST_JOBSTATUS, "false");
@@ -248,7 +242,6 @@ public class TestStreamingBadRecords extends 
ClusterMapReduceTestCase
   }
   */
 
-  @Test
   public void testNoOp() {
     // Added to avoid warnings when running this disabled test
   }

Reply via email to