Author: bobby Date: Tue Mar 26 14:17:59 2013 New Revision: 1461149 URL: http://svn.apache.org/r1461149 Log: svn merge -c 1461146 FIXES: MAPREDUCE-4875. coverage fixing for org.apache.hadoop.mapred (Aleksey Gorshkov via bobby)
Added: hadoop/common/branches/branch-2/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapred/TestClock.java - copied unchanged from r1461146, hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapred/TestClock.java hadoop/common/branches/branch-2/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapred/TestJobConf.java - copied unchanged from r1461146, hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapred/TestJobConf.java hadoop/common/branches/branch-2/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapred/TestJobInfo.java - copied unchanged from r1461146, hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapred/TestJobInfo.java hadoop/common/branches/branch-2/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapred/TestOldMethodsJobID.java - copied unchanged from r1461146, hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapred/TestOldMethodsJobID.java hadoop/common/branches/branch-2/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapred/TestQueue.java - copied unchanged from r1461146, hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapred/TestQueue.java hadoop/common/branches/branch-2/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapred/TestSkipBadRecords.java - copied unchanged from r1461146, hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapred/TestSkipBadRecords.java hadoop/common/branches/branch-2/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapred/TestTaskLog.java - copied unchanged from r1461146, hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapred/TestTaskLog.java hadoop/common/branches/branch-2/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapred/TestTaskLogAppender.java - copied unchanged from r1461146, hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapred/TestTaskLogAppender.java hadoop/common/branches/branch-2/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/resources/mapred-queues.xml - copied unchanged from r1461146, hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/resources/mapred-queues.xml hadoop/common/branches/branch-2/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestQueueConfigurationParser.java - copied unchanged from r1461146, hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestQueueConfigurationParser.java Modified: hadoop/common/branches/branch-2/hadoop-mapreduce-project/CHANGES.txt hadoop/common/branches/branch-2/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapred/TestTaskAttemptListenerImpl.java hadoop/common/branches/branch-2/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/JobEndNotifier.java hadoop/common/branches/branch-2/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/QueueManager.java hadoop/common/branches/branch-2/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/TaskLog.java hadoop/common/branches/branch-2/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/TaskStatus.java hadoop/common/branches/branch-2/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestIFile.java hadoop/common/branches/branch-2/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMultiFileSplit.java hadoop/common/branches/branch-2/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestNetworkedJob.java hadoop/common/branches/branch-2/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestStatisticsCollector.java hadoop/common/branches/branch-2/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestTextInputFormat.java hadoop/common/branches/branch-2/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestTextOutputFormat.java Modified: hadoop/common/branches/branch-2/hadoop-mapreduce-project/CHANGES.txt URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-mapreduce-project/CHANGES.txt?rev=1461149&r1=1461148&r2=1461149&view=diff ============================================================================== --- hadoop/common/branches/branch-2/hadoop-mapreduce-project/CHANGES.txt (original) +++ hadoop/common/branches/branch-2/hadoop-mapreduce-project/CHANGES.txt Tue Mar 26 14:17:59 2013 @@ -17,6 +17,9 @@ Release 2.0.5-beta - UNRELEASED MAPREDUCE-4990. Construct debug strings conditionally in ShuffleHandler.Shuffle#sendMapOutput(). (kkambatl via tucu) + MAPREDUCE-4875. coverage fixing for org.apache.hadoop.mapred + (Aleksey Gorshkov via bobby) + OPTIMIZATIONS BUG FIXES Modified: hadoop/common/branches/branch-2/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapred/TestTaskAttemptListenerImpl.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapred/TestTaskAttemptListenerImpl.java?rev=1461149&r1=1461148&r2=1461149&view=diff ============================================================================== --- hadoop/common/branches/branch-2/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapred/TestTaskAttemptListenerImpl.java (original) +++ hadoop/common/branches/branch-2/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapred/TestTaskAttemptListenerImpl.java Tue Mar 26 14:17:59 2013 @@ -32,6 +32,8 @@ import static org.mockito.Mockito.when; import java.io.IOException; import java.util.Arrays; +import junit.framework.Assert; + import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.mapreduce.TaskType; import org.apache.hadoop.mapreduce.TypeConverter; @@ -78,7 +80,7 @@ public class TestTaskAttemptListenerImpl } } - @Test + @Test (timeout=5000) public void testGetTask() throws IOException { AppContext appCtx = mock(AppContext.class); JobTokenSecretManager secret = mock(JobTokenSecretManager.class); @@ -136,9 +138,30 @@ public class TestTaskAttemptListenerImpl assertTrue(result.shouldDie); listener.stop(); + + // test JVMID + JVMId jvmid = JVMId.forName("jvm_001_002_m_004"); + assertNotNull(jvmid); + try { + JVMId.forName("jvm_001_002_m_004_006"); + Assert.fail(); + } catch (IllegalArgumentException e) { + assertEquals(e.getMessage(), + "TaskId string : jvm_001_002_m_004_006 is not properly formed"); + } + + } + + @Test (timeout=5000) + public void testJVMId() { + + JVMId jvmid = new JVMId("test", 1, true, 2); + JVMId jvmid1 = JVMId.forName("jvm_test_0001_m_000002"); + // test compare methot should be the same + assertEquals(0, jvmid.compareTo(jvmid1)); } - @Test + @Test (timeout=10000) public void testGetMapCompletionEvents() throws IOException { TaskAttemptCompletionEvent[] empty = {}; TaskAttemptCompletionEvent[] taskEvents = { @@ -205,7 +228,7 @@ public class TestTaskAttemptListenerImpl return tce; } - @Test + @Test (timeout=1000) public void testCommitWindow() throws IOException { SystemClock clock = new SystemClock(); Modified: hadoop/common/branches/branch-2/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/JobEndNotifier.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/JobEndNotifier.java?rev=1461149&r1=1461148&r2=1461149&view=diff ============================================================================== --- hadoop/common/branches/branch-2/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/JobEndNotifier.java (original) +++ hadoop/common/branches/branch-2/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/JobEndNotifier.java Tue Mar 26 14:17:59 2013 @@ -19,8 +19,6 @@ package org.apache.hadoop.mapred; import java.io.IOException; -import java.util.concurrent.BlockingQueue; -import java.util.concurrent.DelayQueue; import java.util.concurrent.Delayed; import java.util.concurrent.TimeUnit; @@ -39,62 +37,7 @@ public class JobEndNotifier { private static final Log LOG = LogFactory.getLog(JobEndNotifier.class.getName()); - private static Thread thread; - private static volatile boolean running; - private static BlockingQueue<JobEndStatusInfo> queue = - new DelayQueue<JobEndStatusInfo>(); - - public static void startNotifier() { - running = true; - thread = new Thread( - new Runnable() { - public void run() { - try { - while (running) { - sendNotification(queue.take()); - } - } - catch (InterruptedException irex) { - if (running) { - LOG.error("Thread has ended unexpectedly", irex); - } - } - } - - private void sendNotification(JobEndStatusInfo notification) { - try { - int code = httpNotification(notification.getUri()); - if (code != 200) { - throw new IOException("Invalid response status code: " + code); - } - } - catch (IOException ioex) { - LOG.error("Notification failure [" + notification + "]", ioex); - if (notification.configureForRetry()) { - try { - queue.put(notification); - } - catch (InterruptedException iex) { - LOG.error("Notification queuing error [" + notification + "]", - iex); - } - } - } - catch (Exception ex) { - LOG.error("Notification failure [" + notification + "]", ex); - } - } - - } - - ); - thread.start(); - } - - public static void stopNotifier() { - running = false; - thread.interrupt(); - } + private static JobEndStatusInfo createNotification(JobConf conf, JobStatus status) { @@ -118,18 +61,6 @@ public class JobEndNotifier { return notification; } - public static void registerNotification(JobConf jobConf, JobStatus status) { - JobEndStatusInfo notification = createNotification(jobConf, status); - if (notification != null) { - try { - queue.put(notification); - } - catch (InterruptedException iex) { - LOG.error("Notification queuing failure [" + notification + "]", iex); - } - } - } - private static int httpNotification(String uri) throws IOException { URI url = new URI(uri, false); HttpClient m_client = new HttpClient(); @@ -194,10 +125,6 @@ public class JobEndNotifier { return retryInterval; } - public long getDelayTime() { - return delayTime; - } - public boolean configureForRetry() { boolean retry = false; if (getRetryAttempts() > 0) { Modified: hadoop/common/branches/branch-2/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/QueueManager.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/QueueManager.java?rev=1461149&r1=1461148&r2=1461149&view=diff ============================================================================== --- hadoop/common/branches/branch-2/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/QueueManager.java (original) +++ hadoop/common/branches/branch-2/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/QueueManager.java Tue Mar 26 14:17:59 2013 @@ -40,7 +40,6 @@ import java.util.ArrayList; import java.util.HashMap; import java.util.Map; import java.util.Set; -import java.util.List; import java.net.URL; @@ -487,73 +486,8 @@ public class QueueManager { new QueueAclsInfo[queueAclsInfolist.size()]); } - /** - * ONLY FOR TESTING - Do not use in production code. - * This method is used for setting up of leafQueues only. - * We are not setting the hierarchy here. - * - * @param queues - */ - synchronized void setQueues(Queue[] queues) { - root.getChildren().clear(); - leafQueues.clear(); - allQueues.clear(); - - for (Queue queue : queues) { - root.addChild(queue); - } - //At this point we have root populated - //update data structures leafNodes. - leafQueues = getRoot().getLeafQueues(); - allQueues.putAll(getRoot().getInnerQueues()); - allQueues.putAll(leafQueues); - } - - /** - * Return an array of {@link JobQueueInfo} objects for the root - * queues configured in the system. - * <p/> - * Root queues are queues that are at the top-most level in the - * hierarchy of queues in mapred-queues.xml, or they are the queues - * configured in the mapred.queue.names key in mapred-site.xml. - * - * @return array of JobQueueInfo objects for root level queues. - */ - - JobQueueInfo[] getRootQueues() { - List<JobQueueInfo> list = getRoot().getJobQueueInfo().getChildren(); - return list.toArray(new JobQueueInfo[list.size()]); - } - - /** - * Get the complete hierarchy of children for queue - * queueName - * - * @param queueName - * @return - */ - JobQueueInfo[] getChildQueues(String queueName) { - List<JobQueueInfo> list = - allQueues.get(queueName).getJobQueueInfo().getChildren(); - if (list != null) { - return list.toArray(new JobQueueInfo[list.size()]); - } else { - return new JobQueueInfo[0]; - } - } - - /** - * Used only for testing purposes . - * This method is unstable as refreshQueues would leave this - * data structure in unstable state. - * - * @param queueName - * @return - */ - Queue getQueue(String queueName) { - return this.allQueues.get(queueName); - } - + + /** * Return if ACLs are enabled for the Map/Reduce system @@ -573,29 +507,7 @@ public class QueueManager { return root; } - /** - * Returns the specific queue ACL for the given queue. - * Returns null if the given queue does not exist or the acl is not - * configured for that queue. - * If acls are disabled(mapreduce.cluster.acls.enabled set to false), returns - * ACL with all users. - */ - synchronized AccessControlList getQueueACL(String queueName, - QueueACL qACL) { - if (areAclsEnabled) { - Queue q = leafQueues.get(queueName); - if (q != null) { - return q.getAcls().get(toFullPropertyName( - queueName, qACL.getAclName())); - } - else { - LOG.warn("Queue " + queueName + " is not present."); - return null; - } - } - return new AccessControlList("*"); - } - + /** * Dumps the configuration of hierarchy of queues * @param out the writer object to which dump is written Modified: hadoop/common/branches/branch-2/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/TaskLog.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/TaskLog.java?rev=1461149&r1=1461148&r2=1461149&view=diff ============================================================================== --- hadoop/common/branches/branch-2/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/TaskLog.java (original) +++ hadoop/common/branches/branch-2/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/TaskLog.java Tue Mar 26 14:17:59 2013 @@ -386,73 +386,6 @@ public class TaskLog { return conf.getLong(JobContext.TASK_USERLOG_LIMIT, 0) * 1024; } - /** - * Wrap a command in a shell to capture stdout and stderr to files. - * If the tailLength is 0, the entire output will be saved. - * @param cmd The command and the arguments that should be run - * @param stdoutFilename The filename that stdout should be saved to - * @param stderrFilename The filename that stderr should be saved to - * @param tailLength The length of the tail to be saved. - * @return the modified command that should be run - */ - public static List<String> captureOutAndError(List<String> cmd, - File stdoutFilename, - File stderrFilename, - long tailLength - ) throws IOException { - return captureOutAndError(null, cmd, stdoutFilename, - stderrFilename, tailLength, false); - } - - /** - * Wrap a command in a shell to capture stdout and stderr to files. - * Setup commands such as setting memory limit can be passed which - * will be executed before exec. - * If the tailLength is 0, the entire output will be saved. - * @param setup The setup commands for the execed process. - * @param cmd The command and the arguments that should be run - * @param stdoutFilename The filename that stdout should be saved to - * @param stderrFilename The filename that stderr should be saved to - * @param tailLength The length of the tail to be saved. - * @return the modified command that should be run - */ - public static List<String> captureOutAndError(List<String> setup, - List<String> cmd, - File stdoutFilename, - File stderrFilename, - long tailLength - ) throws IOException { - return captureOutAndError(setup, cmd, stdoutFilename, stderrFilename, - tailLength, false); - } - - /** - * Wrap a command in a shell to capture stdout and stderr to files. - * Setup commands such as setting memory limit can be passed which - * will be executed before exec. - * If the tailLength is 0, the entire output will be saved. - * @param setup The setup commands for the execed process. - * @param cmd The command and the arguments that should be run - * @param stdoutFilename The filename that stdout should be saved to - * @param stderrFilename The filename that stderr should be saved to - * @param tailLength The length of the tail to be saved. - * @param pidFileName The name of the pid-file. pid-file's usage is deprecated - * @return the modified command that should be run - * - * @deprecated pidFiles are no more used. Instead pid is exported to - * env variable JVM_PID. - */ - @Deprecated - public static List<String> captureOutAndError(List<String> setup, - List<String> cmd, - File stdoutFilename, - File stderrFilename, - long tailLength, - String pidFileName - ) throws IOException { - return captureOutAndError(setup, cmd, stdoutFilename, stderrFilename, - tailLength, false); - } /** * Wrap a command in a shell to capture stdout and stderr to files. @@ -607,25 +540,6 @@ public class TaskLog { return command.toString(); } - /** - * Wrap a command in a shell to capture debug script's - * stdout and stderr to debugout. - * @param cmd The command and the arguments that should be run - * @param debugoutFilename The filename that stdout and stderr - * should be saved to. - * @return the modified command that should be run - * @throws IOException - */ - public static List<String> captureDebugOut(List<String> cmd, - File debugoutFilename - ) throws IOException { - String debugout = FileUtil.makeShellPath(debugoutFilename); - List<String> result = new ArrayList<String>(3); - result.add(bashCommand); - result.add("-c"); - result.add(buildDebugScriptCommandLine(cmd, debugout)); - return result; - } /** * Method to return the location of user log directory. Modified: hadoop/common/branches/branch-2/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/TaskStatus.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/TaskStatus.java?rev=1461149&r1=1461148&r2=1461149&view=diff ============================================================================== --- hadoop/common/branches/branch-2/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/TaskStatus.java (original) +++ hadoop/common/branches/branch-2/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/TaskStatus.java Tue Mar 26 14:17:59 2013 @@ -523,17 +523,5 @@ public abstract class TaskStatus impleme return (isMap) ? new MapTaskStatus() : new ReduceTaskStatus(); } - static TaskStatus readTaskStatus(DataInput in) throws IOException { - boolean isMap = in.readBoolean(); - TaskStatus taskStatus = createTaskStatus(isMap); - taskStatus.readFields(in); - return taskStatus; - } - - static void writeTaskStatus(DataOutput out, TaskStatus taskStatus) - throws IOException { - out.writeBoolean(taskStatus.getIsMap()); - taskStatus.write(out); - } } Modified: hadoop/common/branches/branch-2/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestIFile.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestIFile.java?rev=1461149&r1=1461148&r2=1461149&view=diff ============================================================================== --- hadoop/common/branches/branch-2/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestIFile.java (original) +++ hadoop/common/branches/branch-2/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestIFile.java Tue Mar 26 14:17:59 2013 @@ -26,12 +26,12 @@ import org.apache.hadoop.io.compress.Def import org.apache.hadoop.io.compress.GzipCodec; import org.junit.Test; - +import static org.junit.Assert.*; public class TestIFile { @Test /** - * Create an IFile.Writer using GzipCodec since this codec does not + * Create an IFile.Writer using GzipCodec since this code does not * have a compressor when run via the tests (ie no native libraries). */ public void testIFileWriterWithCodec() throws Exception { @@ -63,5 +63,11 @@ public class TestIFile { IFile.Reader<Text, Text> reader = new IFile.Reader<Text, Text>(conf, rfs, path, codec, null); reader.close(); + + // test check sum + byte[] ab= new byte[100]; + int readed= reader.checksumIn.readWithChecksum(ab, 0, ab.length); + assertEquals( readed,reader.checksumIn.getChecksum().length); + } } Modified: hadoop/common/branches/branch-2/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMultiFileSplit.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMultiFileSplit.java?rev=1461149&r1=1461148&r2=1461149&view=diff ============================================================================== --- hadoop/common/branches/branch-2/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMultiFileSplit.java (original) +++ hadoop/common/branches/branch-2/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMultiFileSplit.java Tue Mar 26 14:17:59 2013 @@ -21,13 +21,20 @@ import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.DataInputStream; import java.io.DataOutputStream; +import java.io.File; +import java.io.FileOutputStream; +import java.io.IOException; +import java.io.OutputStream; import java.util.Arrays; import junit.framework.TestCase; import org.apache.hadoop.fs.Path; import org.apache.hadoop.io.IOUtils; - +/** + * + * test MultiFileSplit class + */ public class TestMultiFileSplit extends TestCase{ public void testReadWrite() throws Exception { @@ -58,4 +65,26 @@ public class TestMultiFileSplit extends assertTrue(Arrays.equals(split.getLengths(), readSplit.getLengths())); System.out.println(split.toString()); } + + /** + * test method getLocations + * @throws IOException + */ + public void testgetLocations() throws IOException{ + JobConf job= new JobConf(); + + File tmpFile = File.createTempFile("test","txt"); + tmpFile.createNewFile(); + OutputStream out=new FileOutputStream(tmpFile); + out.write("tempfile".getBytes()); + out.flush(); + out.close(); + Path[] path= {new Path(tmpFile.getAbsolutePath())}; + long[] lengths = {100}; + + MultiFileSplit split = new MultiFileSplit(job,path,lengths); + String [] locations= split.getLocations(); + assertTrue(locations.length==1); + assertEquals(locations[0], "localhost"); + } } Modified: hadoop/common/branches/branch-2/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestNetworkedJob.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestNetworkedJob.java?rev=1461149&r1=1461148&r2=1461149&view=diff ============================================================================== --- hadoop/common/branches/branch-2/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestNetworkedJob.java (original) +++ hadoop/common/branches/branch-2/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestNetworkedJob.java Tue Mar 26 14:17:59 2013 @@ -18,24 +18,37 @@ package org.apache.hadoop.mapred; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNull; +import static org.junit.Assert.*; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; +import java.io.ByteArrayInputStream; +import java.io.ByteArrayOutputStream; +import java.io.DataInputStream; +import java.io.DataOutput; +import java.io.DataOutputStream; import java.io.File; import java.io.IOException; +import java.io.PrintStream; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; +import org.apache.hadoop.mapreduce.security.token.delegation.DelegationTokenIdentifier; +import org.apache.hadoop.io.Text; +import org.apache.hadoop.mapred.ClusterStatus.BlackListInfo; +import org.apache.hadoop.mapred.JobClient.NetworkedJob; +import org.apache.hadoop.mapred.JobClient.TaskStatusFilter; import org.apache.hadoop.mapred.lib.IdentityMapper; import org.apache.hadoop.mapred.lib.IdentityReducer; +import org.apache.hadoop.mapreduce.Cluster.JobTrackerStatus; import org.apache.hadoop.mapreduce.Job; +import org.apache.hadoop.yarn.YarnException; import org.junit.Test; - +import org.apache.hadoop.security.UserGroupInformation; +import org.apache.hadoop.security.token.Token; public class TestNetworkedJob { private static String TEST_ROOT_DIR = new File(System.getProperty( @@ -44,8 +57,7 @@ public class TestNetworkedJob { private static Path inFile = new Path(testDir, "in"); private static Path outDir = new Path(testDir, "out"); - @SuppressWarnings("deprecation") - @Test + @Test (timeout=5000) public void testGetNullCounters() throws Exception { //mock creation Job mockJob = mock(Job.class); @@ -57,7 +69,7 @@ public class TestNetworkedJob { verify(mockJob).getCounters(); } - @Test + @Test (timeout=500000) public void testGetJobStatus() throws IOException, InterruptedException, ClassNotFoundException { MiniMRClientCluster mr = null; @@ -105,4 +117,278 @@ public class TestNetworkedJob { } } } +/** + * test JobConf + * @throws Exception + */ + @SuppressWarnings( "deprecation" ) + @Test (timeout=500000) + public void testNetworkedJob() throws Exception { + // mock creation + MiniMRClientCluster mr = null; + FileSystem fileSys = null; + + try { + Configuration conf = new Configuration(); + mr = MiniMRClientClusterFactory.create(this.getClass(), 2, conf); + + JobConf job = new JobConf(mr.getConfig()); + + fileSys = FileSystem.get(job); + fileSys.delete(testDir, true); + FSDataOutputStream out = fileSys.create(inFile, true); + out.writeBytes("This is a test file"); + out.close(); + + FileInputFormat.setInputPaths(job, inFile); + FileOutputFormat.setOutputPath(job, outDir); + + job.setInputFormat(TextInputFormat.class); + job.setOutputFormat(TextOutputFormat.class); + + job.setMapperClass(IdentityMapper.class); + job.setReducerClass(IdentityReducer.class); + job.setNumReduceTasks(0); + + JobClient client = new JobClient(mr.getConfig()); + + RunningJob rj = client.submitJob(job); + JobID jobId = rj.getID(); + NetworkedJob runningJob = (NetworkedJob) client.getJob(jobId); + runningJob.setJobPriority(JobPriority.HIGH.name()); + // test getters + assertTrue(runningJob.getConfiguration().toString() + .endsWith("0001/job.xml")); + assertEquals(runningJob.getID(), jobId); + assertEquals(runningJob.getJobID(), jobId.toString()); + assertEquals(runningJob.getJobName(), "N/A"); + assertTrue(runningJob.getJobFile().endsWith( + ".staging/" + runningJob.getJobID() + "/job.xml")); + assertTrue(runningJob.getTrackingURL().length() > 0); + assertTrue(runningJob.mapProgress() == 0.0f); + assertTrue(runningJob.reduceProgress() == 0.0f); + assertTrue(runningJob.cleanupProgress() == 0.0f); + assertTrue(runningJob.setupProgress() == 0.0f); + + TaskCompletionEvent[] tce = runningJob.getTaskCompletionEvents(0); + assertEquals(tce.length, 0); + + assertEquals(runningJob.getHistoryUrl(),""); + assertFalse(runningJob.isRetired()); + assertEquals( runningJob.getFailureInfo(),""); + assertEquals(runningJob.getJobStatus().getJobName(), "N/A"); + assertEquals(client.getMapTaskReports(jobId).length, 0); + + try { + client.getSetupTaskReports(jobId); + } catch (YarnException e) { + assertEquals(e.getMessage(), "Unrecognized task type: JOB_SETUP"); + } + try { + client.getCleanupTaskReports(jobId); + } catch (YarnException e) { + assertEquals(e.getMessage(), "Unrecognized task type: JOB_CLEANUP"); + } + assertEquals(client.getReduceTaskReports(jobId).length, 0); + // test ClusterStatus + ClusterStatus status = client.getClusterStatus(true); + assertEquals(status.getActiveTrackerNames().size(), 2); + // it method does not implemented and always return empty array or null; + assertEquals(status.getBlacklistedTrackers(), 0); + assertEquals(status.getBlacklistedTrackerNames().size(), 0); + assertEquals(status.getBlackListedTrackersInfo().size(), 0); + assertEquals(status.getJobTrackerStatus(), JobTrackerStatus.RUNNING); + assertEquals(status.getMapTasks(), 1); + assertEquals(status.getMaxMapTasks(), 20); + assertEquals(status.getMaxReduceTasks(), 4); + assertEquals(status.getNumExcludedNodes(), 0); + assertEquals(status.getReduceTasks(), 1); + assertEquals(status.getTaskTrackers(), 2); + assertEquals(status.getTTExpiryInterval(), 0); + assertEquals(status.getJobTrackerStatus(), JobTrackerStatus.RUNNING); + + // test read and write + ByteArrayOutputStream dataOut = new ByteArrayOutputStream(); + status.write(new DataOutputStream(dataOut)); + ClusterStatus status2 = new ClusterStatus(); + + status2.readFields(new DataInputStream(new ByteArrayInputStream(dataOut + .toByteArray()))); + assertEquals(status.getActiveTrackerNames(), + status2.getActiveTrackerNames()); + assertEquals(status.getBlackListedTrackersInfo(), + status2.getBlackListedTrackersInfo()); + assertEquals(status.getMapTasks(), status2.getMapTasks()); + + try { + } catch (RuntimeException e) { + assertTrue(e.getMessage().endsWith("not found on CLASSPATH")); + } + + // test taskStatusfilter + JobClient.setTaskOutputFilter(job, TaskStatusFilter.ALL); + assertEquals(JobClient.getTaskOutputFilter(job), TaskStatusFilter.ALL); + + // runningJob.setJobPriority(JobPriority.HIGH.name()); + + // test default map + assertEquals(client.getDefaultMaps(), 20); + assertEquals(client.getDefaultReduces(), 4); + assertEquals(client.getSystemDir().getName(), "jobSubmitDir"); + // test queue information + JobQueueInfo[] rootQueueInfo = client.getRootQueues(); + assertEquals(rootQueueInfo.length, 1); + assertEquals(rootQueueInfo[0].getQueueName(), "default"); + JobQueueInfo[] qinfo = client.getQueues(); + assertEquals(qinfo.length, 1); + assertEquals(qinfo[0].getQueueName(), "default"); + assertEquals(client.getChildQueues("default").length, 0); + assertEquals(client.getJobsFromQueue("default").length, 1); + assertTrue(client.getJobsFromQueue("default")[0].getJobFile().endsWith( + "/job.xml")); + + JobQueueInfo qi = client.getQueueInfo("default"); + assertEquals(qi.getQueueName(), "default"); + assertEquals(qi.getQueueState(), "running"); + + QueueAclsInfo[] aai = client.getQueueAclsForCurrentUser(); + assertEquals(aai.length, 2); + assertEquals(aai[0].getQueueName(), "root"); + assertEquals(aai[1].getQueueName(), "default"); + // test token + Token<DelegationTokenIdentifier> token = client + .getDelegationToken(new Text(UserGroupInformation.getCurrentUser() + .getShortUserName())); + assertEquals(token.getKind().toString(), "RM_DELEGATION_TOKEN"); + + // test JobClient + + + // The following asserts read JobStatus twice and ensure the returned + // JobStatus objects correspond to the same Job. + assertEquals("Expected matching JobIDs", jobId, client.getJob(jobId) + .getJobStatus().getJobID()); + assertEquals("Expected matching startTimes", rj.getJobStatus() + .getStartTime(), client.getJob(jobId).getJobStatus().getStartTime()); + } finally { + if (fileSys != null) { + fileSys.delete(testDir, true); + } + if (mr != null) { + mr.stop(); + } + } + } + + /** + * test BlackListInfo class + * + * @throws IOException + */ + @Test (timeout=5000) + public void testBlackListInfo() throws IOException { + BlackListInfo info = new BlackListInfo(); + info.setBlackListReport("blackListInfo"); + info.setReasonForBlackListing("reasonForBlackListing"); + info.setTrackerName("trackerName"); + ByteArrayOutputStream byteOut = new ByteArrayOutputStream(); + DataOutput out = new DataOutputStream(byteOut); + info.write(out); + BlackListInfo info2 = new BlackListInfo(); + info2.readFields(new DataInputStream(new ByteArrayInputStream(byteOut + .toByteArray()))); + assertEquals(info, info); + assertEquals(info.toString(), info.toString()); + assertEquals(info.getTrackerName(), "trackerName"); + assertEquals(info.getReasonForBlackListing(), "reasonForBlackListing"); + assertEquals(info.getBlackListReport(), "blackListInfo"); + + } +/** + * test run from command line JobQueueClient + * @throws Exception + */ + @Test (timeout=500000) + public void testJobQueueClient() throws Exception { + MiniMRClientCluster mr = null; + FileSystem fileSys = null; + PrintStream oldOut = System.out; + try { + Configuration conf = new Configuration(); + mr = MiniMRClientClusterFactory.create(this.getClass(), 2, conf); + + JobConf job = new JobConf(mr.getConfig()); + + fileSys = FileSystem.get(job); + fileSys.delete(testDir, true); + FSDataOutputStream out = fileSys.create(inFile, true); + out.writeBytes("This is a test file"); + out.close(); + + FileInputFormat.setInputPaths(job, inFile); + FileOutputFormat.setOutputPath(job, outDir); + + job.setInputFormat(TextInputFormat.class); + job.setOutputFormat(TextOutputFormat.class); + + job.setMapperClass(IdentityMapper.class); + job.setReducerClass(IdentityReducer.class); + job.setNumReduceTasks(0); + + JobClient client = new JobClient(mr.getConfig()); + + client.submitJob(job); + + JobQueueClient jobClient = new JobQueueClient(job); + + ByteArrayOutputStream bytes = new ByteArrayOutputStream(); + System.setOut(new PrintStream(bytes)); + String[] arg = { "-list" }; + jobClient.run(arg); + assertTrue(bytes.toString().contains("Queue Name : default")); + assertTrue(bytes.toString().contains("Queue State : running")); + bytes = new ByteArrayOutputStream(); + System.setOut(new PrintStream(bytes)); + String[] arg1 = { "-showacls" }; + jobClient.run(arg1); + assertTrue(bytes.toString().contains("Queue acls for user :")); + assertTrue(bytes.toString().contains( + "root ADMINISTER_QUEUE,SUBMIT_APPLICATIONS")); + assertTrue(bytes.toString().contains( + "default ADMINISTER_QUEUE,SUBMIT_APPLICATIONS")); + + // test for info and default queue + + bytes = new ByteArrayOutputStream(); + System.setOut(new PrintStream(bytes)); + String[] arg2 = { "-info", "default" }; + jobClient.run(arg2); + assertTrue(bytes.toString().contains("Queue Name : default")); + assertTrue(bytes.toString().contains("Queue State : running")); + assertTrue(bytes.toString().contains("Scheduling Info")); + + // test for info , default queue and jobs + bytes = new ByteArrayOutputStream(); + System.setOut(new PrintStream(bytes)); + String[] arg3 = { "-info", "default", "-showJobs" }; + jobClient.run(arg3); + assertTrue(bytes.toString().contains("Queue Name : default")); + assertTrue(bytes.toString().contains("Queue State : running")); + assertTrue(bytes.toString().contains("Scheduling Info")); + assertTrue(bytes.toString().contains("job_1")); + + String[] arg4 = {}; + jobClient.run(arg4); + + + } finally { + System.setOut(oldOut); + if (fileSys != null) { + fileSys.delete(testDir, true); + } + if (mr != null) { + mr.stop(); + } + } + } } Modified: hadoop/common/branches/branch-2/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestStatisticsCollector.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestStatisticsCollector.java?rev=1461149&r1=1461148&r2=1461149&view=diff ============================================================================== --- hadoop/common/branches/branch-2/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestStatisticsCollector.java (original) +++ hadoop/common/branches/branch-2/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestStatisticsCollector.java Tue Mar 26 14:17:59 2013 @@ -17,6 +17,8 @@ */ package org.apache.hadoop.mapred; +import java.util.Map; + import junit.framework.TestCase; import org.apache.hadoop.mapred.StatisticsCollector.TimeWindow; @@ -24,6 +26,7 @@ import org.apache.hadoop.mapred.Statisti public class TestStatisticsCollector extends TestCase{ + @SuppressWarnings("rawtypes") public void testMovingWindow() throws Exception { StatisticsCollector collector = new StatisticsCollector(1); TimeWindow window = new TimeWindow("test", 6, 2); @@ -78,6 +81,28 @@ public class TestStatisticsCollector ext collector.update(); assertEquals((10+10+10+12+13+14), stat.getValues().get(window).getValue()); assertEquals(95, stat.getValues().get(sincStart).getValue()); + + // test Stat class + Map updaters= collector.getUpdaters(); + assertEquals(updaters.size(),2); + Map<String, Stat> ststistics=collector.getStatistics(); + assertNotNull(ststistics.get("m1")); + + Stat newStat= collector.createStat("m2"); + assertEquals(newStat.name, "m2"); + Stat st=collector.removeStat("m1"); + assertEquals(st.name, "m1"); + assertEquals((10+10+10+12+13+14), stat.getValues().get(window).getValue()); + assertEquals(95, stat.getValues().get(sincStart).getValue()); + st=collector.removeStat("m1"); + // try to remove stat again + assertNull(st); + collector.start(); + // waiting 2,5 sec + Thread.sleep(2500); + assertEquals(69, stat.getValues().get(window).getValue()); + assertEquals(95, stat.getValues().get(sincStart).getValue()); + } } Modified: hadoop/common/branches/branch-2/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestTextInputFormat.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestTextInputFormat.java?rev=1461149&r1=1461148&r2=1461149&view=diff ============================================================================== --- hadoop/common/branches/branch-2/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestTextInputFormat.java (original) +++ hadoop/common/branches/branch-2/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestTextInputFormat.java Tue Mar 26 14:17:59 2013 @@ -61,11 +61,12 @@ public class TestTextInputFormat { throw new RuntimeException("init failure", e); } } + @SuppressWarnings("deprecation") private static Path workDir = new Path(new Path(System.getProperty("test.build.data", "/tmp")), "TestTextInputFormat").makeQualified(localFs); - @Test + @Test (timeout=500000) public void testFormat() throws Exception { JobConf job = new JobConf(defaultConf); Path file = new Path(workDir, "test.txt"); @@ -145,7 +146,7 @@ public class TestTextInputFormat { } } - @Test + @Test (timeout=900000) public void testSplitableCodecs() throws IOException { JobConf conf = new JobConf(defaultConf); int seed = new Random().nextInt(); @@ -250,7 +251,7 @@ public class TestTextInputFormat { bufsz); } - @Test + @Test (timeout=5000) public void testUTF8() throws Exception { LineReader in = makeStream("abcd\u20acbdcd\u20ac"); Text line = new Text(); @@ -269,7 +270,7 @@ public class TestTextInputFormat { * * @throws Exception */ - @Test + @Test (timeout=5000) public void testNewLines() throws Exception { final String STR = "a\nbb\n\nccc\rdddd\r\r\r\n\r\neeeee"; final int STRLENBYTES = STR.getBytes().length; @@ -309,7 +310,7 @@ public class TestTextInputFormat { * * @throws Exception */ - @Test + @Test (timeout=5000) public void testMaxLineLength() throws Exception { final String STR = "a\nbb\n\nccc\rdddd\r\neeeee"; final int STRLENBYTES = STR.getBytes().length; @@ -334,7 +335,7 @@ public class TestTextInputFormat { } } - @Test + @Test (timeout=5000) public void testMRMaxLine() throws Exception { final int MAXPOS = 1024 * 1024; final int MAXLINE = 10 * 1024; @@ -354,6 +355,9 @@ public class TestTextInputFormat { position += b.length; return b.length; } + public void reset() { + position=0; + } }; final LongWritable key = new LongWritable(); final Text val = new Text(); @@ -362,8 +366,14 @@ public class TestTextInputFormat { conf.setInt(org.apache.hadoop.mapreduce.lib.input. LineRecordReader.MAX_LINE_LENGTH, MAXLINE); conf.setInt("io.file.buffer.size", BUF); // used by LRR - final LineRecordReader lrr = new LineRecordReader(infNull, 0, MAXPOS, conf); + // test another constructor + LineRecordReader lrr = new LineRecordReader(infNull, 0, MAXPOS, conf); + assertFalse("Read a line from null", lrr.next(key, val)); + infNull.reset(); + lrr = new LineRecordReader(infNull, 0L, MAXLINE, MAXPOS); assertFalse("Read a line from null", lrr.next(key, val)); + + } private static void writeFile(FileSystem fs, Path name, @@ -400,7 +410,7 @@ public class TestTextInputFormat { /** * Test using the gzip codec for reading */ - @Test + @Test (timeout=5000) public void testGzip() throws IOException { JobConf job = new JobConf(defaultConf); CompressionCodec gzip = new GzipCodec(); @@ -434,7 +444,7 @@ public class TestTextInputFormat { /** * Test using the gzip codec and an empty input file */ - @Test + @Test (timeout=5000) public void testGzipEmpty() throws IOException { JobConf job = new JobConf(defaultConf); CompressionCodec gzip = new GzipCodec(); Modified: hadoop/common/branches/branch-2/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestTextOutputFormat.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestTextOutputFormat.java?rev=1461149&r1=1461148&r2=1461149&view=diff ============================================================================== --- hadoop/common/branches/branch-2/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestTextOutputFormat.java (original) +++ hadoop/common/branches/branch-2/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestTextOutputFormat.java Tue Mar 26 14:17:59 2013 @@ -44,7 +44,6 @@ public class TestTextOutputFormat extend "data"), FileOutputCommitter.TEMP_DIR_NAME), "_" + attempt); - @SuppressWarnings("unchecked") public void testFormat() throws Exception { JobConf job = new JobConf(); job.set(JobContext.TASK_ATTEMPT_ID, attempt); @@ -59,8 +58,8 @@ public class TestTextOutputFormat extend // A reporter that does nothing Reporter reporter = Reporter.NULL; - TextOutputFormat theOutputFormat = new TextOutputFormat(); - RecordWriter theRecordWriter = + TextOutputFormat<Object,Object> theOutputFormat = new TextOutputFormat<Object,Object>(); + RecordWriter<Object,Object> theRecordWriter = theOutputFormat.getRecordWriter(localFs, job, file, reporter); Text key1 = new Text("key1"); @@ -95,7 +94,6 @@ public class TestTextOutputFormat extend } - @SuppressWarnings("unchecked") public void testFormatWithCustomSeparator() throws Exception { JobConf job = new JobConf(); String separator = "\u0001"; @@ -112,8 +110,8 @@ public class TestTextOutputFormat extend // A reporter that does nothing Reporter reporter = Reporter.NULL; - TextOutputFormat theOutputFormat = new TextOutputFormat(); - RecordWriter theRecordWriter = + TextOutputFormat<Object,Object> theOutputFormat = new TextOutputFormat<Object,Object>(); + RecordWriter<Object,Object> theRecordWriter = theOutputFormat.getRecordWriter(localFs, job, file, reporter); Text key1 = new Text("key1"); @@ -147,7 +145,61 @@ public class TestTextOutputFormat extend assertEquals(output, expectedOutput.toString()); } - + /** + * test compressed file + * @throws IOException + */ + public void testCompress() throws IOException{ + JobConf job = new JobConf(); + String separator = "\u0001"; + job.set("mapreduce.output.textoutputformat.separator", separator); + job.set(JobContext.TASK_ATTEMPT_ID, attempt); + job.set(org.apache.hadoop.mapreduce.lib.output.FileOutputFormat.COMPRESS,"true"); + + FileOutputFormat.setOutputPath(job, workDir.getParent().getParent()); + FileOutputFormat.setWorkOutputPath(job, workDir); + FileSystem fs = workDir.getFileSystem(job); + if (!fs.mkdirs(workDir)) { + fail("Failed to create output directory"); + } + String file = "test.txt"; + + // A reporter that does nothing + Reporter reporter = Reporter.NULL; + + TextOutputFormat<Object,Object> theOutputFormat = new TextOutputFormat<Object,Object>(); + RecordWriter<Object,Object> theRecordWriter = + theOutputFormat.getRecordWriter(localFs, job, file, reporter); + Text key1 = new Text("key1"); + Text key2 = new Text("key2"); + Text val1 = new Text("val1"); + Text val2 = new Text("val2"); + NullWritable nullWritable = NullWritable.get(); + + try { + theRecordWriter.write(key1, val1); + theRecordWriter.write(null, nullWritable); + theRecordWriter.write(null, val1); + theRecordWriter.write(nullWritable, val2); + theRecordWriter.write(key2, nullWritable); + theRecordWriter.write(key1, null); + theRecordWriter.write(null, null); + theRecordWriter.write(key2, val2); + + } finally { + theRecordWriter.close(reporter); + } + File expectedFile = new File(new Path(workDir, file).toString()); + StringBuffer expectedOutput = new StringBuffer(); + expectedOutput.append(key1).append(separator).append(val1).append("\n"); + expectedOutput.append(val1).append("\n"); + expectedOutput.append(val2).append("\n"); + expectedOutput.append(key2).append("\n"); + expectedOutput.append(key1).append("\n"); + expectedOutput.append(key2).append(separator).append(val2).append("\n"); + String output = UtilsForTests.slurp(expectedFile); + assertEquals(output, expectedOutput.toString()); + } public static void main(String[] args) throws Exception { new TestTextOutputFormat().testFormat(); }