Author: bobby Date: Wed Mar 20 16:11:58 2013 New Revision: 1458915 URL: http://svn.apache.org/r1458915 Log: svn merge -c 1458906 FIXES: MAPREDUCE-4972. Coverage fixing for org.apache.hadoop.mapreduce.jobhistory (Aleksey Gorshkov via bobby)
Added: hadoop/common/branches/branch-2/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/jobhistory/TestEvents.java - copied unchanged from r1458906, hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/jobhistory/TestEvents.java Modified: hadoop/common/branches/branch-2/hadoop-mapreduce-project/CHANGES.txt hadoop/common/branches/branch-2/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/jobhistory/TestJobHistoryEventHandler.java hadoop/common/branches/branch-2/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/TestJobHistoryEntities.java hadoop/common/branches/branch-2/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/TestJobHistoryEvents.java hadoop/common/branches/branch-2/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/TestJobHistoryParsing.java Modified: hadoop/common/branches/branch-2/hadoop-mapreduce-project/CHANGES.txt URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-mapreduce-project/CHANGES.txt?rev=1458915&r1=1458914&r2=1458915&view=diff ============================================================================== --- hadoop/common/branches/branch-2/hadoop-mapreduce-project/CHANGES.txt (original) +++ hadoop/common/branches/branch-2/hadoop-mapreduce-project/CHANGES.txt Wed Mar 20 16:11:58 2013 @@ -612,6 +612,9 @@ Release 0.23.7 - UNRELEASED MAPREDUCE-5027. Shuffle does not limit number of outstanding connections (Robert Parker via jeagles) + MAPREDUCE-4972. Coverage fixing for org.apache.hadoop.mapreduce.jobhistory + (Aleksey Gorshkov via bobby) + OPTIMIZATIONS MAPREDUCE-4946. Fix a performance problem for large jobs by reducing the Modified: hadoop/common/branches/branch-2/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/jobhistory/TestJobHistoryEventHandler.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/jobhistory/TestJobHistoryEventHandler.java?rev=1458915&r1=1458914&r2=1458915&view=diff ============================================================================== --- hadoop/common/branches/branch-2/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/jobhistory/TestJobHistoryEventHandler.java (original) +++ hadoop/common/branches/branch-2/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/jobhistory/TestJobHistoryEventHandler.java Wed Mar 20 16:11:58 2013 @@ -31,6 +31,7 @@ import org.apache.hadoop.conf.Configurat import org.apache.hadoop.fs.FileContext; import org.apache.hadoop.fs.Path; import org.apache.hadoop.mapreduce.Counters; +import org.apache.hadoop.mapreduce.JobID; import org.apache.hadoop.mapreduce.MRJobConfig; import org.apache.hadoop.mapreduce.TaskID; import org.apache.hadoop.mapreduce.TaskType; @@ -53,7 +54,7 @@ public class TestJobHistoryEventHandler private static final Log LOG = LogFactory .getLog(TestJobHistoryEventHandler.class); - @Test + @Test (timeout=50000) public void testFirstFlushOnCompletionEvent() throws Exception { TestParams t = new TestParams(); Configuration conf = new Configuration(); @@ -96,7 +97,7 @@ public class TestJobHistoryEventHandler } } - @Test + @Test (timeout=50000) public void testMaxUnflushedCompletionEvents() throws Exception { TestParams t = new TestParams(); Configuration conf = new Configuration(); @@ -131,17 +132,17 @@ public class TestJobHistoryEventHandler handleNextNEvents(jheh, 1); verify(mockWriter).flush(); - + handleNextNEvents(jheh, 50); verify(mockWriter, times(6)).flush(); - + } finally { jheh.stop(); verify(mockWriter).close(); } } - - @Test + + @Test (timeout=50000) public void testUnflushedTimer() throws Exception { TestParams t = new TestParams(); Configuration conf = new Configuration(); @@ -181,8 +182,8 @@ public class TestJobHistoryEventHandler verify(mockWriter).close(); } } - - @Test + + @Test (timeout=50000) public void testBatchedFlushJobEndMultiplier() throws Exception { TestParams t = new TestParams(); Configuration conf = new Configuration(); @@ -265,7 +266,7 @@ public class TestJobHistoryEventHandler when(mockContext.getApplicationID()).thenReturn(appId); return mockContext; } - + private class TestParams { String workDir = setupTestWorkDir(); @@ -279,12 +280,8 @@ public class TestJobHistoryEventHandler } private JobHistoryEvent getEventToEnqueue(JobId jobId) { - JobHistoryEvent toReturn = Mockito.mock(JobHistoryEvent.class); - HistoryEvent he = Mockito.mock(HistoryEvent.class); - Mockito.when(he.getEventType()).thenReturn(EventType.JOB_STATUS_CHANGED); - Mockito.when(toReturn.getHistoryEvent()).thenReturn(he); - Mockito.when(toReturn.getJobID()).thenReturn(jobId); - return toReturn; + HistoryEvent toReturn = new JobStatusChangedEvent(new JobID(Integer.toString(jobId.getId()), jobId.getId()), "change status"); + return new JobHistoryEvent(jobId, toReturn); } @Test @@ -344,8 +341,6 @@ public class TestJobHistoryEventHandler class JHEvenHandlerForTest extends JobHistoryEventHandler { private EventWriter eventWriter; - volatile int handleEventCompleteCalls = 0; - volatile int handleEventStartedCalls = 0; public JHEvenHandlerForTest(AppContext context, int startCount) { super(context, startCount); @@ -354,7 +349,7 @@ class JHEvenHandlerForTest extends JobHi @Override public void start() { } - + @Override protected EventWriter createEventWriter(Path historyFilePath) throws IOException { @@ -365,7 +360,7 @@ class JHEvenHandlerForTest extends JobHi @Override protected void closeEventWriter(JobId jobId) { } - + public EventWriter getEventWriter() { return this.eventWriter; } @@ -375,13 +370,12 @@ class JHEvenHandlerForTest extends JobHi * Class to help with testSigTermedFunctionality */ class JHEventHandlerForSigtermTest extends JobHistoryEventHandler { - private MetaInfo metaInfo; public JHEventHandlerForSigtermTest(AppContext context, int startCount) { super(context, startCount); } public void addToFileMap(JobId jobId) { - metaInfo = Mockito.mock(MetaInfo.class); + MetaInfo metaInfo = Mockito.mock(MetaInfo.class); Mockito.when(metaInfo.isWriterActive()).thenReturn(true); fileMap.put(jobId, metaInfo); } Modified: hadoop/common/branches/branch-2/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/TestJobHistoryEntities.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/TestJobHistoryEntities.java?rev=1458915&r1=1458914&r2=1458915&view=diff ============================================================================== --- hadoop/common/branches/branch-2/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/TestJobHistoryEntities.java (original) +++ hadoop/common/branches/branch-2/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/TestJobHistoryEntities.java Wed Mar 20 16:11:58 2013 @@ -79,7 +79,7 @@ public class TestJobHistoryEntities { } /* Verify some expected values based on the history file */ - @Test + @Test (timeout=10000) public void testCompletedJob() throws Exception { HistoryFileInfo info = mock(HistoryFileInfo.class); when(info.getConfFile()).thenReturn(fullConfPath); @@ -104,7 +104,7 @@ public class TestJobHistoryEntities { assertEquals(JobState.SUCCEEDED, jobReport.getJobState()); } - @Test + @Test (timeout=10000) public void testCompletedTask() throws Exception { HistoryFileInfo info = mock(HistoryFileInfo.class); when(info.getConfFile()).thenReturn(fullConfPath); @@ -133,7 +133,7 @@ public class TestJobHistoryEntities { assertEquals(rt1Id, rt1Report.getTaskId()); } - @Test + @Test (timeout=10000) public void testCompletedTaskAttempt() throws Exception { HistoryFileInfo info = mock(HistoryFileInfo.class); when(info.getConfFile()).thenReturn(fullConfPath); Modified: hadoop/common/branches/branch-2/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/TestJobHistoryEvents.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/TestJobHistoryEvents.java?rev=1458915&r1=1458914&r2=1458915&view=diff ============================================================================== --- hadoop/common/branches/branch-2/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/TestJobHistoryEvents.java (original) +++ hadoop/common/branches/branch-2/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/TestJobHistoryEvents.java Wed Mar 20 16:11:58 2013 @@ -25,7 +25,6 @@ import junit.framework.Assert; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.mapreduce.MRJobConfig; import org.apache.hadoop.mapreduce.TypeConverter; import org.apache.hadoop.mapreduce.jobhistory.JobHistoryEvent; import org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler; @@ -67,8 +66,17 @@ public class TestJobHistoryEvents { * completed maps */ HistoryContext context = new JobHistory(); + // test start and stop states ((JobHistory)context).init(conf); - Job parsedJob = context.getJob(jobId); + ((JobHistory)context).start(); + Assert.assertTrue( context.getStartTime()>0); + Assert.assertEquals(((JobHistory)context).getServiceState(),Service.STATE.STARTED); + + + ((JobHistory)context).stop(); + Assert.assertEquals(((JobHistory)context).getServiceState(),Service.STATE.STOPPED); + Job parsedJob = context.getJob(jobId); + Assert.assertEquals("CompletedMaps not correct", 2, parsedJob.getCompletedMaps()); Assert.assertEquals(System.getProperty("user.name"), parsedJob.getUserName()); @@ -177,9 +185,8 @@ public class TestJobHistoryEvents { @Override protected EventHandler<JobHistoryEvent> createJobHistoryHandler( AppContext context) { - JobHistoryEventHandler eventHandler = new JobHistoryEventHandler( - context, getStartCount()); - return eventHandler; + return new JobHistoryEventHandler( + context, getStartCount()); } } Modified: hadoop/common/branches/branch-2/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/TestJobHistoryParsing.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/TestJobHistoryParsing.java?rev=1458915&r1=1458914&r2=1458915&view=diff ============================================================================== --- hadoop/common/branches/branch-2/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/TestJobHistoryParsing.java (original) +++ hadoop/common/branches/branch-2/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/TestJobHistoryParsing.java Wed Mar 20 16:11:58 2013 @@ -18,7 +18,9 @@ package org.apache.hadoop.mapreduce.v2.hs; +import java.io.ByteArrayOutputStream; import java.io.IOException; +import java.io.PrintStream; import java.util.Arrays; import java.util.HashMap; import java.util.List; @@ -40,6 +42,7 @@ import org.apache.hadoop.mapreduce.TaskI import org.apache.hadoop.mapreduce.TypeConverter; import org.apache.hadoop.mapreduce.jobhistory.EventReader; import org.apache.hadoop.mapreduce.jobhistory.HistoryEvent; +import org.apache.hadoop.mapreduce.jobhistory.HistoryViewer; import org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser; import org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser.AMInfo; import org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser.JobInfo; @@ -60,7 +63,6 @@ import org.apache.hadoop.mapreduce.v2.ap import org.apache.hadoop.mapreduce.v2.hs.HistoryFileManager.HistoryFileInfo; import org.apache.hadoop.mapreduce.v2.hs.TestJobHistoryEvents.MRAppWithHistory; import org.apache.hadoop.mapreduce.v2.jobhistory.FileNameIndexUtils; -import org.apache.hadoop.mapreduce.v2.jobhistory.JHAdminConfig; import org.apache.hadoop.mapreduce.v2.jobhistory.JobHistoryUtils; import org.apache.hadoop.mapreduce.v2.jobhistory.JobIndexInfo; import org.apache.hadoop.net.DNSToSwitchMapping; @@ -78,6 +80,8 @@ public class TestJobHistoryParsing { private static final String RACK_NAME = "/MyRackName"; + private ByteArrayOutputStream outContent = new ByteArrayOutputStream(); + public static class MyResolver implements DNSToSwitchMapping { @Override public List<String> resolve(List<String> names) { @@ -89,14 +93,14 @@ public class TestJobHistoryParsing { } } - @Test + @Test (timeout=50000) public void testJobInfo() throws Exception { JobInfo info = new JobInfo(); Assert.assertEquals("NORMAL", info.getPriority()); info.printAll(); } - @Test + @Test (timeout=50000) public void testHistoryParsing() throws Exception { LOG.info("STARTING testHistoryParsing()"); try { @@ -106,7 +110,7 @@ public class TestJobHistoryParsing { } } - @Test + @Test (timeout=50000) public void testHistoryParsingWithParseErrors() throws Exception { LOG.info("STARTING testHistoryParsingWithParseErrors()"); try { @@ -321,18 +325,37 @@ public class TestJobHistoryParsing { } } } + + // test output for HistoryViewer + PrintStream stdps=System.out; + try { + System.setOut(new PrintStream(outContent)); + HistoryViewer viewer = new HistoryViewer(fc.makeQualified( + fileInfo.getHistoryFile()).toString(), conf, true); + viewer.print(); + + for (TaskInfo taskInfo : allTasks.values()) { + + String test= (taskInfo.getTaskStatus()==null?"":taskInfo.getTaskStatus())+" "+taskInfo.getTaskType()+" task list for "+taskInfo.getTaskId().getJobID(); + Assert.assertTrue(outContent.toString().indexOf(test)>0); + Assert.assertTrue(outContent.toString().indexOf(taskInfo.getTaskId().toString())>0); + } + } finally { + System.setOut(stdps); + + } } - + // Computes finished maps similar to RecoveryService... - private long computeFinishedMaps(JobInfo jobInfo, - int numMaps, int numSuccessfulMaps) { + private long computeFinishedMaps(JobInfo jobInfo, int numMaps, + int numSuccessfulMaps) { if (numMaps == numSuccessfulMaps) { return jobInfo.getFinishedMaps(); } - + long numFinishedMaps = 0; - Map<org.apache.hadoop.mapreduce.TaskID, TaskInfo> taskInfos = - jobInfo.getAllTasks(); + Map<org.apache.hadoop.mapreduce.TaskID, TaskInfo> taskInfos = jobInfo + .getAllTasks(); for (TaskInfo taskInfo : taskInfos.values()) { if (TaskState.SUCCEEDED.toString().equals(taskInfo.getTaskStatus())) { ++numFinishedMaps; @@ -341,7 +364,7 @@ public class TestJobHistoryParsing { return numFinishedMaps; } - @Test + @Test (timeout=50000) public void testHistoryParsingForFailedAttempts() throws Exception { LOG.info("STARTING testHistoryParsingForFailedAttempts"); try { @@ -468,7 +491,7 @@ public class TestJobHistoryParsing { } } - @Test + @Test (timeout=50000) public void testScanningOldDirs() throws Exception { LOG.info("STARTING testScanningOldDirs"); try {