Author: daryn Date: Tue Oct 23 20:57:36 2012 New Revision: 1401467 URL: http://svn.apache.org/viewvc?rev=1401467&view=rev Log: MAPREDUCE-4229. Intern counter names in the JT (bobby via daryn)
Modified: hadoop/common/trunk/hadoop-mapreduce-project/CHANGES.txt hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/jobhistory/EventReader.java hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/jobhistory/JobHistoryParser.java hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/util/CountersStrings.java Modified: hadoop/common/trunk/hadoop-mapreduce-project/CHANGES.txt URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-mapreduce-project/CHANGES.txt?rev=1401467&r1=1401466&r2=1401467&view=diff ============================================================================== --- hadoop/common/trunk/hadoop-mapreduce-project/CHANGES.txt (original) +++ hadoop/common/trunk/hadoop-mapreduce-project/CHANGES.txt Tue Oct 23 20:57:36 2012 @@ -602,6 +602,8 @@ Release 0.23.5 - UNRELEASED MAPREDUCE-4740. only .jars can be added to the Distributed Cache classpath. (Robert Joseph Evans via jlowe) + MAPREDUCE-4229. Intern counter names in the JT (bobby via daryn) + Release 0.23.4 - UNRELEASED INCOMPATIBLE CHANGES Modified: hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/jobhistory/EventReader.java URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/jobhistory/EventReader.java?rev=1401467&r1=1401466&r2=1401467&view=diff ============================================================================== --- hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/jobhistory/EventReader.java (original) +++ hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/jobhistory/EventReader.java Tue Oct 23 20:57:36 2012 @@ -29,6 +29,7 @@ import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.mapreduce.CounterGroup; import org.apache.hadoop.mapreduce.Counters; +import org.apache.hadoop.util.StringInterner; import org.apache.avro.Schema; import org.apache.avro.io.Decoder; @@ -170,9 +171,11 @@ public class EventReader implements Clos Counters result = new Counters(); for (JhCounterGroup g : counters.groups) { CounterGroup group = - result.addGroup(g.name.toString(), g.displayName.toString()); + result.addGroup(StringInterner.weakIntern(g.name.toString()), + StringInterner.weakIntern(g.displayName.toString())); for (JhCounter c : g.counts) { - group.addCounter(c.name.toString(), c.displayName.toString(), c.value); + group.addCounter(StringInterner.weakIntern(c.name.toString()), + StringInterner.weakIntern(c.displayName.toString()), c.value); } } return result; Modified: hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/jobhistory/JobHistoryParser.java URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/jobhistory/JobHistoryParser.java?rev=1401467&r1=1401466&r2=1401467&view=diff ============================================================================== --- hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/jobhistory/JobHistoryParser.java (original) +++ hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/jobhistory/JobHistoryParser.java Tue Oct 23 20:57:36 2012 @@ -42,6 +42,7 @@ import org.apache.hadoop.mapreduce.TaskI import org.apache.hadoop.mapred.TaskStatus; import org.apache.hadoop.mapreduce.TaskType; import org.apache.hadoop.security.authorize.AccessControlList; +import org.apache.hadoop.util.StringInterner; import org.apache.hadoop.yarn.api.records.ApplicationAttemptId; import org.apache.hadoop.yarn.api.records.ContainerId; @@ -226,10 +227,10 @@ public class JobHistoryParser { TaskAttemptInfo attemptInfo = taskInfo.attemptsMap.get(event.getAttemptId()); attemptInfo.finishTime = event.getFinishTime(); - attemptInfo.status = event.getTaskStatus(); - attemptInfo.state = event.getState(); + attemptInfo.status = StringInterner.weakIntern(event.getTaskStatus()); + attemptInfo.state = StringInterner.weakIntern(event.getState()); attemptInfo.counters = event.getCounters(); - attemptInfo.hostname = event.getHostname(); + attemptInfo.hostname = StringInterner.weakIntern(event.getHostname()); } private void handleReduceAttemptFinishedEvent @@ -238,14 +239,14 @@ public class JobHistoryParser { TaskAttemptInfo attemptInfo = taskInfo.attemptsMap.get(event.getAttemptId()); attemptInfo.finishTime = event.getFinishTime(); - attemptInfo.status = event.getTaskStatus(); - attemptInfo.state = event.getState(); + attemptInfo.status = StringInterner.weakIntern(event.getTaskStatus()); + attemptInfo.state = StringInterner.weakIntern(event.getState()); attemptInfo.shuffleFinishTime = event.getShuffleFinishTime(); attemptInfo.sortFinishTime = event.getSortFinishTime(); attemptInfo.counters = event.getCounters(); - attemptInfo.hostname = event.getHostname(); + attemptInfo.hostname = StringInterner.weakIntern(event.getHostname()); attemptInfo.port = event.getPort(); - attemptInfo.rackname = event.getRackName(); + attemptInfo.rackname = StringInterner.weakIntern(event.getRackName()); } private void handleMapAttemptFinishedEvent(MapAttemptFinishedEvent event) { @@ -253,13 +254,13 @@ public class JobHistoryParser { TaskAttemptInfo attemptInfo = taskInfo.attemptsMap.get(event.getAttemptId()); attemptInfo.finishTime = event.getFinishTime(); - attemptInfo.status = event.getTaskStatus(); - attemptInfo.state = event.getState(); + attemptInfo.status = StringInterner.weakIntern(event.getTaskStatus()); + attemptInfo.state = StringInterner.weakIntern(event.getState()); attemptInfo.mapFinishTime = event.getMapFinishTime(); attemptInfo.counters = event.getCounters(); - attemptInfo.hostname = event.getHostname(); + attemptInfo.hostname = StringInterner.weakIntern(event.getHostname()); attemptInfo.port = event.getPort(); - attemptInfo.rackname = event.getRackName(); + attemptInfo.rackname = StringInterner.weakIntern(event.getRackName()); } private void handleTaskAttemptFailedEvent( @@ -269,10 +270,10 @@ public class JobHistoryParser { taskInfo.attemptsMap.get(event.getTaskAttemptId()); attemptInfo.finishTime = event.getFinishTime(); attemptInfo.error = event.getError(); - attemptInfo.status = event.getTaskStatus(); - attemptInfo.hostname = event.getHostname(); + attemptInfo.status = StringInterner.weakIntern(event.getTaskStatus()); + attemptInfo.hostname = StringInterner.weakIntern(event.getHostname()); attemptInfo.port = event.getPort(); - attemptInfo.rackname = event.getRackName(); + attemptInfo.rackname = StringInterner.weakIntern(event.getRackName()); attemptInfo.shuffleFinishTime = event.getFinishTime(); attemptInfo.sortFinishTime = event.getFinishTime(); attemptInfo.mapFinishTime = event.getFinishTime(); @@ -300,7 +301,7 @@ public class JobHistoryParser { attemptInfo.startTime = event.getStartTime(); attemptInfo.attemptId = event.getTaskAttemptId(); attemptInfo.httpPort = event.getHttpPort(); - attemptInfo.trackerName = event.getTrackerName(); + attemptInfo.trackerName = StringInterner.weakIntern(event.getTrackerName()); attemptInfo.taskType = event.getTaskType(); attemptInfo.shufflePort = event.getShufflePort(); attemptInfo.containerId = event.getContainerId(); @@ -344,7 +345,7 @@ public class JobHistoryParser { info.finishTime = event.getFinishTime(); info.finishedMaps = event.getFinishedMaps(); info.finishedReduces = event.getFinishedReduces(); - info.jobStatus = event.getStatus(); + info.jobStatus = StringInterner.weakIntern(event.getStatus()); } private void handleJobFinishedEvent(JobFinishedEvent event) { @@ -375,7 +376,7 @@ public class JobHistoryParser { amInfo.appAttemptId = event.getAppAttemptId(); amInfo.startTime = event.getStartTime(); amInfo.containerId = event.getContainerId(); - amInfo.nodeManagerHost = event.getNodeManagerHost(); + amInfo.nodeManagerHost = StringInterner.weakIntern(event.getNodeManagerHost()); amInfo.nodeManagerPort = event.getNodeManagerPort(); amInfo.nodeManagerHttpPort = event.getNodeManagerHttpPort(); if (info.amInfos == null) { @@ -393,11 +394,11 @@ public class JobHistoryParser { private void handleJobSubmittedEvent(JobSubmittedEvent event) { info.jobid = event.getJobId(); info.jobname = event.getJobName(); - info.username = event.getUserName(); + info.username = StringInterner.weakIntern(event.getUserName()); info.submitTime = event.getSubmitTime(); info.jobConfPath = event.getJobConfPath(); info.jobACLs = event.getJobAcls(); - info.jobQueueName = event.getJobQueueName(); + info.jobQueueName = StringInterner.weakIntern(event.getJobQueueName()); } /** Modified: hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/util/CountersStrings.java URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/util/CountersStrings.java?rev=1401467&r1=1401466&r2=1401467&view=diff ============================================================================== --- hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/util/CountersStrings.java (original) +++ hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/util/CountersStrings.java Tue Oct 23 20:57:36 2012 @@ -28,6 +28,7 @@ import org.apache.hadoop.io.IntWritable; import org.apache.hadoop.mapreduce.counters.AbstractCounters; import org.apache.hadoop.mapreduce.Counter; import org.apache.hadoop.mapreduce.counters.CounterGroupBase; +import org.apache.hadoop.util.StringInterner; import org.apache.hadoop.util.StringUtils; /** @@ -235,13 +236,13 @@ public class CountersStrings { // Get the actual name String groupName = - getBlock(groupString, UNIT_OPEN, UNIT_CLOSE, groupIndex); - groupName = unescape(groupName); + StringInterner.weakIntern(getBlock(groupString, UNIT_OPEN, UNIT_CLOSE, groupIndex)); + groupName = StringInterner.weakIntern(unescape(groupName)); // Get the display name String groupDisplayName = - getBlock(groupString, UNIT_OPEN, UNIT_CLOSE, groupIndex); - groupDisplayName = unescape(groupDisplayName); + StringInterner.weakIntern(getBlock(groupString, UNIT_OPEN, UNIT_CLOSE, groupIndex)); + groupDisplayName = StringInterner.weakIntern(unescape(groupDisplayName)); // Get the counters G group = counters.getGroup(groupName); @@ -255,13 +256,13 @@ public class CountersStrings { // Get the actual name String counterName = - getBlock(counterString, UNIT_OPEN, UNIT_CLOSE, counterIndex); - counterName = unescape(counterName); + StringInterner.weakIntern(getBlock(counterString, UNIT_OPEN, UNIT_CLOSE, counterIndex)); + counterName = StringInterner.weakIntern(unescape(counterName)); // Get the display name String counterDisplayName = - getBlock(counterString, UNIT_OPEN, UNIT_CLOSE, counterIndex); - counterDisplayName = unescape(counterDisplayName); + StringInterner.weakIntern(getBlock(counterString, UNIT_OPEN, UNIT_CLOSE, counterIndex)); + counterDisplayName = StringInterner.weakIntern(unescape(counterDisplayName)); // Get the value long value =