Modified: 
hadoop/common/branches/HDFS-3077/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/TestJobHistoryParsing.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3077/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/TestJobHistoryParsing.java?rev=1396918&r1=1396917&r2=1396918&view=diff
==============================================================================
--- 
hadoop/common/branches/HDFS-3077/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/TestJobHistoryParsing.java
 (original)
+++ 
hadoop/common/branches/HDFS-3077/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/TestJobHistoryParsing.java
 Thu Oct 11 06:14:26 2012
@@ -49,6 +49,7 @@ import org.apache.hadoop.mapreduce.jobhi
 import org.apache.hadoop.mapreduce.v2.api.records.JobId;
 import org.apache.hadoop.mapreduce.v2.api.records.JobState;
 import org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId;
+import org.apache.hadoop.mapreduce.v2.api.records.TaskId;
 import org.apache.hadoop.mapreduce.v2.api.records.TaskState;
 import org.apache.hadoop.mapreduce.v2.app.MRApp;
 import org.apache.hadoop.mapreduce.v2.app.job.Job;
@@ -59,6 +60,7 @@ import org.apache.hadoop.mapreduce.v2.ap
 import org.apache.hadoop.mapreduce.v2.hs.HistoryFileManager.HistoryFileInfo;
 import org.apache.hadoop.mapreduce.v2.hs.TestJobHistoryEvents.MRAppWithHistory;
 import org.apache.hadoop.mapreduce.v2.jobhistory.FileNameIndexUtils;
+import org.apache.hadoop.mapreduce.v2.jobhistory.JHAdminConfig;
 import org.apache.hadoop.mapreduce.v2.jobhistory.JobHistoryUtils;
 import org.apache.hadoop.mapreduce.v2.jobhistory.JobIndexInfo;
 import org.apache.hadoop.net.DNSToSwitchMapping;
@@ -402,6 +404,108 @@ public class TestJobHistoryParsing {
     }
   }
   
+  @Test
+  public void testCountersForFailedTask() throws Exception {
+    LOG.info("STARTING testCountersForFailedTask");
+    try {
+    Configuration conf = new Configuration();
+    conf
+        .setClass(
+            
CommonConfigurationKeysPublic.NET_TOPOLOGY_NODE_SWITCH_MAPPING_IMPL_KEY,
+            MyResolver.class, DNSToSwitchMapping.class);
+    RackResolver.init(conf);
+    MRApp app = new MRAppWithHistoryWithFailedTask(2, 1, true,
+        this.getClass().getName(), true);
+    app.submit(conf);
+    Job job = app.getContext().getAllJobs().values().iterator().next();
+    JobId jobId = job.getID();
+    app.waitForState(job, JobState.FAILED);
+
+    // make sure all events are flushed
+    app.waitForState(Service.STATE.STOPPED);
+
+    String jobhistoryDir = JobHistoryUtils
+        .getHistoryIntermediateDoneDirForUser(conf);
+    JobHistory jobHistory = new JobHistory();
+    jobHistory.init(conf);
+
+    JobIndexInfo jobIndexInfo = jobHistory.getJobFileInfo(jobId)
+        .getJobIndexInfo();
+    String jobhistoryFileName = FileNameIndexUtils
+        .getDoneFileName(jobIndexInfo);
+
+    Path historyFilePath = new Path(jobhistoryDir, jobhistoryFileName);
+    FSDataInputStream in = null;
+    FileContext fc = null;
+    try {
+      fc = FileContext.getFileContext(conf);
+      in = fc.open(fc.makeQualified(historyFilePath));
+    } catch (IOException ioe) {
+      LOG.info("Can not open history file: " + historyFilePath, ioe);
+      throw (new Exception("Can not open History File"));
+    }
+
+    JobHistoryParser parser = new JobHistoryParser(in);
+    JobInfo jobInfo = parser.parse();
+    Exception parseException = parser.getParseException();
+    Assert.assertNull("Caught an expected exception " + parseException,
+        parseException);
+    for (Map.Entry<TaskID,TaskInfo> entry : jobInfo.getAllTasks().entrySet()) {
+      TaskId yarnTaskID = TypeConverter.toYarn(entry.getKey());
+      CompletedTask ct = new CompletedTask(yarnTaskID, entry.getValue());
+      Assert.assertNotNull("completed task report has null counters",
+          ct.getReport().getCounters());
+    }
+    } finally {
+      LOG.info("FINISHED testCountersForFailedTask");
+    }
+  }
+
+  @Test
+  public void testScanningOldDirs() throws Exception {
+    LOG.info("STARTING testScanningOldDirs");
+    try {
+    Configuration conf = new Configuration();
+    conf
+        .setClass(
+            
CommonConfigurationKeysPublic.NET_TOPOLOGY_NODE_SWITCH_MAPPING_IMPL_KEY,
+            MyResolver.class, DNSToSwitchMapping.class);
+    RackResolver.init(conf);
+    MRApp app =
+        new MRAppWithHistory(1, 1, true,
+            this.getClass().getName(), true);
+    app.submit(conf);
+    Job job = app.getContext().getAllJobs().values().iterator().next();
+    JobId jobId = job.getID();
+    LOG.info("JOBID is " + TypeConverter.fromYarn(jobId).toString());
+    app.waitForState(job, JobState.SUCCEEDED);
+
+    // make sure all events are flushed
+    app.waitForState(Service.STATE.STOPPED);
+
+    HistoryFileManagerForTest hfm = new HistoryFileManagerForTest();
+    hfm.init(conf);
+    HistoryFileInfo fileInfo = hfm.getFileInfo(jobId);
+    Assert.assertNotNull("Unable to locate job history", fileInfo);
+
+    // force the manager to "forget" the job
+    hfm.deleteJobFromJobListCache(fileInfo);
+    final int msecPerSleep = 10;
+    int msecToSleep = 10 * 1000;
+    while (fileInfo.isMovePending() && msecToSleep > 0) {
+      Assert.assertTrue(!fileInfo.didMoveFail());
+      msecToSleep -= msecPerSleep;
+      Thread.sleep(msecPerSleep);
+    }
+    Assert.assertTrue("Timeout waiting for history move", msecToSleep > 0);
+
+    fileInfo = hfm.getFileInfo(jobId);
+    Assert.assertNotNull("Unable to locate old job history", fileInfo);
+   } finally {
+      LOG.info("FINISHED testScanningOldDirs");
+    }
+  }
+
   static class MRAppWithHistoryWithFailedAttempt extends MRAppWithHistory {
 
     public MRAppWithHistoryWithFailedAttempt(int maps, int reduces, boolean 
autoComplete,
@@ -422,6 +526,32 @@ public class TestJobHistoryParsing {
     }
   }
 
+  static class MRAppWithHistoryWithFailedTask extends MRAppWithHistory {
+
+    public MRAppWithHistoryWithFailedTask(int maps, int reduces, boolean 
autoComplete,
+        String testName, boolean cleanOnStart) {
+      super(maps, reduces, autoComplete, testName, cleanOnStart);
+    }
+
+    @SuppressWarnings("unchecked")
+    @Override
+    protected void attemptLaunched(TaskAttemptId attemptID) {
+      if (attemptID.getTaskId().getId() == 0) {
+        getContext().getEventHandler().handle(
+            new TaskAttemptEvent(attemptID, TaskAttemptEventType.TA_FAILMSG));
+      } else {
+        getContext().getEventHandler().handle(
+            new TaskAttemptEvent(attemptID, TaskAttemptEventType.TA_DONE));
+      }
+    }
+  }
+
+  static class HistoryFileManagerForTest extends HistoryFileManager {
+    void deleteJobFromJobListCache(HistoryFileInfo fileInfo) {
+      jobListCache.delete(fileInfo);
+    }
+  }
+
   public static void main(String[] args) throws Exception {
     TestJobHistoryParsing t = new TestJobHistoryParsing();
     t.testHistoryParsing();

Modified: 
hadoop/common/branches/HDFS-3077/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/resources/job_1329348432655_0001_conf.xml
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3077/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/resources/job_1329348432655_0001_conf.xml?rev=1396918&r1=1396917&r2=1396918&view=diff
==============================================================================
--- 
hadoop/common/branches/HDFS-3077/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/resources/job_1329348432655_0001_conf.xml
 (original)
+++ 
hadoop/common/branches/HDFS-3077/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/resources/job_1329348432655_0001_conf.xml
 Thu Oct 11 06:14:26 2012
@@ -102,7 +102,7 @@
 <property><!--Loaded from 
job.xml--><name>dfs.permissions.enabled</name><value>true</value></property>
 <property><!--Loaded from 
job.xml--><name>mapreduce.tasktracker.taskcontroller</name><value>org.apache.hadoop.mapred.DefaultTaskController</value></property>
 <property><!--Loaded from 
job.xml--><name>mapreduce.reduce.shuffle.parallelcopies</name><value>5</value></property>
-<property><!--Loaded from 
job.xml--><name>yarn.nodemanager.env-whitelist</name><value>JAVA_HOME,HADOOP_COMMON_HOME,HADOOP_HDFS_HOME,HADOOP_CONF_DIR,YARN_HOME</value></property>
+<property><!--Loaded from 
job.xml--><name>yarn.nodemanager.env-whitelist</name><value>JAVA_HOME,HADOOP_COMMON_HOME,HADOOP_HDFS_HOME,HADOOP_CONF_DIR,HADOOP_YARN_HOME</value></property>
 <property><!--Loaded from 
job.xml--><name>mapreduce.jobtracker.heartbeats.in.second</name><value>100</value></property>
 <property><!--Loaded from 
job.xml--><name>mapreduce.job.maxtaskfailures.per.tracker</name><value>4</value></property>
 <property><!--Loaded from 
job.xml--><name>ipc.client.connection.maxidletime</name><value>10000</value></property>
@@ -317,8 +317,8 @@
         $HADOOP_COMMON_HOME/share/hadoop/common/lib/*,
         $HADOOP_HDFS_HOME/share/hadoop/hdfs/*,
         $HADOOP_HDFS_HOME/share/hadoop/hdfs/lib/*,
-        $YARN_HOME/share/hadoop/mapreduce/*,
-        $YARN_HOME/share/hadoop/mapreduce/lib/*
+        $HADOOP_YARN_HOME/share/hadoop/mapreduce/*,
+        $HADOOP_YARN_HOME/share/hadoop/mapreduce/lib/*
      </value></property>
 <property><!--Loaded from 
job.xml--><name>yarn.nodemanager.log-aggregation.compression-type</name><value>gz</value></property>
 <property><!--Loaded from 
job.xml--><name>dfs.image.compress</name><value>false</value></property>

Modified: 
hadoop/common/branches/HDFS-3077/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/YARNRunner.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3077/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/YARNRunner.java?rev=1396918&r1=1396917&r2=1396918&view=diff
==============================================================================
--- 
hadoop/common/branches/HDFS-3077/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/YARNRunner.java
 (original)
+++ 
hadoop/common/branches/HDFS-3077/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/YARNRunner.java
 Thu Oct 11 06:14:26 2012
@@ -346,9 +346,13 @@ public class YARNRunner implements Clien
             jobConfPath, LocalResourceType.FILE));
     if (jobConf.get(MRJobConfig.JAR) != null) {
       Path jobJarPath = new Path(jobConf.get(MRJobConfig.JAR));
-      localResources.put(MRJobConfig.JOB_JAR,
-          createApplicationResource(defaultFileContext,
-              jobJarPath, LocalResourceType.ARCHIVE));
+      LocalResource rc = createApplicationResource(defaultFileContext,
+          jobJarPath, 
+          LocalResourceType.PATTERN);
+      String pattern = conf.getPattern(JobContext.JAR_UNPACK_PATTERN, 
+          JobConf.UNPACK_JAR_PATTERN_DEFAULT).pattern();
+      rc.setPattern(pattern);
+      localResources.put(MRJobConfig.JOB_JAR, rc);
     } else {
       // Job jar may be null. For e.g, for pipes, the job jar is the hadoop
       // mapreduce jar itself which is already on the classpath.
@@ -368,12 +372,9 @@ public class YARNRunner implements Clien
     }
 
     // Setup security tokens
-    ByteBuffer securityTokens = null;
-    if (UserGroupInformation.isSecurityEnabled()) {
-      DataOutputBuffer dob = new DataOutputBuffer();
-      ts.writeTokenStorageToStream(dob);
-      securityTokens = ByteBuffer.wrap(dob.getData(), 0, dob.getLength());
-    }
+    DataOutputBuffer dob = new DataOutputBuffer();
+    ts.writeTokenStorageToStream(dob);
+    ByteBuffer securityTokens  = ByteBuffer.wrap(dob.getData(), 0, 
dob.getLength());
 
     // Setup the command to run the AM
     List<String> vargs = new ArrayList<String>(8);

Modified: 
hadoop/common/branches/HDFS-3077/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/partition/TestTotalOrderPartitioner.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3077/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/partition/TestTotalOrderPartitioner.java?rev=1396918&r1=1396917&r2=1396918&view=diff
==============================================================================
--- 
hadoop/common/branches/HDFS-3077/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/partition/TestTotalOrderPartitioner.java
 (original)
+++ 
hadoop/common/branches/HDFS-3077/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/partition/TestTotalOrderPartitioner.java
 Thu Oct 11 06:14:26 2012
@@ -21,19 +21,25 @@ package org.apache.hadoop.mapreduce.lib.
 import java.io.IOException;
 import java.util.ArrayList;
 import java.util.Arrays;
+import java.util.Comparator;
 
 import junit.framework.TestCase;
 
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.CommonConfigurationKeys;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.io.NullWritable;
 import org.apache.hadoop.io.RawComparator;
 import org.apache.hadoop.io.SequenceFile;
 import org.apache.hadoop.io.Text;
-import org.apache.hadoop.io.WritableComparable;
 import org.apache.hadoop.io.WritableComparator;
 import org.apache.hadoop.io.WritableUtils;
+import org.apache.hadoop.io.SequenceFile.CompressionType;
+import org.apache.hadoop.io.serializer.JavaSerialization;
+import org.apache.hadoop.io.serializer.JavaSerializationComparator;
+import org.apache.hadoop.io.serializer.Serialization;
+import org.apache.hadoop.io.serializer.WritableSerialization;
 import org.apache.hadoop.mapreduce.MRJobConfig;
 
 public class TestTotalOrderPartitioner extends TestCase {
@@ -51,6 +57,19 @@ public class TestTotalOrderPartitioner e
     new Text("yak"),   // 9
   };
 
+  private static final String[] splitJavaStrings = new String[] {
+    // -inf            // 0
+    new String("aabbb"), // 1
+    new String("babbb"), // 2
+    new String("daddd"), // 3
+    new String("dddee"), // 4
+    new String("ddhee"), // 5
+    new String("dingo"), // 6
+    new String("hijjj"), // 7
+    new String("n"),     // 8
+    new String("yak"),   // 9
+  };
+
   static class Check<T> {
     T data;
     int part;
@@ -76,19 +95,41 @@ public class TestTotalOrderPartitioner e
     testStrings.add(new Check<Text>(new Text("hi"), 6));
   };
 
-  private static <T extends WritableComparable<?>> Path writePartitionFile(
+  private static final ArrayList<Check<String>> testJavaStrings =
+      new ArrayList<Check<String>>();
+    static {
+      testJavaStrings.add(new Check<String>(new String("aaaaa"), 0));
+      testJavaStrings.add(new Check<String>(new String("aaabb"), 0));
+      testJavaStrings.add(new Check<String>(new String("aabbb"), 1));
+      testJavaStrings.add(new Check<String>(new String("aaaaa"), 0));
+      testJavaStrings.add(new Check<String>(new String("babbb"), 2));
+      testJavaStrings.add(new Check<String>(new String("baabb"), 1));
+      testJavaStrings.add(new Check<String>(new String("yai"), 8));
+      testJavaStrings.add(new Check<String>(new String("yak"), 9));
+      testJavaStrings.add(new Check<String>(new String("z"), 9));
+      testJavaStrings.add(new Check<String>(new String("ddngo"), 5));
+      testJavaStrings.add(new Check<String>(new String("hi"), 6));
+    };
+
+
+  private static <T> Path writePartitionFile(
       String testname, Configuration conf, T[] splits) throws IOException {
     final FileSystem fs = FileSystem.getLocal(conf);
     final Path testdir = new Path(System.getProperty("test.build.data", "/tmp")
-                                 ).makeQualified(fs);
+                                 ).makeQualified(
+                                     fs.getUri(),
+                                     fs.getWorkingDirectory());
     Path p = new Path(testdir, testname + "/_partition.lst");
     TotalOrderPartitioner.setPartitionFile(conf, p);
     conf.setInt(MRJobConfig.NUM_REDUCES, splits.length + 1);
     SequenceFile.Writer w = null;
     try {
-      w = SequenceFile.createWriter(fs, conf, p,
-          splits[0].getClass(), NullWritable.class,
-          SequenceFile.CompressionType.NONE);
+      w = SequenceFile.createWriter(
+          conf,
+          SequenceFile.Writer.file(p),
+          SequenceFile.Writer.keyClass(splits[0].getClass()),
+          SequenceFile.Writer.valueClass(NullWritable.class),
+          SequenceFile.Writer.compression(CompressionType.NONE));
       for (int i = 0; i < splits.length; ++i) {
         w.append(splits[i], NullWritable.get());
       }
@@ -99,6 +140,31 @@ public class TestTotalOrderPartitioner e
     return p;
   }
 
+  public void testTotalOrderWithCustomSerialization() throws Exception {
+    TotalOrderPartitioner<String, NullWritable> partitioner =
+        new TotalOrderPartitioner<String, NullWritable>();
+    Configuration conf = new Configuration();
+    conf.setStrings(CommonConfigurationKeys.IO_SERIALIZATIONS_KEY,
+        JavaSerialization.class.getName(),
+        WritableSerialization.class.getName());
+    conf.setClass(MRJobConfig.KEY_COMPARATOR,
+        JavaSerializationComparator.class,
+        Comparator.class);
+    Path p = TestTotalOrderPartitioner.<String>writePartitionFile(
+        "totalordercustomserialization", conf, splitJavaStrings);
+    conf.setClass(MRJobConfig.MAP_OUTPUT_KEY_CLASS, String.class, 
Object.class);
+    try {
+      partitioner.setConf(conf);
+      NullWritable nw = NullWritable.get();
+      for (Check<String> chk : testJavaStrings) {
+        assertEquals(chk.data.toString(), chk.part,
+            partitioner.getPartition(chk.data, nw, splitJavaStrings.length + 
1));
+      }
+    } finally {
+      p.getFileSystem(conf).delete(p, true);
+    }
+  }
+
   public void testTotalOrderMemCmp() throws Exception {
     TotalOrderPartitioner<Text,NullWritable> partitioner =
       new TotalOrderPartitioner<Text,NullWritable>();

Modified: 
hadoop/common/branches/HDFS-3077/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/DBCountPageView.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3077/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/DBCountPageView.java?rev=1396918&r1=1396917&r2=1396918&view=diff
==============================================================================
--- 
hadoop/common/branches/HDFS-3077/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/DBCountPageView.java
 (original)
+++ 
hadoop/common/branches/HDFS-3077/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/DBCountPageView.java
 Thu Oct 11 06:14:26 2012
@@ -27,7 +27,6 @@ import java.sql.PreparedStatement;
 import java.sql.ResultSet;
 import java.sql.SQLException;
 import java.sql.Statement;
-import java.util.Iterator;
 import java.util.Random;
 
 import org.apache.commons.logging.Log;
@@ -82,6 +81,7 @@ public class DBCountPageView extends Con
   
   private Connection connection;
   private boolean initialized = false;
+  private boolean isOracle = false;
 
   private static final String[] AccessFieldNames = {"url", "referrer", "time"};
   private static final String[] PageviewFieldNames = {"url", "pageview"};
@@ -102,7 +102,9 @@ public class DBCountPageView extends Con
   
   private void createConnection(String driverClassName
       , String url) throws Exception {
-    
+    if(driverClassName.toLowerCase().contains("oracle")) {
+      isOracle = true;
+    }
     Class.forName(driverClassName);
     connection = DriverManager.getConnection(url);
     connection.setAutoCommit(false);
@@ -142,7 +144,7 @@ public class DBCountPageView extends Con
   }
   
   private void dropTables() {
-    String dropAccess = "DROP TABLE Access";
+    String dropAccess = "DROP TABLE HAccess";
     String dropPageview = "DROP TABLE Pageview";
     Statement st = null;
     try {
@@ -157,18 +159,21 @@ public class DBCountPageView extends Con
   }
   
   private void createTables() throws SQLException {
-
+       String dataType = "BIGINT NOT NULL";
+       if(isOracle) {
+         dataType = "NUMBER(19) NOT NULL";
+       }
     String createAccess = 
       "CREATE TABLE " +
-      "Access(url      VARCHAR(100) NOT NULL," +
+      "HAccess(url      VARCHAR(100) NOT NULL," +
             " referrer VARCHAR(100)," +
-            " time     BIGINT NOT NULL, " +
+            " time     " + dataType + ", " +
             " PRIMARY KEY (url, time))";
 
     String createPageview = 
       "CREATE TABLE " +
       "Pageview(url      VARCHAR(100) NOT NULL," +
-              " pageview     BIGINT NOT NULL, " +
+              " pageview     " + dataType + ", " +
                " PRIMARY KEY (url))";
     
     Statement st = connection.createStatement();
@@ -189,7 +194,7 @@ public class DBCountPageView extends Con
     PreparedStatement statement = null ;
     try {
       statement = connection.prepareStatement(
-          "INSERT INTO Access(url, referrer, time)" +
+          "INSERT INTO HAccess(url, referrer, time)" +
           " VALUES (?, ?, ?)");
 
       Random random = new Random();
@@ -248,7 +253,7 @@ public class DBCountPageView extends Con
   /**Verifies the results are correct */
   private boolean verify() throws SQLException {
     //check total num pageview
-    String countAccessQuery = "SELECT COUNT(*) FROM Access";
+    String countAccessQuery = "SELECT COUNT(*) FROM HAccess";
     String sumPageviewQuery = "SELECT SUM(pageview) FROM Pageview";
     Statement st = null;
     ResultSet rs = null;
@@ -396,7 +401,7 @@ public class DBCountPageView extends Con
 
     DBConfiguration.configureDB(conf, driverClassName, url);
 
-    Job job = new Job(conf);
+    Job job = Job.getInstance(conf);
         
     job.setJobName("Count Pageviews of URLs");
     job.setJarByClass(DBCountPageView.class);
@@ -404,7 +409,7 @@ public class DBCountPageView extends Con
     job.setCombinerClass(LongSumReducer.class);
     job.setReducerClass(PageviewReducer.class);
 
-    DBInputFormat.setInput(job, AccessRecord.class, "Access"
+    DBInputFormat.setInput(job, AccessRecord.class, "HAccess"
         , null, "url", AccessFieldNames);
 
     DBOutputFormat.setOutput(job, "Pageview", PageviewFieldNames);

Modified: 
hadoop/common/branches/HDFS-3077/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/dancing/DistributedPentomino.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3077/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/dancing/DistributedPentomino.java?rev=1396918&r1=1396917&r2=1396918&view=diff
==============================================================================
--- 
hadoop/common/branches/HDFS-3077/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/dancing/DistributedPentomino.java
 (original)
+++ 
hadoop/common/branches/HDFS-3077/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/dancing/DistributedPentomino.java
 Thu Oct 11 06:14:26 2012
@@ -165,16 +165,30 @@ public class DistributedPentomino extend
   }
 
   public int run(String[] args) throws Exception {
+    Configuration conf = getConf();
     if (args.length == 0) {
-      System.out.println("pentomino <output>");
+      System.out.println("Usage: pentomino <output> [-depth #] [-height #] 
[-width #]");
       ToolRunner.printGenericCommandUsage(System.out);
       return 2;
     }
-
-    Configuration conf = getConf();
-    int width = conf.getInt(Pentomino.WIDTH, PENT_WIDTH);
-    int height = conf.getInt(Pentomino.HEIGHT, PENT_HEIGHT);
-    int depth = conf.getInt(Pentomino.DEPTH, PENT_DEPTH);
+    // check for passed parameters, otherwise use defaults
+    int width = PENT_WIDTH;
+    int height = PENT_HEIGHT;
+    int depth = PENT_DEPTH;
+    for (int i = 0; i < args.length; i++) {
+      if (args[i].equalsIgnoreCase("-depth")) {
+          depth = Integer.parseInt(args[i++].trim());
+      } else if (args[i].equalsIgnoreCase("-height")) {
+         height = Integer.parseInt(args[i++].trim());
+      } else if (args[i].equalsIgnoreCase("-width") ) {
+         width = Integer.parseInt(args[i++].trim()); 
+      }
+    }
+    // now set the values within conf for M/R tasks to read, this
+    // will ensure values are set preventing MAPREDUCE-4678
+    conf.setInt(Pentomino.WIDTH, width);
+    conf.setInt(Pentomino.HEIGHT, height);
+    conf.setInt(Pentomino.DEPTH, depth);
     Class<? extends Pentomino> pentClass = conf.getClass(Pentomino.CLASS, 
       OneSidedPentomino.class, Pentomino.class);
     int numMaps = conf.getInt(MRJobConfig.NUM_MAPS, DEFAULT_MAPS);

Modified: hadoop/common/branches/HDFS-3077/hadoop-mapreduce-project/pom.xml
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3077/hadoop-mapreduce-project/pom.xml?rev=1396918&r1=1396917&r2=1396918&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3077/hadoop-mapreduce-project/pom.xml (original)
+++ hadoop/common/branches/HDFS-3077/hadoop-mapreduce-project/pom.xml Thu Oct 
11 06:14:26 2012
@@ -149,7 +149,6 @@
     <dependency>
       <groupId>junit</groupId>
       <artifactId>junit</artifactId>
-      <scope>test</scope>
     </dependency>
     <dependency>
       <groupId>org.jboss.netty</groupId>

Propchange: hadoop/common/branches/HDFS-3077/hadoop-mapreduce-project/src/c++/
------------------------------------------------------------------------------
  Merged /hadoop/common/trunk/hadoop-mapreduce-project/src/c++:r1390199-1396916

Propchange: 
hadoop/common/branches/HDFS-3077/hadoop-mapreduce-project/src/contrib/
------------------------------------------------------------------------------
  Merged 
/hadoop/common/trunk/hadoop-mapreduce-project/src/contrib:r1390199-1396916

Propchange: 
hadoop/common/branches/HDFS-3077/hadoop-mapreduce-project/src/contrib/block_forensics/
------------------------------------------------------------------------------
  Merged 
/hadoop/common/trunk/hadoop-mapreduce-project/src/contrib/block_forensics:r1390199-1396916

Propchange: 
hadoop/common/branches/HDFS-3077/hadoop-mapreduce-project/src/contrib/build-contrib.xml
------------------------------------------------------------------------------
  Merged 
/hadoop/common/trunk/hadoop-mapreduce-project/src/contrib/build-contrib.xml:r1390199-1396916

Propchange: 
hadoop/common/branches/HDFS-3077/hadoop-mapreduce-project/src/contrib/build.xml
------------------------------------------------------------------------------
  Merged 
/hadoop/common/trunk/hadoop-mapreduce-project/src/contrib/build.xml:r1390199-1396916

Propchange: 
hadoop/common/branches/HDFS-3077/hadoop-mapreduce-project/src/contrib/data_join/
------------------------------------------------------------------------------
  Merged 
/hadoop/common/trunk/hadoop-mapreduce-project/src/contrib/data_join:r1390199-1396916

Propchange: 
hadoop/common/branches/HDFS-3077/hadoop-mapreduce-project/src/contrib/eclipse-plugin/
------------------------------------------------------------------------------
  Merged 
/hadoop/common/trunk/hadoop-mapreduce-project/src/contrib/eclipse-plugin:r1390199-1396916

Propchange: 
hadoop/common/branches/HDFS-3077/hadoop-mapreduce-project/src/contrib/index/
------------------------------------------------------------------------------
  Merged 
/hadoop/common/trunk/hadoop-mapreduce-project/src/contrib/index:r1390199-1396916

Propchange: 
hadoop/common/branches/HDFS-3077/hadoop-mapreduce-project/src/contrib/vaidya/
------------------------------------------------------------------------------
  Merged 
/hadoop/common/trunk/hadoop-mapreduce-project/src/contrib/vaidya:r1390199-1396916

Propchange: 
hadoop/common/branches/HDFS-3077/hadoop-mapreduce-project/src/examples/
------------------------------------------------------------------------------
  Merged 
/hadoop/common/trunk/hadoop-mapreduce-project/src/examples:r1390199-1396916

Propchange: hadoop/common/branches/HDFS-3077/hadoop-mapreduce-project/src/java/
------------------------------------------------------------------------------
  Merged /hadoop/common/trunk/hadoop-mapreduce-project/src/java:r1390199-1396916

Propchange: 
hadoop/common/branches/HDFS-3077/hadoop-mapreduce-project/src/test/mapred/
------------------------------------------------------------------------------
  Merged 
/hadoop/common/trunk/hadoop-mapreduce-project/src/test/mapred:r1390199-1396916

Propchange: 
hadoop/common/branches/HDFS-3077/hadoop-mapreduce-project/src/test/mapred/org/apache/hadoop/fs/
------------------------------------------------------------------------------
  Merged 
/hadoop/common/trunk/hadoop-mapreduce-project/src/test/mapred/org/apache/hadoop/fs:r1390199-1396916

Propchange: 
hadoop/common/branches/HDFS-3077/hadoop-mapreduce-project/src/test/mapred/org/apache/hadoop/hdfs/
------------------------------------------------------------------------------
  Merged 
/hadoop/common/trunk/hadoop-mapreduce-project/src/test/mapred/org/apache/hadoop/hdfs:r1390199-1396916

Propchange: 
hadoop/common/branches/HDFS-3077/hadoop-mapreduce-project/src/test/mapred/org/apache/hadoop/ipc/
------------------------------------------------------------------------------
  Merged 
/hadoop/common/trunk/hadoop-mapreduce-project/src/test/mapred/org/apache/hadoop/ipc:r1390199-1396916

Modified: 
hadoop/common/branches/HDFS-3077/hadoop-mapreduce-project/src/test/mapred/org/apache/hadoop/mapred/TestJobQueueInformation.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3077/hadoop-mapreduce-project/src/test/mapred/org/apache/hadoop/mapred/TestJobQueueInformation.java?rev=1396918&r1=1396917&r2=1396918&view=diff
==============================================================================
--- 
hadoop/common/branches/HDFS-3077/hadoop-mapreduce-project/src/test/mapred/org/apache/hadoop/mapred/TestJobQueueInformation.java
 (original)
+++ 
hadoop/common/branches/HDFS-3077/hadoop-mapreduce-project/src/test/mapred/org/apache/hadoop/mapred/TestJobQueueInformation.java
 Thu Oct 11 06:14:26 2012
@@ -98,7 +98,7 @@ public class TestJobQueueInformation ext
     dfsCluster.shutdown();
   }
 
-  public void testJobQueues() throws IOException {
+  public void testJobQueues() throws Exception {
     JobClient jc = new JobClient(mrCluster.createJobConf());
     String expectedQueueInfo = "Maximum Tasks Per Job :: 10";
     JobQueueInfo[] queueInfos = jc.getQueues();

Modified: 
hadoop/common/branches/HDFS-3077/hadoop-mapreduce-project/src/test/mapred/org/apache/hadoop/mapred/TestSetupAndCleanupFailure.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3077/hadoop-mapreduce-project/src/test/mapred/org/apache/hadoop/mapred/TestSetupAndCleanupFailure.java?rev=1396918&r1=1396917&r2=1396918&view=diff
==============================================================================
--- 
hadoop/common/branches/HDFS-3077/hadoop-mapreduce-project/src/test/mapred/org/apache/hadoop/mapred/TestSetupAndCleanupFailure.java
 (original)
+++ 
hadoop/common/branches/HDFS-3077/hadoop-mapreduce-project/src/test/mapred/org/apache/hadoop/mapred/TestSetupAndCleanupFailure.java
 Thu Oct 11 06:14:26 2012
@@ -149,7 +149,7 @@ public class TestSetupAndCleanupFailure 
   private void testSetupAndCleanupKill(MiniMRCluster mr, 
                                        MiniDFSCluster dfs, 
                                        boolean commandLineKill) 
-  throws IOException {
+  throws Exception {
     // launch job with waiting setup/cleanup
     RunningJob job = launchJobWithWaitingSetupAndCleanup(mr);
     
@@ -223,7 +223,7 @@ public class TestSetupAndCleanupFailure 
   // Also Tests the command-line kill for setup/cleanup attempts. 
   // tests the setup/cleanup attempts getting killed if 
   // they were running on a lost tracker
-  public void testWithDFS() throws IOException {
+  public void testWithDFS() throws Exception {
     MiniDFSCluster dfs = null;
     MiniMRCluster mr = null;
     FileSystem fileSys = null;

Modified: 
hadoop/common/branches/HDFS-3077/hadoop-mapreduce-project/src/test/mapred/org/apache/hadoop/mapred/UtilsForTests.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3077/hadoop-mapreduce-project/src/test/mapred/org/apache/hadoop/mapred/UtilsForTests.java?rev=1396918&r1=1396917&r2=1396918&view=diff
==============================================================================
--- 
hadoop/common/branches/HDFS-3077/hadoop-mapreduce-project/src/test/mapred/org/apache/hadoop/mapred/UtilsForTests.java
 (original)
+++ 
hadoop/common/branches/HDFS-3077/hadoop-mapreduce-project/src/test/mapred/org/apache/hadoop/mapred/UtilsForTests.java
 Thu Oct 11 06:14:26 2012
@@ -449,7 +449,7 @@ public class UtilsForTests {
   static void signalTasks(MiniDFSCluster dfs, FileSystem fileSys, 
                           String mapSignalFile, 
                           String reduceSignalFile, int replication) 
-  throws IOException {
+  throws Exception {
     writeFile(dfs.getNameNode(), fileSys.getConf(), new Path(mapSignalFile), 
               (short)replication);
     writeFile(dfs.getNameNode(), fileSys.getConf(), new 
Path(reduceSignalFile), 
@@ -462,7 +462,7 @@ public class UtilsForTests {
   static void signalTasks(MiniDFSCluster dfs, FileSystem fileSys, 
                           boolean isMap, String mapSignalFile, 
                           String reduceSignalFile)
-  throws IOException {
+  throws Exception {
     //  signal the maps to complete
     writeFile(dfs.getNameNode(), fileSys.getConf(),
               isMap 
@@ -483,7 +483,7 @@ public class UtilsForTests {
   }
   
   static void writeFile(NameNode namenode, Configuration conf, Path name, 
-      short replication) throws IOException {
+      short replication) throws Exception {
     FileSystem fileSys = FileSystem.get(conf);
     SequenceFile.Writer writer = 
       SequenceFile.createWriter(fileSys, conf, name, 

Propchange: 
hadoop/common/branches/HDFS-3077/hadoop-mapreduce-project/src/webapps/job/
------------------------------------------------------------------------------
  Merged 
/hadoop/common/trunk/hadoop-mapreduce-project/src/webapps/job:r1390199-1396916


Reply via email to