MAPREDUCE-6983. Moving logging APIs over to slf4j in 
hadoop-mapreduce-client-core. Contributed by Jinjiang Ling.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/178751ed
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/178751ed
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/178751ed

Branch: refs/heads/YARN-6592
Commit: 178751ed8c9d47038acf8616c226f1f52e884feb
Parents: 940ffe3
Author: Akira Ajisaka <aajis...@apache.org>
Authored: Thu Nov 2 17:42:52 2017 +0900
Committer: Akira Ajisaka <aajis...@apache.org>
Committed: Thu Nov 2 17:43:08 2017 +0900

----------------------------------------------------------------------
 .../org/apache/hadoop/mapred/BackupStore.java     |  7 ++++---
 .../org/apache/hadoop/mapred/CleanupQueue.java    |  8 ++++----
 .../java/org/apache/hadoop/mapred/Counters.java   |  4 ++--
 .../DeprecatedQueueConfigurationParser.java       |  8 ++++----
 .../org/apache/hadoop/mapred/FileInputFormat.java |  8 ++++----
 .../apache/hadoop/mapred/FileOutputCommitter.java |  6 +++---
 .../main/java/org/apache/hadoop/mapred/IFile.java |  6 +++---
 .../apache/hadoop/mapred/IFileInputStream.java    |  7 ++++---
 .../java/org/apache/hadoop/mapred/IndexCache.java |  6 +++---
 .../org/apache/hadoop/mapred/JobACLsManager.java  |  6 +++---
 .../java/org/apache/hadoop/mapred/JobConf.java    |  6 +++---
 .../org/apache/hadoop/mapred/JobEndNotifier.java  |  8 ++++----
 .../java/org/apache/hadoop/mapred/JvmContext.java |  8 ++++----
 .../apache/hadoop/mapred/LineRecordReader.java    |  8 ++++----
 .../java/org/apache/hadoop/mapred/MapTask.java    |  7 ++++---
 .../java/org/apache/hadoop/mapred/Merger.java     |  6 +++---
 .../main/java/org/apache/hadoop/mapred/Queue.java | 10 +++++-----
 .../hadoop/mapred/QueueConfigurationParser.java   |  8 ++++----
 .../org/apache/hadoop/mapred/QueueManager.java    |  6 +++---
 .../java/org/apache/hadoop/mapred/ReduceTask.java |  9 +++++----
 .../org/apache/hadoop/mapred/SortedRanges.java    |  8 ++++----
 .../main/java/org/apache/hadoop/mapred/Task.java  | 18 +++++++++---------
 .../java/org/apache/hadoop/mapred/TaskLog.java    | 11 +++++------
 .../java/org/apache/hadoop/mapred/TaskStatus.java |  8 ++++----
 .../org/apache/hadoop/mapred/jobcontrol/Job.java  |  6 +++---
 .../mapred/lib/FieldSelectionMapReduce.java       |  7 ++++---
 .../apache/hadoop/mapred/lib/InputSampler.java    |  6 +++---
 .../hadoop/mapred/lib/MultithreadedMapRunner.java |  8 ++++----
 .../apache/hadoop/mapred/pipes/Application.java   |  7 ++++---
 .../hadoop/mapred/pipes/BinaryProtocol.java       |  8 ++++----
 .../apache/hadoop/mapred/pipes/PipesReducer.java  |  7 ++++---
 .../org/apache/hadoop/mapred/pipes/Submitter.java |  6 +++---
 .../java/org/apache/hadoop/mapreduce/Cluster.java |  7 ++++---
 .../org/apache/hadoop/mapreduce/CryptoUtils.java  |  6 +++---
 .../java/org/apache/hadoop/mapreduce/Job.java     |  6 +++---
 .../hadoop/mapreduce/JobResourceUploader.java     |  7 ++++---
 .../hadoop/mapreduce/JobSubmissionFiles.java      |  7 ++++---
 .../org/apache/hadoop/mapreduce/JobSubmitter.java | 11 +++++------
 .../mapreduce/counters/AbstractCounters.java      |  7 ++++---
 .../counters/FileSystemCounterGroup.java          |  7 ++++---
 .../mapreduce/counters/FrameworkCounterGroup.java |  7 ++++---
 .../hadoop/mapreduce/jobhistory/EventWriter.java  |  8 ++++----
 .../mapreduce/jobhistory/JobHistoryParser.java    |  7 ++++---
 .../mapreduce/lib/db/BigDecimalSplitter.java      |  7 ++++---
 .../hadoop/mapreduce/lib/db/DBInputFormat.java    |  7 ++++---
 .../hadoop/mapreduce/lib/db/DBOutputFormat.java   |  7 ++++---
 .../hadoop/mapreduce/lib/db/DBRecordReader.java   |  7 ++++---
 .../mapreduce/lib/db/DataDrivenDBInputFormat.java |  7 ++++---
 .../lib/db/DataDrivenDBRecordReader.java          |  7 ++++---
 .../hadoop/mapreduce/lib/db/DateSplitter.java     |  6 +++---
 .../hadoop/mapreduce/lib/db/FloatSplitter.java    |  7 ++++---
 .../mapreduce/lib/db/OracleDBRecordReader.java    |  7 ++++---
 .../lib/db/OracleDataDrivenDBInputFormat.java     |  3 ---
 .../hadoop/mapreduce/lib/db/TextSplitter.java     |  6 +++---
 .../lib/fieldsel/FieldSelectionMapper.java        |  7 ++++---
 .../lib/fieldsel/FieldSelectionReducer.java       |  7 ++++---
 .../lib/input/CombineFileInputFormat.java         |  7 ++++---
 .../mapreduce/lib/input/FileInputFormat.java      |  7 ++++---
 .../lib/input/FixedLengthRecordReader.java        |  8 ++++----
 .../mapreduce/lib/input/LineRecordReader.java     |  7 ++++---
 .../lib/input/SequenceFileInputFilter.java        |  9 +++++----
 .../mapreduce/lib/jobcontrol/ControlledJob.java   |  7 ++++---
 .../mapreduce/lib/jobcontrol/JobControl.java      |  6 +++---
 .../mapreduce/lib/map/MultithreadedMapper.java    |  7 ++++---
 .../mapreduce/lib/output/FileOutputCommitter.java |  7 ++++---
 .../lib/output/PartialFileOutputCommitter.java    |  8 ++++----
 .../mapreduce/lib/partition/InputSampler.java     |  6 +++---
 .../lib/partition/KeyFieldBasedPartitioner.java   |  6 +++---
 .../lib/partition/TotalOrderPartitioner.java      |  9 +++++----
 .../mapreduce/security/SecureShuffleUtils.java    |  7 ++++---
 .../hadoop/mapreduce/security/TokenCache.java     |  6 +++---
 .../hadoop/mapreduce/split/JobSplitWriter.java    |  7 ++++---
 .../mapreduce/task/reduce/EventFetcher.java       |  6 +++---
 .../hadoop/mapreduce/task/reduce/Fetcher.java     | 10 +++++-----
 .../mapreduce/task/reduce/InMemoryMapOutput.java  |  9 +++++----
 .../mapreduce/task/reduce/LocalFetcher.java       |  8 ++++----
 .../mapreduce/task/reduce/MergeManagerImpl.java   |  7 ++++---
 .../hadoop/mapreduce/task/reduce/MergeThread.java |  6 +++---
 .../mapreduce/task/reduce/OnDiskMapOutput.java    |  9 +++++----
 .../task/reduce/ShuffleSchedulerImpl.java         |  9 +++++----
 .../org/apache/hadoop/mapreduce/tools/CLI.java    |  8 ++++----
 .../apache/hadoop/mapreduce/util/ProcessTree.java |  7 +++----
 .../org/apache/hadoop/mapred/TestCounters.java    |  6 +++---
 .../apache/hadoop/mapred/TestFileInputFormat.java |  7 ++++---
 .../TestClientDistributedCacheManager.java        |  8 ++++----
 .../jobhistory/TestHistoryViewerPrinter.java      |  8 ++++----
 .../mapreduce/lib/input/TestFileInputFormat.java  |  7 ++++---
 .../lib/output/TestFileOutputCommitter.java       |  8 ++++----
 .../hadoop/mapreduce/task/reduce/TestFetcher.java |  6 +++---
 .../hadoop/io/TestSequenceFileMergeProgress.java  |  4 ++--
 .../TestSequenceFileAsBinaryInputFormat.java      |  4 ++--
 .../mapred/TestSequenceFileAsTextInputFormat.java |  4 ++--
 .../mapred/TestSequenceFileInputFilter.java       |  4 ++--
 .../mapred/TestSequenceFileInputFormat.java       |  4 ++--
 94 files changed, 352 insertions(+), 318 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hadoop/blob/178751ed/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/BackupStore.java
----------------------------------------------------------------------
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/BackupStore.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/BackupStore.java
index e79ec66..94ad9e0 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/BackupStore.java
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/BackupStore.java
@@ -26,8 +26,6 @@ import java.util.LinkedList;
 import java.util.List;
 import java.util.NoSuchElementException;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;
@@ -45,6 +43,8 @@ import org.apache.hadoop.mapreduce.MRConfig;
 import org.apache.hadoop.mapreduce.MRJobConfig;
 import org.apache.hadoop.mapreduce.TaskAttemptID;
 import org.apache.hadoop.mapreduce.CryptoUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * <code>BackupStore</code> is an utility class that is used to support
@@ -60,7 +60,8 @@ import org.apache.hadoop.mapreduce.CryptoUtils;
 @InterfaceStability.Unstable
 public class BackupStore<K,V> {
 
-  private static final Log LOG = 
LogFactory.getLog(BackupStore.class.getName());
+  private static final Logger LOG =
+      LoggerFactory.getLogger(BackupStore.class.getName());
   private static final int MAX_VINT_SIZE = 9;
   private static final int EOF_MARKER_SIZE = 2 * MAX_VINT_SIZE;
   private final TaskAttemptID tid;

http://git-wip-us.apache.org/repos/asf/hadoop/blob/178751ed/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/CleanupQueue.java
----------------------------------------------------------------------
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/CleanupQueue.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/CleanupQueue.java
index 2282b54..a40a40a 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/CleanupQueue.java
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/CleanupQueue.java
@@ -21,16 +21,16 @@ package org.apache.hadoop.mapred;
 import java.io.IOException;
 import java.util.concurrent.LinkedBlockingQueue;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 
 class CleanupQueue {
 
-  public static final Log LOG =
-    LogFactory.getLog(CleanupQueue.class);
+  public static final Logger LOG =
+      LoggerFactory.getLogger(CleanupQueue.class);
 
   private static PathCleanupThread cleanupThread;
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/178751ed/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/Counters.java
----------------------------------------------------------------------
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/Counters.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/Counters.java
index c9e0b9f..1b2ce19 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/Counters.java
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/Counters.java
@@ -30,7 +30,6 @@ import java.util.HashMap;
 import java.util.Iterator;
 
 import org.apache.commons.collections.IteratorUtils;
-import org.apache.commons.logging.Log;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.mapreduce.FileSystemCounter;
@@ -44,6 +43,7 @@ import org.apache.hadoop.mapreduce.counters.GenericCounter;
 import org.apache.hadoop.mapreduce.counters.Limits;
 import org.apache.hadoop.mapreduce.lib.input.FileInputFormatCounter;
 import org.apache.hadoop.mapreduce.lib.output.FileOutputFormatCounter;
+import org.slf4j.Logger;
 
 import com.google.common.collect.Iterators;
 
@@ -596,7 +596,7 @@ public class Counters
    * Logs the current counter values.
    * @param log The log to use.
    */
-  public void log(Log log) {
+  public void log(Logger log) {
     log.info("Counters: " + size());
     for(Group group: this) {
       log.info("  " + group.getDisplayName());

http://git-wip-us.apache.org/repos/asf/hadoop/blob/178751ed/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/DeprecatedQueueConfigurationParser.java
----------------------------------------------------------------------
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/DeprecatedQueueConfigurationParser.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/DeprecatedQueueConfigurationParser.java
index 1f05869..ddfc820 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/DeprecatedQueueConfigurationParser.java
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/DeprecatedQueueConfigurationParser.java
@@ -23,8 +23,8 @@ import org.apache.hadoop.mapreduce.MRConfig;
 import org.apache.hadoop.mapreduce.QueueState;
 import org.apache.hadoop.security.authorize.AccessControlList;
 import static org.apache.hadoop.mapred.QueueManager.*;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import java.util.List;
 import java.util.Map;
@@ -37,8 +37,8 @@ import java.util.ArrayList;
  * 
  */
 class DeprecatedQueueConfigurationParser extends QueueConfigurationParser {
-  private static final Log LOG =
-    LogFactory.getLog(DeprecatedQueueConfigurationParser.class);
+  private static final Logger LOG =
+      LoggerFactory.getLogger(DeprecatedQueueConfigurationParser.class);
   static final String MAPRED_QUEUE_NAMES_KEY = "mapred.queue.names";
 
   DeprecatedQueueConfigurationParser(Configuration conf) {

http://git-wip-us.apache.org/repos/asf/hadoop/blob/178751ed/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/FileInputFormat.java
----------------------------------------------------------------------
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/FileInputFormat.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/FileInputFormat.java
index 3e0ea25..b0ec979 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/FileInputFormat.java
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/FileInputFormat.java
@@ -30,8 +30,6 @@ import java.util.Map;
 import java.util.Set;
 import java.util.concurrent.TimeUnit;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.fs.BlockLocation;
@@ -50,6 +48,8 @@ import org.apache.hadoop.util.StopWatch;
 import org.apache.hadoop.util.StringUtils;
 
 import com.google.common.collect.Iterables;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /** 
  * A base class for file-based {@link InputFormat}.
@@ -68,8 +68,8 @@ import com.google.common.collect.Iterables;
 @InterfaceStability.Stable
 public abstract class FileInputFormat<K, V> implements InputFormat<K, V> {
 
-  public static final Log LOG =
-    LogFactory.getLog(FileInputFormat.class);
+  public static final Logger LOG =
+      LoggerFactory.getLogger(FileInputFormat.class);
   
   @Deprecated
   public enum Counter {

http://git-wip-us.apache.org/repos/asf/hadoop/blob/178751ed/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/FileOutputCommitter.java
----------------------------------------------------------------------
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/FileOutputCommitter.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/FileOutputCommitter.java
index c44bb37..64f346d 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/FileOutputCommitter.java
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/FileOutputCommitter.java
@@ -20,12 +20,12 @@ package org.apache.hadoop.mapred;
 
 import java.io.IOException;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceAudience.Private;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.fs.Path;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /** An {@link OutputCommitter} that commits files specified 
  * in job output directory i.e. 
${mapreduce.output.fileoutputformat.outputdir}. 
@@ -34,7 +34,7 @@ import org.apache.hadoop.fs.Path;
 @InterfaceStability.Stable
 public class FileOutputCommitter extends OutputCommitter {
 
-  public static final Log LOG = LogFactory.getLog(
+  public static final Logger LOG = LoggerFactory.getLogger(
       "org.apache.hadoop.mapred.FileOutputCommitter");
   
   /**

http://git-wip-us.apache.org/repos/asf/hadoop/blob/178751ed/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/IFile.java
----------------------------------------------------------------------
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/IFile.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/IFile.java
index 32e07e7..1a917e1 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/IFile.java
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/IFile.java
@@ -43,8 +43,8 @@ import org.apache.hadoop.io.compress.Decompressor;
 import org.apache.hadoop.io.serializer.SerializationFactory;
 import org.apache.hadoop.io.serializer.Serializer;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * <code>IFile</code> is the simple &lt;key-len, value-len, key, value&gt; 
format
@@ -56,7 +56,7 @@ import org.apache.commons.logging.LogFactory;
 @InterfaceAudience.Private
 @InterfaceStability.Unstable
 public class IFile {
-  private static final Log LOG = LogFactory.getLog(IFile.class);
+  private static final Logger LOG = LoggerFactory.getLogger(IFile.class);
   public static final int EOF_MARKER = -1; // End of File Marker
   
   /**

http://git-wip-us.apache.org/repos/asf/hadoop/blob/178751ed/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/IFileInputStream.java
----------------------------------------------------------------------
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/IFileInputStream.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/IFileInputStream.java
index 02cbce3..ce1ad5d 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/IFileInputStream.java
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/IFileInputStream.java
@@ -25,8 +25,6 @@ import java.io.IOException;
 import java.io.InputStream;
 
 import org.apache.hadoop.conf.Configuration;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.fs.ChecksumException;
@@ -36,6 +34,8 @@ import org.apache.hadoop.io.ReadaheadPool;
 import org.apache.hadoop.io.ReadaheadPool.ReadaheadRequest;
 import org.apache.hadoop.mapreduce.MRConfig;
 import org.apache.hadoop.util.DataChecksum;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 /**
  * A checksum input stream, used for IFiles.
  * Used to validate the checksum of files created by {@link 
IFileOutputStream}. 
@@ -59,7 +59,8 @@ public class IFileInputStream extends InputStream {
   private boolean readahead;
   private int readaheadLength;
 
-  public static final Log LOG = LogFactory.getLog(IFileInputStream.class);
+  public static final Logger LOG =
+      LoggerFactory.getLogger(IFileInputStream.class);
 
   private boolean disableChecksumValidation = false;
   

http://git-wip-us.apache.org/repos/asf/hadoop/blob/178751ed/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/IndexCache.java
----------------------------------------------------------------------
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/IndexCache.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/IndexCache.java
index c3db951..0e24bbe 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/IndexCache.java
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/IndexCache.java
@@ -22,17 +22,17 @@ import java.util.concurrent.ConcurrentHashMap;
 import java.util.concurrent.LinkedBlockingQueue;
 import java.util.concurrent.atomic.AtomicInteger;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.mapreduce.server.tasktracker.TTConfig;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 class IndexCache {
 
   private final JobConf conf;
   private final int totalMemoryAllowed;
   private AtomicInteger totalMemoryUsed = new AtomicInteger();
-  private static final Log LOG = LogFactory.getLog(IndexCache.class);
+  private static final Logger LOG = LoggerFactory.getLogger(IndexCache.class);
 
   private final ConcurrentHashMap<String,IndexInformation> cache =
     new ConcurrentHashMap<String,IndexInformation>();

http://git-wip-us.apache.org/repos/asf/hadoop/blob/178751ed/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/JobACLsManager.java
----------------------------------------------------------------------
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/JobACLsManager.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/JobACLsManager.java
index 0dbbe5a..7373f7a 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/JobACLsManager.java
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/JobACLsManager.java
@@ -20,8 +20,6 @@ package org.apache.hadoop.mapred;
 import java.util.HashMap;
 import java.util.Map;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.mapreduce.JobACL;
@@ -29,11 +27,13 @@ import org.apache.hadoop.mapreduce.MRConfig;
 import org.apache.hadoop.security.AccessControlException;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.security.authorize.AccessControlList;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 @InterfaceAudience.Private
 public class JobACLsManager {
 
-  static final Log LOG = LogFactory.getLog(JobACLsManager.class);
+  static final Logger LOG = LoggerFactory.getLogger(JobACLsManager.class);
   Configuration conf;
   private final AccessControlList adminAcl;
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/178751ed/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/JobConf.java
----------------------------------------------------------------------
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/JobConf.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/JobConf.java
index be8fa9e..8c57d1b 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/JobConf.java
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/JobConf.java
@@ -24,8 +24,6 @@ import java.util.regex.Matcher;
 import java.util.regex.Pattern;
 
 import com.google.common.annotations.VisibleForTesting;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceAudience.Private;
 import org.apache.hadoop.classification.InterfaceStability;
@@ -53,6 +51,8 @@ import org.apache.hadoop.security.Credentials;
 import org.apache.hadoop.util.ClassUtil;
 import org.apache.hadoop.util.ReflectionUtils;
 import org.apache.hadoop.util.Tool;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /** 
  * A map/reduce job configuration.
@@ -115,7 +115,7 @@ import org.apache.hadoop.util.Tool;
 @InterfaceStability.Stable
 public class JobConf extends Configuration {
 
-  private static final Log LOG = LogFactory.getLog(JobConf.class);
+  private static final Logger LOG = LoggerFactory.getLogger(JobConf.class);
   private static final Pattern JAVA_OPTS_XMX_PATTERN =
           Pattern.compile(".*(?:^|\\s)-Xmx(\\d+)([gGmMkK]?)(?:$|\\s).*");
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/178751ed/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/JobEndNotifier.java
----------------------------------------------------------------------
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/JobEndNotifier.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/JobEndNotifier.java
index 5f03dc8..1ce5254 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/JobEndNotifier.java
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/JobEndNotifier.java
@@ -24,20 +24,20 @@ import java.net.URISyntaxException;
 import java.util.concurrent.Delayed;
 import java.util.concurrent.TimeUnit;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.http.client.methods.HttpGet;
 import org.apache.http.client.params.ClientPNames;
 import org.apache.http.impl.client.DefaultHttpClient;
 import org.apache.http.params.CoreConnectionPNames;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 @InterfaceAudience.Private
 @InterfaceStability.Unstable
 public class JobEndNotifier {
-  private static final Log LOG =
-    LogFactory.getLog(JobEndNotifier.class.getName());
+  private static final Logger LOG =
+      LoggerFactory.getLogger(JobEndNotifier.class.getName());
 
  
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/178751ed/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/JvmContext.java
----------------------------------------------------------------------
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/JvmContext.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/JvmContext.java
index 1c2d936..88eeefc 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/JvmContext.java
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/JvmContext.java
@@ -24,13 +24,13 @@ import java.io.IOException;
 
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.io.Writable;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 class JvmContext implements Writable {
 
-  public static final Log LOG =
-    LogFactory.getLog(JvmContext.class);
+  public static final Logger LOG =
+      LoggerFactory.getLogger(JvmContext.class);
   
   JVMId jvmId;
   String pid;

http://git-wip-us.apache.org/repos/asf/hadoop/blob/178751ed/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/LineRecordReader.java
----------------------------------------------------------------------
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/LineRecordReader.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/LineRecordReader.java
index 9802697..bfc6c0e 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/LineRecordReader.java
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/LineRecordReader.java
@@ -39,8 +39,8 @@ import 
org.apache.hadoop.io.compress.SplittableCompressionCodec;
 import org.apache.hadoop.mapreduce.lib.input.CompressedSplitLineReader;
 import org.apache.hadoop.mapreduce.lib.input.SplitLineReader;
 import org.apache.hadoop.mapreduce.lib.input.UncompressedSplitLineReader;
-import org.apache.commons.logging.LogFactory;
-import org.apache.commons.logging.Log;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * Treats keys as offset in file and value as line. 
@@ -48,8 +48,8 @@ import org.apache.commons.logging.Log;
 @InterfaceAudience.LimitedPrivate({"MapReduce", "Pig"})
 @InterfaceStability.Unstable
 public class LineRecordReader implements RecordReader<LongWritable, Text> {
-  private static final Log LOG
-    = LogFactory.getLog(LineRecordReader.class.getName());
+  private static final Logger LOG =
+      LoggerFactory.getLogger(LineRecordReader.class.getName());
 
   private CompressionCodecFactory compressionCodecs = null;
   private long start;

http://git-wip-us.apache.org/repos/asf/hadoop/blob/178751ed/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/MapTask.java
----------------------------------------------------------------------
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/MapTask.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/MapTask.java
index 9ec0914..27c8976 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/MapTask.java
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/MapTask.java
@@ -32,8 +32,6 @@ import java.util.List;
 import java.util.concurrent.locks.Condition;
 import java.util.concurrent.locks.ReentrantLock;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.fs.FSDataInputStream;
@@ -74,6 +72,8 @@ import org.apache.hadoop.util.QuickSort;
 import org.apache.hadoop.util.ReflectionUtils;
 import org.apache.hadoop.util.StringInterner;
 import org.apache.hadoop.util.StringUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /** A Map task. */
 @InterfaceAudience.LimitedPrivate({"MapReduce"})
@@ -87,7 +87,8 @@ public class MapTask extends Task {
   private TaskSplitIndex splitMetaInfo = new TaskSplitIndex();
   private final static int APPROX_HEADER_LENGTH = 150;
 
-  private static final Log LOG = LogFactory.getLog(MapTask.class.getName());
+  private static final Logger LOG =
+      LoggerFactory.getLogger(MapTask.class.getName());
 
   private Progress mapPhase;
   private Progress sortPhase;

http://git-wip-us.apache.org/repos/asf/hadoop/blob/178751ed/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/Merger.java
----------------------------------------------------------------------
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/Merger.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/Merger.java
index 3667e3c..16f8837 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/Merger.java
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/Merger.java
@@ -23,8 +23,6 @@ import java.util.Collections;
 import java.util.Comparator;
 import java.util.List;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;
@@ -45,6 +43,8 @@ import org.apache.hadoop.mapreduce.CryptoUtils;
 import org.apache.hadoop.util.PriorityQueue;
 import org.apache.hadoop.util.Progress;
 import org.apache.hadoop.util.Progressable;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * Merger is an utility class used by the Map and Reduce tasks for merging
@@ -53,7 +53,7 @@ import org.apache.hadoop.util.Progressable;
 @InterfaceAudience.Private
 @InterfaceStability.Unstable
 public class Merger {  
-  private static final Log LOG = LogFactory.getLog(Merger.class);
+  private static final Logger LOG = LoggerFactory.getLogger(Merger.class);
 
   // Local directories
   private static LocalDirAllocator lDirAlloc = 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/178751ed/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/Queue.java
----------------------------------------------------------------------
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/Queue.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/Queue.java
index 8f4f3e8..cb88a81 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/Queue.java
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/Queue.java
@@ -17,10 +17,10 @@
  */
 package org.apache.hadoop.mapred;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.mapreduce.QueueState;
 import org.apache.hadoop.security.authorize.AccessControlList;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import java.util.ArrayList;
 import java.util.HashMap;
@@ -36,7 +36,7 @@ import java.util.TreeSet;
  */
 class Queue implements Comparable<Queue>{
 
-  private static final Log LOG = LogFactory.getLog(Queue.class);
+  private static final Logger LOG = LoggerFactory.getLogger(Queue.class);
 
   //Queue name
   private String name = null;
@@ -348,14 +348,14 @@ class Queue implements Comparable<Queue>{
       //check for the individual children and then see if all of them
       //are updated.
       if (newState.getChildren() == null) {
-        LOG.fatal("In the current state, queue " + getName() + " has "
+        LOG.error("In the current state, queue " + getName() + " has "
             + children.size() + " but the new state has none!");
         return false;
       }
       int childrenSize = children.size();
       int newChildrenSize = newState.getChildren().size();
       if (childrenSize != newChildrenSize) {
-        LOG.fatal("Number of children for queue " + newState.getName()
+        LOG.error("Number of children for queue " + newState.getName()
             + " in newState is " + newChildrenSize + " which is not equal to "
             + childrenSize + " in the current state.");
         return false;

http://git-wip-us.apache.org/repos/asf/hadoop/blob/178751ed/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/QueueConfigurationParser.java
----------------------------------------------------------------------
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/QueueConfigurationParser.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/QueueConfigurationParser.java
index 3e608e0..cbc8e52 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/QueueConfigurationParser.java
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/QueueConfigurationParser.java
@@ -17,8 +17,6 @@
  */
 package org.apache.hadoop.mapred;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.io.IOUtils;
 import org.apache.hadoop.mapreduce.MRConfig;
 import org.apache.hadoop.mapreduce.QueueState;
@@ -31,6 +29,8 @@ import org.w3c.dom.NamedNodeMap;
 import org.w3c.dom.Node;
 import org.w3c.dom.NodeList;
 import org.w3c.dom.DOMException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import javax.xml.parsers.ParserConfigurationException;
 import javax.xml.parsers.DocumentBuilderFactory;
@@ -59,8 +59,8 @@ import java.util.HashSet;
  * Creates the complete queue hieararchy
  */
 class QueueConfigurationParser {
-  private static final Log LOG =
-    LogFactory.getLog(QueueConfigurationParser.class);
+  private static final Logger LOG =
+      LoggerFactory.getLogger(QueueConfigurationParser.class);
   
   private boolean aclsEnabled = false;
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/178751ed/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/QueueManager.java
----------------------------------------------------------------------
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/QueueManager.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/QueueManager.java
index d4ea87b..ec43bce 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/QueueManager.java
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/QueueManager.java
@@ -21,8 +21,6 @@ package org.apache.hadoop.mapred;
 import com.fasterxml.jackson.core.JsonFactory;
 import com.fasterxml.jackson.core.JsonGenerationException;
 import com.fasterxml.jackson.core.JsonGenerator;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.io.IOUtils;
@@ -31,6 +29,8 @@ import org.apache.hadoop.mapreduce.QueueState;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.security.authorize.AccessControlList;
 import org.apache.hadoop.util.StringUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import java.io.BufferedInputStream;
 import java.io.InputStream;
@@ -82,7 +82,7 @@ import java.net.URL;
 @InterfaceAudience.Private
 public class QueueManager {
 
-  private static final Log LOG = LogFactory.getLog(QueueManager.class);
+  private static final Logger LOG = 
LoggerFactory.getLogger(QueueManager.class);
 
   // Map of a queue name and Queue object
   private Map<String, Queue> leafQueues = new HashMap<String,Queue>();

http://git-wip-us.apache.org/repos/asf/hadoop/blob/178751ed/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/ReduceTask.java
----------------------------------------------------------------------
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/ReduceTask.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/ReduceTask.java
index c90e38b..b3c5de3 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/ReduceTask.java
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/ReduceTask.java
@@ -28,8 +28,6 @@ import java.util.Map;
 import java.util.SortedSet;
 import java.util.TreeSet;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;
@@ -56,6 +54,8 @@ import org.apache.hadoop.mapreduce.task.reduce.Shuffle;
 import org.apache.hadoop.util.Progress;
 import org.apache.hadoop.util.Progressable;
 import org.apache.hadoop.util.ReflectionUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /** A Reduce task. */
 @InterfaceAudience.Private
@@ -70,7 +70,8 @@ public class ReduceTask extends Task {
        });
   }
   
-  private static final Log LOG = LogFactory.getLog(ReduceTask.class.getName());
+  private static final Logger LOG =
+      LoggerFactory.getLogger(ReduceTask.class.getName());
   private int numMaps;
 
   private CompressionCodec codec;
@@ -456,7 +457,7 @@ public class ReduceTask extends Task {
       out.close(reporter);
       out = null;
     } finally {
-      IOUtils.cleanup(LOG, reducer);
+      IOUtils.cleanupWithLogger(LOG, reducer);
       closeQuietly(out, reporter);
     }
   }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/178751ed/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/SortedRanges.java
----------------------------------------------------------------------
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/SortedRanges.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/SortedRanges.java
index cdc2695..9d73e2be 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/SortedRanges.java
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/SortedRanges.java
@@ -25,9 +25,9 @@ import java.util.Iterator;
 import java.util.SortedSet;
 import java.util.TreeSet;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.io.Writable;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * Keeps the Ranges sorted by startIndex.
@@ -37,8 +37,8 @@ import org.apache.hadoop.io.Writable;
  */
 class SortedRanges implements Writable{
   
-  private static final Log LOG = 
-    LogFactory.getLog(SortedRanges.class);
+  private static final Logger LOG =
+      LoggerFactory.getLogger(SortedRanges.class);
   
   private TreeSet<Range> ranges = new TreeSet<Range>();
   private long indicesCount;

http://git-wip-us.apache.org/repos/asf/hadoop/blob/178751ed/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/Task.java
----------------------------------------------------------------------
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/Task.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/Task.java
index 542e956..f9bd30b 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/Task.java
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/Task.java
@@ -35,8 +35,6 @@ import java.util.concurrent.atomic.AtomicBoolean;
 import javax.crypto.SecretKey;
 
 import com.google.common.annotations.VisibleForTesting;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configurable;
@@ -73,6 +71,8 @@ import org.apache.hadoop.util.ReflectionUtils;
 import org.apache.hadoop.util.ShutdownHookManager;
 import org.apache.hadoop.util.StringInterner;
 import org.apache.hadoop.util.StringUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * Base class for tasks.
@@ -80,8 +80,8 @@ import org.apache.hadoop.util.StringUtils;
 @InterfaceAudience.LimitedPrivate({"MapReduce"})
 @InterfaceStability.Unstable
 abstract public class Task implements Writable, Configurable {
-  private static final Log LOG =
-    LogFactory.getLog(Task.class);
+  private static final Logger LOG =
+      LoggerFactory.getLogger(Task.class);
 
   public static String MERGED_OUTPUT_PREFIX = ".merged";
   public static final long DEFAULT_COMBINE_RECORDS_BEFORE_PROGRESS = 10000;
@@ -355,7 +355,7 @@ abstract public class Task implements Writable, 
Configurable {
    */
   protected void reportFatalError(TaskAttemptID id, Throwable throwable, 
                                   String logMsg) {
-    LOG.fatal(logMsg);
+    LOG.error(logMsg);
     
     if (ShutdownHookManager.get().isShutdownInProgress()) {
       return;
@@ -368,7 +368,7 @@ abstract public class Task implements Writable, 
Configurable {
     try {
       umbilical.fatalError(id, cause);
     } catch (IOException ioe) {
-      LOG.fatal("Failed to contact the tasktracker", ioe);
+      LOG.error("Failed to contact the tasktracker", ioe);
       System.exit(-1);
     }
   }
@@ -849,13 +849,13 @@ abstract public class Task implements Writable, 
Configurable {
         } catch (TaskLimitException e) {
           String errMsg = "Task exceeded the limits: " +
                   StringUtils.stringifyException(e);
-          LOG.fatal(errMsg);
+          LOG.error(errMsg);
           try {
             umbilical.fatalError(taskId, errMsg);
           } catch (IOException ioe) {
-            LOG.fatal("Failed to update failure diagnosis", ioe);
+            LOG.error("Failed to update failure diagnosis", ioe);
           }
-          LOG.fatal("Killing " + taskId);
+          LOG.error("Killing " + taskId);
           resetDoneFlag();
           ExitUtil.terminate(69);
         } catch (Throwable t) {

http://git-wip-us.apache.org/repos/asf/hadoop/blob/178751ed/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/TaskLog.java
----------------------------------------------------------------------
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/TaskLog.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/TaskLog.java
index 059b0ad..54e1e67 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/TaskLog.java
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/TaskLog.java
@@ -35,8 +35,6 @@ import java.util.concurrent.ScheduledExecutorService;
 import java.util.concurrent.ThreadFactory;
 import java.util.concurrent.TimeUnit;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileStatus;
@@ -56,6 +54,7 @@ import org.apache.hadoop.yarn.conf.YarnConfiguration;
 import org.apache.log4j.Appender;
 import org.apache.log4j.LogManager;
 import org.apache.log4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import com.google.common.base.Charsets;
 
@@ -66,8 +65,8 @@ import com.google.common.base.Charsets;
  */
 @InterfaceAudience.Private
 public class TaskLog {
-  private static final Log LOG =
-    LogFactory.getLog(TaskLog.class);
+  private static final org.slf4j.Logger LOG =
+      LoggerFactory.getLogger(TaskLog.class);
 
   static final String USERLOGS_DIR_NAME = "userlogs";
 
@@ -156,7 +155,7 @@ public class TaskLog {
       fis.close();
       fis = null;
     } finally {
-      IOUtils.cleanup(LOG, fis);
+      IOUtils.cleanupWithLogger(LOG, fis);
     }
     return l;
   }
@@ -231,7 +230,7 @@ public class TaskLog {
       bos.close();
       bos = null;
     } finally {
-      IOUtils.cleanup(LOG, dos, bos);
+      IOUtils.cleanupWithLogger(LOG, dos, bos);
     }
 
     File indexFile = getIndexFile(currentTaskid, isCleanup);

http://git-wip-us.apache.org/repos/asf/hadoop/blob/178751ed/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/TaskStatus.java
----------------------------------------------------------------------
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/TaskStatus.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/TaskStatus.java
index bd12cd3..7496d7c 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/TaskStatus.java
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/TaskStatus.java
@@ -22,8 +22,6 @@ import java.io.DataOutput;
 import java.io.IOException;
 import java.util.List;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.io.Text;
@@ -31,6 +29,8 @@ import org.apache.hadoop.io.Writable;
 import org.apache.hadoop.io.WritableUtils;
 import org.apache.hadoop.util.StringInterner;
 import org.apache.hadoop.util.StringUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 /**************************************************
  * Describes the current status of a task.  This is
  * not intended to be a comprehensive piece of data.
@@ -39,8 +39,8 @@ import org.apache.hadoop.util.StringUtils;
 @InterfaceAudience.Private
 @InterfaceStability.Unstable
 public abstract class TaskStatus implements Writable, Cloneable {
-  static final Log LOG =
-    LogFactory.getLog(TaskStatus.class.getName());
+  static final Logger LOG =
+      LoggerFactory.getLogger(TaskStatus.class.getName());
   
   //enumeration for reporting current phase of a task.
   @InterfaceAudience.Private

http://git-wip-us.apache.org/repos/asf/hadoop/blob/178751ed/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/jobcontrol/Job.java
----------------------------------------------------------------------
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/jobcontrol/Job.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/jobcontrol/Job.java
index e79ab87..fd07837 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/jobcontrol/Job.java
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/jobcontrol/Job.java
@@ -23,19 +23,19 @@ import java.io.IOException;
 import java.util.ArrayList;
 import java.util.List;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.mapred.JobClient;
 import org.apache.hadoop.mapred.JobConf;
 import org.apache.hadoop.mapred.JobID;
 import org.apache.hadoop.mapreduce.lib.jobcontrol.ControlledJob;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 @InterfaceAudience.Public
 @InterfaceStability.Stable
 public class Job extends ControlledJob {
-  static final Log LOG = LogFactory.getLog(Job.class);
+  static final Logger LOG = LoggerFactory.getLogger(Job.class);
 
   final public static int SUCCESS = 0;
   final public static int WAITING = 1;

http://git-wip-us.apache.org/repos/asf/hadoop/blob/178751ed/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/lib/FieldSelectionMapReduce.java
----------------------------------------------------------------------
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/lib/FieldSelectionMapReduce.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/lib/FieldSelectionMapReduce.java
index 7a48785..0105559 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/lib/FieldSelectionMapReduce.java
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/lib/FieldSelectionMapReduce.java
@@ -23,8 +23,6 @@ import java.util.ArrayList;
 import java.util.Iterator;
 import java.util.List;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.io.Text;
@@ -35,6 +33,8 @@ import org.apache.hadoop.mapred.Reducer;
 import org.apache.hadoop.mapred.Reporter;
 import org.apache.hadoop.mapred.TextInputFormat;
 import org.apache.hadoop.mapreduce.lib.fieldsel.*;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * This class implements a mapper/reducer class that can be used to perform
@@ -92,7 +92,8 @@ public class FieldSelectionMapReduce<K, V>
   private int allReduceValueFieldsFrom = -1;
 
 
-  public static final Log LOG = LogFactory.getLog("FieldSelectionMapReduce");
+  public static final Logger LOG =
+      LoggerFactory.getLogger("FieldSelectionMapReduce");
 
   private String specToString() {
     StringBuffer sb = new StringBuffer();

http://git-wip-us.apache.org/repos/asf/hadoop/blob/178751ed/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/lib/InputSampler.java
----------------------------------------------------------------------
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/lib/InputSampler.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/lib/InputSampler.java
index d378bca..affd7ed 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/lib/InputSampler.java
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/lib/InputSampler.java
@@ -22,8 +22,6 @@ import java.io.IOException;
 import java.util.ArrayList;
 import java.util.Random;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.mapred.InputFormat;
@@ -32,13 +30,15 @@ import org.apache.hadoop.mapred.JobConf;
 import org.apache.hadoop.mapred.RecordReader;
 import org.apache.hadoop.mapred.Reporter;
 import org.apache.hadoop.mapreduce.Job;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 @InterfaceAudience.Public
 @InterfaceStability.Stable
 public class InputSampler<K,V> extends 
   org.apache.hadoop.mapreduce.lib.partition.InputSampler<K, V> {
 
-  private static final Log LOG = LogFactory.getLog(InputSampler.class);
+  private static final Logger LOG = 
LoggerFactory.getLogger(InputSampler.class);
 
   public InputSampler(JobConf conf) {
     super(conf);

http://git-wip-us.apache.org/repos/asf/hadoop/blob/178751ed/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/lib/MultithreadedMapRunner.java
----------------------------------------------------------------------
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/lib/MultithreadedMapRunner.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/lib/MultithreadedMapRunner.java
index 05339bc..0af8046 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/lib/MultithreadedMapRunner.java
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/lib/MultithreadedMapRunner.java
@@ -29,9 +29,9 @@ import org.apache.hadoop.mapred.OutputCollector;
 import org.apache.hadoop.mapred.Reporter;
 import org.apache.hadoop.mapred.SkipBadRecords;
 import org.apache.hadoop.mapreduce.lib.map.MultithreadedMapper;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.util.concurrent.HadoopThreadPoolExecutor;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import java.io.IOException;
 import java.util.concurrent.*;
@@ -57,8 +57,8 @@ import java.util.concurrent.*;
 public class MultithreadedMapRunner<K1, V1, K2, V2>
     implements MapRunnable<K1, V1, K2, V2> {
 
-  private static final Log LOG =
-    LogFactory.getLog(MultithreadedMapRunner.class.getName());
+  private static final Logger LOG =
+      LoggerFactory.getLogger(MultithreadedMapRunner.class.getName());
 
   private JobConf job;
   private Mapper<K1, V1, K2, V2> mapper;

http://git-wip-us.apache.org/repos/asf/hadoop/blob/178751ed/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/pipes/Application.java
----------------------------------------------------------------------
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/pipes/Application.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/pipes/Application.java
index 733ead4..83d2509 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/pipes/Application.java
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/pipes/Application.java
@@ -30,8 +30,6 @@ import java.util.Random;
 
 import javax.crypto.SecretKey;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.fs.FSDataOutputStream;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.FileUtil;
@@ -56,6 +54,8 @@ import 
org.apache.hadoop.mapreduce.security.token.JobTokenSecretManager;
 import org.apache.hadoop.security.token.Token;
 import org.apache.hadoop.util.ReflectionUtils;
 import org.apache.hadoop.util.StringUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * This class is responsible for launching and communicating with the child 
@@ -63,7 +63,8 @@ import org.apache.hadoop.util.StringUtils;
  */
 class Application<K1 extends WritableComparable, V1 extends Writable,
                   K2 extends WritableComparable, V2 extends Writable> {
-  private static final Log LOG = 
LogFactory.getLog(Application.class.getName());
+  private static final Logger LOG =
+      LoggerFactory.getLogger(Application.class.getName());
   private ServerSocket serverSocket;
   private Process process;
   private Socket clientSocket;

http://git-wip-us.apache.org/repos/asf/hadoop/blob/178751ed/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/pipes/BinaryProtocol.java
----------------------------------------------------------------------
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/pipes/BinaryProtocol.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/pipes/BinaryProtocol.java
index d196723..89c594a 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/pipes/BinaryProtocol.java
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/pipes/BinaryProtocol.java
@@ -32,8 +32,6 @@ import java.util.ArrayList;
 import java.util.List;
 import java.util.Map;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.io.BytesWritable;
 import org.apache.hadoop.io.DataOutputBuffer;
 import org.apache.hadoop.io.IOUtils;
@@ -44,6 +42,8 @@ import org.apache.hadoop.io.WritableUtils;
 import org.apache.hadoop.mapred.InputSplit;
 import org.apache.hadoop.mapred.JobConf;
 import org.apache.hadoop.util.StringUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * This protocol is a binary implementation of the Pipes protocol.
@@ -60,8 +60,8 @@ class BinaryProtocol<K1 extends WritableComparable, V1 
extends Writable,
 
   private DataOutputStream stream;
   private DataOutputBuffer buffer = new DataOutputBuffer();
-  private static final Log LOG = 
-    LogFactory.getLog(BinaryProtocol.class.getName());
+  private static final Logger LOG =
+      LoggerFactory.getLogger(BinaryProtocol.class.getName());
   private UplinkReaderThread uplink;
 
   /**

http://git-wip-us.apache.org/repos/asf/hadoop/blob/178751ed/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/pipes/PipesReducer.java
----------------------------------------------------------------------
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/pipes/PipesReducer.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/pipes/PipesReducer.java
index 3fbe135..cfabdce 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/pipes/PipesReducer.java
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/pipes/PipesReducer.java
@@ -18,8 +18,6 @@
 
 package org.apache.hadoop.mapred.pipes;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.io.Writable;
 import org.apache.hadoop.io.WritableComparable;
 import org.apache.hadoop.mapred.JobConf;
@@ -28,6 +26,8 @@ import org.apache.hadoop.mapred.Reducer;
 import org.apache.hadoop.mapred.Reporter;
 import org.apache.hadoop.mapred.SkipBadRecords;
 import org.apache.hadoop.mapreduce.MRJobConfig;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import java.io.IOException;
 import java.util.Iterator;
@@ -38,7 +38,8 @@ import java.util.Iterator;
 class PipesReducer<K2 extends WritableComparable, V2 extends Writable,
     K3 extends WritableComparable, V3 extends Writable>
     implements Reducer<K2, V2, K3, V3> {
-  private static final Log LOG= 
LogFactory.getLog(PipesReducer.class.getName());
+  private static final Logger LOG =
+      LoggerFactory.getLogger(PipesReducer.class.getName());
   private JobConf job;
   private Application<K2, V2, K3, V3> application = null;
   private DownwardProtocol<K2, V2> downlink = null;

http://git-wip-us.apache.org/repos/asf/hadoop/blob/178751ed/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/pipes/Submitter.java
----------------------------------------------------------------------
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/pipes/Submitter.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/pipes/Submitter.java
index ae45782..64448c1 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/pipes/Submitter.java
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/pipes/Submitter.java
@@ -34,8 +34,6 @@ import org.apache.commons.cli.OptionBuilder;
 import org.apache.commons.cli.Options;
 import org.apache.commons.cli.ParseException;
 import org.apache.commons.cli.Parser;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;
@@ -61,6 +59,8 @@ import org.apache.hadoop.mapreduce.filecache.DistributedCache;
 import org.apache.hadoop.util.ExitUtil;
 import org.apache.hadoop.util.GenericOptionsParser;
 import org.apache.hadoop.util.Tool;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * The main entry point and job submitter. It may either be used as a command
@@ -70,7 +70,7 @@ import org.apache.hadoop.util.Tool;
 @InterfaceStability.Stable
 public class Submitter extends Configured implements Tool {
 
-  protected static final Log LOG = LogFactory.getLog(Submitter.class);
+  protected static final Logger LOG = LoggerFactory.getLogger(Submitter.class);
   public static final String PRESERVE_COMMANDFILE = 
     "mapreduce.pipes.commandfile.preserve";
   public static final String EXECUTABLE = "mapreduce.pipes.executable";

http://git-wip-us.apache.org/repos/asf/hadoop/blob/178751ed/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/Cluster.java
----------------------------------------------------------------------
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/Cluster.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/Cluster.java
index 4245daf..bed423a 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/Cluster.java
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/Cluster.java
@@ -28,8 +28,6 @@ import java.util.ServiceConfigurationError;
 import java.util.ServiceLoader;
 
 import com.google.common.annotations.VisibleForTesting;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;
@@ -45,6 +43,8 @@ import org.apache.hadoop.mapreduce.v2.LogParams;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.security.token.SecretManager.InvalidToken;
 import org.apache.hadoop.security.token.Token;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * Provides a way to access information about the map/reduce cluster.
@@ -64,7 +64,8 @@ public class Cluster {
   private Path sysDir = null;
   private Path stagingAreaDir = null;
   private Path jobHistoryDir = null;
-  private static final Log LOG = LogFactory.getLog(Cluster.class);
+  private static final Logger LOG =
+      LoggerFactory.getLogger(Cluster.class);
 
   @VisibleForTesting
   static Iterable<ClientProtocolProvider> frameworkLoader =

http://git-wip-us.apache.org/repos/asf/hadoop/blob/178751ed/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/CryptoUtils.java
----------------------------------------------------------------------
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/CryptoUtils.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/CryptoUtils.java
index 00119cd..73ce3a8 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/CryptoUtils.java
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/CryptoUtils.java
@@ -22,8 +22,6 @@ import java.io.InputStream;
 import java.nio.ByteBuffer;
 
 import org.apache.commons.codec.binary.Base64;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;
@@ -37,6 +35,8 @@ import org.apache.hadoop.io.IOUtils;
 import org.apache.hadoop.mapreduce.security.TokenCache;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.util.LimitInputStream;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * This class provides utilities to make it easier to work with Cryptographic
@@ -47,7 +47,7 @@ import org.apache.hadoop.util.LimitInputStream;
 @InterfaceStability.Unstable
 public class CryptoUtils {
 
-  private static final Log LOG = LogFactory.getLog(CryptoUtils.class);
+  private static final Logger LOG = LoggerFactory.getLogger(CryptoUtils.class);
 
   public static boolean isEncryptedSpillEnabled(Configuration conf) {
     return conf.getBoolean(MRJobConfig.MR_ENCRYPTED_INTERMEDIATE_DATA,

http://git-wip-us.apache.org/repos/asf/hadoop/blob/178751ed/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/Job.java
----------------------------------------------------------------------
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/Job.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/Job.java
index a09f034..f164b62 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/Job.java
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/Job.java
@@ -26,8 +26,6 @@ import java.util.Iterator;
 import java.util.LinkedHashMap;
 import java.util.Map;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.classification.InterfaceAudience.Private;
@@ -44,6 +42,8 @@ import org.apache.hadoop.mapreduce.task.JobContextImpl;
 import org.apache.hadoop.mapreduce.util.ConfigUtil;
 import org.apache.hadoop.util.StringUtils;
 import org.apache.hadoop.yarn.api.records.ReservationId;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * The job submitter's view of the Job.
@@ -81,7 +81,7 @@ import org.apache.hadoop.yarn.api.records.ReservationId;
 @InterfaceAudience.Public
 @InterfaceStability.Evolving
 public class Job extends JobContextImpl implements JobContext, AutoCloseable {
-  private static final Log LOG = LogFactory.getLog(Job.class);
+  private static final Logger LOG = LoggerFactory.getLogger(Job.class);
 
   @InterfaceStability.Evolving
   public enum JobState {DEFINE, RUNNING};

http://git-wip-us.apache.org/repos/asf/hadoop/blob/178751ed/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/JobResourceUploader.java
----------------------------------------------------------------------
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/JobResourceUploader.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/JobResourceUploader.java
index a044fc1..03b29bd 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/JobResourceUploader.java
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/JobResourceUploader.java
@@ -27,8 +27,6 @@ import java.util.LinkedList;
 import java.util.LinkedHashMap;
 import java.util.Map;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.classification.InterfaceAudience.Private;
 import org.apache.hadoop.classification.InterfaceStability.Unstable;
 import org.apache.hadoop.conf.Configuration;
@@ -45,6 +43,8 @@ import org.apache.hadoop.yarn.api.records.ApplicationId;
 import org.apache.hadoop.yarn.api.records.URL;
 import org.apache.hadoop.yarn.client.api.SharedCacheClient;
 import org.apache.hadoop.yarn.exceptions.YarnException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import com.google.common.annotations.VisibleForTesting;
 
@@ -55,7 +55,8 @@ import com.google.common.annotations.VisibleForTesting;
 @Private
 @Unstable
 class JobResourceUploader {
-  protected static final Log LOG = 
LogFactory.getLog(JobResourceUploader.class);
+  protected static final Logger LOG =
+      LoggerFactory.getLogger(JobResourceUploader.class);
   private final boolean useWildcard;
   private final FileSystem jtFs;
   private SharedCacheClient scClient = null;

http://git-wip-us.apache.org/repos/asf/hadoop/blob/178751ed/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/JobSubmissionFiles.java
----------------------------------------------------------------------
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/JobSubmissionFiles.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/JobSubmissionFiles.java
index ae914c3..303aa13 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/JobSubmissionFiles.java
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/JobSubmissionFiles.java
@@ -30,8 +30,8 @@ import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.conf.Configuration;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * A utility to manage job submission files.
@@ -39,7 +39,8 @@ import org.apache.commons.logging.LogFactory;
 @InterfaceAudience.Private
 public class JobSubmissionFiles {
 
-  private final static Log LOG = LogFactory.getLog(JobSubmissionFiles.class);
+  private final static Logger LOG =
+      LoggerFactory.getLogger(JobSubmissionFiles.class);
 
   // job submission directory is private!
   final public static FsPermission JOB_DIR_PERMISSION =

http://git-wip-us.apache.org/repos/asf/hadoop/blob/178751ed/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/JobSubmitter.java
----------------------------------------------------------------------
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/JobSubmitter.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/JobSubmitter.java
index e5ff26d..246986f 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/JobSubmitter.java
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/JobSubmitter.java
@@ -36,8 +36,6 @@ import com.fasterxml.jackson.core.JsonParseException;
 import com.fasterxml.jackson.databind.JsonMappingException;
 import com.fasterxml.jackson.databind.ObjectMapper;
 import com.fasterxml.jackson.databind.ObjectReader;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;
@@ -49,6 +47,8 @@ import org.apache.hadoop.fs.permission.FsPermission;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.mapred.JobConf;
 import org.apache.hadoop.mapred.QueueACL;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import static org.apache.hadoop.mapred.QueueManager.toFullPropertyName;
 
@@ -69,7 +69,8 @@ import com.google.common.base.Charsets;
 @InterfaceAudience.Private
 @InterfaceStability.Unstable
 class JobSubmitter {
-  protected static final Log LOG = LogFactory.getLog(JobSubmitter.class);
+  protected static final Logger LOG =
+      LoggerFactory.getLogger(JobSubmitter.class);
   private static final ObjectReader READER =
       new ObjectMapper().readerFor(Map.class);
   private static final String SHUFFLE_KEYGEN_ALGORITHM = "HmacSHA1";
@@ -298,9 +299,7 @@ class JobSubmitter {
   private void printTokens(JobID jobId,
       Credentials credentials) throws IOException {
     LOG.info("Submitting tokens for job: " + jobId);
-    for (Token<?> token: credentials.getAllTokens()) {
-      LOG.info(token);
-    }
+    LOG.info("Executing with tokens: {}", credentials.getAllTokens());
   }
 
   @SuppressWarnings("unchecked")

http://git-wip-us.apache.org/repos/asf/hadoop/blob/178751ed/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/counters/AbstractCounters.java
----------------------------------------------------------------------
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/counters/AbstractCounters.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/counters/AbstractCounters.java
index 4ab7e89..a1c0f68 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/counters/AbstractCounters.java
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/counters/AbstractCounters.java
@@ -29,8 +29,6 @@ import java.util.Iterator;
 import java.util.Map;
 import java.util.concurrent.ConcurrentSkipListMap;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.io.Text;
@@ -41,6 +39,8 @@ import org.apache.hadoop.mapreduce.FileSystemCounter;
 import org.apache.hadoop.mapreduce.JobCounter;
 import org.apache.hadoop.mapreduce.TaskCounter;
 import org.apache.hadoop.util.StringInterner;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import com.google.common.collect.Iterables;
 import com.google.common.collect.Iterators;
@@ -59,7 +59,8 @@ public abstract class AbstractCounters<C extends Counter,
                                        G extends CounterGroupBase<C>>
     implements Writable, Iterable<G> {
 
-  protected static final Log LOG = LogFactory.getLog("mapreduce.Counters");
+  protected static final Logger LOG =
+      LoggerFactory.getLogger("mapreduce.Counters");
 
   /**
    * A cache from enum values to the associated counter.

http://git-wip-us.apache.org/repos/asf/hadoop/blob/178751ed/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/counters/FileSystemCounterGroup.java
----------------------------------------------------------------------
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/counters/FileSystemCounterGroup.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/counters/FileSystemCounterGroup.java
index e0e5b79..046368e 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/counters/FileSystemCounterGroup.java
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/counters/FileSystemCounterGroup.java
@@ -33,8 +33,6 @@ import com.google.common.collect.AbstractIterator;
 import com.google.common.collect.Iterators;
 import com.google.common.collect.Maps;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceAudience.Private;
 import org.apache.hadoop.io.WritableUtils;
@@ -42,6 +40,8 @@ import org.apache.hadoop.mapreduce.Counter;
 import org.apache.hadoop.mapreduce.FileSystemCounter;
 import org.apache.hadoop.mapreduce.util.ResourceBundles;
 import org.apache.hadoop.util.StringUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * An abstract class to provide common implementation of the filesystem
@@ -56,7 +56,8 @@ public abstract class FileSystemCounterGroup<C extends 
Counter>
   static final int MAX_NUM_SCHEMES = 100; // intern/sanity check
   static final ConcurrentMap<String, String> schemes = Maps.newConcurrentMap();
   
-  private static final Log LOG = 
LogFactory.getLog(FileSystemCounterGroup.class);
+  private static final Logger LOG =
+      LoggerFactory.getLogger(FileSystemCounterGroup.class);
 
   // C[] would need Array.newInstance which requires a Class<C> reference.
   // Just a few local casts probably worth not having to carry it around.

http://git-wip-us.apache.org/repos/asf/hadoop/blob/178751ed/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/counters/FrameworkCounterGroup.java
----------------------------------------------------------------------
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/counters/FrameworkCounterGroup.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/counters/FrameworkCounterGroup.java
index b51f528..5324223 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/counters/FrameworkCounterGroup.java
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/counters/FrameworkCounterGroup.java
@@ -26,13 +26,13 @@ import java.io.IOException;
 import java.util.Arrays;
 import java.util.Iterator;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceAudience.Private;
 import org.apache.hadoop.io.WritableUtils;
 import org.apache.hadoop.mapreduce.Counter;
 import org.apache.hadoop.mapreduce.util.ResourceBundles;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import com.google.common.collect.AbstractIterator;
 import com.google.common.collect.Iterators;
@@ -47,7 +47,8 @@ import com.google.common.collect.Iterators;
 @InterfaceAudience.Private
 public abstract class FrameworkCounterGroup<T extends Enum<T>,
     C extends Counter> implements CounterGroupBase<C> {
-  private static final Log LOG = 
LogFactory.getLog(FrameworkCounterGroup.class);
+  private static final Logger LOG =
+      LoggerFactory.getLogger(FrameworkCounterGroup.class);
   
   private final Class<T> enumClass; // for Enum.valueOf
   private final Object[] counters;  // local casts are OK and save a class ref


---------------------------------------------------------------------
To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org
For additional commands, e-mail: common-commits-h...@hadoop.apache.org

Reply via email to