This is an automated email from the ASF dual-hosted git repository.

gengliang pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new a252cbd5ca13 [SPARK-48291][CORE] Rename Java Logger as SparkLogger
a252cbd5ca13 is described below

commit a252cbd5ca13fb7b758c839edc92b50336747d82
Author: Gengliang Wang <gengli...@apache.org>
AuthorDate: Wed May 15 16:43:45 2024 -0700

    [SPARK-48291][CORE] Rename Java Logger as SparkLogger
    
    ### What changes were proposed in this pull request?
    
    Two new classes `org.apache.spark.internal.Logger` and 
`org.apache.spark.internal.LoggerFactory` were introduced from 
https://github.com/apache/spark/pull/46301.
    Given that Logger is a widely recognized **interface** in Log4j, it may 
lead to confusion to have a class with the same name. To avoid this and clarify 
its purpose within the Spark framework, I propose renaming 
`org.apache.spark.internal.Logger` to `org.apache.spark.internal.SparkLogger`. 
Similarly, to maintain consistency, `org.apache.spark.internal.LoggerFactory` 
should be renamed to `org.apache.spark.internal.SparkLoggerFactory`.
    
    ### Why are the changes needed?
    
    To avoid naming confusion and clarify the java Spark logger purpose within 
the logging framework
    
    ### Does this PR introduce _any_ user-facing change?
    
    No
    
    ### How was this patch tested?
    
    GA tests
    ### Was this patch authored or co-authored using generative AI tooling?
    
    No
    
    Closes #46600 from gengliangwang/refactorLogger.
    
    Authored-by: Gengliang Wang <gengli...@apache.org>
    Signed-off-by: Gengliang Wang <gengli...@apache.org>
---
 .../java/org/apache/spark/network/TransportContext.java     |  6 +++---
 .../org/apache/spark/network/client/TransportClient.java    |  6 +++---
 .../apache/spark/network/client/TransportClientFactory.java |  7 ++++---
 .../spark/network/client/TransportResponseHandler.java      |  7 ++++---
 .../apache/spark/network/crypto/AuthClientBootstrap.java    |  6 +++---
 .../org/apache/spark/network/crypto/AuthRpcHandler.java     |  6 +++---
 .../org/apache/spark/network/protocol/MessageDecoder.java   |  6 +++---
 .../org/apache/spark/network/protocol/MessageEncoder.java   |  6 +++---
 .../apache/spark/network/protocol/SslMessageEncoder.java    |  6 +++---
 .../org/apache/spark/network/sasl/SaslClientBootstrap.java  |  6 +++---
 .../java/org/apache/spark/network/sasl/SaslRpcHandler.java  |  6 +++---
 .../java/org/apache/spark/network/sasl/SparkSaslClient.java |  6 +++---
 .../java/org/apache/spark/network/sasl/SparkSaslServer.java |  6 +++---
 .../spark/network/server/ChunkFetchRequestHandler.java      |  7 ++++---
 .../apache/spark/network/server/OneForOneStreamManager.java |  7 ++++---
 .../java/org/apache/spark/network/server/RpcHandler.java    |  6 +++---
 .../spark/network/server/TransportChannelHandler.java       |  7 ++++---
 .../spark/network/server/TransportRequestHandler.java       |  7 ++++---
 .../org/apache/spark/network/server/TransportServer.java    |  6 +++---
 .../apache/spark/network/ssl/ReloadingX509TrustManager.java |  7 ++++---
 .../main/java/org/apache/spark/network/ssl/SSLFactory.java  |  6 +++---
 .../main/java/org/apache/spark/network/util/DBProvider.java |  6 +++---
 .../java/org/apache/spark/network/util/LevelDBProvider.java |  8 ++++----
 .../java/org/apache/spark/network/util/NettyLogger.java     |  6 +++---
 .../java/org/apache/spark/network/util/RocksDBProvider.java |  8 ++++----
 .../org/apache/spark/network/sasl/ShuffleSecretManager.java |  7 ++++---
 .../org/apache/spark/network/shuffle/BlockStoreClient.java  |  6 +++---
 .../apache/spark/network/shuffle/ExternalBlockHandler.java  |  7 ++++---
 .../spark/network/shuffle/ExternalShuffleBlockResolver.java |  7 ++++---
 .../apache/spark/network/shuffle/OneForOneBlockFetcher.java |  7 ++++---
 .../apache/spark/network/shuffle/OneForOneBlockPusher.java  |  7 ++++---
 .../spark/network/shuffle/RemoteBlockPushResolver.java      |  7 ++++---
 .../spark/network/shuffle/RetryingBlockTransferor.java      |  7 ++++---
 .../spark/network/shuffle/ShuffleTransportContext.java      |  9 +++++----
 .../network/shuffle/checksum/ShuffleChecksumHelper.java     |  8 ++++----
 .../org/apache/spark/network/yarn/YarnShuffleService.java   | 13 +++++++------
 .../apache/spark/internal/{Logger.java => SparkLogger.java} |  4 ++--
 .../{LoggerFactory.java => SparkLoggerFactory.java}         | 10 +++++-----
 .../main/java/org/apache/spark/network/util/JavaUtils.java  |  6 +++---
 .../test/java/org/apache/spark/util/LoggerSuiteBase.java    |  4 ++--
 .../test/java/org/apache/spark/util/PatternLoggerSuite.java |  8 ++++----
 .../java/org/apache/spark/util/StructuredLoggerSuite.java   |  9 +++++----
 .../java/com/codahale/metrics/ganglia/GangliaReporter.java  |  6 +++---
 .../main/java/org/apache/spark/io/ReadAheadInputStream.java |  7 ++++---
 .../java/org/apache/spark/memory/TaskMemoryManager.java     |  6 +++---
 .../spark/shuffle/sort/BypassMergeSortShuffleWriter.java    |  7 ++++---
 .../apache/spark/shuffle/sort/ShuffleExternalSorter.java    |  7 ++++---
 .../org/apache/spark/shuffle/sort/UnsafeShuffleWriter.java  |  6 +++---
 .../shuffle/sort/io/LocalDiskShuffleMapOutputWriter.java    |  8 ++++----
 .../java/org/apache/spark/unsafe/map/BytesToBytesMap.java   |  6 +++---
 .../util/collection/unsafe/sort/UnsafeExternalSorter.java   |  7 ++++---
 .../collection/unsafe/sort/UnsafeSorterSpillReader.java     |  7 ++++---
 .../sql/catalyst/expressions/RowBasedKeyValueBatch.java     |  7 ++++---
 .../org/apache/spark/sql/util/CaseInsensitiveStringMap.java |  7 ++++---
 .../main/java/org/apache/hive/service/AbstractService.java  |  6 +++---
 .../main/java/org/apache/hive/service/CompositeService.java |  6 +++---
 .../src/main/java/org/apache/hive/service/CookieSigner.java |  6 +++---
 .../java/org/apache/hive/service/ServiceOperations.java     |  6 +++---
 .../src/main/java/org/apache/hive/service/ServiceUtils.java |  4 ++--
 .../java/org/apache/hive/service/auth/HiveAuthFactory.java  |  6 +++---
 .../java/org/apache/hive/service/auth/HttpAuthUtils.java    |  6 +++---
 .../apache/hive/service/auth/TSetIpAddressProcessor.java    |  6 +++---
 .../main/java/org/apache/hive/service/cli/CLIService.java   |  6 +++---
 .../java/org/apache/hive/service/cli/ColumnBasedSet.java    |  6 +++---
 .../hive/service/cli/operation/ClassicTableTypeMapping.java |  6 +++---
 .../org/apache/hive/service/cli/operation/Operation.java    |  6 +++---
 .../apache/hive/service/cli/operation/OperationManager.java |  6 +++---
 .../apache/hive/service/cli/session/HiveSessionImpl.java    |  6 +++---
 .../org/apache/hive/service/cli/session/SessionManager.java |  6 +++---
 .../apache/hive/service/cli/thrift/ThriftCLIService.java    |  6 +++---
 .../apache/hive/service/cli/thrift/ThriftHttpServlet.java   |  6 +++---
 .../java/org/apache/hive/service/server/HiveServer2.java    |  6 +++---
 .../hive/service/server/ThreadWithGarbageCleanup.java       |  6 +++---
 .../spark/sql/hive/thriftserver/SparkSQLCLIService.scala    |  6 +++---
 74 files changed, 255 insertions(+), 231 deletions(-)

diff --git 
a/common/network-common/src/main/java/org/apache/spark/network/TransportContext.java
 
b/common/network-common/src/main/java/org/apache/spark/network/TransportContext.java
index 815f4dc6e6cd..e8ce6840e3fc 100644
--- 
a/common/network-common/src/main/java/org/apache/spark/network/TransportContext.java
+++ 
b/common/network-common/src/main/java/org/apache/spark/network/TransportContext.java
@@ -35,8 +35,8 @@ import io.netty.handler.stream.ChunkedWriteHandler;
 import io.netty.handler.timeout.IdleStateHandler;
 import io.netty.handler.codec.MessageToMessageEncoder;
 
-import org.apache.spark.internal.Logger;
-import org.apache.spark.internal.LoggerFactory;
+import org.apache.spark.internal.SparkLogger;
+import org.apache.spark.internal.SparkLoggerFactory;
 import org.apache.spark.network.client.TransportClient;
 import org.apache.spark.network.client.TransportClientBootstrap;
 import org.apache.spark.network.client.TransportClientFactory;
@@ -73,7 +73,7 @@ import org.apache.spark.network.util.TransportFrameDecoder;
  * processes to send messages back to the client on an existing channel.
  */
 public class TransportContext implements Closeable {
-  private static final Logger logger = 
LoggerFactory.getLogger(TransportContext.class);
+  private static final SparkLogger logger = 
SparkLoggerFactory.getLogger(TransportContext.class);
 
   private static final NettyLogger nettyLogger = new NettyLogger();
   private final TransportConf conf;
diff --git 
a/common/network-common/src/main/java/org/apache/spark/network/client/TransportClient.java
 
b/common/network-common/src/main/java/org/apache/spark/network/client/TransportClient.java
index 77ef6f09c1b5..4c144a73a929 100644
--- 
a/common/network-common/src/main/java/org/apache/spark/network/client/TransportClient.java
+++ 
b/common/network-common/src/main/java/org/apache/spark/network/client/TransportClient.java
@@ -36,8 +36,8 @@ import io.netty.util.concurrent.GenericFutureListener;
 import org.apache.commons.lang3.builder.ToStringBuilder;
 import org.apache.commons.lang3.builder.ToStringStyle;
 
-import org.apache.spark.internal.Logger;
-import org.apache.spark.internal.LoggerFactory;
+import org.apache.spark.internal.SparkLogger;
+import org.apache.spark.internal.SparkLoggerFactory;
 import org.apache.spark.internal.LogKeys;
 import org.apache.spark.internal.MDC;
 import org.apache.spark.network.buffer.ManagedBuffer;
@@ -73,7 +73,7 @@ import static 
org.apache.spark.network.util.NettyUtils.getRemoteAddress;
  * Concurrency: thread safe and can be called from multiple threads.
  */
 public class TransportClient implements Closeable {
-  private static final Logger logger = 
LoggerFactory.getLogger(TransportClient.class);
+  private static final SparkLogger logger = 
SparkLoggerFactory.getLogger(TransportClient.class);
 
   private final Channel channel;
   private final TransportResponseHandler handler;
diff --git 
a/common/network-common/src/main/java/org/apache/spark/network/client/TransportClientFactory.java
 
b/common/network-common/src/main/java/org/apache/spark/network/client/TransportClientFactory.java
index f2dbfd92b854..e1f19f956cc0 100644
--- 
a/common/network-common/src/main/java/org/apache/spark/network/client/TransportClientFactory.java
+++ 
b/common/network-common/src/main/java/org/apache/spark/network/client/TransportClientFactory.java
@@ -43,8 +43,8 @@ import io.netty.handler.ssl.SslHandler;
 import io.netty.util.concurrent.Future;
 import io.netty.util.concurrent.GenericFutureListener;
 
-import org.apache.spark.internal.Logger;
-import org.apache.spark.internal.LoggerFactory;
+import org.apache.spark.internal.SparkLogger;
+import org.apache.spark.internal.SparkLoggerFactory;
 import org.apache.spark.internal.LogKeys;
 import org.apache.spark.internal.MDC;
 import org.apache.spark.network.TransportContext;
@@ -79,7 +79,8 @@ public class TransportClientFactory implements Closeable {
     }
   }
 
-  private static final Logger logger = 
LoggerFactory.getLogger(TransportClientFactory.class);
+  private static final SparkLogger logger =
+    SparkLoggerFactory.getLogger(TransportClientFactory.class);
 
   private final TransportContext context;
   private final TransportConf conf;
diff --git 
a/common/network-common/src/main/java/org/apache/spark/network/client/TransportResponseHandler.java
 
b/common/network-common/src/main/java/org/apache/spark/network/client/TransportResponseHandler.java
index 24ae570044e2..be4cf4a58abe 100644
--- 
a/common/network-common/src/main/java/org/apache/spark/network/client/TransportResponseHandler.java
+++ 
b/common/network-common/src/main/java/org/apache/spark/network/client/TransportResponseHandler.java
@@ -29,8 +29,8 @@ import io.netty.channel.Channel;
 import org.apache.commons.lang3.tuple.ImmutablePair;
 import org.apache.commons.lang3.tuple.Pair;
 
-import org.apache.spark.internal.Logger;
-import org.apache.spark.internal.LoggerFactory;
+import org.apache.spark.internal.SparkLogger;
+import org.apache.spark.internal.SparkLoggerFactory;
 import org.apache.spark.internal.LogKeys;
 import org.apache.spark.internal.MDC;
 import org.apache.spark.network.protocol.ChunkFetchFailure;
@@ -53,7 +53,8 @@ import org.apache.spark.network.util.TransportFrameDecoder;
  * Concurrency: thread safe and can be called from multiple threads.
  */
 public class TransportResponseHandler extends MessageHandler<ResponseMessage> {
-  private static final Logger logger = 
LoggerFactory.getLogger(TransportResponseHandler.class);
+  private static final SparkLogger logger =
+    SparkLoggerFactory.getLogger(TransportResponseHandler.class);
 
   private final Channel channel;
 
diff --git 
a/common/network-common/src/main/java/org/apache/spark/network/crypto/AuthClientBootstrap.java
 
b/common/network-common/src/main/java/org/apache/spark/network/crypto/AuthClientBootstrap.java
index 0bfede415ba5..08e2c084fe67 100644
--- 
a/common/network-common/src/main/java/org/apache/spark/network/crypto/AuthClientBootstrap.java
+++ 
b/common/network-common/src/main/java/org/apache/spark/network/crypto/AuthClientBootstrap.java
@@ -27,8 +27,8 @@ import io.netty.buffer.ByteBuf;
 import io.netty.buffer.Unpooled;
 import io.netty.channel.Channel;
 
-import org.apache.spark.internal.Logger;
-import org.apache.spark.internal.LoggerFactory;
+import org.apache.spark.internal.SparkLogger;
+import org.apache.spark.internal.SparkLoggerFactory;
 import org.apache.spark.network.client.TransportClient;
 import org.apache.spark.network.client.TransportClientBootstrap;
 import org.apache.spark.network.sasl.SaslClientBootstrap;
@@ -47,7 +47,7 @@ import org.apache.spark.network.util.TransportConf;
  */
 public class AuthClientBootstrap implements TransportClientBootstrap {
 
-  private static final Logger LOG = 
LoggerFactory.getLogger(AuthClientBootstrap.class);
+  private static final SparkLogger LOG = 
SparkLoggerFactory.getLogger(AuthClientBootstrap.class);
 
   private final TransportConf conf;
   private final String appId;
diff --git 
a/common/network-common/src/main/java/org/apache/spark/network/crypto/AuthRpcHandler.java
 
b/common/network-common/src/main/java/org/apache/spark/network/crypto/AuthRpcHandler.java
index 778cb9a120e1..65367743e24f 100644
--- 
a/common/network-common/src/main/java/org/apache/spark/network/crypto/AuthRpcHandler.java
+++ 
b/common/network-common/src/main/java/org/apache/spark/network/crypto/AuthRpcHandler.java
@@ -26,8 +26,8 @@ import io.netty.buffer.ByteBuf;
 import io.netty.buffer.Unpooled;
 import io.netty.channel.Channel;
 
-import org.apache.spark.internal.Logger;
-import org.apache.spark.internal.LoggerFactory;
+import org.apache.spark.internal.SparkLogger;
+import org.apache.spark.internal.SparkLoggerFactory;
 import org.apache.spark.internal.LogKeys;
 import org.apache.spark.internal.MDC;
 import org.apache.spark.network.client.RpcResponseCallback;
@@ -48,7 +48,7 @@ import org.apache.spark.network.util.TransportConf;
  * authenticated. A connection may be authenticated at most once.
  */
 class AuthRpcHandler extends AbstractAuthRpcHandler {
-  private static final Logger LOG = 
LoggerFactory.getLogger(AuthRpcHandler.class);
+  private static final SparkLogger LOG = 
SparkLoggerFactory.getLogger(AuthRpcHandler.class);
 
   /** Transport configuration. */
   private final TransportConf conf;
diff --git 
a/common/network-common/src/main/java/org/apache/spark/network/protocol/MessageDecoder.java
 
b/common/network-common/src/main/java/org/apache/spark/network/protocol/MessageDecoder.java
index 4dbd968788d4..a9b700a7800e 100644
--- 
a/common/network-common/src/main/java/org/apache/spark/network/protocol/MessageDecoder.java
+++ 
b/common/network-common/src/main/java/org/apache/spark/network/protocol/MessageDecoder.java
@@ -24,8 +24,8 @@ import io.netty.channel.ChannelHandler;
 import io.netty.channel.ChannelHandlerContext;
 import io.netty.handler.codec.MessageToMessageDecoder;
 
-import org.apache.spark.internal.Logger;
-import org.apache.spark.internal.LoggerFactory;
+import org.apache.spark.internal.SparkLogger;
+import org.apache.spark.internal.SparkLoggerFactory;
 
 /**
  * Decoder used by the client side to encode server-to-client responses.
@@ -34,7 +34,7 @@ import org.apache.spark.internal.LoggerFactory;
 @ChannelHandler.Sharable
 public final class MessageDecoder extends MessageToMessageDecoder<ByteBuf> {
 
-  private static final Logger logger = 
LoggerFactory.getLogger(MessageDecoder.class);
+  private static final SparkLogger logger = 
SparkLoggerFactory.getLogger(MessageDecoder.class);
 
   public static final MessageDecoder INSTANCE = new MessageDecoder();
 
diff --git 
a/common/network-common/src/main/java/org/apache/spark/network/protocol/MessageEncoder.java
 
b/common/network-common/src/main/java/org/apache/spark/network/protocol/MessageEncoder.java
index 081329c74aa2..ab20fb908eb4 100644
--- 
a/common/network-common/src/main/java/org/apache/spark/network/protocol/MessageEncoder.java
+++ 
b/common/network-common/src/main/java/org/apache/spark/network/protocol/MessageEncoder.java
@@ -25,8 +25,8 @@ import io.netty.channel.ChannelHandlerContext;
 import io.netty.handler.codec.MessageToMessageEncoder;
 
 import org.apache.spark.internal.LogKeys;
-import org.apache.spark.internal.Logger;
-import org.apache.spark.internal.LoggerFactory;
+import org.apache.spark.internal.SparkLogger;
+import org.apache.spark.internal.SparkLoggerFactory;
 import org.apache.spark.internal.MDC;
 
 /**
@@ -36,7 +36,7 @@ import org.apache.spark.internal.MDC;
 @ChannelHandler.Sharable
 public final class MessageEncoder extends MessageToMessageEncoder<Message> {
 
-  private static final Logger logger = 
LoggerFactory.getLogger(MessageEncoder.class);
+  private static final SparkLogger logger = 
SparkLoggerFactory.getLogger(MessageEncoder.class);
 
   public static final MessageEncoder INSTANCE = new MessageEncoder();
 
diff --git 
a/common/network-common/src/main/java/org/apache/spark/network/protocol/SslMessageEncoder.java
 
b/common/network-common/src/main/java/org/apache/spark/network/protocol/SslMessageEncoder.java
index 94e4c1011cce..abe6ccca7bfd 100644
--- 
a/common/network-common/src/main/java/org/apache/spark/network/protocol/SslMessageEncoder.java
+++ 
b/common/network-common/src/main/java/org/apache/spark/network/protocol/SslMessageEncoder.java
@@ -26,8 +26,8 @@ import io.netty.channel.ChannelHandlerContext;
 import io.netty.handler.codec.MessageToMessageEncoder;
 import io.netty.handler.stream.ChunkedStream;
 
-import org.apache.spark.internal.Logger;
-import org.apache.spark.internal.LoggerFactory;
+import org.apache.spark.internal.SparkLogger;
+import org.apache.spark.internal.SparkLoggerFactory;
 import org.apache.spark.internal.LogKeys;
 import org.apache.spark.internal.MDC;
 
@@ -38,7 +38,7 @@ import org.apache.spark.internal.MDC;
 @ChannelHandler.Sharable
 public final class SslMessageEncoder extends MessageToMessageEncoder<Message> {
 
-  private static final Logger logger = 
LoggerFactory.getLogger(SslMessageEncoder.class);
+  private static final SparkLogger logger = 
SparkLoggerFactory.getLogger(SslMessageEncoder.class);
 
   private SslMessageEncoder() {}
 
diff --git 
a/common/network-common/src/main/java/org/apache/spark/network/sasl/SaslClientBootstrap.java
 
b/common/network-common/src/main/java/org/apache/spark/network/sasl/SaslClientBootstrap.java
index fca46f6120e6..0a355d28c366 100644
--- 
a/common/network-common/src/main/java/org/apache/spark/network/sasl/SaslClientBootstrap.java
+++ 
b/common/network-common/src/main/java/org/apache/spark/network/sasl/SaslClientBootstrap.java
@@ -27,8 +27,8 @@ import io.netty.buffer.ByteBuf;
 import io.netty.buffer.Unpooled;
 import io.netty.channel.Channel;
 
-import org.apache.spark.internal.Logger;
-import org.apache.spark.internal.LoggerFactory;
+import org.apache.spark.internal.SparkLogger;
+import org.apache.spark.internal.SparkLoggerFactory;
 import org.apache.spark.network.client.TransportClient;
 import org.apache.spark.network.client.TransportClientBootstrap;
 import org.apache.spark.network.util.JavaUtils;
@@ -39,7 +39,7 @@ import org.apache.spark.network.util.TransportConf;
  * server should be setup with a {@link SaslRpcHandler} with matching keys for 
the given appId.
  */
 public class SaslClientBootstrap implements TransportClientBootstrap {
-  private static final Logger logger = 
LoggerFactory.getLogger(SaslClientBootstrap.class);
+  private static final SparkLogger logger = 
SparkLoggerFactory.getLogger(SaslClientBootstrap.class);
 
   private final TransportConf conf;
   private final String appId;
diff --git 
a/common/network-common/src/main/java/org/apache/spark/network/sasl/SaslRpcHandler.java
 
b/common/network-common/src/main/java/org/apache/spark/network/sasl/SaslRpcHandler.java
index 61a599fc6b9e..b5fffe583ec6 100644
--- 
a/common/network-common/src/main/java/org/apache/spark/network/sasl/SaslRpcHandler.java
+++ 
b/common/network-common/src/main/java/org/apache/spark/network/sasl/SaslRpcHandler.java
@@ -25,8 +25,8 @@ import io.netty.buffer.ByteBuf;
 import io.netty.buffer.Unpooled;
 import io.netty.channel.Channel;
 
-import org.apache.spark.internal.Logger;
-import org.apache.spark.internal.LoggerFactory;
+import org.apache.spark.internal.SparkLogger;
+import org.apache.spark.internal.SparkLoggerFactory;
 import org.apache.spark.network.client.RpcResponseCallback;
 import org.apache.spark.network.client.TransportClient;
 import org.apache.spark.network.server.AbstractAuthRpcHandler;
@@ -43,7 +43,7 @@ import org.apache.spark.network.util.TransportConf;
  * which are individual RPCs.
  */
 public class SaslRpcHandler extends AbstractAuthRpcHandler {
-  private static final Logger logger = 
LoggerFactory.getLogger(SaslRpcHandler.class);
+  private static final SparkLogger logger = 
SparkLoggerFactory.getLogger(SaslRpcHandler.class);
 
   /** Transport configuration. */
   private final TransportConf conf;
diff --git 
a/common/network-common/src/main/java/org/apache/spark/network/sasl/SparkSaslClient.java
 
b/common/network-common/src/main/java/org/apache/spark/network/sasl/SparkSaslClient.java
index cf391b7049b6..3600c1045dbf 100644
--- 
a/common/network-common/src/main/java/org/apache/spark/network/sasl/SparkSaslClient.java
+++ 
b/common/network-common/src/main/java/org/apache/spark/network/sasl/SparkSaslClient.java
@@ -32,8 +32,8 @@ import javax.security.sasl.SaslException;
 import com.google.common.base.Throwables;
 import com.google.common.collect.ImmutableMap;
 
-import org.apache.spark.internal.Logger;
-import org.apache.spark.internal.LoggerFactory;
+import org.apache.spark.internal.SparkLogger;
+import org.apache.spark.internal.SparkLoggerFactory;
 
 import static org.apache.spark.network.sasl.SparkSaslServer.*;
 
@@ -43,7 +43,7 @@ import static org.apache.spark.network.sasl.SparkSaslServer.*;
  * firstToken, which is then followed by a set of challenges and responses.
  */
 public class SparkSaslClient implements SaslEncryptionBackend {
-  private static final Logger logger = 
LoggerFactory.getLogger(SparkSaslClient.class);
+  private static final SparkLogger logger = 
SparkLoggerFactory.getLogger(SparkSaslClient.class);
 
   private final String secretKeyId;
   private final SecretKeyHolder secretKeyHolder;
diff --git 
a/common/network-common/src/main/java/org/apache/spark/network/sasl/SparkSaslServer.java
 
b/common/network-common/src/main/java/org/apache/spark/network/sasl/SparkSaslServer.java
index 0008b9ad284a..b897650afe83 100644
--- 
a/common/network-common/src/main/java/org/apache/spark/network/sasl/SparkSaslServer.java
+++ 
b/common/network-common/src/main/java/org/apache/spark/network/sasl/SparkSaslServer.java
@@ -37,8 +37,8 @@ import io.netty.buffer.ByteBuf;
 import io.netty.buffer.Unpooled;
 import io.netty.handler.codec.base64.Base64;
 
-import org.apache.spark.internal.Logger;
-import org.apache.spark.internal.LoggerFactory;
+import org.apache.spark.internal.SparkLogger;
+import org.apache.spark.internal.SparkLoggerFactory;
 
 /**
  * A SASL Server for Spark which simply keeps track of the state of a single 
SASL session, from the
@@ -46,7 +46,7 @@ import org.apache.spark.internal.LoggerFactory;
  * connections on some socket.)
  */
 public class SparkSaslServer implements SaslEncryptionBackend {
-  private static final Logger logger = 
LoggerFactory.getLogger(SparkSaslServer.class);
+  private static final SparkLogger logger = 
SparkLoggerFactory.getLogger(SparkSaslServer.class);
 
   /**
    * This is passed as the server name when creating the sasl client/server.
diff --git 
a/common/network-common/src/main/java/org/apache/spark/network/server/ChunkFetchRequestHandler.java
 
b/common/network-common/src/main/java/org/apache/spark/network/server/ChunkFetchRequestHandler.java
index dc3b559d3602..cc0bed7ed5b6 100644
--- 
a/common/network-common/src/main/java/org/apache/spark/network/server/ChunkFetchRequestHandler.java
+++ 
b/common/network-common/src/main/java/org/apache/spark/network/server/ChunkFetchRequestHandler.java
@@ -27,8 +27,8 @@ import io.netty.channel.ChannelHandlerContext;
 import io.netty.channel.SimpleChannelInboundHandler;
 
 import org.apache.spark.internal.LogKeys;
-import org.apache.spark.internal.Logger;
-import org.apache.spark.internal.LoggerFactory;
+import org.apache.spark.internal.SparkLogger;
+import org.apache.spark.internal.SparkLoggerFactory;
 import org.apache.spark.internal.MDC;
 import org.apache.spark.network.buffer.ManagedBuffer;
 import org.apache.spark.network.client.TransportClient;
@@ -51,7 +51,8 @@ import static org.apache.spark.network.util.NettyUtils.*;
  * registering executors, or waiting for response for an OpenBlocks messages.
  */
 public class ChunkFetchRequestHandler extends 
SimpleChannelInboundHandler<ChunkFetchRequest> {
-  private static final Logger logger = 
LoggerFactory.getLogger(ChunkFetchRequestHandler.class);
+  private static final SparkLogger logger =
+    SparkLoggerFactory.getLogger(ChunkFetchRequestHandler.class);
 
   private final TransportClient client;
   private final StreamManager streamManager;
diff --git 
a/common/network-common/src/main/java/org/apache/spark/network/server/OneForOneStreamManager.java
 
b/common/network-common/src/main/java/org/apache/spark/network/server/OneForOneStreamManager.java
index 316342469287..f322293782de 100644
--- 
a/common/network-common/src/main/java/org/apache/spark/network/server/OneForOneStreamManager.java
+++ 
b/common/network-common/src/main/java/org/apache/spark/network/server/OneForOneStreamManager.java
@@ -29,8 +29,8 @@ import io.netty.channel.Channel;
 import org.apache.commons.lang3.tuple.ImmutablePair;
 import org.apache.commons.lang3.tuple.Pair;
 
-import org.apache.spark.internal.Logger;
-import org.apache.spark.internal.LoggerFactory;
+import org.apache.spark.internal.SparkLogger;
+import org.apache.spark.internal.SparkLoggerFactory;
 import org.apache.spark.network.buffer.ManagedBuffer;
 import org.apache.spark.network.client.TransportClient;
 
@@ -39,7 +39,8 @@ import org.apache.spark.network.client.TransportClient;
  * individually fetched as chunks by the client. Each registered buffer is one 
chunk.
  */
 public class OneForOneStreamManager extends StreamManager {
-  private static final Logger logger = 
LoggerFactory.getLogger(OneForOneStreamManager.class);
+  private static final SparkLogger logger =
+    SparkLoggerFactory.getLogger(OneForOneStreamManager.class);
 
   private final AtomicLong nextStreamId;
   private final ConcurrentHashMap<Long, StreamState> streams;
diff --git 
a/common/network-common/src/main/java/org/apache/spark/network/server/RpcHandler.java
 
b/common/network-common/src/main/java/org/apache/spark/network/server/RpcHandler.java
index b91e14e6332a..a7c38917d17f 100644
--- 
a/common/network-common/src/main/java/org/apache/spark/network/server/RpcHandler.java
+++ 
b/common/network-common/src/main/java/org/apache/spark/network/server/RpcHandler.java
@@ -19,8 +19,8 @@ package org.apache.spark.network.server;
 
 import java.nio.ByteBuffer;
 
-import org.apache.spark.internal.Logger;
-import org.apache.spark.internal.LoggerFactory;
+import org.apache.spark.internal.SparkLogger;
+import org.apache.spark.internal.SparkLoggerFactory;
 import org.apache.spark.network.client.MergedBlockMetaResponseCallback;
 import org.apache.spark.network.client.RpcResponseCallback;
 import org.apache.spark.network.client.StreamCallbackWithID;
@@ -122,7 +122,7 @@ public abstract class RpcHandler {
 
   private static class OneWayRpcCallback implements RpcResponseCallback {
 
-    private static final Logger logger = 
LoggerFactory.getLogger(OneWayRpcCallback.class);
+    private static final SparkLogger logger = 
SparkLoggerFactory.getLogger(OneWayRpcCallback.class);
 
     @Override
     public void onSuccess(ByteBuffer response) {
diff --git 
a/common/network-common/src/main/java/org/apache/spark/network/server/TransportChannelHandler.java
 
b/common/network-common/src/main/java/org/apache/spark/network/server/TransportChannelHandler.java
index ad8b8d71bcc6..283f0f0a431f 100644
--- 
a/common/network-common/src/main/java/org/apache/spark/network/server/TransportChannelHandler.java
+++ 
b/common/network-common/src/main/java/org/apache/spark/network/server/TransportChannelHandler.java
@@ -23,8 +23,8 @@ import io.netty.handler.timeout.IdleState;
 import io.netty.handler.timeout.IdleStateEvent;
 import org.apache.spark.network.TransportContext;
 
-import org.apache.spark.internal.Logger;
-import org.apache.spark.internal.LoggerFactory;
+import org.apache.spark.internal.SparkLogger;
+import org.apache.spark.internal.SparkLoggerFactory;
 import org.apache.spark.internal.LogKeys;
 import org.apache.spark.internal.MDC;
 import org.apache.spark.network.client.TransportClient;
@@ -53,7 +53,8 @@ import static 
org.apache.spark.network.util.NettyUtils.getRemoteAddress;
  * timeout if the client is continuously sending but getting no responses, for 
simplicity.
  */
 public class TransportChannelHandler extends 
SimpleChannelInboundHandler<Message> {
-  private static final Logger logger = 
LoggerFactory.getLogger(TransportChannelHandler.class);
+  private static final SparkLogger logger =
+    SparkLoggerFactory.getLogger(TransportChannelHandler.class);
 
   private final TransportClient client;
   private final TransportResponseHandler responseHandler;
diff --git 
a/common/network-common/src/main/java/org/apache/spark/network/server/TransportRequestHandler.java
 
b/common/network-common/src/main/java/org/apache/spark/network/server/TransportRequestHandler.java
index 9c581193d16f..687c3040ed08 100644
--- 
a/common/network-common/src/main/java/org/apache/spark/network/server/TransportRequestHandler.java
+++ 
b/common/network-common/src/main/java/org/apache/spark/network/server/TransportRequestHandler.java
@@ -25,8 +25,8 @@ import com.google.common.base.Throwables;
 import io.netty.channel.Channel;
 import io.netty.channel.ChannelFuture;
 
-import org.apache.spark.internal.Logger;
-import org.apache.spark.internal.LoggerFactory;
+import org.apache.spark.internal.SparkLogger;
+import org.apache.spark.internal.SparkLoggerFactory;
 import org.apache.spark.internal.LogKeys;
 import org.apache.spark.internal.MDC;
 import org.apache.spark.network.buffer.ManagedBuffer;
@@ -46,7 +46,8 @@ import static 
org.apache.spark.network.util.NettyUtils.getRemoteAddress;
  */
 public class TransportRequestHandler extends MessageHandler<RequestMessage> {
 
-  private static final Logger logger = 
LoggerFactory.getLogger(TransportRequestHandler.class);
+  private static final SparkLogger logger =
+    SparkLoggerFactory.getLogger(TransportRequestHandler.class);
 
   /** The Netty channel that this handler is associated with. */
   private final Channel channel;
diff --git 
a/common/network-common/src/main/java/org/apache/spark/network/server/TransportServer.java
 
b/common/network-common/src/main/java/org/apache/spark/network/server/TransportServer.java
index 4cbde59ed6e7..d1a19652f564 100644
--- 
a/common/network-common/src/main/java/org/apache/spark/network/server/TransportServer.java
+++ 
b/common/network-common/src/main/java/org/apache/spark/network/server/TransportServer.java
@@ -35,8 +35,8 @@ import io.netty.channel.EventLoopGroup;
 import io.netty.channel.socket.SocketChannel;
 import org.apache.commons.lang3.SystemUtils;
 
-import org.apache.spark.internal.Logger;
-import org.apache.spark.internal.LoggerFactory;
+import org.apache.spark.internal.SparkLogger;
+import org.apache.spark.internal.SparkLoggerFactory;
 import org.apache.spark.network.TransportContext;
 import org.apache.spark.network.util.*;
 
@@ -44,7 +44,7 @@ import org.apache.spark.network.util.*;
  * Server for the efficient, low-level streaming service.
  */
 public class TransportServer implements Closeable {
-  private static final Logger logger = 
LoggerFactory.getLogger(TransportServer.class);
+  private static final SparkLogger logger = 
SparkLoggerFactory.getLogger(TransportServer.class);
 
   private final TransportContext context;
   private final TransportConf conf;
diff --git 
a/common/network-common/src/main/java/org/apache/spark/network/ssl/ReloadingX509TrustManager.java
 
b/common/network-common/src/main/java/org/apache/spark/network/ssl/ReloadingX509TrustManager.java
index 52e1c9a1fc6a..09609d0ac8ad 100644
--- 
a/common/network-common/src/main/java/org/apache/spark/network/ssl/ReloadingX509TrustManager.java
+++ 
b/common/network-common/src/main/java/org/apache/spark/network/ssl/ReloadingX509TrustManager.java
@@ -30,8 +30,8 @@ import java.util.concurrent.atomic.AtomicReference;
 
 import com.google.common.annotations.VisibleForTesting;
 
-import org.apache.spark.internal.Logger;
-import org.apache.spark.internal.LoggerFactory;
+import org.apache.spark.internal.SparkLogger;
+import org.apache.spark.internal.SparkLoggerFactory;
 
 /**
  * A {@link TrustManager} implementation that reloads its configuration when
@@ -45,7 +45,8 @@ import org.apache.spark.internal.LoggerFactory;
 public final class ReloadingX509TrustManager
         implements X509TrustManager, Runnable {
 
-  private static final Logger logger = 
LoggerFactory.getLogger(ReloadingX509TrustManager.class);
+  private static final SparkLogger logger =
+    SparkLoggerFactory.getLogger(ReloadingX509TrustManager.class);
 
   private final String type;
   private final File file;
diff --git 
a/common/network-common/src/main/java/org/apache/spark/network/ssl/SSLFactory.java
 
b/common/network-common/src/main/java/org/apache/spark/network/ssl/SSLFactory.java
index 85589e4accea..a2e42e3eb39f 100644
--- 
a/common/network-common/src/main/java/org/apache/spark/network/ssl/SSLFactory.java
+++ 
b/common/network-common/src/main/java/org/apache/spark/network/ssl/SSLFactory.java
@@ -49,12 +49,12 @@ import io.netty.handler.ssl.SslContext;
 import io.netty.handler.ssl.SslContextBuilder;
 import io.netty.handler.ssl.SslProvider;
 
-import org.apache.spark.internal.Logger;
-import org.apache.spark.internal.LoggerFactory;
+import org.apache.spark.internal.SparkLogger;
+import org.apache.spark.internal.SparkLoggerFactory;
 import org.apache.spark.network.util.JavaUtils;
 
 public class SSLFactory {
-  private static final Logger logger = 
LoggerFactory.getLogger(SSLFactory.class);
+  private static final SparkLogger logger = 
SparkLoggerFactory.getLogger(SSLFactory.class);
 
   /**
    * For a configuration specifying keystore/truststore files
diff --git 
a/common/network-common/src/main/java/org/apache/spark/network/util/DBProvider.java
 
b/common/network-common/src/main/java/org/apache/spark/network/util/DBProvider.java
index 950a5298fd6d..94a64b3f4037 100644
--- 
a/common/network-common/src/main/java/org/apache/spark/network/util/DBProvider.java
+++ 
b/common/network-common/src/main/java/org/apache/spark/network/util/DBProvider.java
@@ -22,8 +22,8 @@ import java.io.IOException;
 import com.fasterxml.jackson.databind.ObjectMapper;
 import com.google.common.annotations.VisibleForTesting;
 
-import org.apache.spark.internal.Logger;
-import org.apache.spark.internal.LoggerFactory;
+import org.apache.spark.internal.SparkLogger;
+import org.apache.spark.internal.SparkLoggerFactory;
 import org.apache.spark.network.shuffledb.DB;
 import org.apache.spark.network.shuffledb.DBBackend;
 import org.apache.spark.network.shuffledb.LevelDB;
@@ -31,7 +31,7 @@ import org.apache.spark.network.shuffledb.RocksDB;
 import org.apache.spark.network.shuffledb.StoreVersion;
 
 public class DBProvider {
-  private static final Logger logger = 
LoggerFactory.getLogger(DBProvider.class);
+  private static final SparkLogger logger = 
SparkLoggerFactory.getLogger(DBProvider.class);
     public static DB initDB(
         DBBackend dbBackend,
         File dbFile,
diff --git 
a/common/network-common/src/main/java/org/apache/spark/network/util/LevelDBProvider.java
 
b/common/network-common/src/main/java/org/apache/spark/network/util/LevelDBProvider.java
index 184d83c67224..391931961a47 100644
--- 
a/common/network-common/src/main/java/org/apache/spark/network/util/LevelDBProvider.java
+++ 
b/common/network-common/src/main/java/org/apache/spark/network/util/LevelDBProvider.java
@@ -27,8 +27,8 @@ import org.fusesource.leveldbjni.internal.NativeDB;
 import org.iq80.leveldb.DB;
 import org.iq80.leveldb.Options;
 
-import org.apache.spark.internal.Logger;
-import org.apache.spark.internal.LoggerFactory;
+import org.apache.spark.internal.SparkLogger;
+import org.apache.spark.internal.SparkLoggerFactory;
 import org.apache.spark.internal.LogKeys;
 import org.apache.spark.internal.MDC;
 import org.apache.spark.network.shuffledb.StoreVersion;
@@ -37,7 +37,7 @@ import org.apache.spark.network.shuffledb.StoreVersion;
  * LevelDB utility class available in the network package.
  */
 public class LevelDBProvider {
-  private static final Logger logger = 
LoggerFactory.getLogger(LevelDBProvider.class);
+  private static final SparkLogger logger = 
SparkLoggerFactory.getLogger(LevelDBProvider.class);
 
   public static DB initLevelDB(File dbFile, StoreVersion version, ObjectMapper 
mapper) throws
       IOException {
@@ -101,7 +101,7 @@ public class LevelDBProvider {
   }
 
   private static class LevelDBLogger implements org.iq80.leveldb.Logger {
-    private static final Logger LOG = 
LoggerFactory.getLogger(LevelDBLogger.class);
+    private static final SparkLogger LOG = 
SparkLoggerFactory.getLogger(LevelDBLogger.class);
 
     @Override
     public void log(String message) {
diff --git 
a/common/network-common/src/main/java/org/apache/spark/network/util/NettyLogger.java
 
b/common/network-common/src/main/java/org/apache/spark/network/util/NettyLogger.java
index 2d6dcdbe0e83..a7063151fae8 100644
--- 
a/common/network-common/src/main/java/org/apache/spark/network/util/NettyLogger.java
+++ 
b/common/network-common/src/main/java/org/apache/spark/network/util/NettyLogger.java
@@ -26,11 +26,11 @@ import io.netty.channel.ChannelHandlerContext;
 import io.netty.handler.logging.LoggingHandler;
 import io.netty.handler.logging.LogLevel;
 
-import org.apache.spark.internal.Logger;
-import org.apache.spark.internal.LoggerFactory;
+import org.apache.spark.internal.SparkLogger;
+import org.apache.spark.internal.SparkLoggerFactory;
 
 public class NettyLogger {
-  private static final Logger logger = 
LoggerFactory.getLogger(NettyLogger.class);
+  private static final SparkLogger logger = 
SparkLoggerFactory.getLogger(NettyLogger.class);
 
   /** A Netty LoggingHandler which does not dump the message contents. */
   private static class NoContentLoggingHandler extends LoggingHandler {
diff --git 
a/common/network-common/src/main/java/org/apache/spark/network/util/RocksDBProvider.java
 
b/common/network-common/src/main/java/org/apache/spark/network/util/RocksDBProvider.java
index 994e21eb439d..1753c124c993 100644
--- 
a/common/network-common/src/main/java/org/apache/spark/network/util/RocksDBProvider.java
+++ 
b/common/network-common/src/main/java/org/apache/spark/network/util/RocksDBProvider.java
@@ -25,8 +25,8 @@ import com.fasterxml.jackson.databind.ObjectMapper;
 import com.google.common.annotations.VisibleForTesting;
 import org.rocksdb.*;
 
-import org.apache.spark.internal.Logger;
-import org.apache.spark.internal.LoggerFactory;
+import org.apache.spark.internal.SparkLogger;
+import org.apache.spark.internal.SparkLoggerFactory;
 import org.apache.spark.internal.LogKeys;
 import org.apache.spark.internal.MDC;
 import org.apache.spark.network.shuffledb.StoreVersion;
@@ -40,7 +40,7 @@ public class RocksDBProvider {
       org.rocksdb.RocksDB.loadLibrary();
     }
 
-    private static final Logger logger = 
LoggerFactory.getLogger(RocksDBProvider.class);
+    private static final SparkLogger logger = 
SparkLoggerFactory.getLogger(RocksDBProvider.class);
 
     public static RocksDB initRockDB(File dbFile, StoreVersion version, 
ObjectMapper mapper) throws
         IOException {
@@ -135,7 +135,7 @@ public class RocksDBProvider {
     }
 
     private static class RocksDBLogger extends org.rocksdb.Logger {
-        private static final Logger LOG = 
LoggerFactory.getLogger(RocksDBLogger.class);
+        private static final SparkLogger LOG = 
SparkLoggerFactory.getLogger(RocksDBLogger.class);
 
         RocksDBLogger(Options options) {
           super(options.infoLogLevel());
diff --git 
a/common/network-shuffle/src/main/java/org/apache/spark/network/sasl/ShuffleSecretManager.java
 
b/common/network-shuffle/src/main/java/org/apache/spark/network/sasl/ShuffleSecretManager.java
index 6a490cc7897e..d67f2a3099d3 100644
--- 
a/common/network-shuffle/src/main/java/org/apache/spark/network/sasl/ShuffleSecretManager.java
+++ 
b/common/network-shuffle/src/main/java/org/apache/spark/network/sasl/ShuffleSecretManager.java
@@ -20,8 +20,8 @@ package org.apache.spark.network.sasl;
 import java.nio.ByteBuffer;
 import java.util.concurrent.ConcurrentHashMap;
 
-import org.apache.spark.internal.Logger;
-import org.apache.spark.internal.LoggerFactory;
+import org.apache.spark.internal.SparkLogger;
+import org.apache.spark.internal.SparkLoggerFactory;
 import org.apache.spark.internal.LogKeys;
 import org.apache.spark.internal.MDC;
 
@@ -31,7 +31,8 @@ import org.apache.spark.network.util.JavaUtils;
  * A class that manages shuffle secret used by the external shuffle service.
  */
 public class ShuffleSecretManager implements SecretKeyHolder {
-  private static final Logger logger = 
LoggerFactory.getLogger(ShuffleSecretManager.class);
+  private static final SparkLogger logger =
+    SparkLoggerFactory.getLogger(ShuffleSecretManager.class);
 
   private final ConcurrentHashMap<String, String> shuffleSecretMap;
 
diff --git 
a/common/network-shuffle/src/main/java/org/apache/spark/network/shuffle/BlockStoreClient.java
 
b/common/network-shuffle/src/main/java/org/apache/spark/network/shuffle/BlockStoreClient.java
index 695df81f89f3..dcb0a52b0d66 100644
--- 
a/common/network-shuffle/src/main/java/org/apache/spark/network/shuffle/BlockStoreClient.java
+++ 
b/common/network-shuffle/src/main/java/org/apache/spark/network/shuffle/BlockStoreClient.java
@@ -27,8 +27,8 @@ import java.util.concurrent.CompletableFuture;
 
 import com.codahale.metrics.MetricSet;
 
-import org.apache.spark.internal.Logger;
-import org.apache.spark.internal.LoggerFactory;
+import org.apache.spark.internal.SparkLogger;
+import org.apache.spark.internal.SparkLoggerFactory;
 import org.apache.spark.internal.LogKeys;
 import org.apache.spark.internal.MDC;
 import org.apache.spark.network.buffer.ManagedBuffer;
@@ -44,7 +44,7 @@ import org.apache.spark.network.util.TransportConf;
  * or external service.
  */
 public abstract class BlockStoreClient implements Closeable {
-  protected final Logger logger = LoggerFactory.getLogger(this.getClass());
+  protected final SparkLogger logger = 
SparkLoggerFactory.getLogger(this.getClass());
 
   protected volatile TransportClientFactory clientFactory;
   protected String appId;
diff --git 
a/common/network-shuffle/src/main/java/org/apache/spark/network/shuffle/ExternalBlockHandler.java
 
b/common/network-shuffle/src/main/java/org/apache/spark/network/shuffle/ExternalBlockHandler.java
index a084fb4cc213..5d33bfb345a9 100644
--- 
a/common/network-shuffle/src/main/java/org/apache/spark/network/shuffle/ExternalBlockHandler.java
+++ 
b/common/network-shuffle/src/main/java/org/apache/spark/network/shuffle/ExternalBlockHandler.java
@@ -38,8 +38,8 @@ import com.google.common.annotations.VisibleForTesting;
 import com.google.common.base.Preconditions;
 import com.google.common.collect.Sets;
 
-import org.apache.spark.internal.Logger;
-import org.apache.spark.internal.LoggerFactory;
+import org.apache.spark.internal.SparkLogger;
+import org.apache.spark.internal.SparkLoggerFactory;
 import org.apache.spark.internal.LogKeys;
 import org.apache.spark.internal.MDC;
 import org.apache.spark.network.buffer.ManagedBuffer;
@@ -67,7 +67,8 @@ import org.apache.spark.network.util.TransportConf;
  */
 public class ExternalBlockHandler extends RpcHandler
     implements RpcHandler.MergedBlockMetaReqHandler {
-  private static final Logger logger = 
LoggerFactory.getLogger(ExternalBlockHandler.class);
+  private static final SparkLogger logger =
+    SparkLoggerFactory.getLogger(ExternalBlockHandler.class);
   private static final String SHUFFLE_MERGER_IDENTIFIER = 
"shuffle-push-merger";
   private static final String SHUFFLE_BLOCK_ID = "shuffle";
   private static final String SHUFFLE_CHUNK_ID = "shuffleChunk";
diff --git 
a/common/network-shuffle/src/main/java/org/apache/spark/network/shuffle/ExternalShuffleBlockResolver.java
 
b/common/network-shuffle/src/main/java/org/apache/spark/network/shuffle/ExternalShuffleBlockResolver.java
index 3e493327c36f..e43eedd8b25e 100644
--- 
a/common/network-shuffle/src/main/java/org/apache/spark/network/shuffle/ExternalShuffleBlockResolver.java
+++ 
b/common/network-shuffle/src/main/java/org/apache/spark/network/shuffle/ExternalShuffleBlockResolver.java
@@ -39,8 +39,8 @@ import com.google.common.cache.LoadingCache;
 import com.google.common.cache.Weigher;
 import com.google.common.collect.Maps;
 
-import org.apache.spark.internal.Logger;
-import org.apache.spark.internal.LoggerFactory;
+import org.apache.spark.internal.SparkLogger;
+import org.apache.spark.internal.SparkLoggerFactory;
 import org.apache.spark.internal.LogKeys;
 import org.apache.spark.internal.MDC;
 import org.apache.spark.network.buffer.FileSegmentManagedBuffer;
@@ -64,7 +64,8 @@ import org.apache.spark.network.util.TransportConf;
  * from Spark's IndexShuffleBlockResolver.
  */
 public class ExternalShuffleBlockResolver {
-  private static final Logger logger = 
LoggerFactory.getLogger(ExternalShuffleBlockResolver.class);
+  private static final SparkLogger logger =
+    SparkLoggerFactory.getLogger(ExternalShuffleBlockResolver.class);
 
   private static final ObjectMapper mapper = new ObjectMapper();
 
diff --git 
a/common/network-shuffle/src/main/java/org/apache/spark/network/shuffle/OneForOneBlockFetcher.java
 
b/common/network-shuffle/src/main/java/org/apache/spark/network/shuffle/OneForOneBlockFetcher.java
index 7d20ac50b737..c5c6ab313e19 100644
--- 
a/common/network-shuffle/src/main/java/org/apache/spark/network/shuffle/OneForOneBlockFetcher.java
+++ 
b/common/network-shuffle/src/main/java/org/apache/spark/network/shuffle/OneForOneBlockFetcher.java
@@ -27,8 +27,8 @@ import java.util.Map;
 import com.google.common.primitives.Ints;
 import com.google.common.primitives.Longs;
 
-import org.apache.spark.internal.Logger;
-import org.apache.spark.internal.LoggerFactory;
+import org.apache.spark.internal.SparkLogger;
+import org.apache.spark.internal.SparkLoggerFactory;
 import org.apache.spark.network.buffer.ManagedBuffer;
 import org.apache.spark.network.client.ChunkReceivedCallback;
 import org.apache.spark.network.client.RpcResponseCallback;
@@ -53,7 +53,8 @@ import org.apache.spark.network.util.TransportConf;
  * {@link org.apache.spark.network.server.OneForOneStreamManager} on the 
server side.
  */
 public class OneForOneBlockFetcher {
-  private static final Logger logger = 
LoggerFactory.getLogger(OneForOneBlockFetcher.class);
+  private static final SparkLogger logger =
+    SparkLoggerFactory.getLogger(OneForOneBlockFetcher.class);
   private static final String SHUFFLE_BLOCK_PREFIX = "shuffle_";
   private static final String SHUFFLE_CHUNK_PREFIX = "shuffleChunk_";
   private static final String SHUFFLE_BLOCK_SPLIT = "shuffle";
diff --git 
a/common/network-shuffle/src/main/java/org/apache/spark/network/shuffle/OneForOneBlockPusher.java
 
b/common/network-shuffle/src/main/java/org/apache/spark/network/shuffle/OneForOneBlockPusher.java
index bbb8661fb397..d90ca1a88a26 100644
--- 
a/common/network-shuffle/src/main/java/org/apache/spark/network/shuffle/OneForOneBlockPusher.java
+++ 
b/common/network-shuffle/src/main/java/org/apache/spark/network/shuffle/OneForOneBlockPusher.java
@@ -23,8 +23,8 @@ import java.util.Map;
 
 import com.google.common.base.Preconditions;
 
-import org.apache.spark.internal.Logger;
-import org.apache.spark.internal.LoggerFactory;
+import org.apache.spark.internal.SparkLogger;
+import org.apache.spark.internal.SparkLoggerFactory;
 import org.apache.spark.network.buffer.ManagedBuffer;
 import org.apache.spark.network.buffer.NioManagedBuffer;
 import org.apache.spark.network.client.RpcResponseCallback;
@@ -44,7 +44,8 @@ import 
org.apache.spark.network.shuffle.protocol.PushBlockStream;
  * @since 3.1.0
  */
 public class OneForOneBlockPusher {
-  private static final Logger logger = 
LoggerFactory.getLogger(OneForOneBlockPusher.class);
+  private static final SparkLogger logger =
+    SparkLoggerFactory.getLogger(OneForOneBlockPusher.class);
   private static final ErrorHandler PUSH_ERROR_HANDLER = new 
ErrorHandler.BlockPushErrorHandler();
   public static final String SHUFFLE_PUSH_BLOCK_PREFIX = "shufflePush";
 
diff --git 
a/common/network-shuffle/src/main/java/org/apache/spark/network/shuffle/RemoteBlockPushResolver.java
 
b/common/network-shuffle/src/main/java/org/apache/spark/network/shuffle/RemoteBlockPushResolver.java
index cf1e1cdb42b0..02a38eac5b40 100644
--- 
a/common/network-shuffle/src/main/java/org/apache/spark/network/shuffle/RemoteBlockPushResolver.java
+++ 
b/common/network-shuffle/src/main/java/org/apache/spark/network/shuffle/RemoteBlockPushResolver.java
@@ -65,8 +65,8 @@ import com.google.common.primitives.Longs;
 
 import org.roaringbitmap.RoaringBitmap;
 
-import org.apache.spark.internal.Logger;
-import org.apache.spark.internal.LoggerFactory;
+import org.apache.spark.internal.SparkLogger;
+import org.apache.spark.internal.SparkLoggerFactory;
 import org.apache.spark.internal.LogKeys;
 import org.apache.spark.internal.MDC;
 import org.apache.spark.network.buffer.FileSegmentManagedBuffer;
@@ -98,7 +98,8 @@ import org.apache.spark.network.util.TransportConf;
 public class RemoteBlockPushResolver implements MergedShuffleFileManager {
 
   private static final Cleaner CLEANER = Cleaner.create();
-  private static final Logger logger = 
LoggerFactory.getLogger(RemoteBlockPushResolver.class);
+  private static final SparkLogger logger =
+    SparkLoggerFactory.getLogger(RemoteBlockPushResolver.class);
 
   public static final String MERGED_SHUFFLE_FILE_NAME_PREFIX = "shuffleMerged";
   public static final String SHUFFLE_META_DELIMITER = ":";
diff --git 
a/common/network-shuffle/src/main/java/org/apache/spark/network/shuffle/RetryingBlockTransferor.java
 
b/common/network-shuffle/src/main/java/org/apache/spark/network/shuffle/RetryingBlockTransferor.java
index a5c26c6185bf..ca2073af87c1 100644
--- 
a/common/network-shuffle/src/main/java/org/apache/spark/network/shuffle/RetryingBlockTransferor.java
+++ 
b/common/network-shuffle/src/main/java/org/apache/spark/network/shuffle/RetryingBlockTransferor.java
@@ -29,8 +29,8 @@ import com.google.common.base.Preconditions;
 import com.google.common.collect.Sets;
 import com.google.common.util.concurrent.Uninterruptibles;
 
-import org.apache.spark.internal.Logger;
-import org.apache.spark.internal.LoggerFactory;
+import org.apache.spark.internal.SparkLogger;
+import org.apache.spark.internal.SparkLoggerFactory;
 import org.apache.spark.internal.LogKeys;
 import org.apache.spark.internal.MDC;
 import org.apache.spark.network.buffer.ManagedBuffer;
@@ -70,7 +70,8 @@ public class RetryingBlockTransferor {
   private static final ExecutorService executorService = 
Executors.newCachedThreadPool(
     NettyUtils.createThreadFactory("Block Transfer Retry"));
 
-  private static final Logger logger = 
LoggerFactory.getLogger(RetryingBlockTransferor.class);
+  private static final SparkLogger logger =
+    SparkLoggerFactory.getLogger(RetryingBlockTransferor.class);
 
   /** Used to initiate new Block transfer on our remaining blocks. */
   private final BlockTransferStarter transferStarter;
diff --git 
a/common/network-shuffle/src/main/java/org/apache/spark/network/shuffle/ShuffleTransportContext.java
 
b/common/network-shuffle/src/main/java/org/apache/spark/network/shuffle/ShuffleTransportContext.java
index 96651189aede..705d47aab3b5 100644
--- 
a/common/network-shuffle/src/main/java/org/apache/spark/network/shuffle/ShuffleTransportContext.java
+++ 
b/common/network-shuffle/src/main/java/org/apache/spark/network/shuffle/ShuffleTransportContext.java
@@ -29,8 +29,8 @@ import io.netty.channel.SimpleChannelInboundHandler;
 import io.netty.channel.socket.SocketChannel;
 import io.netty.handler.codec.MessageToMessageDecoder;
 
-import org.apache.spark.internal.Logger;
-import org.apache.spark.internal.LoggerFactory;
+import org.apache.spark.internal.SparkLogger;
+import org.apache.spark.internal.SparkLoggerFactory;
 import org.apache.spark.network.TransportContext;
 import org.apache.spark.network.protocol.Message;
 import org.apache.spark.network.protocol.MessageDecoder;
@@ -51,7 +51,8 @@ import static 
org.apache.spark.network.util.NettyUtils.getRemoteAddress;
  * are processed in the separate handlers.
  * */
 public class ShuffleTransportContext extends TransportContext {
-  private static final Logger logger = 
LoggerFactory.getLogger(ShuffleTransportContext.class);
+  private static final SparkLogger logger =
+    SparkLoggerFactory.getLogger(ShuffleTransportContext.class);
   private static final ShuffleMessageDecoder SHUFFLE_DECODER =
       new ShuffleMessageDecoder(MessageDecoder.INSTANCE);
   private final EventLoopGroup finalizeWorkers;
@@ -157,7 +158,7 @@ public class ShuffleTransportContext extends 
TransportContext {
   }
 
   static class FinalizedHandler extends 
SimpleChannelInboundHandler<RpcRequestInternal> {
-    private static final Logger logger = 
LoggerFactory.getLogger(FinalizedHandler.class);
+    private static final SparkLogger logger = 
SparkLoggerFactory.getLogger(FinalizedHandler.class);
     public static final String HANDLER_NAME = "finalizeHandler";
     private final TransportRequestHandler transportRequestHandler;
 
diff --git 
a/common/network-shuffle/src/main/java/org/apache/spark/network/shuffle/checksum/ShuffleChecksumHelper.java
 
b/common/network-shuffle/src/main/java/org/apache/spark/network/shuffle/checksum/ShuffleChecksumHelper.java
index 2665801a1d03..f9c0c60c2f2c 100644
--- 
a/common/network-shuffle/src/main/java/org/apache/spark/network/shuffle/checksum/ShuffleChecksumHelper.java
+++ 
b/common/network-shuffle/src/main/java/org/apache/spark/network/shuffle/checksum/ShuffleChecksumHelper.java
@@ -26,8 +26,8 @@ import java.util.zip.Checksum;
 
 import com.google.common.io.ByteStreams;
 
-import org.apache.spark.internal.Logger;
-import org.apache.spark.internal.LoggerFactory;
+import org.apache.spark.internal.SparkLogger;
+import org.apache.spark.internal.SparkLoggerFactory;
 import org.apache.spark.internal.LogKeys;
 import org.apache.spark.internal.MDC;
 import org.apache.spark.annotation.Private;
@@ -38,8 +38,8 @@ import org.apache.spark.network.buffer.ManagedBuffer;
  */
 @Private
 public class ShuffleChecksumHelper {
-  private static final Logger logger =
-    LoggerFactory.getLogger(ShuffleChecksumHelper.class);
+  private static final SparkLogger logger =
+    SparkLoggerFactory.getLogger(ShuffleChecksumHelper.class);
 
   public static final int CHECKSUM_CALCULATION_BUFFER = 8192;
   public static final Checksum[] EMPTY_CHECKSUM = new Checksum[0];
diff --git 
a/common/network-yarn/src/main/java/org/apache/spark/network/yarn/YarnShuffleService.java
 
b/common/network-yarn/src/main/java/org/apache/spark/network/yarn/YarnShuffleService.java
index 66a6429ba14d..e0af3c5ae246 100644
--- 
a/common/network-yarn/src/main/java/org/apache/spark/network/yarn/YarnShuffleService.java
+++ 
b/common/network-yarn/src/main/java/org/apache/spark/network/yarn/YarnShuffleService.java
@@ -54,8 +54,8 @@ import org.apache.spark.network.shuffledb.StoreVersion;
 import org.apache.spark.network.util.DBProvider;
 
 
-import org.apache.spark.internal.Logger;
-import org.apache.spark.internal.LoggerFactory;
+import org.apache.spark.internal.SparkLogger;
+import org.apache.spark.internal.SparkLoggerFactory;
 import org.apache.spark.internal.LogKeys;
 import org.apache.spark.internal.MDC;
 import org.apache.spark.network.TransportContext;
@@ -102,8 +102,9 @@ import 
org.apache.spark.network.yarn.util.HadoopConfigProvider;
  * This {@code classpath} configuration is only supported on YARN versions 
&gt;= 2.9.0.
  */
 public class YarnShuffleService extends AuxiliaryService {
-  private static final Logger defaultLogger = 
LoggerFactory.getLogger(YarnShuffleService.class);
-  private Logger logger = defaultLogger;
+  private static final SparkLogger defaultSparkLogger =
+    SparkLoggerFactory.getLogger(YarnShuffleService.class);
+  private SparkLogger logger = defaultSparkLogger;
 
   // Port on which the shuffle server listens for fetch requests
   private static final String SPARK_SHUFFLE_SERVICE_PORT_KEY = 
"spark.shuffle.service.port";
@@ -246,7 +247,7 @@ public class YarnShuffleService extends AuxiliaryService {
     String logsNamespace = _conf.get(SPARK_SHUFFLE_SERVICE_LOGS_NAMESPACE_KEY, 
"");
     if (!logsNamespace.isEmpty()) {
       String className = YarnShuffleService.class.getName();
-      logger = LoggerFactory.getLogger(className + "." + logsNamespace);
+      logger = SparkLoggerFactory.getLogger(className + "." + logsNamespace);
     }
 
     super.serviceInit(_conf);
@@ -367,7 +368,7 @@ public class YarnShuffleService extends AuxiliaryService {
       return mergeManagerSubClazz.getConstructor(TransportConf.class, 
File.class)
         .newInstance(conf, mergeManagerFile);
     } catch (Exception e) {
-      defaultLogger.error("Unable to create an instance of {}",
+      defaultSparkLogger.error("Unable to create an instance of {}",
         MDC.of(LogKeys.CLASS_NAME$.MODULE$, mergeManagerImplClassName));
       return new NoOpMergedShuffleFileManager(conf, mergeManagerFile);
     }
diff --git a/common/utils/src/main/java/org/apache/spark/internal/Logger.java 
b/common/utils/src/main/java/org/apache/spark/internal/SparkLogger.java
similarity index 98%
rename from common/utils/src/main/java/org/apache/spark/internal/Logger.java
rename to common/utils/src/main/java/org/apache/spark/internal/SparkLogger.java
index 7c54e912b189..20ad68641da0 100644
--- a/common/utils/src/main/java/org/apache/spark/internal/Logger.java
+++ b/common/utils/src/main/java/org/apache/spark/internal/SparkLogger.java
@@ -25,12 +25,12 @@ import org.apache.logging.log4j.CloseableThreadContext;
 import org.apache.logging.log4j.message.MessageFactory;
 import org.apache.logging.log4j.message.ParameterizedMessageFactory;
 
-public class Logger {
+public class SparkLogger {
 
   private static final MessageFactory MESSAGE_FACTORY = 
ParameterizedMessageFactory.INSTANCE;
   private final org.slf4j.Logger slf4jLogger;
 
-  Logger(org.slf4j.Logger slf4jLogger) {
+  SparkLogger(org.slf4j.Logger slf4jLogger) {
     this.slf4jLogger = slf4jLogger;
   }
 
diff --git 
a/common/utils/src/main/java/org/apache/spark/internal/LoggerFactory.java 
b/common/utils/src/main/java/org/apache/spark/internal/SparkLoggerFactory.java
similarity index 81%
rename from 
common/utils/src/main/java/org/apache/spark/internal/LoggerFactory.java
rename to 
common/utils/src/main/java/org/apache/spark/internal/SparkLoggerFactory.java
index 4595c9ad4b01..699f43d77283 100644
--- a/common/utils/src/main/java/org/apache/spark/internal/LoggerFactory.java
+++ 
b/common/utils/src/main/java/org/apache/spark/internal/SparkLoggerFactory.java
@@ -17,15 +17,15 @@
 
 package org.apache.spark.internal;
 
-public class LoggerFactory {
+public class SparkLoggerFactory {
 
-  public static Logger getLogger(String name) {
+  public static SparkLogger getLogger(String name) {
     org.slf4j.Logger slf4jLogger = org.slf4j.LoggerFactory.getLogger(name);
-    return new Logger(slf4jLogger);
+    return new SparkLogger(slf4jLogger);
   }
 
-  public static Logger getLogger(Class<?> clazz) {
+  public static SparkLogger getLogger(Class<?> clazz) {
     org.slf4j.Logger slf4jLogger = org.slf4j.LoggerFactory.getLogger(clazz);
-    return new Logger(slf4jLogger);
+    return new SparkLogger(slf4jLogger);
   }
 }
diff --git 
a/common/utils/src/main/java/org/apache/spark/network/util/JavaUtils.java 
b/common/utils/src/main/java/org/apache/spark/network/util/JavaUtils.java
index d2120a997ba3..65eef3833646 100644
--- a/common/utils/src/main/java/org/apache/spark/network/util/JavaUtils.java
+++ b/common/utils/src/main/java/org/apache/spark/network/util/JavaUtils.java
@@ -30,8 +30,8 @@ import java.util.regex.Pattern;
 
 import org.apache.commons.lang3.SystemUtils;
 
-import org.apache.spark.internal.Logger;
-import org.apache.spark.internal.LoggerFactory;
+import org.apache.spark.internal.SparkLogger;
+import org.apache.spark.internal.SparkLoggerFactory;
 import org.apache.spark.internal.LogKeys;
 import org.apache.spark.internal.MDC;
 
@@ -40,7 +40,7 @@ import org.apache.spark.internal.MDC;
  * own Utils, just accessible within this package.
  */
 public class JavaUtils {
-  private static final Logger logger = 
LoggerFactory.getLogger(JavaUtils.class);
+  private static final SparkLogger logger = 
SparkLoggerFactory.getLogger(JavaUtils.class);
 
   /**
    * Define a default value for driver memory here since this value is 
referenced across the code
diff --git 
a/common/utils/src/test/java/org/apache/spark/util/LoggerSuiteBase.java 
b/common/utils/src/test/java/org/apache/spark/util/LoggerSuiteBase.java
index 6c39304bece0..ecc0a75070c7 100644
--- a/common/utils/src/test/java/org/apache/spark/util/LoggerSuiteBase.java
+++ b/common/utils/src/test/java/org/apache/spark/util/LoggerSuiteBase.java
@@ -26,13 +26,13 @@ import org.apache.commons.lang3.tuple.Pair;
 import org.apache.logging.log4j.Level;
 import org.junit.jupiter.api.Test;
 
-import org.apache.spark.internal.Logger;
+import org.apache.spark.internal.SparkLogger;
 import org.apache.spark.internal.LogKeys;
 import org.apache.spark.internal.MDC;
 
 public abstract class LoggerSuiteBase {
 
-  abstract Logger logger();
+  abstract SparkLogger logger();
   abstract String className();
   abstract String logFilePath();
 
diff --git 
a/common/utils/src/test/java/org/apache/spark/util/PatternLoggerSuite.java 
b/common/utils/src/test/java/org/apache/spark/util/PatternLoggerSuite.java
index 13b6a1d05470..33de91697efa 100644
--- a/common/utils/src/test/java/org/apache/spark/util/PatternLoggerSuite.java
+++ b/common/utils/src/test/java/org/apache/spark/util/PatternLoggerSuite.java
@@ -19,12 +19,12 @@ package org.apache.spark.util;
 
 import org.apache.logging.log4j.Level;
 
-import org.apache.spark.internal.Logger;
-import org.apache.spark.internal.LoggerFactory;
+import org.apache.spark.internal.SparkLogger;
+import org.apache.spark.internal.SparkLoggerFactory;
 
 public class PatternLoggerSuite extends LoggerSuiteBase {
 
-  private static final Logger LOGGER = 
LoggerFactory.getLogger(PatternLoggerSuite.class);
+  private static final SparkLogger LOGGER = 
SparkLoggerFactory.getLogger(PatternLoggerSuite.class);
 
   private String toRegexPattern(Level level, String msg) {
     return msg
@@ -33,7 +33,7 @@ public class PatternLoggerSuite extends LoggerSuiteBase {
   }
 
   @Override
-  Logger logger() {
+  SparkLogger logger() {
     return LOGGER;
   }
 
diff --git 
a/common/utils/src/test/java/org/apache/spark/util/StructuredLoggerSuite.java 
b/common/utils/src/test/java/org/apache/spark/util/StructuredLoggerSuite.java
index c1b31bf68a7d..110e7cc7794e 100644
--- 
a/common/utils/src/test/java/org/apache/spark/util/StructuredLoggerSuite.java
+++ 
b/common/utils/src/test/java/org/apache/spark/util/StructuredLoggerSuite.java
@@ -21,12 +21,13 @@ import com.fasterxml.jackson.core.JsonProcessingException;
 import com.fasterxml.jackson.databind.ObjectMapper;
 import org.apache.logging.log4j.Level;
 
-import org.apache.spark.internal.Logger;
-import org.apache.spark.internal.LoggerFactory;
+import org.apache.spark.internal.SparkLogger;
+import org.apache.spark.internal.SparkLoggerFactory;
 
 public class StructuredLoggerSuite extends LoggerSuiteBase {
 
-  private static final Logger LOGGER = 
LoggerFactory.getLogger(StructuredLoggerSuite.class);
+  private static final SparkLogger LOGGER =
+    SparkLoggerFactory.getLogger(StructuredLoggerSuite.class);
 
   private static final ObjectMapper JSON_MAPPER = new ObjectMapper();
   private String compactAndToRegexPattern(Level level, String json) {
@@ -43,7 +44,7 @@ public class StructuredLoggerSuite extends LoggerSuiteBase {
   }
 
   @Override
-  Logger logger() {
+  SparkLogger logger() {
     return LOGGER;
   }
 
diff --git 
a/connector/spark-ganglia-lgpl/src/main/java/com/codahale/metrics/ganglia/GangliaReporter.java
 
b/connector/spark-ganglia-lgpl/src/main/java/com/codahale/metrics/ganglia/GangliaReporter.java
index 37d133bcafd4..48c61e80d665 100644
--- 
a/connector/spark-ganglia-lgpl/src/main/java/com/codahale/metrics/ganglia/GangliaReporter.java
+++ 
b/connector/spark-ganglia-lgpl/src/main/java/com/codahale/metrics/ganglia/GangliaReporter.java
@@ -19,8 +19,8 @@ import java.util.concurrent.ScheduledExecutorService;
 import java.util.concurrent.TimeUnit;
 import java.util.regex.Pattern;
 
-import org.apache.spark.internal.Logger;
-import org.apache.spark.internal.LoggerFactory;
+import org.apache.spark.internal.SparkLogger;
+import org.apache.spark.internal.SparkLoggerFactory;
 import org.apache.spark.internal.LogKeys;
 import org.apache.spark.internal.MDC;
 
@@ -203,7 +203,7 @@ public class GangliaReporter extends ScheduledReporter {
         }
     }
 
-    private static final Logger LOGGER = 
LoggerFactory.getLogger(GangliaReporter.class);
+    private static final SparkLogger LOGGER = 
SparkLoggerFactory.getLogger(GangliaReporter.class);
 
     private final GMetric gmetric;
     private final GMetric[] gmetrics;
diff --git a/core/src/main/java/org/apache/spark/io/ReadAheadInputStream.java 
b/core/src/main/java/org/apache/spark/io/ReadAheadInputStream.java
index 4aab6d9edcaa..5e9f1b78273a 100644
--- a/core/src/main/java/org/apache/spark/io/ReadAheadInputStream.java
+++ b/core/src/main/java/org/apache/spark/io/ReadAheadInputStream.java
@@ -31,8 +31,8 @@ import javax.annotation.concurrent.GuardedBy;
 import com.google.common.base.Preconditions;
 import com.google.common.base.Throwables;
 
-import org.apache.spark.internal.Logger;
-import org.apache.spark.internal.LoggerFactory;
+import org.apache.spark.internal.SparkLogger;
+import org.apache.spark.internal.SparkLoggerFactory;
 import org.apache.spark.internal.LogKeys;
 import org.apache.spark.internal.MDC;
 import org.apache.spark.util.ThreadUtils;
@@ -48,7 +48,8 @@ import org.apache.spark.util.ThreadUtils;
  */
 public class ReadAheadInputStream extends InputStream {
 
-  private static final Logger logger = 
LoggerFactory.getLogger(ReadAheadInputStream.class);
+  private static final SparkLogger logger =
+    SparkLoggerFactory.getLogger(ReadAheadInputStream.class);
 
   private ReentrantLock stateChangeLock = new ReentrantLock();
 
diff --git a/core/src/main/java/org/apache/spark/memory/TaskMemoryManager.java 
b/core/src/main/java/org/apache/spark/memory/TaskMemoryManager.java
index aeabd358144f..7e993c8a2a3a 100644
--- a/core/src/main/java/org/apache/spark/memory/TaskMemoryManager.java
+++ b/core/src/main/java/org/apache/spark/memory/TaskMemoryManager.java
@@ -30,8 +30,8 @@ import java.util.TreeMap;
 
 import com.google.common.annotations.VisibleForTesting;
 
-import org.apache.spark.internal.Logger;
-import org.apache.spark.internal.LoggerFactory;
+import org.apache.spark.internal.SparkLogger;
+import org.apache.spark.internal.SparkLoggerFactory;
 import org.apache.spark.internal.LogKeys;
 import org.apache.spark.internal.MDC;
 import org.apache.spark.unsafe.memory.MemoryBlock;
@@ -60,7 +60,7 @@ import org.apache.spark.util.Utils;
  */
 public class TaskMemoryManager {
 
-  private static final Logger logger = 
LoggerFactory.getLogger(TaskMemoryManager.class);
+  private static final SparkLogger logger = 
SparkLoggerFactory.getLogger(TaskMemoryManager.class);
 
   /** The number of bits used to address the page table. */
   private static final int PAGE_NUMBER_BITS = 13;
diff --git 
a/core/src/main/java/org/apache/spark/shuffle/sort/BypassMergeSortShuffleWriter.java
 
b/core/src/main/java/org/apache/spark/shuffle/sort/BypassMergeSortShuffleWriter.java
index 284d1dd036b4..86f7d5143eff 100644
--- 
a/core/src/main/java/org/apache/spark/shuffle/sort/BypassMergeSortShuffleWriter.java
+++ 
b/core/src/main/java/org/apache/spark/shuffle/sort/BypassMergeSortShuffleWriter.java
@@ -34,8 +34,8 @@ import scala.collection.Iterator;
 
 import com.google.common.io.Closeables;
 
-import org.apache.spark.internal.Logger;
-import org.apache.spark.internal.LoggerFactory;
+import org.apache.spark.internal.SparkLogger;
+import org.apache.spark.internal.SparkLoggerFactory;
 import org.apache.spark.internal.LogKeys;
 import org.apache.spark.internal.MDC;
 import org.apache.spark.Partitioner;
@@ -83,7 +83,8 @@ final class BypassMergeSortShuffleWriter<K, V>
   extends ShuffleWriter<K, V>
   implements ShuffleChecksumSupport {
 
-  private static final Logger logger = 
LoggerFactory.getLogger(BypassMergeSortShuffleWriter.class);
+  private static final SparkLogger logger =
+    SparkLoggerFactory.getLogger(BypassMergeSortShuffleWriter.class);
 
   private final int fileBufferSize;
   private final boolean transferToEnabled;
diff --git 
a/core/src/main/java/org/apache/spark/shuffle/sort/ShuffleExternalSorter.java 
b/core/src/main/java/org/apache/spark/shuffle/sort/ShuffleExternalSorter.java
index 8fe432cfe239..f96513f1b109 100644
--- 
a/core/src/main/java/org/apache/spark/shuffle/sort/ShuffleExternalSorter.java
+++ 
b/core/src/main/java/org/apache/spark/shuffle/sort/ShuffleExternalSorter.java
@@ -32,8 +32,8 @@ import org.apache.spark.SparkException;
 import org.apache.spark.TaskContext;
 import org.apache.spark.executor.ShuffleWriteMetrics;
 import org.apache.spark.internal.config.package$;
-import org.apache.spark.internal.Logger;
-import org.apache.spark.internal.LoggerFactory;
+import org.apache.spark.internal.SparkLogger;
+import org.apache.spark.internal.SparkLoggerFactory;
 import org.apache.spark.internal.LogKeys;
 import org.apache.spark.internal.MDC;
 import org.apache.spark.memory.MemoryConsumer;
@@ -72,7 +72,8 @@ import org.apache.spark.util.Utils;
  */
 final class ShuffleExternalSorter extends MemoryConsumer implements 
ShuffleChecksumSupport {
 
-  private static final Logger logger = 
LoggerFactory.getLogger(ShuffleExternalSorter.class);
+  private static final SparkLogger logger =
+    SparkLoggerFactory.getLogger(ShuffleExternalSorter.class);
 
   @VisibleForTesting
   static final int DISK_WRITE_BUFFER_SIZE = 1024 * 1024;
diff --git 
a/core/src/main/java/org/apache/spark/shuffle/sort/UnsafeShuffleWriter.java 
b/core/src/main/java/org/apache/spark/shuffle/sort/UnsafeShuffleWriter.java
index 6da9d3def3f8..13fd18c0942b 100644
--- a/core/src/main/java/org/apache/spark/shuffle/sort/UnsafeShuffleWriter.java
+++ b/core/src/main/java/org/apache/spark/shuffle/sort/UnsafeShuffleWriter.java
@@ -39,8 +39,8 @@ import com.google.common.io.Closeables;
 import org.apache.spark.*;
 import org.apache.spark.annotation.Private;
 import org.apache.spark.internal.config.package$;
-import org.apache.spark.internal.Logger;
-import org.apache.spark.internal.LoggerFactory;
+import org.apache.spark.internal.SparkLogger;
+import org.apache.spark.internal.SparkLoggerFactory;
 import org.apache.spark.internal.LogKeys;
 import org.apache.spark.internal.MDC;
 import org.apache.spark.io.CompressionCodec;
@@ -68,7 +68,7 @@ import org.apache.spark.util.Utils;
 @Private
 public class UnsafeShuffleWriter<K, V> extends ShuffleWriter<K, V> {
 
-  private static final Logger logger = 
LoggerFactory.getLogger(UnsafeShuffleWriter.class);
+  private static final SparkLogger logger = 
SparkLoggerFactory.getLogger(UnsafeShuffleWriter.class);
 
   private static final ClassTag<Object> OBJECT_CLASS_TAG = 
ClassTag$.MODULE$.Object();
 
diff --git 
a/core/src/main/java/org/apache/spark/shuffle/sort/io/LocalDiskShuffleMapOutputWriter.java
 
b/core/src/main/java/org/apache/spark/shuffle/sort/io/LocalDiskShuffleMapOutputWriter.java
index fbf4abc160b6..606bb625f5b2 100644
--- 
a/core/src/main/java/org/apache/spark/shuffle/sort/io/LocalDiskShuffleMapOutputWriter.java
+++ 
b/core/src/main/java/org/apache/spark/shuffle/sort/io/LocalDiskShuffleMapOutputWriter.java
@@ -27,8 +27,8 @@ import java.nio.channels.WritableByteChannel;
 import java.util.Optional;
 
 import org.apache.spark.SparkConf;
-import org.apache.spark.internal.Logger;
-import org.apache.spark.internal.LoggerFactory;
+import org.apache.spark.internal.SparkLogger;
+import org.apache.spark.internal.SparkLoggerFactory;
 import org.apache.spark.internal.LogKeys;
 import org.apache.spark.internal.MDC;
 import org.apache.spark.shuffle.api.ShuffleMapOutputWriter;
@@ -45,8 +45,8 @@ import 
org.apache.spark.shuffle.api.metadata.MapOutputCommitMessage;
  */
 public class LocalDiskShuffleMapOutputWriter implements ShuffleMapOutputWriter 
{
 
-  private static final Logger log =
-    LoggerFactory.getLogger(LocalDiskShuffleMapOutputWriter.class);
+  private static final SparkLogger log =
+    SparkLoggerFactory.getLogger(LocalDiskShuffleMapOutputWriter.class);
 
   private final int shuffleId;
   private final long mapId;
diff --git 
a/core/src/main/java/org/apache/spark/unsafe/map/BytesToBytesMap.java 
b/core/src/main/java/org/apache/spark/unsafe/map/BytesToBytesMap.java
index 3506e2a88864..2a8e15cd09cc 100644
--- a/core/src/main/java/org/apache/spark/unsafe/map/BytesToBytesMap.java
+++ b/core/src/main/java/org/apache/spark/unsafe/map/BytesToBytesMap.java
@@ -29,8 +29,8 @@ import com.google.common.io.Closeables;
 import org.apache.spark.SparkEnv;
 import org.apache.spark.executor.ShuffleWriteMetrics;
 import org.apache.spark.internal.LogKeys;
-import org.apache.spark.internal.Logger;
-import org.apache.spark.internal.LoggerFactory;
+import org.apache.spark.internal.SparkLogger;
+import org.apache.spark.internal.SparkLoggerFactory;
 import org.apache.spark.internal.MDC;
 import org.apache.spark.memory.MemoryConsumer;
 import org.apache.spark.memory.SparkOutOfMemoryError;
@@ -68,7 +68,7 @@ import 
org.apache.spark.util.collection.unsafe.sort.UnsafeSorterSpillWriter;
  */
 public final class BytesToBytesMap extends MemoryConsumer {
 
-  private static final Logger logger = 
LoggerFactory.getLogger(BytesToBytesMap.class);
+  private static final SparkLogger logger = 
SparkLoggerFactory.getLogger(BytesToBytesMap.class);
 
   private static final HashMapGrowthStrategy growthStrategy = 
HashMapGrowthStrategy.DOUBLING;
 
diff --git 
a/core/src/main/java/org/apache/spark/util/collection/unsafe/sort/UnsafeExternalSorter.java
 
b/core/src/main/java/org/apache/spark/util/collection/unsafe/sort/UnsafeExternalSorter.java
index 0be312d48a9d..af421e903ba3 100644
--- 
a/core/src/main/java/org/apache/spark/util/collection/unsafe/sort/UnsafeExternalSorter.java
+++ 
b/core/src/main/java/org/apache/spark/util/collection/unsafe/sort/UnsafeExternalSorter.java
@@ -32,8 +32,8 @@ import org.apache.commons.io.IOUtils;
 import org.apache.spark.TaskContext;
 import org.apache.spark.executor.ShuffleWriteMetrics;
 import org.apache.spark.internal.LogKeys;
-import org.apache.spark.internal.Logger;
-import org.apache.spark.internal.LoggerFactory;
+import org.apache.spark.internal.SparkLogger;
+import org.apache.spark.internal.SparkLoggerFactory;
 import org.apache.spark.internal.MDC;
 import org.apache.spark.memory.MemoryConsumer;
 import org.apache.spark.memory.SparkOutOfMemoryError;
@@ -52,7 +52,8 @@ import org.apache.spark.util.Utils;
  */
 public final class UnsafeExternalSorter extends MemoryConsumer {
 
-  private static final Logger logger = 
LoggerFactory.getLogger(UnsafeExternalSorter.class);
+  private static final SparkLogger logger =
+    SparkLoggerFactory.getLogger(UnsafeExternalSorter.class);
 
   @Nullable
   private final PrefixComparator prefixComparator;
diff --git 
a/core/src/main/java/org/apache/spark/util/collection/unsafe/sort/UnsafeSorterSpillReader.java
 
b/core/src/main/java/org/apache/spark/util/collection/unsafe/sort/UnsafeSorterSpillReader.java
index 4eff6a70acca..0693f8cb1a80 100644
--- 
a/core/src/main/java/org/apache/spark/util/collection/unsafe/sort/UnsafeSorterSpillReader.java
+++ 
b/core/src/main/java/org/apache/spark/util/collection/unsafe/sort/UnsafeSorterSpillReader.java
@@ -23,8 +23,8 @@ import org.apache.spark.SparkEnv;
 import org.apache.spark.TaskContext;
 import org.apache.spark.internal.config.package$;
 import org.apache.spark.internal.config.ConfigEntry;
-import org.apache.spark.internal.Logger;
-import org.apache.spark.internal.LoggerFactory;
+import org.apache.spark.internal.SparkLogger;
+import org.apache.spark.internal.SparkLoggerFactory;
 import org.apache.spark.io.NioBufferedFileInputStream;
 import org.apache.spark.io.ReadAheadInputStream;
 import org.apache.spark.serializer.SerializerManager;
@@ -38,7 +38,8 @@ import java.io.*;
  * of the file format).
  */
 public final class UnsafeSorterSpillReader extends UnsafeSorterIterator 
implements Closeable {
-  private static final Logger logger = 
LoggerFactory.getLogger(UnsafeSorterSpillReader.class);
+  private static final SparkLogger logger =
+    SparkLoggerFactory.getLogger(UnsafeSorterSpillReader.class);
   public static final int MAX_BUFFER_SIZE_BYTES = 16777216; // 16 mb
 
   private InputStream in;
diff --git 
a/sql/catalyst/src/main/java/org/apache/spark/sql/catalyst/expressions/RowBasedKeyValueBatch.java
 
b/sql/catalyst/src/main/java/org/apache/spark/sql/catalyst/expressions/RowBasedKeyValueBatch.java
index be7e682a3bdf..c057c36ca820 100644
--- 
a/sql/catalyst/src/main/java/org/apache/spark/sql/catalyst/expressions/RowBasedKeyValueBatch.java
+++ 
b/sql/catalyst/src/main/java/org/apache/spark/sql/catalyst/expressions/RowBasedKeyValueBatch.java
@@ -19,8 +19,8 @@ package org.apache.spark.sql.catalyst.expressions;
 import java.io.Closeable;
 import java.io.IOException;
 
-import org.apache.spark.internal.Logger;
-import org.apache.spark.internal.LoggerFactory;
+import org.apache.spark.internal.SparkLogger;
+import org.apache.spark.internal.SparkLoggerFactory;
 import org.apache.spark.internal.LogKeys;
 import org.apache.spark.internal.MDC;
 import org.apache.spark.memory.MemoryConsumer;
@@ -48,7 +48,8 @@ import org.apache.spark.unsafe.memory.MemoryBlock;
  *
  */
 public abstract class RowBasedKeyValueBatch extends MemoryConsumer implements 
Closeable {
-  protected static final Logger logger = 
LoggerFactory.getLogger(RowBasedKeyValueBatch.class);
+  protected static final SparkLogger logger =
+    SparkLoggerFactory.getLogger(RowBasedKeyValueBatch.class);
 
   private static final int DEFAULT_CAPACITY = 1 << 16;
 
diff --git 
a/sql/catalyst/src/main/java/org/apache/spark/sql/util/CaseInsensitiveStringMap.java
 
b/sql/catalyst/src/main/java/org/apache/spark/sql/util/CaseInsensitiveStringMap.java
index d66524d841ca..ec461f974001 100644
--- 
a/sql/catalyst/src/main/java/org/apache/spark/sql/util/CaseInsensitiveStringMap.java
+++ 
b/sql/catalyst/src/main/java/org/apache/spark/sql/util/CaseInsensitiveStringMap.java
@@ -26,8 +26,8 @@ import java.util.Objects;
 import java.util.Set;
 
 import org.apache.spark.annotation.Experimental;
-import org.apache.spark.internal.Logger;
-import org.apache.spark.internal.LoggerFactory;
+import org.apache.spark.internal.SparkLogger;
+import org.apache.spark.internal.SparkLoggerFactory;
 import org.apache.spark.internal.LogKeys;
 import org.apache.spark.internal.MDC;
 import org.apache.spark.SparkIllegalArgumentException;
@@ -45,7 +45,8 @@ import org.apache.spark.SparkUnsupportedOperationException;
  */
 @Experimental
 public class CaseInsensitiveStringMap implements Map<String, String> {
-  private static final Logger logger = 
LoggerFactory.getLogger(CaseInsensitiveStringMap.class);
+  private static final SparkLogger logger =
+    SparkLoggerFactory.getLogger(CaseInsensitiveStringMap.class);
 
   public static CaseInsensitiveStringMap empty() {
     return new CaseInsensitiveStringMap(new HashMap<>(0));
diff --git 
a/sql/hive-thriftserver/src/main/java/org/apache/hive/service/AbstractService.java
 
b/sql/hive-thriftserver/src/main/java/org/apache/hive/service/AbstractService.java
index 009b9f253ce0..b31d024eeeeb 100644
--- 
a/sql/hive-thriftserver/src/main/java/org/apache/hive/service/AbstractService.java
+++ 
b/sql/hive-thriftserver/src/main/java/org/apache/hive/service/AbstractService.java
@@ -22,8 +22,8 @@ import java.util.List;
 
 import org.apache.hadoop.hive.conf.HiveConf;
 
-import org.apache.spark.internal.Logger;
-import org.apache.spark.internal.LoggerFactory;
+import org.apache.spark.internal.SparkLogger;
+import org.apache.spark.internal.SparkLoggerFactory;
 import org.apache.spark.internal.LogKeys;
 import org.apache.spark.internal.MDC;
 
@@ -33,7 +33,7 @@ import org.apache.spark.internal.MDC;
  */
 public abstract class AbstractService implements Service {
 
-  private static final Logger LOG = 
LoggerFactory.getLogger(AbstractService.class);
+  private static final SparkLogger LOG = 
SparkLoggerFactory.getLogger(AbstractService.class);
 
   /**
    * Service state: initially {@link STATE#NOTINITED}.
diff --git 
a/sql/hive-thriftserver/src/main/java/org/apache/hive/service/CompositeService.java
 
b/sql/hive-thriftserver/src/main/java/org/apache/hive/service/CompositeService.java
index ecd9de8154b3..663bcdb86f9f 100644
--- 
a/sql/hive-thriftserver/src/main/java/org/apache/hive/service/CompositeService.java
+++ 
b/sql/hive-thriftserver/src/main/java/org/apache/hive/service/CompositeService.java
@@ -24,8 +24,8 @@ import java.util.List;
 
 import org.apache.hadoop.hive.conf.HiveConf;
 
-import org.apache.spark.internal.Logger;
-import org.apache.spark.internal.LoggerFactory;
+import org.apache.spark.internal.SparkLogger;
+import org.apache.spark.internal.SparkLoggerFactory;
 import org.apache.spark.internal.LogKeys;
 import org.apache.spark.internal.MDC;
 
@@ -35,7 +35,7 @@ import org.apache.spark.internal.MDC;
  */
 public class CompositeService extends AbstractService {
 
-  private static final Logger LOG = 
LoggerFactory.getLogger(CompositeService.class);
+  private static final SparkLogger LOG = 
SparkLoggerFactory.getLogger(CompositeService.class);
 
   private final List<Service> serviceList = new ArrayList<Service>();
 
diff --git 
a/sql/hive-thriftserver/src/main/java/org/apache/hive/service/CookieSigner.java 
b/sql/hive-thriftserver/src/main/java/org/apache/hive/service/CookieSigner.java
index 25e0316d5e9c..c315478939c8 100644
--- 
a/sql/hive-thriftserver/src/main/java/org/apache/hive/service/CookieSigner.java
+++ 
b/sql/hive-thriftserver/src/main/java/org/apache/hive/service/CookieSigner.java
@@ -22,8 +22,8 @@ import java.security.NoSuchAlgorithmException;
 
 import org.apache.commons.codec.binary.Base64;
 
-import org.apache.spark.internal.Logger;
-import org.apache.spark.internal.LoggerFactory;
+import org.apache.spark.internal.SparkLogger;
+import org.apache.spark.internal.SparkLoggerFactory;
 
 /**
  * The cookie signer generates a signature based on SHA digest
@@ -34,7 +34,7 @@ public class CookieSigner {
   private static final String SIGNATURE = "&s=";
   private static final String SHA_STRING = "SHA-256";
   private byte[] secretBytes;
-  private static final Logger LOG = 
LoggerFactory.getLogger(CookieSigner.class);
+  private static final SparkLogger LOG = 
SparkLoggerFactory.getLogger(CookieSigner.class);
 
   /**
    * Constructor
diff --git 
a/sql/hive-thriftserver/src/main/java/org/apache/hive/service/ServiceOperations.java
 
b/sql/hive-thriftserver/src/main/java/org/apache/hive/service/ServiceOperations.java
index d947f01681be..92d733c563ca 100644
--- 
a/sql/hive-thriftserver/src/main/java/org/apache/hive/service/ServiceOperations.java
+++ 
b/sql/hive-thriftserver/src/main/java/org/apache/hive/service/ServiceOperations.java
@@ -19,8 +19,8 @@ package org.apache.hive.service;
 
 import org.apache.hadoop.hive.conf.HiveConf;
 
-import org.apache.spark.internal.Logger;
-import org.apache.spark.internal.LoggerFactory;
+import org.apache.spark.internal.SparkLogger;
+import org.apache.spark.internal.SparkLoggerFactory;
 import org.apache.spark.internal.LogKeys;
 import org.apache.spark.internal.MDC;
 
@@ -29,7 +29,7 @@ import org.apache.spark.internal.MDC;
  *
  */
 public final class ServiceOperations {
-  private static final Logger LOG = 
LoggerFactory.getLogger(ServiceOperations.class);
+  private static final SparkLogger LOG = 
SparkLoggerFactory.getLogger(ServiceOperations.class);
 
   private ServiceOperations() {
   }
diff --git 
a/sql/hive-thriftserver/src/main/java/org/apache/hive/service/ServiceUtils.java 
b/sql/hive-thriftserver/src/main/java/org/apache/hive/service/ServiceUtils.java
index 82ef4b9f9ce7..25db121207bb 100644
--- 
a/sql/hive-thriftserver/src/main/java/org/apache/hive/service/ServiceUtils.java
+++ 
b/sql/hive-thriftserver/src/main/java/org/apache/hive/service/ServiceUtils.java
@@ -18,7 +18,7 @@ package org.apache.hive.service;
 
 import java.io.IOException;
 
-import org.apache.spark.internal.Logger;
+import org.apache.spark.internal.SparkLogger;
 
 public class ServiceUtils {
 
@@ -52,7 +52,7 @@ public class ServiceUtils {
    * @param log the log to record problems to at debug level. Can be null.
    * @param closeables the objects to close
    */
-  public static void cleanup(Logger log, java.io.Closeable... closeables) {
+  public static void cleanup(SparkLogger log, java.io.Closeable... closeables) 
{
     for (java.io.Closeable c : closeables) {
       if (c != null) {
         try {
diff --git 
a/sql/hive-thriftserver/src/main/java/org/apache/hive/service/auth/HiveAuthFactory.java
 
b/sql/hive-thriftserver/src/main/java/org/apache/hive/service/auth/HiveAuthFactory.java
index b570e88e2bc5..ecbda2661e96 100644
--- 
a/sql/hive-thriftserver/src/main/java/org/apache/hive/service/auth/HiveAuthFactory.java
+++ 
b/sql/hive-thriftserver/src/main/java/org/apache/hive/service/auth/HiveAuthFactory.java
@@ -43,8 +43,8 @@ import org.apache.thrift.TProcessorFactory;
 import org.apache.thrift.transport.TTransportException;
 import org.apache.thrift.transport.TTransportFactory;
 
-import org.apache.spark.internal.Logger;
-import org.apache.spark.internal.LoggerFactory;
+import org.apache.spark.internal.SparkLogger;
+import org.apache.spark.internal.SparkLoggerFactory;
 import org.apache.spark.internal.LogKeys;
 import org.apache.spark.internal.MDC;
 
@@ -54,7 +54,7 @@ import org.apache.spark.internal.MDC;
  */
 public class HiveAuthFactory {
 
-  private static final Logger LOG = 
LoggerFactory.getLogger(HiveAuthFactory.class);
+  private static final SparkLogger LOG = 
SparkLoggerFactory.getLogger(HiveAuthFactory.class);
 
   public enum AuthTypes {
     NOSASL("NOSASL"),
diff --git 
a/sql/hive-thriftserver/src/main/java/org/apache/hive/service/auth/HttpAuthUtils.java
 
b/sql/hive-thriftserver/src/main/java/org/apache/hive/service/auth/HttpAuthUtils.java
index 0bfe361104de..e307bdab0449 100644
--- 
a/sql/hive-thriftserver/src/main/java/org/apache/hive/service/auth/HttpAuthUtils.java
+++ 
b/sql/hive-thriftserver/src/main/java/org/apache/hive/service/auth/HttpAuthUtils.java
@@ -40,8 +40,8 @@ import org.ietf.jgss.GSSManager;
 import org.ietf.jgss.GSSName;
 import org.ietf.jgss.Oid;
 
-import org.apache.spark.internal.Logger;
-import org.apache.spark.internal.LoggerFactory;
+import org.apache.spark.internal.SparkLogger;
+import org.apache.spark.internal.SparkLoggerFactory;
 import org.apache.spark.internal.LogKeys;
 import org.apache.spark.internal.MDC;
 
@@ -53,7 +53,7 @@ public final class HttpAuthUtils {
   public static final String AUTHORIZATION = "Authorization";
   public static final String BASIC = "Basic";
   public static final String NEGOTIATE = "Negotiate";
-  private static final Logger LOG = 
LoggerFactory.getLogger(HttpAuthUtils.class);
+  private static final SparkLogger LOG = 
SparkLoggerFactory.getLogger(HttpAuthUtils.class);
   private static final String COOKIE_ATTR_SEPARATOR = "&";
   private static final String COOKIE_CLIENT_USER_NAME = "cu";
   private static final String COOKIE_CLIENT_RAND_NUMBER = "rn";
diff --git 
a/sql/hive-thriftserver/src/main/java/org/apache/hive/service/auth/TSetIpAddressProcessor.java
 
b/sql/hive-thriftserver/src/main/java/org/apache/hive/service/auth/TSetIpAddressProcessor.java
index 3b24ad1ebe14..e0091d6c04fe 100644
--- 
a/sql/hive-thriftserver/src/main/java/org/apache/hive/service/auth/TSetIpAddressProcessor.java
+++ 
b/sql/hive-thriftserver/src/main/java/org/apache/hive/service/auth/TSetIpAddressProcessor.java
@@ -26,8 +26,8 @@ import org.apache.thrift.transport.TSaslServerTransport;
 import org.apache.thrift.transport.TSocket;
 import org.apache.thrift.transport.TTransport;
 
-import org.apache.spark.internal.Logger;
-import org.apache.spark.internal.LoggerFactory;
+import org.apache.spark.internal.SparkLogger;
+import org.apache.spark.internal.SparkLoggerFactory;
 
 /**
  * This class is responsible for setting the ipAddress for operations executed 
via HiveServer2.
@@ -39,7 +39,7 @@ import org.apache.spark.internal.LoggerFactory;
  */
 public class TSetIpAddressProcessor<I extends Iface> extends 
TCLIService.Processor<Iface> {
 
-  private static final Logger LOGGER = 
LoggerFactory.getLogger(TSetIpAddressProcessor.class);
+  private static final SparkLogger LOGGER = 
SparkLoggerFactory.getLogger(TSetIpAddressProcessor.class);
 
   public TSetIpAddressProcessor(Iface iface) {
     super(iface);
diff --git 
a/sql/hive-thriftserver/src/main/java/org/apache/hive/service/cli/CLIService.java
 
b/sql/hive-thriftserver/src/main/java/org/apache/hive/service/cli/CLIService.java
index e612b34d7bdf..86fb725d3a3c 100644
--- 
a/sql/hive-thriftserver/src/main/java/org/apache/hive/service/cli/CLIService.java
+++ 
b/sql/hive-thriftserver/src/main/java/org/apache/hive/service/cli/CLIService.java
@@ -50,8 +50,8 @@ import org.apache.hive.service.rpc.thrift.TRowSet;
 import org.apache.hive.service.rpc.thrift.TTableSchema;
 import org.apache.hive.service.server.HiveServer2;
 
-import org.apache.spark.internal.Logger;
-import org.apache.spark.internal.LoggerFactory;
+import org.apache.spark.internal.SparkLogger;
+import org.apache.spark.internal.SparkLoggerFactory;
 import org.apache.spark.internal.LogKeys;
 import org.apache.spark.internal.MDC;
 
@@ -61,7 +61,7 @@ import org.apache.spark.internal.MDC;
  */
 public class CLIService extends CompositeService implements ICLIService {
 
-  private static final Logger LOG = LoggerFactory.getLogger(CLIService.class);
+  private static final SparkLogger LOG = 
SparkLoggerFactory.getLogger(CLIService.class);
 
   public static final TProtocolVersion SERVER_VERSION;
 
diff --git 
a/sql/hive-thriftserver/src/main/java/org/apache/hive/service/cli/ColumnBasedSet.java
 
b/sql/hive-thriftserver/src/main/java/org/apache/hive/service/cli/ColumnBasedSet.java
index f6a269e99251..4331f6829fbf 100644
--- 
a/sql/hive-thriftserver/src/main/java/org/apache/hive/service/cli/ColumnBasedSet.java
+++ 
b/sql/hive-thriftserver/src/main/java/org/apache/hive/service/cli/ColumnBasedSet.java
@@ -31,8 +31,8 @@ import org.apache.thrift.protocol.TCompactProtocol;
 import org.apache.thrift.protocol.TProtocol;
 import org.apache.thrift.transport.TIOStreamTransport;
 
-import org.apache.spark.internal.Logger;
-import org.apache.spark.internal.LoggerFactory;
+import org.apache.spark.internal.SparkLogger;
+import org.apache.spark.internal.SparkLoggerFactory;
 import org.apache.spark.internal.LogKeys;
 import org.apache.spark.internal.MDC;
 
@@ -47,7 +47,7 @@ public class ColumnBasedSet implements RowSet {
   private final List<ColumnBuffer> columns;
   private byte[] blob;
   private boolean isBlobBased = false;
-  public static final Logger LOG = 
LoggerFactory.getLogger(ColumnBasedSet.class);
+  public static final SparkLogger LOG = 
SparkLoggerFactory.getLogger(ColumnBasedSet.class);
 
   public ColumnBasedSet(TableSchema schema) {
     descriptors = schema.toTypeDescriptors();
diff --git 
a/sql/hive-thriftserver/src/main/java/org/apache/hive/service/cli/operation/ClassicTableTypeMapping.java
 
b/sql/hive-thriftserver/src/main/java/org/apache/hive/service/cli/operation/ClassicTableTypeMapping.java
index 387663221171..0b71b606b9d6 100644
--- 
a/sql/hive-thriftserver/src/main/java/org/apache/hive/service/cli/operation/ClassicTableTypeMapping.java
+++ 
b/sql/hive-thriftserver/src/main/java/org/apache/hive/service/cli/operation/ClassicTableTypeMapping.java
@@ -29,8 +29,8 @@ import com.google.common.collect.Iterables;
 import com.google.common.collect.Multimap;
 import org.apache.hadoop.hive.metastore.TableType;
 
-import org.apache.spark.internal.Logger;
-import org.apache.spark.internal.LoggerFactory;
+import org.apache.spark.internal.SparkLogger;
+import org.apache.spark.internal.SparkLoggerFactory;
 import org.apache.spark.internal.LogKeys;
 import org.apache.spark.internal.MDC;
 
@@ -43,7 +43,7 @@ import org.apache.spark.internal.MDC;
  */
 public class ClassicTableTypeMapping implements TableTypeMapping {
 
-  private static final Logger LOG = 
LoggerFactory.getLogger(ClassicTableTypeMapping.class);
+  private static final SparkLogger LOG = 
SparkLoggerFactory.getLogger(ClassicTableTypeMapping.class);
 
   public enum ClassicTableTypes {
     TABLE,
diff --git 
a/sql/hive-thriftserver/src/main/java/org/apache/hive/service/cli/operation/Operation.java
 
b/sql/hive-thriftserver/src/main/java/org/apache/hive/service/cli/operation/Operation.java
index 135420508e21..f0c1985ce58a 100644
--- 
a/sql/hive-thriftserver/src/main/java/org/apache/hive/service/cli/operation/Operation.java
+++ 
b/sql/hive-thriftserver/src/main/java/org/apache/hive/service/cli/operation/Operation.java
@@ -39,8 +39,8 @@ import org.apache.hive.service.rpc.thrift.TProtocolVersion;
 import org.apache.hive.service.rpc.thrift.TRowSet;
 import org.apache.hive.service.rpc.thrift.TTableSchema;
 
-import org.apache.spark.internal.Logger;
-import org.apache.spark.internal.LoggerFactory;
+import org.apache.spark.internal.SparkLogger;
+import org.apache.spark.internal.SparkLoggerFactory;
 import org.apache.spark.internal.LogKeys;
 import org.apache.spark.internal.MDC;
 
@@ -49,7 +49,7 @@ public abstract class Operation {
   private OperationState state = OperationState.INITIALIZED;
   private final OperationHandle opHandle;
   private HiveConf configuration;
-  public static final Logger LOG = LoggerFactory.getLogger(Operation.class);
+  public static final SparkLogger LOG = 
SparkLoggerFactory.getLogger(Operation.class);
   public static final FetchOrientation DEFAULT_FETCH_ORIENTATION = 
FetchOrientation.FETCH_NEXT;
   public static final long DEFAULT_FETCH_MAX_ROWS = 100;
   protected boolean hasResultSet;
diff --git 
a/sql/hive-thriftserver/src/main/java/org/apache/hive/service/cli/operation/OperationManager.java
 
b/sql/hive-thriftserver/src/main/java/org/apache/hive/service/cli/operation/OperationManager.java
index 1498cb4907f1..fd8266d1a9ac 100644
--- 
a/sql/hive-thriftserver/src/main/java/org/apache/hive/service/cli/operation/OperationManager.java
+++ 
b/sql/hive-thriftserver/src/main/java/org/apache/hive/service/cli/operation/OperationManager.java
@@ -41,8 +41,8 @@ import org.apache.hive.service.rpc.thrift.TRowSet;
 import org.apache.hive.service.rpc.thrift.TTableSchema;
 import org.apache.logging.log4j.core.Appender;
 
-import org.apache.spark.internal.Logger;
-import org.apache.spark.internal.LoggerFactory;
+import org.apache.spark.internal.SparkLogger;
+import org.apache.spark.internal.SparkLoggerFactory;
 import org.apache.spark.internal.LogKeys;
 import org.apache.spark.internal.MDC;
 
@@ -51,7 +51,7 @@ import org.apache.spark.internal.MDC;
  *
  */
 public class OperationManager extends AbstractService {
-  private static final Logger LOG = 
LoggerFactory.getLogger(OperationManager.class);
+  private static final SparkLogger LOG = 
SparkLoggerFactory.getLogger(OperationManager.class);
 
   private final Map<OperationHandle, Operation> handleToOperation =
       new HashMap<OperationHandle, Operation>();
diff --git 
a/sql/hive-thriftserver/src/main/java/org/apache/hive/service/cli/session/HiveSessionImpl.java
 
b/sql/hive-thriftserver/src/main/java/org/apache/hive/service/cli/session/HiveSessionImpl.java
index e073fa4713bf..410d010a79bd 100644
--- 
a/sql/hive-thriftserver/src/main/java/org/apache/hive/service/cli/session/HiveSessionImpl.java
+++ 
b/sql/hive-thriftserver/src/main/java/org/apache/hive/service/cli/session/HiveSessionImpl.java
@@ -70,8 +70,8 @@ import org.apache.hive.service.rpc.thrift.TRowSet;
 import org.apache.hive.service.rpc.thrift.TTableSchema;
 import org.apache.hive.service.server.ThreadWithGarbageCleanup;
 
-import org.apache.spark.internal.Logger;
-import org.apache.spark.internal.LoggerFactory;
+import org.apache.spark.internal.SparkLogger;
+import org.apache.spark.internal.SparkLoggerFactory;
 import org.apache.spark.internal.LogKeys;
 import org.apache.spark.internal.MDC;
 
@@ -94,7 +94,7 @@ public class HiveSessionImpl implements HiveSession {
   private String ipAddress;
   private static final String FETCH_WORK_SERDE_CLASS =
       "org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe";
-  private static final Logger LOG = 
LoggerFactory.getLogger(HiveSessionImpl.class);
+  private static final SparkLogger LOG = 
SparkLoggerFactory.getLogger(HiveSessionImpl.class);
   private SessionManager sessionManager;
   private OperationManager operationManager;
   private final Set<OperationHandle> opHandleSet = new 
HashSet<OperationHandle>();
diff --git 
a/sql/hive-thriftserver/src/main/java/org/apache/hive/service/cli/session/SessionManager.java
 
b/sql/hive-thriftserver/src/main/java/org/apache/hive/service/cli/session/SessionManager.java
index 6c282b679ca8..3f60fd00b82a 100644
--- 
a/sql/hive-thriftserver/src/main/java/org/apache/hive/service/cli/session/SessionManager.java
+++ 
b/sql/hive-thriftserver/src/main/java/org/apache/hive/service/cli/session/SessionManager.java
@@ -39,8 +39,8 @@ import org.apache.hive.service.rpc.thrift.TProtocolVersion;
 import org.apache.hive.service.server.HiveServer2;
 import org.apache.hive.service.server.ThreadFactoryWithGarbageCleanup;
 
-import org.apache.spark.internal.Logger;
-import org.apache.spark.internal.LoggerFactory;
+import org.apache.spark.internal.SparkLogger;
+import org.apache.spark.internal.SparkLoggerFactory;
 import org.apache.spark.internal.LogKeys;
 import org.apache.spark.internal.MDC;
 
@@ -50,7 +50,7 @@ import org.apache.spark.internal.MDC;
  */
 public class SessionManager extends CompositeService {
 
-  private static final Logger LOG = 
LoggerFactory.getLogger(SessionManager.class);
+  private static final SparkLogger LOG = 
SparkLoggerFactory.getLogger(SessionManager.class);
   public static final String HIVERCFILE = ".hiverc";
   private HiveConf hiveConf;
   private final Map<SessionHandle, HiveSession> handleToSession =
diff --git 
a/sql/hive-thriftserver/src/main/java/org/apache/hive/service/cli/thrift/ThriftCLIService.java
 
b/sql/hive-thriftserver/src/main/java/org/apache/hive/service/cli/thrift/ThriftCLIService.java
index defe51bc9799..07af0013846b 100644
--- 
a/sql/hive-thriftserver/src/main/java/org/apache/hive/service/cli/thrift/ThriftCLIService.java
+++ 
b/sql/hive-thriftserver/src/main/java/org/apache/hive/service/cli/thrift/ThriftCLIService.java
@@ -42,8 +42,8 @@ import org.apache.thrift.server.ServerContext;
 import org.apache.thrift.server.TServerEventHandler;
 import org.apache.thrift.transport.TTransport;
 
-import org.apache.spark.internal.Logger;
-import org.apache.spark.internal.LoggerFactory;
+import org.apache.spark.internal.SparkLogger;
+import org.apache.spark.internal.SparkLoggerFactory;
 import org.apache.spark.internal.LogKeys;
 import org.apache.spark.internal.MDC;
 
@@ -53,7 +53,7 @@ import org.apache.spark.internal.MDC;
  */
 public abstract class ThriftCLIService extends AbstractService implements 
TCLIService.Iface, Runnable {
 
-  public static final Logger LOG = 
LoggerFactory.getLogger(ThriftCLIService.class);
+  public static final SparkLogger LOG = 
SparkLoggerFactory.getLogger(ThriftCLIService.class);
 
   protected CLIService cliService;
   private static final TStatus OK_STATUS = new 
TStatus(TStatusCode.SUCCESS_STATUS);
diff --git 
a/sql/hive-thriftserver/src/main/java/org/apache/hive/service/cli/thrift/ThriftHttpServlet.java
 
b/sql/hive-thriftserver/src/main/java/org/apache/hive/service/cli/thrift/ThriftHttpServlet.java
index b423038fe2b6..d9bf361fdef6 100644
--- 
a/sql/hive-thriftserver/src/main/java/org/apache/hive/service/cli/thrift/ThriftHttpServlet.java
+++ 
b/sql/hive-thriftserver/src/main/java/org/apache/hive/service/cli/thrift/ThriftHttpServlet.java
@@ -56,8 +56,8 @@ import org.ietf.jgss.GSSManager;
 import org.ietf.jgss.GSSName;
 import org.ietf.jgss.Oid;
 
-import org.apache.spark.internal.Logger;
-import org.apache.spark.internal.LoggerFactory;
+import org.apache.spark.internal.SparkLogger;
+import org.apache.spark.internal.SparkLoggerFactory;
 import org.apache.spark.internal.LogKeys;
 import org.apache.spark.internal.MDC;
 
@@ -69,7 +69,7 @@ import org.apache.spark.internal.MDC;
 public class ThriftHttpServlet extends TServlet {
 
   private static final long serialVersionUID = 1L;
-  public static final Logger LOG = 
LoggerFactory.getLogger(ThriftHttpServlet.class);
+  public static final SparkLogger LOG = 
SparkLoggerFactory.getLogger(ThriftHttpServlet.class);
   private final String authType;
   private final UserGroupInformation serviceUGI;
   private final UserGroupInformation httpUGI;
diff --git 
a/sql/hive-thriftserver/src/main/java/org/apache/hive/service/server/HiveServer2.java
 
b/sql/hive-thriftserver/src/main/java/org/apache/hive/service/server/HiveServer2.java
index b6c9b937c5f3..9345125a8279 100644
--- 
a/sql/hive-thriftserver/src/main/java/org/apache/hive/service/server/HiveServer2.java
+++ 
b/sql/hive-thriftserver/src/main/java/org/apache/hive/service/server/HiveServer2.java
@@ -37,8 +37,8 @@ import 
org.apache.hive.service.cli.thrift.ThriftBinaryCLIService;
 import org.apache.hive.service.cli.thrift.ThriftCLIService;
 import org.apache.hive.service.cli.thrift.ThriftHttpCLIService;
 
-import org.apache.spark.internal.Logger;
-import org.apache.spark.internal.LoggerFactory;
+import org.apache.spark.internal.SparkLogger;
+import org.apache.spark.internal.SparkLoggerFactory;
 import org.apache.spark.internal.LogKeys;
 import org.apache.spark.internal.MDC;
 import org.apache.spark.util.ShutdownHookManager;
@@ -49,7 +49,7 @@ import org.apache.spark.util.SparkExitCode;
  *
  */
 public class HiveServer2 extends CompositeService {
-  private static final Logger LOG = LoggerFactory.getLogger(HiveServer2.class);
+  private static final SparkLogger LOG = 
SparkLoggerFactory.getLogger(HiveServer2.class);
 
   private CLIService cliService;
   private ThriftCLIService thriftCLIService;
diff --git 
a/sql/hive-thriftserver/src/main/java/org/apache/hive/service/server/ThreadWithGarbageCleanup.java
 
b/sql/hive-thriftserver/src/main/java/org/apache/hive/service/server/ThreadWithGarbageCleanup.java
index 23957e146ddf..16d8540b4056 100644
--- 
a/sql/hive-thriftserver/src/main/java/org/apache/hive/service/server/ThreadWithGarbageCleanup.java
+++ 
b/sql/hive-thriftserver/src/main/java/org/apache/hive/service/server/ThreadWithGarbageCleanup.java
@@ -23,8 +23,8 @@ import java.util.Map;
 import org.apache.hadoop.hive.metastore.HiveMetaStore;
 import org.apache.hadoop.hive.metastore.RawStore;
 
-import org.apache.spark.internal.Logger;
-import org.apache.spark.internal.LoggerFactory;
+import org.apache.spark.internal.SparkLogger;
+import org.apache.spark.internal.SparkLoggerFactory;
 
 /**
  * A HiveServer2 thread used to construct new server threads.
@@ -32,7 +32,7 @@ import org.apache.spark.internal.LoggerFactory;
  * when killed by its corresponding ExecutorService.
  */
 public class ThreadWithGarbageCleanup extends Thread {
-  private static final Logger LOG = 
LoggerFactory.getLogger(ThreadWithGarbageCleanup.class);
+  private static final SparkLogger LOG = 
SparkLoggerFactory.getLogger(ThreadWithGarbageCleanup.class);
 
   Map<Long, RawStore> threadRawStoreMap =
       ThreadFactoryWithGarbageCleanup.getThreadRawStoreMap();
diff --git 
a/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkSQLCLIService.scala
 
b/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkSQLCLIService.scala
index bf1c4978431b..46537f75f1a1 100644
--- 
a/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkSQLCLIService.scala
+++ 
b/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkSQLCLIService.scala
@@ -34,7 +34,7 @@ import org.apache.hive.service.auth.HiveAuthFactory
 import org.apache.hive.service.cli._
 import org.apache.hive.service.server.HiveServer2
 
-import org.apache.spark.internal.Logger
+import org.apache.spark.internal.SparkLogger
 import org.apache.spark.sql.SQLContext
 import org.apache.spark.sql.catalyst.util.SQLKeywordUtils
 import org.apache.spark.sql.errors.QueryExecutionErrors
@@ -113,10 +113,10 @@ private[hive] class SparkSQLCLIService(hiveServer: 
HiveServer2, sqlContext: SQLC
 
 private[thriftserver] trait ReflectedCompositeService { this: AbstractService 
=>
 
-  private val logInfo = (msg: String) => getAncestorField[Logger](this, 3, 
"LOG").info(msg)
+  private val logInfo = (msg: String) => getAncestorField[SparkLogger](this, 
3, "LOG").info(msg)
 
   private val logError = (msg: String, e: Throwable) =>
-    getAncestorField[Logger](this, 3, "LOG").error(msg, e)
+    getAncestorField[SparkLogger](this, 3, "LOG").error(msg, e)
 
   def initCompositeService(hiveConf: HiveConf): Unit = {
     // Emulating `CompositeService.init(hiveConf)`


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to