http://git-wip-us.apache.org/repos/asf/hive/blob/c93d6c77/itests/pom.xml
----------------------------------------------------------------------
diff --git a/itests/pom.xml b/itests/pom.xml
index f156cc4..acce713 100644
--- a/itests/pom.xml
+++ b/itests/pom.xml
@@ -94,7 +94,7 @@
                       }
                       mkdir -p $DOWNLOAD_DIR
                       download 
"http://d3jw87u4immizc.cloudfront.net/spark-tarball/spark-${spark.version}-bin-hadoop2-without-hive.tgz";
 "spark"
-                      cp -f $HIVE_ROOT/data/conf/spark/log4j.properties 
$BASE_DIR/spark/conf/
+                      cp -f $HIVE_ROOT/data/conf/spark/log4j2.xml 
$BASE_DIR/spark/conf/
                       sed '/package /d' 
${basedir}/${hive.path.to.root}/contrib/src/java/org/apache/hadoop/hive/contrib/udf/example/UDFExampleAdd.java
 > /tmp/UDFExampleAdd.java
                       javac -cp  
${settings.localRepository}/org/apache/hive/hive-exec/${project.version}/hive-exec-${project.version}.jar
 /tmp/UDFExampleAdd.java -d /tmp
                       jar -cf /tmp/udfexampleadd-1.0.jar -C /tmp 
UDFExampleAdd.class

http://git-wip-us.apache.org/repos/asf/hive/blob/c93d6c77/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
----------------------------------------------------------------------
diff --git 
a/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java 
b/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
index b33cb58..65117c4 100644
--- a/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
+++ b/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
@@ -5931,7 +5931,7 @@ public class HiveMetaStore extends ThriftHiveMetastore {
 
     // If the log4j.configuration property hasn't already been explicitly set,
     // use Hive's default log4j configuration
-    if (System.getProperty("log4j.configuration") == null) {
+    if (System.getProperty("log4j.configurationFile") == null) {
       // NOTE: It is critical to do this here so that log4j is reinitialized
       // before any of the other core hive classes are loaded
       try {

http://git-wip-us.apache.org/repos/asf/hive/blob/c93d6c77/metastore/src/test/org/apache/hadoop/hive/metastore/txn/TestCompactionTxnHandler.java
----------------------------------------------------------------------
diff --git 
a/metastore/src/test/org/apache/hadoop/hive/metastore/txn/TestCompactionTxnHandler.java
 
b/metastore/src/test/org/apache/hadoop/hive/metastore/txn/TestCompactionTxnHandler.java
index ad99427..df42f1a 100644
--- 
a/metastore/src/test/org/apache/hadoop/hive/metastore/txn/TestCompactionTxnHandler.java
+++ 
b/metastore/src/test/org/apache/hadoop/hive/metastore/txn/TestCompactionTxnHandler.java
@@ -17,15 +17,11 @@
  */
 package org.apache.hadoop.hive.metastore.txn;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.hive.conf.HiveConf;
-import org.apache.hadoop.hive.metastore.api.*;
-import org.apache.log4j.Level;
-import org.apache.log4j.LogManager;
-import org.junit.After;
-import org.junit.Before;
-import org.junit.Test;
+import static junit.framework.Assert.assertEquals;
+import static junit.framework.Assert.assertNotNull;
+import static junit.framework.Assert.assertNull;
+import static junit.framework.Assert.assertTrue;
+import static junit.framework.Assert.fail;
 
 import java.util.ArrayList;
 import java.util.Arrays;
@@ -34,7 +30,29 @@ import java.util.Set;
 import java.util.SortedSet;
 import java.util.TreeSet;
 
-import static junit.framework.Assert.*;
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.metastore.api.AbortTxnRequest;
+import org.apache.hadoop.hive.metastore.api.AddDynamicPartitions;
+import org.apache.hadoop.hive.metastore.api.CommitTxnRequest;
+import org.apache.hadoop.hive.metastore.api.CompactionRequest;
+import org.apache.hadoop.hive.metastore.api.CompactionType;
+import org.apache.hadoop.hive.metastore.api.GetOpenTxnsResponse;
+import org.apache.hadoop.hive.metastore.api.LockComponent;
+import org.apache.hadoop.hive.metastore.api.LockLevel;
+import org.apache.hadoop.hive.metastore.api.LockRequest;
+import org.apache.hadoop.hive.metastore.api.LockResponse;
+import org.apache.hadoop.hive.metastore.api.LockState;
+import org.apache.hadoop.hive.metastore.api.LockType;
+import org.apache.hadoop.hive.metastore.api.MetaException;
+import org.apache.hadoop.hive.metastore.api.OpenTxnRequest;
+import org.apache.hadoop.hive.metastore.api.OpenTxnsResponse;
+import org.apache.hadoop.hive.metastore.api.ShowCompactRequest;
+import org.apache.hadoop.hive.metastore.api.ShowCompactResponse;
+import org.apache.hadoop.hive.metastore.api.ShowCompactResponseElement;
+import org.apache.hadoop.hive.metastore.api.UnlockRequest;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
 
 /**
  * Tests for TxnHandler.
@@ -43,11 +61,9 @@ public class TestCompactionTxnHandler {
 
   private HiveConf conf = new HiveConf();
   private CompactionTxnHandler txnHandler;
-  static final private Log LOG = 
LogFactory.getLog(TestCompactionTxnHandler.class);
 
   public TestCompactionTxnHandler() throws Exception {
     TxnDbUtil.setConfValues(conf);
-    LogManager.getLogger(TxnHandler.class.getName()).setLevel(Level.DEBUG);
     tearDown();
   }
 

http://git-wip-us.apache.org/repos/asf/hive/blob/c93d6c77/metastore/src/test/org/apache/hadoop/hive/metastore/txn/TestTxnHandler.java
----------------------------------------------------------------------
diff --git 
a/metastore/src/test/org/apache/hadoop/hive/metastore/txn/TestTxnHandler.java 
b/metastore/src/test/org/apache/hadoop/hive/metastore/txn/TestTxnHandler.java
index f478184..6dc0bd3 100644
--- 
a/metastore/src/test/org/apache/hadoop/hive/metastore/txn/TestTxnHandler.java
+++ 
b/metastore/src/test/org/apache/hadoop/hive/metastore/txn/TestTxnHandler.java
@@ -17,16 +17,11 @@
  */
 package org.apache.hadoop.hive.metastore.txn;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.hive.conf.HiveConf;
-import org.apache.hadoop.hive.metastore.api.*;
-import org.apache.log4j.Level;
-import org.apache.log4j.LogManager;
-import org.junit.After;
-import org.junit.Before;
-import org.junit.Ignore;
-import org.junit.Test;
+import static junit.framework.Assert.assertEquals;
+import static junit.framework.Assert.assertFalse;
+import static junit.framework.Assert.assertNull;
+import static junit.framework.Assert.assertTrue;
+import static junit.framework.Assert.fail;
 
 import java.sql.Connection;
 import java.sql.SQLException;
@@ -36,21 +31,66 @@ import java.util.List;
 import java.util.concurrent.TimeUnit;
 import java.util.concurrent.atomic.AtomicBoolean;
 
-import static junit.framework.Assert.*;
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.metastore.api.AbortTxnRequest;
+import org.apache.hadoop.hive.metastore.api.CheckLockRequest;
+import org.apache.hadoop.hive.metastore.api.CommitTxnRequest;
+import org.apache.hadoop.hive.metastore.api.CompactionRequest;
+import org.apache.hadoop.hive.metastore.api.CompactionType;
+import org.apache.hadoop.hive.metastore.api.GetOpenTxnsInfoResponse;
+import org.apache.hadoop.hive.metastore.api.GetOpenTxnsResponse;
+import org.apache.hadoop.hive.metastore.api.HeartbeatRequest;
+import org.apache.hadoop.hive.metastore.api.HeartbeatTxnRangeRequest;
+import org.apache.hadoop.hive.metastore.api.HeartbeatTxnRangeResponse;
+import org.apache.hadoop.hive.metastore.api.LockComponent;
+import org.apache.hadoop.hive.metastore.api.LockLevel;
+import org.apache.hadoop.hive.metastore.api.LockRequest;
+import org.apache.hadoop.hive.metastore.api.LockResponse;
+import org.apache.hadoop.hive.metastore.api.LockState;
+import org.apache.hadoop.hive.metastore.api.LockType;
+import org.apache.hadoop.hive.metastore.api.MetaException;
+import org.apache.hadoop.hive.metastore.api.NoSuchLockException;
+import org.apache.hadoop.hive.metastore.api.NoSuchTxnException;
+import org.apache.hadoop.hive.metastore.api.OpenTxnRequest;
+import org.apache.hadoop.hive.metastore.api.OpenTxnsResponse;
+import org.apache.hadoop.hive.metastore.api.ShowCompactRequest;
+import org.apache.hadoop.hive.metastore.api.ShowCompactResponse;
+import org.apache.hadoop.hive.metastore.api.ShowCompactResponseElement;
+import org.apache.hadoop.hive.metastore.api.ShowLocksRequest;
+import org.apache.hadoop.hive.metastore.api.ShowLocksResponse;
+import org.apache.hadoop.hive.metastore.api.ShowLocksResponseElement;
+import org.apache.hadoop.hive.metastore.api.TxnAbortedException;
+import org.apache.hadoop.hive.metastore.api.TxnInfo;
+import org.apache.hadoop.hive.metastore.api.TxnOpenException;
+import org.apache.hadoop.hive.metastore.api.TxnState;
+import org.apache.hadoop.hive.metastore.api.UnlockRequest;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.core.LoggerContext;
+import org.apache.logging.log4j.core.config.Configuration;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Ignore;
+import org.junit.Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * Tests for TxnHandler.
  */
 public class TestTxnHandler {
   static final private String CLASS_NAME = TxnHandler.class.getName();
-  static final private Log LOG = LogFactory.getLog(CLASS_NAME);
+  private static final Logger LOG = LoggerFactory.getLogger(CLASS_NAME);
 
   private HiveConf conf = new HiveConf();
   private TxnHandler txnHandler;
 
   public TestTxnHandler() throws Exception {
     TxnDbUtil.setConfValues(conf);
-    LogManager.getLogger(TxnHandler.class.getName()).setLevel(Level.DEBUG);
+    LoggerContext ctx = (LoggerContext) LogManager.getContext(false);
+    Configuration conf = ctx.getConfiguration();
+    conf.getLoggerConfig(CLASS_NAME).setLevel(Level.DEBUG);
+    ctx.updateLoggers(conf);
     tearDown();
   }
 

http://git-wip-us.apache.org/repos/asf/hive/blob/c93d6c77/packaging/src/main/assembly/bin.xml
----------------------------------------------------------------------
diff --git a/packaging/src/main/assembly/bin.xml 
b/packaging/src/main/assembly/bin.xml
index 63253c5..0fa6af8 100644
--- a/packaging/src/main/assembly/bin.xml
+++ b/packaging/src/main/assembly/bin.xml
@@ -275,7 +275,7 @@
       <fileMode>644</fileMode>
       <includes>
         <include>webhcat-default.xml</include>
-        <include>webhcat-log4j.properties</include>
+        <include>webhcat-log4j2.xml</include>
       </includes>
       <filtered>true</filtered>
       <outputDirectory>hcatalog/etc/webhcat</outputDirectory>
@@ -323,19 +323,19 @@
 
   <files>
     <file>
-      
<source>${project.parent.basedir}/common/src/main/resources/hive-log4j.properties</source>
+      
<source>${project.parent.basedir}/common/src/main/resources/hive-log4j2.xml</source>
       <outputDirectory>conf</outputDirectory>
-      <destName>hive-log4j.properties.template</destName>
+      <destName>hive-log4j2.xml.template</destName>
     </file>
     <file>
-      
<source>${project.parent.basedir}/ql/src/main/resources/hive-exec-log4j.properties</source>
+      
<source>${project.parent.basedir}/ql/src/main/resources/hive-exec-log4j2.xml</source>
       <outputDirectory>conf</outputDirectory>
-      <destName>hive-exec-log4j.properties.template</destName>
+      <destName>hive-exec-log4j2.xml.template</destName>
     </file>
     <file>
-      
<source>${project.parent.basedir}/beeline/src/main/resources/beeline-log4j.properties</source>
+      
<source>${project.parent.basedir}/beeline/src/main/resources/beeline-log4j2.xml</source>
       <outputDirectory>conf</outputDirectory>
-      <destName>beeline-log4j.properties.template</destName>
+      <destName>beeline-log4j2.xml.template</destName>
     </file>
     <file>
       <source>${project.parent.basedir}/hcatalog/README.txt</source>

http://git-wip-us.apache.org/repos/asf/hive/blob/c93d6c77/pom.xml
----------------------------------------------------------------------
diff --git a/pom.xml b/pom.xml
index 0383e01..15c2805 100644
--- a/pom.xml
+++ b/pom.xml
@@ -148,8 +148,7 @@
     <kryo.version>2.22</kryo.version>
     <libfb303.version>0.9.2</libfb303.version>
     <libthrift.version>0.9.2</libthrift.version>
-    <log4j.version>1.2.16</log4j.version>
-    <log4j-extras.version>1.2.17</log4j-extras.version>
+    <log4j2.version>2.3</log4j2.version>
     <opencsv.version>2.3</opencsv.version>
     <mockito-all.version>1.9.5</mockito-all.version>
     <mina.version>2.0.0-M5</mina.version>
@@ -366,14 +365,24 @@
         <version>${junit.version}</version>
       </dependency>
       <dependency>
-        <groupId>log4j</groupId>
-        <artifactId>log4j</artifactId>
-        <version>${log4j.version}</version>
+        <groupId>org.apache.logging.log4j</groupId>
+        <artifactId>log4j-1.2-api</artifactId>
+        <version>${log4j2.version}</version>
       </dependency>
       <dependency>
-        <groupId>log4j</groupId>
-        <artifactId>apache-log4j-extras</artifactId>
-        <version>${log4j-extras.version}</version>
+        <groupId>org.apache.logging.log4j</groupId>
+        <artifactId>log4j-web</artifactId>
+        <version>${log4j2.version}</version>
+      </dependency>
+      <dependency>
+        <groupId>org.apache.logging.log4j</groupId>
+        <artifactId>log4j-slf4j-impl</artifactId>
+        <version>${log4j2.version}</version>
+      </dependency>
+      <dependency>
+        <groupId>org.apache.logging.log4j</groupId>
+        <artifactId>log4j-jcl</artifactId>
+        <version>${log4j2.version}</version>
       </dependency>
       <dependency>
         <groupId>org.antlr</groupId>
@@ -584,11 +593,6 @@
         <version>${slf4j.version}</version>
       </dependency>
       <dependency>
-        <groupId>org.slf4j</groupId>
-        <artifactId>slf4j-log4j12</artifactId>
-        <version>${slf4j.version}</version>
-      </dependency>
-      <dependency>
         <groupId>xerces</groupId>
         <artifactId>xercesImpl</artifactId>
         <version>${xerces.version}</version>
@@ -604,11 +608,6 @@
       <artifactId>slf4j-api</artifactId>
       <version>${slf4j.version}</version>
     </dependency>
-    <dependency>
-      <groupId>org.slf4j</groupId>
-      <artifactId>slf4j-log4j12</artifactId>
-      <version>${slf4j.version}</version>
-    </dependency>
   </dependencies>
 
   <build>
@@ -872,7 +871,7 @@
             <!-- required for hive-exec jar path and tests which reference a 
jar -->
             
<maven.local.repository>${maven.repo.local}</maven.local.repository>
             <mapred.job.tracker>local</mapred.job.tracker>
-            
<log4j.configuration>${test.log4j.scheme}${test.tmp.dir}/conf/hive-log4j.properties</log4j.configuration>
+            
<log4j.configurationFile>${test.log4j.scheme}${test.tmp.dir}/conf/hive-log4j2.xml</log4j.configurationFile>
             <log4j.debug>true</log4j.debug>
             <!-- don't diry up /tmp -->
             <java.io.tmpdir>${test.tmp.dir}</java.io.tmpdir>

http://git-wip-us.apache.org/repos/asf/hive/blob/c93d6c77/ql/pom.xml
----------------------------------------------------------------------
diff --git a/ql/pom.xml b/ql/pom.xml
index e7a8e7b..36b3433 100644
--- a/ql/pom.xml
+++ b/ql/pom.xml
@@ -112,14 +112,19 @@
       <version>${javolution.version}</version>
     </dependency>
     <dependency>
-      <groupId>log4j</groupId>
-      <artifactId>log4j</artifactId>
-      <version>${log4j.version}</version>
+      <groupId>org.apache.logging.log4j</groupId>
+      <artifactId>log4j-1.2-api</artifactId>
+      <version>${log4j2.version}</version>
     </dependency>
     <dependency>
-      <groupId>log4j</groupId>
-      <artifactId>apache-log4j-extras</artifactId>
-      <version>${log4j-extras.version}</version>
+      <groupId>org.apache.logging.log4j</groupId>
+      <artifactId>log4j-slf4j-impl</artifactId>
+      <version>${log4j2.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.logging.log4j</groupId>
+      <artifactId>log4j-jcl</artifactId>
+      <version>${log4j2.version}</version>
     </dependency>
     <dependency>
       <groupId>org.antlr</groupId>

http://git-wip-us.apache.org/repos/asf/hive/blob/c93d6c77/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecDriver.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecDriver.java 
b/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecDriver.java
index a2cf712..82345ee 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecDriver.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecDriver.java
@@ -27,7 +27,6 @@ import java.lang.management.MemoryMXBean;
 import java.util.ArrayList;
 import java.util.Collection;
 import java.util.Collections;
-import java.util.Enumeration;
 import java.util.List;
 import java.util.Properties;
 
@@ -57,13 +56,14 @@ import org.apache.hadoop.hive.ql.exec.PartitionKeySampler;
 import org.apache.hadoop.hive.ql.exec.TableScanOperator;
 import org.apache.hadoop.hive.ql.exec.Task;
 import org.apache.hadoop.hive.ql.exec.Utilities;
-import org.apache.hadoop.hive.ql.exec.tez.TezSessionState;
 import org.apache.hadoop.hive.ql.exec.tez.TezSessionPoolManager;
+import org.apache.hadoop.hive.ql.exec.tez.TezSessionState;
 import org.apache.hadoop.hive.ql.io.BucketizedHiveInputFormat;
 import org.apache.hadoop.hive.ql.io.HiveFileFormatUtils;
 import org.apache.hadoop.hive.ql.io.HiveKey;
 import org.apache.hadoop.hive.ql.io.HiveOutputFormatImpl;
 import org.apache.hadoop.hive.ql.io.IOPrepareCache;
+import org.apache.hadoop.hive.ql.log.NullAppender;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.plan.FetchWork;
 import org.apache.hadoop.hive.ql.plan.MapWork;
@@ -88,11 +88,12 @@ import org.apache.hadoop.mapred.JobConf;
 import org.apache.hadoop.mapred.Partitioner;
 import org.apache.hadoop.mapred.RunningJob;
 import org.apache.hadoop.security.UserGroupInformation;
-import org.apache.log4j.Appender;
-import org.apache.log4j.BasicConfigurator;
-import org.apache.log4j.FileAppender;
-import org.apache.log4j.LogManager;
-import org.apache.log4j.varia.NullAppender;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
+import org.apache.logging.log4j.core.Appender;
+import org.apache.logging.log4j.core.appender.FileAppender;
+import org.apache.logging.log4j.core.appender.RollingFileAppender;
 
 /**
  * ExecDriver is the central class in co-ordinating execution of any 
map-reduce task.
@@ -687,8 +688,10 @@ public class ExecDriver extends Task<MapredWork> 
implements Serializable, Hadoop
     if (noLog) {
       // If started from main(), and noLog is on, we should not output
       // any logs. To turn the log on, please set -Dtest.silent=false
-      BasicConfigurator.resetConfiguration();
-      BasicConfigurator.configure(new NullAppender());
+      Logger logger = org.apache.logging.log4j.LogManager.getRootLogger();
+      NullAppender appender = NullAppender.createNullAppender();
+      appender.addToLogger(logger.getName(), Level.ERROR);
+      appender.start();
     } else {
       setupChildLog4j(conf);
     }
@@ -703,10 +706,12 @@ public class ExecDriver extends Task<MapredWork> 
implements Serializable, Hadoop
 
     // print out the location of the log file for the user so
     // that it's easy to find reason for local mode execution failures
-    for (Appender appender : Collections.list((Enumeration<Appender>) 
LogManager.getRootLogger()
-        .getAllAppenders())) {
+    for (Appender appender : ((org.apache.logging.log4j.core.Logger) 
LogManager.getRootLogger())
+            .getAppenders().values()) {
       if (appender instanceof FileAppender) {
-        console.printInfo("Execution log at: " + ((FileAppender) 
appender).getFile());
+        console.printInfo("Execution log at: " + ((FileAppender) 
appender).getFileName());
+      } else if (appender instanceof RollingFileAppender) {
+        console.printInfo("Execution log at: " + ((RollingFileAppender) 
appender).getFileName());
       }
     }
 

http://git-wip-us.apache.org/repos/asf/hive/blob/c93d6c77/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/HadoopJobExecHelper.java
----------------------------------------------------------------------
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/HadoopJobExecHelper.java 
b/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/HadoopJobExecHelper.java
index 6a6593c..44dfe3e 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/HadoopJobExecHelper.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/HadoopJobExecHelper.java
@@ -24,7 +24,6 @@ import java.text.SimpleDateFormat;
 import java.util.ArrayList;
 import java.util.Calendar;
 import java.util.Collections;
-import java.util.Enumeration;
 import java.util.HashMap;
 import java.util.LinkedList;
 import java.util.List;
@@ -33,8 +32,6 @@ import java.util.concurrent.TimeUnit;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hive.common.JavaUtils;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
 import org.apache.hadoop.hive.ql.MapRedStats;
@@ -59,9 +56,11 @@ import org.apache.hadoop.mapred.JobStatus;
 import org.apache.hadoop.mapred.RunningJob;
 import org.apache.hadoop.mapred.TaskCompletionEvent;
 import org.apache.hadoop.mapred.TaskReport;
-import org.apache.log4j.Appender;
-import org.apache.log4j.FileAppender;
-import org.apache.log4j.LogManager;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.core.Appender;
+import org.apache.logging.log4j.core.Logger;
+import org.apache.logging.log4j.core.appender.FileAppender;
+import org.apache.logging.log4j.core.appender.RollingFileAppender;
 
 public class HadoopJobExecHelper {
 
@@ -492,10 +491,11 @@ public class HadoopJobExecHelper {
     sb.append("Logs:\n");
     console.printError(sb.toString());
 
-    for (Appender a : Collections.list((Enumeration<Appender>)
-          LogManager.getRootLogger().getAllAppenders())) {
-      if (a instanceof FileAppender) {
-        console.printError((new 
Path(((FileAppender)a).getFile())).toUri().getPath());
+    for (Appender appender : ((Logger) 
LogManager.getRootLogger()).getAppenders().values()) {
+      if (appender instanceof FileAppender) {
+        console.printError(((FileAppender) appender).getFileName());
+      } else if (appender instanceof RollingFileAppender) {
+        console.printError(((RollingFileAppender) appender).getFileName());
       }
     }
   }

http://git-wip-us.apache.org/repos/asf/hive/blob/c93d6c77/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/stats/PartialScanTask.java
----------------------------------------------------------------------
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/stats/PartialScanTask.java 
b/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/stats/PartialScanTask.java
index 3cb9e9c..cee0878 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/stats/PartialScanTask.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/stats/PartialScanTask.java
@@ -21,8 +21,6 @@ package org.apache.hadoop.hive.ql.io.rcfile.stats;
 import java.io.IOException;
 import java.io.Serializable;
 import java.util.ArrayList;
-import java.util.Collections;
-import java.util.Enumeration;
 import java.util.List;
 
 import org.apache.commons.lang.StringUtils;
@@ -59,9 +57,11 @@ import org.apache.hadoop.mapred.InputFormat;
 import org.apache.hadoop.mapred.JobClient;
 import org.apache.hadoop.mapred.JobConf;
 import org.apache.hadoop.mapred.RunningJob;
-import org.apache.log4j.Appender;
-import org.apache.log4j.FileAppender;
-import org.apache.log4j.LogManager;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.core.Appender;
+import org.apache.logging.log4j.core.Logger;
+import org.apache.logging.log4j.core.appender.FileAppender;
+import org.apache.logging.log4j.core.appender.RollingFileAppender;
 
 /**
  * PartialScanTask.
@@ -335,15 +335,15 @@ public class PartialScanTask extends 
Task<PartialScanWork> implements
 
     // print out the location of the log file for the user so
     // that it's easy to find reason for local mode execution failures
-    for (Appender appender : Collections
-        .list((Enumeration<Appender>) LogManager.getRootLogger()
-            .getAllAppenders())) {
+    for (Appender appender : ((Logger) 
LogManager.getRootLogger()).getAppenders().values()) {
       if (appender instanceof FileAppender) {
-        console.printInfo("Execution log at: "
-            + ((FileAppender) appender).getFile());
+        console.printInfo("Execution log at: " + ((FileAppender) 
appender).getFileName());
+      } else if (appender instanceof RollingFileAppender) {
+        console.printInfo("Execution log at: " + ((RollingFileAppender) 
appender).getFileName());
       }
     }
 
+
     PartialScanWork mergeWork = new PartialScanWork(inputPaths);
     DriverContext driverCxt = new DriverContext();
     PartialScanTask taskExec = new PartialScanTask();

http://git-wip-us.apache.org/repos/asf/hive/blob/c93d6c77/ql/src/java/org/apache/hadoop/hive/ql/log/HiveEventCounter.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/log/HiveEventCounter.java 
b/ql/src/java/org/apache/hadoop/hive/ql/log/HiveEventCounter.java
new file mode 100644
index 0000000..46662c4
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/log/HiveEventCounter.java
@@ -0,0 +1,135 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.ql.log;
+
+import java.io.Serializable;
+import java.util.concurrent.atomic.AtomicLongArray;
+
+import org.apache.hadoop.hive.common.classification.InterfaceAudience;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.core.Filter;
+import org.apache.logging.log4j.core.Layout;
+import org.apache.logging.log4j.core.LogEvent;
+import org.apache.logging.log4j.core.LoggerContext;
+import org.apache.logging.log4j.core.appender.AbstractAppender;
+import org.apache.logging.log4j.core.config.Configuration;
+import org.apache.logging.log4j.core.config.LoggerConfig;
+import org.apache.logging.log4j.core.config.plugins.Plugin;
+import org.apache.logging.log4j.core.config.plugins.PluginAttribute;
+import org.apache.logging.log4j.core.config.plugins.PluginElement;
+import org.apache.logging.log4j.core.config.plugins.PluginFactory;
+import org.apache.logging.log4j.core.layout.PatternLayout;
+
+import com.google.common.annotations.VisibleForTesting;
+
+/**
+ * A log4J2 Appender that simply counts logging events in four levels:
+ * fatal, error, warn and info. The class name is used in log4j2.xml
+ */
+@Plugin(name = "HiveEventCounter", category = "Core", elementType = 
"appender", printObject = true)
+public class HiveEventCounter extends AbstractAppender {
+  private static LoggerContext context = (LoggerContext) 
LogManager.getContext(false);
+  private static Configuration configuration = context.getConfiguration();
+  private static final String APPENDER_NAME = "HiveEventCounter";
+  private static final int FATAL = 0;
+  private static final int ERROR = 1;
+  private static final int WARN = 2;
+  private static final int INFO = 3;
+
+  private static class EventCounts {
+    private final AtomicLongArray counts = new AtomicLongArray(4);
+
+    private void incr(int i) {
+      counts.incrementAndGet(i);
+    }
+
+    private long get(int i) {
+      return counts.get(i);
+    }
+  }
+
+  private static EventCounts counts = new EventCounts();
+
+  protected HiveEventCounter(String name, Filter filter,
+      Layout<? extends Serializable> layout, boolean ignoreExceptions) {
+    super(name, filter, layout, ignoreExceptions);
+  }
+
+  @PluginFactory
+  public static HiveEventCounter createInstance(@PluginAttribute("name") 
String name,
+      @PluginAttribute("ignoreExceptions") boolean ignoreExceptions,
+      @PluginElement("Layout") Layout layout,
+      @PluginElement("Filters") Filter filter) {
+    if (name == null) {
+      name = APPENDER_NAME;
+    }
+
+    if (layout == null) {
+      layout = PatternLayout.createDefaultLayout();
+    }
+    return new HiveEventCounter(name, filter, layout, ignoreExceptions);
+  }
+
+  @InterfaceAudience.Private
+  public static long getFatal() {
+    return counts.get(FATAL);
+  }
+
+  @InterfaceAudience.Private
+  public static long getError() {
+    return counts.get(ERROR);
+  }
+
+  @InterfaceAudience.Private
+  public static long getWarn() {
+    return counts.get(WARN);
+  }
+
+  @InterfaceAudience.Private
+  public static long getInfo() {
+    return counts.get(INFO);
+  }
+
+  @VisibleForTesting
+  public void addToLogger(String loggerName, Level level) {
+    LoggerConfig loggerConfig = configuration.getLoggerConfig(loggerName);
+    loggerConfig.addAppender(this, level, null);
+    context.updateLoggers();
+  }
+
+  @VisibleForTesting
+  public void removeFromLogger(String loggerName) {
+    LoggerConfig loggerConfig = configuration.getLoggerConfig(loggerName);
+    loggerConfig.removeAppender(APPENDER_NAME);
+    context.updateLoggers();
+  }
+
+  public void append(LogEvent event) {
+    Level level = event.getLevel();
+    if (level.equals(Level.INFO)) {
+      counts.incr(INFO);
+    } else if (level.equals(Level.WARN)) {
+      counts.incr(WARN);
+    } else if (level.equals(Level.ERROR)) {
+      counts.incr(ERROR);
+    } else if (level.equals(Level.FATAL)) {
+      counts.incr(FATAL);
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/hive/blob/c93d6c77/ql/src/java/org/apache/hadoop/hive/ql/log/NullAppender.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/log/NullAppender.java 
b/ql/src/java/org/apache/hadoop/hive/ql/log/NullAppender.java
new file mode 100644
index 0000000..c4cb7dd
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/log/NullAppender.java
@@ -0,0 +1,63 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.ql.log;
+
+import java.io.Serializable;
+
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.core.Filter;
+import org.apache.logging.log4j.core.Layout;
+import org.apache.logging.log4j.core.LogEvent;
+import org.apache.logging.log4j.core.LoggerContext;
+import org.apache.logging.log4j.core.appender.AbstractAppender;
+import org.apache.logging.log4j.core.config.Configuration;
+import org.apache.logging.log4j.core.config.LoggerConfig;
+import org.apache.logging.log4j.core.config.plugins.Plugin;
+import org.apache.logging.log4j.core.config.plugins.PluginFactory;
+import org.apache.logging.log4j.core.layout.PatternLayout;
+
+/**
+ * A NullAppender merely exists, it never outputs a message to any device.
+ */
+@Plugin(name = "NullAppender", category = "Core", elementType = "appender", 
printObject = false)
+public class NullAppender extends AbstractAppender {
+
+  private static LoggerContext context = (LoggerContext) 
LogManager.getContext(false);
+  private static Configuration configuration = context.getConfiguration();
+
+  protected NullAppender(String name, Filter filter,
+      Layout<? extends Serializable> layout, boolean ignoreExceptions) {
+    super(name, filter, layout, ignoreExceptions);
+  }
+
+  @PluginFactory
+  public static NullAppender createNullAppender() {
+    return new NullAppender("NullAppender", null, 
PatternLayout.createDefaultLayout(), true);
+  }
+
+  public void addToLogger(String loggerName, Level level) {
+    LoggerConfig loggerConfig = configuration.getLoggerConfig(loggerName);
+    loggerConfig.addAppender(this, level, null);
+    context.updateLoggers();
+  }
+
+  public void append(LogEvent event) {
+    // no-op
+  }
+}

http://git-wip-us.apache.org/repos/asf/hive/blob/c93d6c77/ql/src/java/org/apache/hadoop/hive/ql/log/PidDailyRollingFileAppender.java
----------------------------------------------------------------------
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/log/PidDailyRollingFileAppender.java 
b/ql/src/java/org/apache/hadoop/hive/ql/log/PidDailyRollingFileAppender.java
deleted file mode 100644
index 6a59d4a..0000000
--- a/ql/src/java/org/apache/hadoop/hive/ql/log/PidDailyRollingFileAppender.java
+++ /dev/null
@@ -1,33 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.hive.ql.log;
-
-import java.lang.management.ManagementFactory;
-import java.lang.management.RuntimeMXBean;
-
-import org.apache.log4j.DailyRollingFileAppender;
-
-public class PidDailyRollingFileAppender extends DailyRollingFileAppender {
-
-  @Override
-  public void setFile(String file) {
-    RuntimeMXBean rt = ManagementFactory.getRuntimeMXBean();
-    super.setFile(file + '.' + rt.getName());
-  }
-}

http://git-wip-us.apache.org/repos/asf/hive/blob/c93d6c77/ql/src/java/org/apache/hadoop/hive/ql/log/PidFilePatternConverter.java
----------------------------------------------------------------------
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/log/PidFilePatternConverter.java 
b/ql/src/java/org/apache/hadoop/hive/ql/log/PidFilePatternConverter.java
new file mode 100644
index 0000000..4db10bb
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/log/PidFilePatternConverter.java
@@ -0,0 +1,62 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.ql.log;
+
+import java.lang.management.ManagementFactory;
+
+import org.apache.logging.log4j.core.config.plugins.Plugin;
+import org.apache.logging.log4j.core.config.plugins.PluginFactory;
+import org.apache.logging.log4j.core.pattern.AbstractPatternConverter;
+import org.apache.logging.log4j.core.pattern.ArrayPatternConverter;
+import org.apache.logging.log4j.core.pattern.ConverterKeys;
+
+/**
+ * FilePattern converter that converts %pid pattern to <process-id>@<hostname> 
information
+ * obtained at runtime.
+ *
+ * Example usage:
+ * <RollingFile name="Rolling-default" fileName="test.log" 
filePattern="test.log.%pid.gz">
+ *
+ * Will generate output file with name containing <process-id>@<hostname> like 
below
+ * test.log.95...@localhost.gz
+ */
+@Plugin(name = "PidFilePatternConverter", category = "FileConverter")
+@ConverterKeys({ "pid" })
+public class PidFilePatternConverter extends AbstractPatternConverter 
implements
+    ArrayPatternConverter {
+
+  /**
+   * Private constructor.
+   */
+  private PidFilePatternConverter() {
+    super("pid", "pid");
+  }
+
+  @PluginFactory
+  public static PidFilePatternConverter newInstance() {
+    return new PidFilePatternConverter();
+  }
+
+  public void format(StringBuilder toAppendTo, Object... objects) {
+    toAppendTo.append(ManagementFactory.getRuntimeMXBean().getName());
+  }
+
+  public void format(Object obj, StringBuilder toAppendTo) {
+    toAppendTo.append(ManagementFactory.getRuntimeMXBean().getName());
+  }
+}

http://git-wip-us.apache.org/repos/asf/hive/blob/c93d6c77/ql/src/main/resources/hive-exec-log4j.properties
----------------------------------------------------------------------
diff --git a/ql/src/main/resources/hive-exec-log4j.properties 
b/ql/src/main/resources/hive-exec-log4j.properties
deleted file mode 100644
index 9eaa6b6..0000000
--- a/ql/src/main/resources/hive-exec-log4j.properties
+++ /dev/null
@@ -1,77 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-# Define some default values that can be overridden by system properties
-hive.log.threshold=ALL
-hive.root.logger=INFO,FA
-hive.log.dir=${java.io.tmpdir}/${user.name}
-hive.query.id=hadoop
-hive.log.file=${hive.query.id}.log
-
-# Define the root logger to the system property "hadoop.root.logger".
-log4j.rootLogger=${hive.root.logger}, EventCounter
-
-# Logging Threshold
-log4j.threshhold=${hive.log.threshold}
-
-#
-# File Appender
-#
-
-log4j.appender.FA=org.apache.log4j.FileAppender
-log4j.appender.FA.File=${hive.log.dir}/${hive.log.file}
-log4j.appender.FA.layout=org.apache.log4j.PatternLayout
-
-# Pattern format: Date LogLevel LoggerName LogMessage
-#log4j.appender.DRFA.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n
-# Debugging Pattern format
-log4j.appender.FA.layout.ConversionPattern=%d{ISO8601} %-5p [%t]: %c{2} 
(%F:%M(%L)) - %m%n
-
-
-#
-# console
-# Add "console" to rootlogger above if you want to use this
-#
-
-log4j.appender.console=org.apache.log4j.ConsoleAppender
-log4j.appender.console.target=System.err
-log4j.appender.console.layout=org.apache.log4j.PatternLayout
-log4j.appender.console.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss} [%t] %p 
%c{2}: %m%n
-
-#custom logging levels
-#log4j.logger.xxx=DEBUG
-
-#
-# Event Counter Appender
-# Sends counts of logging messages at different severity levels to Hadoop 
Metrics.
-#
-log4j.appender.EventCounter=org.apache.hadoop.hive.shims.HiveEventCounter
-
-
-log4j.category.DataNucleus=ERROR,FA
-log4j.category.Datastore=ERROR,FA
-log4j.category.Datastore.Schema=ERROR,FA
-log4j.category.JPOX.Datastore=ERROR,FA
-log4j.category.JPOX.Plugin=ERROR,FA
-log4j.category.JPOX.MetaData=ERROR,FA
-log4j.category.JPOX.Query=ERROR,FA
-log4j.category.JPOX.General=ERROR,FA
-log4j.category.JPOX.Enhancer=ERROR,FA
-
-
-# Silence useless ZK logs
-log4j.logger.org.apache.zookeeper.server.NIOServerCnxn=WARN,FA
-log4j.logger.org.apache.zookeeper.ClientCnxnSocketNIO=WARN,FA

http://git-wip-us.apache.org/repos/asf/hive/blob/c93d6c77/ql/src/main/resources/hive-exec-log4j2.xml
----------------------------------------------------------------------
diff --git a/ql/src/main/resources/hive-exec-log4j2.xml 
b/ql/src/main/resources/hive-exec-log4j2.xml
new file mode 100644
index 0000000..c93437c
--- /dev/null
+++ b/ql/src/main/resources/hive-exec-log4j2.xml
@@ -0,0 +1,110 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+
+<Configuration status="info" strict="true" name="HiveExecLog4j2"
+ packages="org.apache.hadoop.hive.ql.log">
+
+  <Properties>
+    <Property name="hive.log.threshold">ALL</Property>
+    <Property name="hive.log.level">INFO</Property>
+    <Property name="hive.root.logger">FA</Property>
+    <Property 
name="hive.log.dir">${sys:java.io.tmpdir}/${sys:user.name}</Property>
+    <Property name="hive.query.id">hadoop</Property>
+    <Property name="hive.log.file">${sys:hive.query.id}.log</Property>
+  </Properties>
+
+  <Appenders>
+    <Console name="console" target="SYSTEM_ERR">
+      <PatternLayout pattern="%d{yy/MM/dd HH:mm:ss} [%t]: %p %c{2}: %m%n"/>
+    </Console>
+
+    <!-- Regular File Appender -->
+    <!-- NOTE: if enabling multiple file appender make sure to use different 
file names -->
+    <File name="FA" fileName="${sys:hive.log.dir}/${sys:hive.log.file}">
+      <PatternLayout pattern="%d{ISO8601} %-5p [%t]: %c{2} (%F:%M(%L)) - %m%n" 
/>
+    </File>
+
+    <!-- Daily Rolling File Appender -->
+    <!-- NOTE: if enabling multiple file appender make sure to use different 
file names -->
+    <!-- Use %pid in the filePattern to append <process-id>@<host-name> to the 
filename if you want separate log files for different CLI session -->
+    <!-- <RollingFile name="DRFA" 
fileName="${sys:hive.log.dir}/${sys:hive.log.file}"
+     filePattern="${sys:hive.log.dir}/${sys:hive.log.file}.%d{yyyy-MM-dd}">
+      <PatternLayout pattern="%d{ISO8601} %-5p [%t]: %c{2} (%F:%M(%L)) - %m%n" 
/>
+      <Policies>
+        <TimeBasedTriggeringPolicy interval="1" modulate="true"/>
+      </Policies>
+      <DefaultRolloverStrategy max="30"/>
+    </RollingFile> -->
+
+    <!-- Size based Rolling File Appender -->
+    <!-- NOTE: if enabling multiple file appender make sure to use different 
file names -->
+    <!-- <RollingFile name="RFA" 
fileName="${sys:hive.log.dir}/${sys:hive.log.file}"
+     filePattern="${sys:hive.log.dir}/${sys:hive.log.file}.%i">
+      <PatternLayout pattern="%d{ISO8601} %-5p [%t]: %c{2} (%F:%M(%L)) - %m%n" 
/>
+      <Policies>
+        <SizeBasedTriggeringPolicy size="256 MB" />
+      </Policies>
+      <DefaultRolloverStrategy max="10"/>
+    </RollingFile> -->
+
+    <!-- HiveEventCounter appender is loaded from Configuration packages 
attribute.Sends counts of logging messages at different severity levels to 
Hadoop Metrics. -->
+    <HiveEventCounter name="EventCounter"/>
+  </Appenders>
+
+  <Loggers>
+    <Root level="${sys:hive.log.threshold}">
+      <AppenderRef ref="${sys:hive.root.logger}" 
level="${sys:hive.log.level}"/>
+      <AppenderRef ref="EventCounter" />
+    </Root>
+
+    <Logger name="org.apache.zookeeper.server.NIOServerCnxn" level="WARN">
+      <AppenderRef ref="${sys:hive.root.logger}"/>
+    </Logger>
+    <Logger name="org.apache.zookeeper.ClientCnxnSocketNIO" level="WARN">
+      <AppenderRef ref="${sys:hive.root.logger}"/>
+    </Logger>
+    <Logger name="DataNucleus" level="ERROR">
+      <AppenderRef ref="${sys:hive.root.logger}"/>
+    </Logger>
+    <Logger name="Datastore" level="ERROR">
+      <AppenderRef ref="${sys:hive.root.logger}"/>
+    </Logger>
+    <Logger name="Datastore.Schema" level="ERROR">
+      <AppenderRef ref="${sys:hive.root.logger}"/>
+    </Logger>
+    <Logger name="JPOX.Datastore" level="ERROR">
+      <AppenderRef ref="${sys:hive.root.logger}"/>
+    </Logger>
+    <Logger name="JPOX.Plugin" level="ERROR">
+      <AppenderRef ref="${sys:hive.root.logger}"/>
+    </Logger>
+    <Logger name="JPOX.Metadata" level="ERROR">
+      <AppenderRef ref="${sys:hive.root.logger}"/>
+    </Logger>
+    <Logger name="JPOX.Query" level="ERROR">
+      <AppenderRef ref="${sys:hive.root.logger}"/>
+    </Logger>
+    <Logger name="JPOX.General" level="ERROR">
+      <AppenderRef ref="${sys:hive.root.logger}"/>
+    </Logger>
+    <Logger name="JPOX.Enhancer" level="ERROR">
+      <AppenderRef ref="${sys:hive.root.logger}"/>
+    </Logger>
+  </Loggers>
+
+</Configuration>

http://git-wip-us.apache.org/repos/asf/hive/blob/c93d6c77/ql/src/test/org/apache/hadoop/hive/ql/log/TestLog4j2Appenders.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/log/TestLog4j2Appenders.java 
b/ql/src/test/org/apache/hadoop/hive/ql/log/TestLog4j2Appenders.java
new file mode 100644
index 0000000..bdd837e
--- /dev/null
+++ b/ql/src/test/org/apache/hadoop/hive/ql/log/TestLog4j2Appenders.java
@@ -0,0 +1,95 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.ql.log;
+
+import static org.junit.Assert.assertEquals;
+
+import org.apache.hadoop.hive.ql.metadata.StringAppender;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
+import org.apache.logging.log4j.core.LoggerContext;
+import org.apache.logging.log4j.core.config.Configuration;
+import org.apache.logging.log4j.core.config.LoggerConfig;
+import org.junit.Before;
+import org.junit.Test;
+
+/**
+ *
+ */
+public class TestLog4j2Appenders {
+
+  @Before
+  public void setup() {
+    // programmatically set root logger level to INFO. By default if 
log4j2-test.xml is not
+    // available root logger will use ERROR log level
+    LoggerContext ctx = (LoggerContext) LogManager.getContext(false);
+    Configuration config = ctx.getConfiguration();
+    LoggerConfig loggerConfig = 
config.getLoggerConfig(LogManager.ROOT_LOGGER_NAME);
+    loggerConfig.setLevel(Level.INFO);
+    ctx.updateLoggers();
+  }
+
+  @Test
+  public void testStringAppender() throws Exception {
+    // Get the RootLogger which, if you don't have log4j2-test.xml defined, 
will only log ERRORs
+    Logger logger = LogManager.getRootLogger();
+    // Create a String Appender to capture log output
+    StringAppender appender = StringAppender.createStringAppender("%m");
+    appender.addToLogger(logger.getName(), Level.INFO);
+    appender.start();
+
+    // Log to the string appender
+    logger.info("Hello!");
+    logger.info(" World");
+
+    assertEquals("Hello! World", appender.getOutput());
+    appender.removeFromLogger(LogManager.getRootLogger().getName());
+  }
+
+  @Test
+  public void testHiveEventCounterAppender() throws Exception {
+    Logger logger = LogManager.getRootLogger();
+    HiveEventCounter appender = 
HiveEventCounter.createInstance("EventCounter", true, null, null);
+    appender.addToLogger(logger.getName(), Level.INFO);
+    appender.start();
+
+    logger.info("Test");
+    logger.info("Test");
+    logger.info("Test");
+    logger.info("Test");
+
+    logger.error("Test");
+    logger.error("Test");
+    logger.error("Test");
+
+    logger.warn("Test");
+    logger.warn("Test");
+
+    logger.fatal("Test");
+
+    // HiveEventCounter will be loaded from hive-log4j2-test.xml before tests 
are run. The 2 log
+    // info msgs from previous test case will also be counted along with 4 log 
info msgs in this
+    // test and hence we assert for 6 here
+    assertEquals(6, appender.getInfo());
+    assertEquals(3, appender.getError());
+    assertEquals(2, appender.getWarn());
+    assertEquals(1, appender.getFatal());
+    appender.removeFromLogger(LogManager.getRootLogger().getName());
+  }
+}

http://git-wip-us.apache.org/repos/asf/hive/blob/c93d6c77/ql/src/test/org/apache/hadoop/hive/ql/metadata/StringAppender.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/metadata/StringAppender.java 
b/ql/src/test/org/apache/hadoop/hive/ql/metadata/StringAppender.java
new file mode 100644
index 0000000..17b64d6
--- /dev/null
+++ b/ql/src/test/org/apache/hadoop/hive/ql/metadata/StringAppender.java
@@ -0,0 +1,128 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.ql.metadata;
+
+import java.io.ByteArrayOutputStream;
+import java.io.OutputStream;
+import java.io.OutputStreamWriter;
+import java.io.Serializable;
+
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.core.Filter;
+import org.apache.logging.log4j.core.Layout;
+import org.apache.logging.log4j.core.LoggerContext;
+import org.apache.logging.log4j.core.appender.AbstractOutputStreamAppender;
+import org.apache.logging.log4j.core.appender.OutputStreamManager;
+import org.apache.logging.log4j.core.config.Configuration;
+import org.apache.logging.log4j.core.config.LoggerConfig;
+import org.apache.logging.log4j.core.config.plugins.Plugin;
+import org.apache.logging.log4j.core.config.plugins.PluginAttribute;
+import org.apache.logging.log4j.core.config.plugins.PluginFactory;
+import org.apache.logging.log4j.core.layout.PatternLayout;
+
+import com.google.common.annotations.VisibleForTesting;
+
+/**
+ * Log4j2 appender that writers to in-memory string object.
+ */
+@Plugin(name = "StringAppender", category = "Core", elementType = "appender", 
printObject = true)
+public class StringAppender
+    extends 
AbstractOutputStreamAppender<StringAppender.StringOutputStreamManager> {
+
+  private static final String APPENDER_NAME = "StringAppender";
+  private static LoggerContext context = (LoggerContext) 
LogManager.getContext(false);
+  private static Configuration configuration = context.getConfiguration();
+  private StringOutputStreamManager manager;
+
+  /**
+   * Instantiate a WriterAppender and set the output destination to a
+   * new {@link OutputStreamWriter} initialized with <code>os</code>
+   * as its {@link OutputStream}.
+   *
+   * @param name             The name of the Appender.
+   * @param layout           The layout to format the message.
+   * @param filter
+   * @param ignoreExceptions
+   * @param immediateFlush
+   * @param manager          The OutputStreamManager.
+   */
+  protected StringAppender(String name,
+      Layout<? extends Serializable> layout, Filter filter,
+      boolean ignoreExceptions, boolean immediateFlush,
+      StringOutputStreamManager manager) {
+    super(name, layout, filter, ignoreExceptions, immediateFlush, manager);
+    this.manager = manager;
+  }
+
+  @PluginFactory
+  public static StringAppender createStringAppender(
+      @PluginAttribute("name") String nullablePatternString) {
+    PatternLayout layout;
+    if (nullablePatternString == null) {
+      layout = PatternLayout.createDefaultLayout();
+    } else {
+      layout = PatternLayout.createLayout(nullablePatternString, configuration,
+          null, null, true, false, null, null);
+    }
+
+    return new StringAppender(APPENDER_NAME, layout, null, false, true,
+        new StringOutputStreamManager(new ByteArrayOutputStream(), 
"StringStream", layout));
+  }
+
+  @VisibleForTesting
+  public void addToLogger(String loggerName, Level level) {
+    LoggerConfig loggerConfig = configuration.getLoggerConfig(loggerName);
+    loggerConfig.addAppender(this, level, null);
+    context.updateLoggers();
+  }
+
+  @VisibleForTesting
+  public void removeFromLogger(String loggerName) {
+    LoggerConfig loggerConfig = configuration.getLoggerConfig(loggerName);
+    loggerConfig.removeAppender(APPENDER_NAME);
+    context.updateLoggers();
+  }
+
+  public String getOutput() {
+    manager.flush();
+    return new String(manager.getStream().toByteArray());
+  }
+
+  public void reset() {
+    manager.reset();
+  }
+
+  protected static class StringOutputStreamManager extends OutputStreamManager 
{
+    ByteArrayOutputStream stream;
+
+    protected StringOutputStreamManager(ByteArrayOutputStream os, String 
streamName,
+        Layout<?> layout) {
+      super(os, streamName, layout);
+      stream = os;
+    }
+
+    public ByteArrayOutputStream getStream() {
+      return stream;
+    }
+
+    public void reset() {
+      stream.reset();
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/hive/blob/c93d6c77/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHive.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHive.java 
b/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHive.java
index 99fbd5d..1e2feaa 100755
--- a/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHive.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHive.java
@@ -20,7 +20,6 @@ package org.apache.hadoop.hive.ql.metadata;
 
 import static 
org.apache.hadoop.hive.metastore.MetaStoreUtils.DEFAULT_DATABASE_NAME;
 
-import java.io.StringWriter;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.HashMap;
@@ -29,8 +28,6 @@ import java.util.List;
 import java.util.Map;
 import java.util.regex.Pattern;
 
-import junit.framework.TestCase;
-
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
@@ -56,15 +53,19 @@ import org.apache.hadoop.mapred.SequenceFileInputFormat;
 import org.apache.hadoop.mapred.SequenceFileOutputFormat;
 import org.apache.hadoop.mapred.TextInputFormat;
 import org.apache.hadoop.util.StringUtils;
-import org.apache.log4j.Level;
-import org.apache.log4j.Logger;
-import org.apache.log4j.PatternLayout;
-import org.apache.log4j.WriterAppender;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
+import org.apache.logging.log4j.core.LoggerContext;
+import org.apache.logging.log4j.core.config.Configuration;
+import org.apache.logging.log4j.core.config.LoggerConfig;
 import org.apache.thrift.protocol.TBinaryProtocol;
 import org.junit.Assert;
 
 import com.google.common.collect.ImmutableMap;
 
+import junit.framework.TestCase;
+
 /**
  * TestHive.
  *
@@ -248,36 +249,39 @@ public class TestHive extends TestCase {
    * @throws Throwable
    */
   public void testMetaStoreApiTiming() throws Throwable {
-    // set log level to DEBUG, as this is logged at debug level
-    Logger logger = Logger.getLogger("hive.ql.metadata.Hive");
-    Level origLevel = logger.getLevel();
-    logger.setLevel(Level.DEBUG);
-
-    // create an appender to capture the logs in a string
-    StringWriter writer = new StringWriter();
-    WriterAppender appender = new WriterAppender(new PatternLayout(), writer);
+    // Get the RootLogger which, if you don't have log4j2-test.xml defined, 
will only log ERRORs
+    Logger logger = LogManager.getLogger("hive.ql.metadata.Hive");
+    Level oldLevel = logger.getLevel();
+    LoggerContext ctx = (LoggerContext) LogManager.getContext(false);
+    Configuration config = ctx.getConfiguration();
+    LoggerConfig loggerConfig = config.getLoggerConfig(logger.getName());
+    loggerConfig.setLevel(Level.DEBUG);
+    ctx.updateLoggers();
+
+    // Create a String Appender to capture log output
+    StringAppender appender = StringAppender.createStringAppender("%m");
+    appender.addToLogger(logger.getName(), Level.DEBUG);
+    appender.start();
 
     try {
-      logger.addAppender(appender);
-
       hm.clearMetaCallTiming();
       hm.getAllDatabases();
       hm.dumpAndClearMetaCallTiming("test");
-      String logStr = writer.toString();
+      String logStr = appender.getOutput();
       String expectedString = "getAllDatabases_()=";
       Assert.assertTrue(logStr + " should contain <" + expectedString,
           logStr.contains(expectedString));
 
       // reset the log buffer, verify new dump without any api call does not 
contain func
-      writer.getBuffer().setLength(0);
+      appender.reset();
       hm.dumpAndClearMetaCallTiming("test");
-      logStr = writer.toString();
+      logStr = appender.getOutput();
       Assert.assertFalse(logStr + " should not contain <" + expectedString,
           logStr.contains(expectedString));
-
     } finally {
-      logger.setLevel(origLevel);
-      logger.removeAppender(appender);
+      loggerConfig.setLevel(oldLevel);
+      ctx.updateLoggers();
+      appender.removeFromLogger(logger.getName());
     }
   }
 

http://git-wip-us.apache.org/repos/asf/hive/blob/c93d6c77/service/src/java/org/apache/hive/service/cli/CLIServiceUtils.java
----------------------------------------------------------------------
diff --git a/service/src/java/org/apache/hive/service/cli/CLIServiceUtils.java 
b/service/src/java/org/apache/hive/service/cli/CLIServiceUtils.java
index 9d64b10..876ade8 100644
--- a/service/src/java/org/apache/hive/service/cli/CLIServiceUtils.java
+++ b/service/src/java/org/apache/hive/service/cli/CLIServiceUtils.java
@@ -18,9 +18,6 @@
 
 package org.apache.hive.service.cli;
 
-import org.apache.log4j.Layout;
-import org.apache.log4j.PatternLayout;
-
 /**
  * CLIServiceUtils.
  *
@@ -29,10 +26,6 @@ public class CLIServiceUtils {
 
 
   private static final char SEARCH_STRING_ESCAPE = '\\';
-  public static final Layout verboseLayout = new PatternLayout(
-    "%d{yy/MM/dd HH:mm:ss} %p %c{2}: %m%n");
-  public static final Layout nonVerboseLayout = new PatternLayout(
-    "%-5p : %m%n");
 
   /**
    * Convert a SQL search pattern into an equivalent Java Regex.

http://git-wip-us.apache.org/repos/asf/hive/blob/c93d6c77/service/src/java/org/apache/hive/service/cli/operation/LogDivertAppender.java
----------------------------------------------------------------------
diff --git 
a/service/src/java/org/apache/hive/service/cli/operation/LogDivertAppender.java 
b/service/src/java/org/apache/hive/service/cli/operation/LogDivertAppender.java
index 70340bd..fb3921f 100644
--- 
a/service/src/java/org/apache/hive/service/cli/operation/LogDivertAppender.java
+++ 
b/service/src/java/org/apache/hive/service/cli/operation/LogDivertAppender.java
@@ -6,44 +6,94 @@
  * to you under the Apache License, Version 2.0 (the
  * "License"); you may not use this file except in compliance
  * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
  * Unless required by applicable law or agreed to in writing, software
  * distributed under the License is distributed on an "AS IS" BASIS,
  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-
 package org.apache.hive.service.cli.operation;
-import java.io.CharArrayWriter;
-import java.util.Enumeration;
+
+import java.io.ByteArrayOutputStream;
+import java.io.OutputStream;
+import java.io.OutputStreamWriter;
+import java.io.Serializable;
 import java.util.regex.Pattern;
 
 import org.apache.hadoop.hive.ql.exec.Task;
 import org.apache.hadoop.hive.ql.log.PerfLogger;
 import org.apache.hadoop.hive.ql.session.OperationLog;
-import org.apache.hadoop.hive.ql.session.OperationLog.LoggingLevel;
-import org.apache.hive.service.cli.CLIServiceUtils;
-import org.apache.log4j.Appender;
-import org.apache.log4j.ConsoleAppender;
-import org.apache.log4j.Layout;
-import org.apache.log4j.Logger;
-import org.apache.log4j.WriterAppender;
-import org.apache.log4j.spi.Filter;
-import org.apache.log4j.spi.LoggingEvent;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
+import org.apache.logging.log4j.core.Appender;
+import org.apache.logging.log4j.core.Filter;
+import org.apache.logging.log4j.core.Layout;
+import org.apache.logging.log4j.core.LogEvent;
+import org.apache.logging.log4j.core.LoggerContext;
+import org.apache.logging.log4j.core.appender.AbstractOutputStreamAppender;
+import org.apache.logging.log4j.core.appender.ConsoleAppender;
+import org.apache.logging.log4j.core.appender.OutputStreamManager;
+import org.apache.logging.log4j.core.config.Configuration;
+import org.apache.logging.log4j.core.filter.AbstractFilter;
+import org.apache.logging.log4j.core.layout.PatternLayout;
 
 import com.google.common.base.Joiner;
 
 /**
- * An Appender to divert logs from individual threads to the LogObject they 
belong to.
+ * Divert appender to redirect operation logs to separate files.
  */
-public class LogDivertAppender extends WriterAppender {
-  private static final Logger LOG = 
Logger.getLogger(LogDivertAppender.class.getName());
+public class LogDivertAppender
+    extends 
AbstractOutputStreamAppender<LogDivertAppender.StringOutputStreamManager> {
+  private static final Logger LOG = 
LogManager.getLogger(LogDivertAppender.class.getName());
+  private static LoggerContext context = (LoggerContext) 
LogManager.getContext(false);
+  private static Configuration configuration = context.getConfiguration();
+  public static final Layout<? extends Serializable> verboseLayout = 
PatternLayout.createLayout(
+      "%d{yy/MM/dd HH:mm:ss} %p %c{2}: %m%n", configuration, null, null, true, 
false, null, null);
+  public static final Layout<? extends Serializable> nonVerboseLayout = 
PatternLayout.createLayout(
+      "%-5p : %m%n", configuration, null, null, true, false, null, null);
+
   private final OperationManager operationManager;
+  private StringOutputStreamManager manager;
   private boolean isVerbose;
-  private Layout verboseLayout;
+  private final Layout<? extends Serializable> layout;
+
+  /**
+   * Instantiate a WriterAppender and set the output destination to a
+   * new {@link OutputStreamWriter} initialized with <code>os</code>
+   * as its {@link OutputStream}.
+   *
+   * @param name             The name of the Appender.
+   * @param filter           Filter
+   * @param manager          The OutputStreamManager.
+   * @param operationManager Operation manager
+   */
+  protected LogDivertAppender(String name, Filter filter,
+      StringOutputStreamManager manager, OperationManager operationManager,
+      OperationLog.LoggingLevel loggingMode) {
+    super(name, null, filter, false, true, manager);
+    this.operationManager = operationManager;
+    this.manager = manager;
+    this.isVerbose = (loggingMode == OperationLog.LoggingLevel.VERBOSE);
+    this.layout = getDefaultLayout();
+  }
+
+  public Layout<? extends Serializable> getDefaultLayout() {
+    // There should be a ConsoleAppender. Copy its Layout.
+    Logger root = LogManager.getRootLogger();
+    Layout layout = null;
+
+    for (Appender ap : ((org.apache.logging.log4j.core.Logger) 
root).getAppenders().values()) {
+      if (ap.getClass().equals(ConsoleAppender.class)) {
+        layout = ap.getLayout();
+        break;
+      }
+    }
+
+    return layout;
+  }
 
   /**
    * A log filter that filters messages coming from the logger with the given 
names.
@@ -52,31 +102,31 @@ public class LogDivertAppender extends WriterAppender {
    * they don't generate more logs for themselves when they process logs.
    * White list filter is used for less verbose log collection
    */
-  private static class NameFilter extends Filter {
+  private static class NameFilter extends AbstractFilter {
     private Pattern namePattern;
-    private LoggingLevel loggingMode;
+    private OperationLog.LoggingLevel loggingMode;
     private OperationManager operationManager;
 
     /* Patterns that are excluded in verbose logging level.
      * Filter out messages coming from log processing classes, or we'll run an 
infinite loop.
      */
     private static final Pattern verboseExcludeNamePattern = 
Pattern.compile(Joiner.on("|").
-      join(new String[] {LOG.getName(), OperationLog.class.getName(),
-      OperationManager.class.getName()}));
+        join(new String[]{LOG.getName(), OperationLog.class.getName(),
+            OperationManager.class.getName()}));
 
     /* Patterns that are included in execution logging level.
      * In execution mode, show only select logger messages.
      */
     private static final Pattern executionIncludeNamePattern = 
Pattern.compile(Joiner.on("|").
-      join(new String[] {"org.apache.hadoop.mapreduce.JobSubmitter",
-      "org.apache.hadoop.mapreduce.Job", "SessionState", Task.class.getName(),
-      "org.apache.hadoop.hive.ql.exec.spark.status.SparkJobMonitor"}));
+        join(new String[]{"org.apache.hadoop.mapreduce.JobSubmitter",
+            "org.apache.hadoop.mapreduce.Job", "SessionState", 
Task.class.getName(),
+            "org.apache.hadoop.hive.ql.exec.spark.status.SparkJobMonitor"}));
 
     /* Patterns that are included in performance logging level.
      * In performance mode, show execution and performance logger messages.
      */
     private static final Pattern performanceIncludeNamePattern = 
Pattern.compile(
-      executionIncludeNamePattern.pattern() + "|" + 
PerfLogger.class.getName());
+        executionIncludeNamePattern.pattern() + "|" + 
PerfLogger.class.getName());
 
     private void setCurrentNamePattern(OperationLog.LoggingLevel mode) {
       if (mode == OperationLog.LoggingLevel.VERBOSE) {
@@ -88,26 +138,25 @@ public class LogDivertAppender extends WriterAppender {
       }
     }
 
-    public NameFilter(
-      OperationLog.LoggingLevel loggingMode, OperationManager op) {
+    public NameFilter(OperationLog.LoggingLevel loggingMode, OperationManager 
op) {
       this.operationManager = op;
       this.loggingMode = loggingMode;
       setCurrentNamePattern(loggingMode);
     }
 
     @Override
-    public int decide(LoggingEvent ev) {
+    public Result filter(LogEvent event) {
       OperationLog log = operationManager.getOperationLogByThread();
       boolean excludeMatches = (loggingMode == 
OperationLog.LoggingLevel.VERBOSE);
 
       if (log == null) {
-        return Filter.DENY;
+        return Result.DENY;
       }
 
       OperationLog.LoggingLevel currentLoggingMode = log.getOpLoggingLevel();
       // If logging is disabled, deny everything.
       if (currentLoggingMode == OperationLog.LoggingLevel.NONE) {
-        return Filter.DENY;
+        return Result.DENY;
       }
       // Look at the current session's setting
       // and set the pattern and excludeMatches accordingly.
@@ -116,88 +165,58 @@ public class LogDivertAppender extends WriterAppender {
         setCurrentNamePattern(loggingMode);
       }
 
-      boolean isMatch = namePattern.matcher(ev.getLoggerName()).matches();
+      boolean isMatch = namePattern.matcher(event.getLoggerName()).matches();
 
       if (excludeMatches == isMatch) {
         // Deny if this is black-list filter (excludeMatches = true) and it
-        // matched
-        // or if this is whitelist filter and it didn't match
-        return Filter.DENY;
+        // matched or if this is whitelist filter and it didn't match
+        return Result.DENY;
       }
-      return Filter.NEUTRAL;
+      return Result.NEUTRAL;
     }
   }
 
-  /** This is where the log message will go to */
-  private final CharArrayWriter writer = new CharArrayWriter();
-
-  private void setLayout (boolean isVerbose, Layout lo) {
-    if (isVerbose) {
-      if (lo == null) {
-        lo = CLIServiceUtils.verboseLayout;
-        LOG.info("Cannot find a Layout from a ConsoleAppender. Using default 
Layout pattern.");
-      }
-    } else {
-      lo = CLIServiceUtils.nonVerboseLayout;
-    }
-    setLayout(lo);
+  public static LogDivertAppender createInstance(OperationManager 
operationManager,
+      OperationLog.LoggingLevel loggingMode) {
+    return new LogDivertAppender("LogDivertAppender", new 
NameFilter(loggingMode, operationManager),
+        new StringOutputStreamManager(new ByteArrayOutputStream(), 
"StringStream", null),
+        operationManager, loggingMode);
   }
 
-  private void initLayout(boolean isVerbose) {
-    // There should be a ConsoleAppender. Copy its Layout.
-    Logger root = Logger.getRootLogger();
-    Layout layout = null;
-
-    Enumeration<?> appenders = root.getAllAppenders();
-    while (appenders.hasMoreElements()) {
-      Appender ap = (Appender) appenders.nextElement();
-      if (ap.getClass().equals(ConsoleAppender.class)) {
-        layout = ap.getLayout();
-        break;
-      }
-    }
-    setLayout(isVerbose, layout);
+  public String getOutput() {
+    return new String(manager.getStream().toByteArray());
   }
 
-  public LogDivertAppender(OperationManager operationManager,
-    OperationLog.LoggingLevel loggingMode) {
-    isVerbose = (loggingMode == OperationLog.LoggingLevel.VERBOSE);
-    initLayout(isVerbose);
-    setWriter(writer);
-    setName("LogDivertAppender");
-    this.operationManager = operationManager;
-    this.verboseLayout = isVerbose ? layout : CLIServiceUtils.verboseLayout;
-    addFilter(new NameFilter(loggingMode, operationManager));
+  @Override
+  public void start() {
+    super.start();
   }
 
   @Override
-  public void doAppend(LoggingEvent event) {
-    OperationLog log = operationManager.getOperationLogByThread();
+  public Layout<? extends Serializable> getLayout() {
 
-    // Set current layout depending on the verbose/non-verbose mode.
+    // If there is a logging level change from verbose->non-verbose or 
vice-versa since
+    // the last subAppend call, change the layout to preserve consistency.
+    OperationLog log = operationManager.getOperationLogByThread();
     if (log != null) {
-      boolean isCurrModeVerbose = (log.getOpLoggingLevel() == 
OperationLog.LoggingLevel.VERBOSE);
+      isVerbose = (log.getOpLoggingLevel() == 
OperationLog.LoggingLevel.VERBOSE);
+    }
 
-      // If there is a logging level change from verbose->non-verbose or 
vice-versa since
-      // the last subAppend call, change the layout to preserve consistency.
-      if (isCurrModeVerbose != isVerbose) {
-        isVerbose = isCurrModeVerbose;
-        setLayout(isVerbose, verboseLayout);
-      }
+    // layout is immutable in log4j2, so we cheat here and return a different 
layout when
+    // verbosity changes
+    if (isVerbose) {
+      return verboseLayout;
+    } else {
+      return layout == null ? nonVerboseLayout : layout;
     }
-    super.doAppend(event);
   }
 
-  /**
-   * Overrides WriterAppender.subAppend(), which does the real logging. No need
-   * to worry about concurrency since log4j calls this synchronously.
-   */
   @Override
-  protected void subAppend(LoggingEvent event) {
-    super.subAppend(event);
-    // That should've gone into our writer. Notify the LogContext.
-    String logOutput = writer.toString();
-    writer.reset();
+  public void append(LogEvent event) {
+    super.append(event);
+
+    String logOutput = getOutput();
+    manager.reset();
 
     OperationLog log = operationManager.getOperationLogByThread();
     if (log == null) {
@@ -206,4 +225,22 @@ public class LogDivertAppender extends WriterAppender {
     }
     log.writeOperationLog(logOutput);
   }
+
+  protected static class StringOutputStreamManager extends OutputStreamManager 
{
+    ByteArrayOutputStream stream;
+
+    protected StringOutputStreamManager(ByteArrayOutputStream os, String 
streamName,
+        Layout<?> layout) {
+      super(os, streamName, layout);
+      stream = os;
+    }
+
+    public ByteArrayOutputStream getStream() {
+      return stream;
+    }
+
+    public void reset() {
+      stream.reset();
+    }
+  }
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/c93d6c77/service/src/java/org/apache/hive/service/cli/operation/OperationManager.java
----------------------------------------------------------------------
diff --git 
a/service/src/java/org/apache/hive/service/cli/operation/OperationManager.java 
b/service/src/java/org/apache/hive/service/cli/operation/OperationManager.java
index 9b0a519..304a525 100644
--- 
a/service/src/java/org/apache/hive/service/cli/operation/OperationManager.java
+++ 
b/service/src/java/org/apache/hive/service/cli/operation/OperationManager.java
@@ -41,8 +41,11 @@ import org.apache.hive.service.cli.RowSet;
 import org.apache.hive.service.cli.RowSetFactory;
 import org.apache.hive.service.cli.TableSchema;
 import org.apache.hive.service.cli.session.HiveSession;
-import org.apache.log4j.Appender;
-import org.apache.log4j.Logger;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.core.Appender;
+import org.apache.logging.log4j.core.LoggerContext;
+import org.apache.logging.log4j.core.config.Configuration;
+import org.apache.logging.log4j.core.config.LoggerConfig;
 
 /**
  * OperationManager.
@@ -50,7 +53,6 @@ import org.apache.log4j.Logger;
  */
 public class OperationManager extends AbstractService {
   private final Log LOG = LogFactory.getLog(OperationManager.class.getName());
-
   private final Map<OperationHandle, Operation> handleToOperation =
       new HashMap<OperationHandle, Operation>();
 
@@ -83,8 +85,13 @@ public class OperationManager extends AbstractService {
 
   private void initOperationLogCapture(String loggingMode) {
     // Register another Appender (with the same layout) that talks to us.
-    Appender ap = new LogDivertAppender(this, 
OperationLog.getLoggingLevel(loggingMode));
-    Logger.getRootLogger().addAppender(ap);
+    Appender ap = LogDivertAppender.createInstance(this, 
OperationLog.getLoggingLevel(loggingMode));
+    LoggerContext context = (LoggerContext) LogManager.getContext(false);
+    Configuration configuration = context.getConfiguration();
+    LoggerConfig loggerConfig = 
configuration.getLoggerConfig(LogManager.getLogger().getName());
+    loggerConfig.addAppender(ap, null, null);
+    context.updateLoggers();
+    ap.start();
   }
 
   public ExecuteStatementOperation newExecuteStatementOperation(HiveSession 
parentSession,

http://git-wip-us.apache.org/repos/asf/hive/blob/c93d6c77/shims/common/pom.xml
----------------------------------------------------------------------
diff --git a/shims/common/pom.xml b/shims/common/pom.xml
index 9e9a3b7..dfdec2b 100644
--- a/shims/common/pom.xml
+++ b/shims/common/pom.xml
@@ -41,14 +41,19 @@
       <version>${commons-logging.version}</version>
     </dependency>
     <dependency>
-      <groupId>log4j</groupId>
-      <artifactId>log4j</artifactId>
-      <version>${log4j.version}</version>
+      <groupId>org.apache.logging.log4j</groupId>
+      <artifactId>log4j-1.2-api</artifactId>
+      <version>${log4j2.version}</version>
     </dependency>
     <dependency>
-      <groupId>log4j</groupId>
-      <artifactId>apache-log4j-extras</artifactId>
-      <version>${log4j-extras.version}</version>
+      <groupId>org.apache.logging.log4j</groupId>
+      <artifactId>log4j-slf4j-impl</artifactId>
+      <version>${log4j2.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.logging.log4j</groupId>
+      <artifactId>log4j-jcl</artifactId>
+      <version>${log4j2.version}</version>
     </dependency>
     <dependency>
       <groupId>com.google.guava</groupId>

http://git-wip-us.apache.org/repos/asf/hive/blob/c93d6c77/shims/common/src/main/java/org/apache/hadoop/hive/shims/HiveEventCounter.java
----------------------------------------------------------------------
diff --git 
a/shims/common/src/main/java/org/apache/hadoop/hive/shims/HiveEventCounter.java 
b/shims/common/src/main/java/org/apache/hadoop/hive/shims/HiveEventCounter.java
deleted file mode 100644
index 224b135..0000000
--- 
a/shims/common/src/main/java/org/apache/hadoop/hive/shims/HiveEventCounter.java
+++ /dev/null
@@ -1,102 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.hive.shims;
-
-import org.apache.log4j.Appender;
-import org.apache.log4j.AppenderSkeleton;
-import org.apache.log4j.Layout;
-import org.apache.log4j.spi.ErrorHandler;
-import org.apache.log4j.spi.Filter;
-import org.apache.log4j.spi.LoggingEvent;
-import org.apache.log4j.spi.OptionHandler;
-
-public class HiveEventCounter implements Appender, OptionHandler {
-
-  AppenderSkeleton hadoopEventCounter;
-
-  public HiveEventCounter() {
-    hadoopEventCounter = ShimLoader.getEventCounter();
-  }
-
-  @Override
-  public void close() {
-    hadoopEventCounter.close();
-  }
-
-  @Override
-  public boolean requiresLayout() {
-    return hadoopEventCounter.requiresLayout();
-  }
-
-  @Override
-  public void addFilter(Filter filter) {
-    hadoopEventCounter.addFilter(filter);
-  }
-
-  @Override
-  public void clearFilters() {
-    hadoopEventCounter.clearFilters();
-  }
-
-  @Override
-  public void doAppend(LoggingEvent event) {
-    hadoopEventCounter.doAppend(event);
-  }
-
-  @Override
-  public ErrorHandler getErrorHandler() {
-    return hadoopEventCounter.getErrorHandler();
-  }
-
-  @Override
-  public Filter getFilter() {
-    return hadoopEventCounter.getFilter();
-  }
-
-  @Override
-  public Layout getLayout() {
-    return hadoopEventCounter.getLayout();
-  }
-
-  @Override
-  public String getName() {
-    return hadoopEventCounter.getName();
-  }
-
-  @Override
-  public void setErrorHandler(ErrorHandler handler) {
-    hadoopEventCounter.setErrorHandler(handler);
-  }
-
-  @Override
-  public void setLayout(Layout layout) {
-    hadoopEventCounter.setLayout(layout);
-  }
-
-  @Override
-  public void setName(String name) {
-    hadoopEventCounter.setName(name);
-  }
-
-  @Override
-  public void activateOptions() {
-    hadoopEventCounter.activateOptions();
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/hive/blob/c93d6c77/spark-client/src/test/resources/log4j.properties
----------------------------------------------------------------------
diff --git a/spark-client/src/test/resources/log4j.properties 
b/spark-client/src/test/resources/log4j.properties
deleted file mode 100644
index 93a60cc..0000000
--- a/spark-client/src/test/resources/log4j.properties
+++ /dev/null
@@ -1,23 +0,0 @@
-#
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#    http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-# Set everything to be logged to the file target/unit-tests.log
-log4j.rootCategory=DEBUG, console
-log4j.appender.console=org.apache.log4j.ConsoleAppender
-log4j.appender.console.target=System.err
-log4j.appender.console.layout=org.apache.log4j.PatternLayout
-log4j.appender.console.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss} %p 
%c{1}: %m%n

http://git-wip-us.apache.org/repos/asf/hive/blob/c93d6c77/spark-client/src/test/resources/log4j2.xml
----------------------------------------------------------------------
diff --git a/spark-client/src/test/resources/log4j2.xml 
b/spark-client/src/test/resources/log4j2.xml
new file mode 100644
index 0000000..a435069
--- /dev/null
+++ b/spark-client/src/test/resources/log4j2.xml
@@ -0,0 +1,39 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+
+<Configuration status="info" strict="true" name="SparkClientLog4j2"
+ packages="org.apache.hadoop.hive.ql.log">
+
+  <Properties>
+    <Property name="spark.log.level">DEBUG</Property>
+    <Property name="spark.root.logger">console</Property>
+  </Properties>
+
+  <Appenders>
+    <Console name="console" target="SYSTEM_ERR">
+      <PatternLayout pattern="%d{yy/MM/dd HH:mm:ss} %p %c{1}: %m%n"/>
+    </Console>
+  </Appenders>
+
+  <Loggers>
+    <Root level="DEBUG">
+      <AppenderRef ref="${sys:spark.root.logger}" 
level="${sys:spark.log.level}"/>
+    </Root>
+  </Loggers>
+
+</Configuration>

http://git-wip-us.apache.org/repos/asf/hive/blob/c93d6c77/storage-api/pom.xml
----------------------------------------------------------------------
diff --git a/storage-api/pom.xml b/storage-api/pom.xml
index 71b51b8..71b79f1 100644
--- a/storage-api/pom.xml
+++ b/storage-api/pom.xml
@@ -32,13 +32,6 @@
   </properties>
 
   <dependencies>
-    <!-- dependencies are always listed in sorted order by groupId, artifectId 
-->
-    <!-- inter-project -->
-    <dependency>
-      <groupId>log4j</groupId>
-      <artifactId>log4j</artifactId>
-      <version>${log4j.version}</version>
-    </dependency>
     <!-- test inter-project -->
     <dependency>
       <groupId>junit</groupId>

http://git-wip-us.apache.org/repos/asf/hive/blob/c93d6c77/testutils/ptest2/pom.xml
----------------------------------------------------------------------
diff --git a/testutils/ptest2/pom.xml b/testutils/ptest2/pom.xml
index 211678e..2cf7f45 100644
--- a/testutils/ptest2/pom.xml
+++ b/testutils/ptest2/pom.xml
@@ -64,6 +64,26 @@ limitations under the License.
       <version>15.0</version>
     </dependency>
     <dependency>
+      <groupId>org.apache.logging.log4j</groupId>
+      <artifactId>log4j-1.2-api</artifactId>
+      <version>${log4j2.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.logging.log4j</groupId>
+      <artifactId>log4j-web</artifactId>
+      <version>${log4j2.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.logging.log4j</groupId>
+      <artifactId>log4j-slf4j-impl</artifactId>
+      <version>${log4j2.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.logging.log4j</groupId>
+      <artifactId>log4j-jcl</artifactId>
+      <version>${log4j2.version}</version>
+    </dependency>
+    <dependency>
       <groupId>log4j</groupId>
       <artifactId>log4j</artifactId>
       <version>1.2.17</version>

http://git-wip-us.apache.org/repos/asf/hive/blob/c93d6c77/testutils/ptest2/src/main/resources/log4j.properties
----------------------------------------------------------------------
diff --git a/testutils/ptest2/src/main/resources/log4j.properties 
b/testutils/ptest2/src/main/resources/log4j.properties
deleted file mode 100644
index edb9696..0000000
--- a/testutils/ptest2/src/main/resources/log4j.properties
+++ /dev/null
@@ -1,37 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-hive.ptest.logdir=target
-
-log4j.rootLogger=DEBUG,FILE
-log4j.threshhold=ALL
-
-log4j.appender.FILE=org.apache.log4j.RollingFileAppender
-log4j.appender.FILE.File=${hive.ptest.logdir}/ptest.log
-log4j.appender.FILE.MaxFileSize=50MB
-log4j.appender.FILE.MaxBackupIndex=1
-log4j.appender.FILE.layout=org.apache.log4j.PatternLayout
-log4j.appender.FILE.layout.ConversionPattern=%d{yyyy-MM-dd HH:mm:ss} %-5p 
%c{1}:%L - %m%n
-
-log4j.logger.org.apache.http=INFO
-log4j.logger.org.springframework=INFO
-log4j.logger.org.jclouds=INFO
-log4j.logger.jclouds=INFO
-log4j.logger.org.apache.hive=DEBUG
-log4j.logger.org.apache.http=TRACE
-
-# Silence useless ZK logs
-log4j.logger.org.apache.zookeeper.server.NIOServerCnxn=WARN
-log4j.logger.org.apache.zookeeper.ClientCnxnSocketNIO=WARN

Reply via email to