HIVE-18224: Introduce interface above driver (Zoltan Haindrich, reviewed by 
Ashutosh Chauhan)

Signed-off-by: Zoltan Haindrich <k...@rxd.hu>


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/0e2dab91
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/0e2dab91
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/0e2dab91

Branch: refs/heads/master
Commit: 0e2dab913989a2ccc4fde1f2ff7dfea097f4d493
Parents: 3e930bf
Author: Zoltan Haindrich <k...@rxd.hu>
Authored: Fri Dec 22 09:37:52 2017 +0100
Committer: Zoltan Haindrich <k...@rxd.hu>
Committed: Fri Dec 22 09:38:53 2017 +0100

----------------------------------------------------------------------
 .../org/apache/hadoop/hive/cli/CliDriver.java   | 10 +--
 .../hadoop/hive/cli/TestCliDriverMethods.java   |  4 +-
 .../apache/hive/hcatalog/cli/HCatDriver.java    | 26 ++++++--
 .../hive/hcatalog/cli/TestSemanticAnalysis.java |  7 +-
 .../hive/hcatalog/cli/TestUseDatabase.java      |  7 +-
 .../hive/hcatalog/data/HCatDataCheckUtil.java   | 13 ++--
 .../hive/hcatalog/mapreduce/HCatBaseTest.java   |  7 +-
 .../hcatalog/mapreduce/TestPassProperties.java  |  7 +-
 .../hcatalog/pig/AbstractHCatLoaderTest.java    | 10 +--
 .../hive/hcatalog/pig/TestE2EScenarios.java     |  7 +-
 .../pig/TestHCatLoaderComplexSchema.java        |  7 +-
 .../hcatalog/pig/TestHCatLoaderEncryption.java  | 15 +++--
 .../hive/hcatalog/pig/TestHCatStorerMulti.java  |  7 +-
 .../hcatalog/listener/TestMsgBusConnection.java |  7 +-
 .../listener/TestNotificationListener.java      |  4 +-
 .../hive/hcatalog/streaming/HiveEndPoint.java   |  7 +-
 .../hive/hcatalog/streaming/TestStreaming.java  | 17 ++---
 .../api/repl/commands/TestCommands.java         |  7 +-
 .../hbase/TestPigHBaseStorageHandler.java       |  7 +-
 .../listener/TestDbNotificationListener.java    |  7 +-
 .../mapreduce/TestSequenceFileReadWrite.java    |  7 +-
 .../hadoop/hive/metastore/TestFilterHooks.java  | 13 ++--
 .../hive/metastore/TestMarkPartition.java       |  7 +-
 .../metastore/TestMetaStoreAuthorization.java   |  5 +-
 .../TestMetaStoreEndFunctionListener.java       | 13 ++--
 .../metastore/TestMetaStoreEventListener.java   | 16 ++---
 .../TestMetaStoreEventListenerOnlyOnCommit.java | 13 ++--
 .../metastore/TestMetaStoreInitListener.java    | 13 ++--
 .../metastore/TestMetaStoreListenersError.java  |  6 +-
 .../hive/metastore/TestMetaStoreMetrics.java    |  9 ++-
 .../hive/metastore/TestMetastoreVersion.java    | 30 ++++-----
 .../apache/hadoop/hive/ql/TestAcidOnTez.java    |  8 +--
 .../hadoop/hive/ql/TestCreateUdfEntities.java   |  6 +-
 ...estDDLWithRemoteMetastoreSecondNamenode.java |  4 +-
 .../hadoop/hive/ql/history/TestHiveHistory.java |  5 +-
 .../TestSemanticAnalyzerHookLoading.java        |  5 +-
 .../hive/ql/parse/TestReplicationScenarios.java | 41 ++++++------
 .../hadoop/hive/ql/parse/WarehouseInstance.java |  7 +-
 .../security/StorageBasedMetastoreTestBase.java |  7 +-
 .../TestAuthorizationPreEventListener.java      |  7 +-
 .../TestClientSideAuthorizationProvider.java    |  7 +-
 .../TestMetastoreAuthorizationProvider.java     |  7 +-
 .../TestMultiAuthorizationPreEventListener.java |  7 +-
 .../TestHiveAuthorizerCheckInvocation.java      |  7 +-
 .../plugin/TestHiveAuthorizerShowFilters.java   |  7 +-
 .../hive/ql/txn/compactor/TestCompactor.java    | 11 ++--
 .../org/apache/hadoop/hive/ql/QTestUtil.java    | 13 ++--
 .../java/org/apache/hadoop/hive/ql/Driver.java  | 39 +++++++----
 .../apache/hadoop/hive/ql/DriverFactory.java    | 62 ++++++++++++++++++
 .../java/org/apache/hadoop/hive/ql/IDriver.java | 69 ++++++++++++++++++++
 .../hadoop/hive/ql/QueryLifeTimeHookRunner.java |  2 +-
 .../apache/hadoop/hive/ql/exec/DagUtils.java    |  9 +--
 .../apache/hadoop/hive/ql/hooks/HookUtils.java  |  2 +-
 .../hadoop/hive/ql/hooks/HooksLoader.java       | 11 ++--
 .../ql/processors/CommandProcessorFactory.java  | 11 ++--
 .../service/cli/operation/SQLOperation.java     |  7 +-
 .../service/cli/session/SessionManager.java     |  3 +-
 57 files changed, 440 insertions(+), 239 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hive/blob/0e2dab91/cli/src/java/org/apache/hadoop/hive/cli/CliDriver.java
----------------------------------------------------------------------
diff --git a/cli/src/java/org/apache/hadoop/hive/cli/CliDriver.java 
b/cli/src/java/org/apache/hadoop/hive/cli/CliDriver.java
index bd0b422..2cd3c95 100644
--- a/cli/src/java/org/apache/hadoop/hive/cli/CliDriver.java
+++ b/cli/src/java/org/apache/hadoop/hive/cli/CliDriver.java
@@ -67,7 +67,7 @@ import org.apache.hadoop.hive.conf.VariableSubstitution;
 import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
 import org.apache.hadoop.hive.metastore.api.FieldSchema;
 import org.apache.hadoop.hive.ql.CommandNeedRetryException;
-import org.apache.hadoop.hive.ql.Driver;
+import org.apache.hadoop.hive.ql.IDriver;
 import org.apache.hadoop.hive.ql.QueryPlan;
 import org.apache.hadoop.hive.ql.exec.FunctionRegistry;
 import org.apache.hadoop.hive.ql.exec.mr.HadoopJobExecHelper;
@@ -182,7 +182,7 @@ public class CliDriver {
     }  else { // local mode
       try {
         CommandProcessor proc = CommandProcessorFactory.get(tokens, (HiveConf) 
conf);
-        if (proc instanceof Driver) {
+        if (proc instanceof IDriver) {
           // Let Driver strip comments using sql parser
           ret = processLocalCmd(cmd, proc, ss);
         } else {
@@ -227,8 +227,8 @@ public class CliDriver {
       try {
         needRetry = false;
         if (proc != null) {
-          if (proc instanceof Driver) {
-            Driver qp = (Driver) proc;
+          if (proc instanceof IDriver) {
+            IDriver qp = (IDriver) proc;
             PrintStream out = ss.out;
             long start = System.currentTimeMillis();
             if (ss.getIsVerbose()) {
@@ -321,7 +321,7 @@ public class CliDriver {
    * @param qp Driver that executed the command
    * @param out PrintStream which to send output to
    */
-  private void printHeader(Driver qp, PrintStream out) {
+  private void printHeader(IDriver qp, PrintStream out) {
     List<FieldSchema> fieldSchemas = qp.getSchema().getFieldSchemas();
     if (HiveConf.getBoolVar(conf, HiveConf.ConfVars.HIVE_CLI_PRINT_HEADER)
           && fieldSchemas != null) {

http://git-wip-us.apache.org/repos/asf/hive/blob/0e2dab91/cli/src/test/org/apache/hadoop/hive/cli/TestCliDriverMethods.java
----------------------------------------------------------------------
diff --git a/cli/src/test/org/apache/hadoop/hive/cli/TestCliDriverMethods.java 
b/cli/src/test/org/apache/hadoop/hive/cli/TestCliDriverMethods.java
index 8f1c15e..0f7a1ee 100644
--- a/cli/src/test/org/apache/hadoop/hive/cli/TestCliDriverMethods.java
+++ b/cli/src/test/org/apache/hadoop/hive/cli/TestCliDriverMethods.java
@@ -54,7 +54,7 @@ import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
 import org.apache.hadoop.hive.metastore.api.FieldSchema;
 import org.apache.hadoop.hive.metastore.api.Schema;
 import org.apache.hadoop.hive.ql.CommandNeedRetryException;
-import org.apache.hadoop.hive.ql.Driver;
+import org.apache.hadoop.hive.ql.IDriver;
 import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse;
 import org.apache.hadoop.util.Shell;
 
@@ -166,7 +166,7 @@ public class TestCliDriverMethods extends TestCase {
         .thenReturn(true);
     cliDriver.setConf(conf);
 
-    Driver proc = mock(Driver.class);
+    IDriver proc = mock(IDriver.class);
 
     CommandProcessorResponse cpr = mock(CommandProcessorResponse.class);
     when(cpr.getResponseCode()).thenReturn(0);

http://git-wip-us.apache.org/repos/asf/hive/blob/0e2dab91/hcatalog/core/src/main/java/org/apache/hive/hcatalog/cli/HCatDriver.java
----------------------------------------------------------------------
diff --git 
a/hcatalog/core/src/main/java/org/apache/hive/hcatalog/cli/HCatDriver.java 
b/hcatalog/core/src/main/java/org/apache/hive/hcatalog/cli/HCatDriver.java
index 4873595..e41db34 100644
--- a/hcatalog/core/src/main/java/org/apache/hive/hcatalog/cli/HCatDriver.java
+++ b/hcatalog/core/src/main/java/org/apache/hive/hcatalog/cli/HCatDriver.java
@@ -18,13 +18,17 @@
  */
 package org.apache.hive.hcatalog.cli;
 
+import java.io.IOException;
+import java.util.ArrayList;
+
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.permission.FsPermission;
 import org.apache.hadoop.hive.metastore.Warehouse;
 import org.apache.hadoop.hive.ql.CommandNeedRetryException;
-import org.apache.hadoop.hive.ql.Driver;
+import org.apache.hadoop.hive.ql.DriverFactory;
+import org.apache.hadoop.hive.ql.IDriver;
 import org.apache.hadoop.hive.ql.metadata.Hive;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.metadata.Table;
@@ -32,14 +36,19 @@ import 
org.apache.hadoop.hive.ql.processors.CommandProcessorResponse;
 import org.apache.hadoop.hive.ql.session.SessionState;
 import org.apache.hive.hcatalog.common.HCatConstants;
 
-public class HCatDriver extends Driver {
+public class HCatDriver {
+
+  private IDriver driver;
+
+  public HCatDriver() {
+    driver = DriverFactory.newDriver();
+  }
 
-  @Override
   public CommandProcessorResponse run(String command) {
 
     CommandProcessorResponse cpr = null;
     try {
-      cpr = super.run(command);
+      cpr = driver.run(command);
     } catch (CommandNeedRetryException e) {
       return new CommandProcessorResponse(-1, e.toString(), "");
     }
@@ -139,4 +148,13 @@ public class HCatDriver extends Driver {
       }
     }
   }
+
+  public int close() {
+    return driver.close();
+  }
+
+  public boolean getResults(ArrayList<String> res) throws IOException, 
CommandNeedRetryException {
+    return driver.getResults(res);
+  }
+
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/0e2dab91/hcatalog/core/src/test/java/org/apache/hive/hcatalog/cli/TestSemanticAnalysis.java
----------------------------------------------------------------------
diff --git 
a/hcatalog/core/src/test/java/org/apache/hive/hcatalog/cli/TestSemanticAnalysis.java
 
b/hcatalog/core/src/test/java/org/apache/hive/hcatalog/cli/TestSemanticAnalysis.java
index d249d43..110c57e 100644
--- 
a/hcatalog/core/src/test/java/org/apache/hive/hcatalog/cli/TestSemanticAnalysis.java
+++ 
b/hcatalog/core/src/test/java/org/apache/hive/hcatalog/cli/TestSemanticAnalysis.java
@@ -27,7 +27,8 @@ import 
org.apache.hadoop.hive.metastore.api.NoSuchObjectException;
 import org.apache.hadoop.hive.metastore.api.Partition;
 import org.apache.hadoop.hive.metastore.api.Table;
 import org.apache.hadoop.hive.ql.CommandNeedRetryException;
-import org.apache.hadoop.hive.ql.Driver;
+import org.apache.hadoop.hive.ql.DriverFactory;
+import org.apache.hadoop.hive.ql.IDriver;
 import org.apache.hadoop.hive.ql.ErrorMsg;
 import org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat;
 import org.apache.hadoop.hive.ql.io.RCFileInputFormat;
@@ -57,7 +58,7 @@ public class TestSemanticAnalysis extends HCatBaseTest {
   private static final Logger LOG = 
LoggerFactory.getLogger(TestSemanticAnalysis.class);
   private static final String TBL_NAME = "junit_sem_analysis";
 
-  private Driver hcatDriver = null;
+  private IDriver hcatDriver = null;
   private String query;
 
   @Before
@@ -72,7 +73,7 @@ public class TestSemanticAnalysis extends HCatBaseTest {
       hcatConf.set(HiveConf.ConfVars.SEMANTIC_ANALYZER_HOOK.varname,
           HCatSemanticAnalyzer.class.getName());
       
hcatConf.setBoolVar(HiveConf.ConfVars.METASTORE_DISALLOW_INCOMPATIBLE_COL_TYPE_CHANGES,
 false);
-      hcatDriver = new Driver(hcatConf);
+      hcatDriver = DriverFactory.newDriver(hcatConf);
       SessionState.start(new CliSessionState(hcatConf));
     }
   }

http://git-wip-us.apache.org/repos/asf/hive/blob/0e2dab91/hcatalog/core/src/test/java/org/apache/hive/hcatalog/cli/TestUseDatabase.java
----------------------------------------------------------------------
diff --git 
a/hcatalog/core/src/test/java/org/apache/hive/hcatalog/cli/TestUseDatabase.java 
b/hcatalog/core/src/test/java/org/apache/hive/hcatalog/cli/TestUseDatabase.java
index 1606982..8bdb210 100644
--- 
a/hcatalog/core/src/test/java/org/apache/hive/hcatalog/cli/TestUseDatabase.java
+++ 
b/hcatalog/core/src/test/java/org/apache/hive/hcatalog/cli/TestUseDatabase.java
@@ -27,7 +27,8 @@ import org.apache.hadoop.hive.cli.CliSessionState;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
 import org.apache.hadoop.hive.ql.CommandNeedRetryException;
-import org.apache.hadoop.hive.ql.Driver;
+import org.apache.hadoop.hive.ql.DriverFactory;
+import org.apache.hadoop.hive.ql.IDriver;
 import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse;
 import org.apache.hadoop.hive.ql.session.SessionState;
 import org.apache.hive.hcatalog.cli.SemanticAnalysis.HCatSemanticAnalyzer;
@@ -35,7 +36,7 @@ import 
org.apache.hive.hcatalog.cli.SemanticAnalysis.HCatSemanticAnalyzer;
 /* Unit test for GitHub Howl issue #3 */
 public class TestUseDatabase extends TestCase {
 
-  private Driver hcatDriver;
+  private IDriver hcatDriver;
 
   @Override
   protected void setUp() throws Exception {
@@ -46,7 +47,7 @@ public class TestUseDatabase extends TestCase {
     hcatConf.set(ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false");
 
     hcatConf.set(ConfVars.SEMANTIC_ANALYZER_HOOK.varname, 
HCatSemanticAnalyzer.class.getName());
-    hcatDriver = new Driver(hcatConf);
+    hcatDriver = DriverFactory.newDriver(hcatConf);
     SessionState.start(new CliSessionState(hcatConf));
   }
 

http://git-wip-us.apache.org/repos/asf/hive/blob/0e2dab91/hcatalog/core/src/test/java/org/apache/hive/hcatalog/data/HCatDataCheckUtil.java
----------------------------------------------------------------------
diff --git 
a/hcatalog/core/src/test/java/org/apache/hive/hcatalog/data/HCatDataCheckUtil.java
 
b/hcatalog/core/src/test/java/org/apache/hive/hcatalog/data/HCatDataCheckUtil.java
index ff56234..9eb3b7a 100644
--- 
a/hcatalog/core/src/test/java/org/apache/hive/hcatalog/data/HCatDataCheckUtil.java
+++ 
b/hcatalog/core/src/test/java/org/apache/hive/hcatalog/data/HCatDataCheckUtil.java
@@ -27,7 +27,8 @@ import java.util.Map.Entry;
 import org.apache.hadoop.hive.cli.CliSessionState;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.ql.CommandNeedRetryException;
-import org.apache.hadoop.hive.ql.Driver;
+import org.apache.hadoop.hive.ql.DriverFactory;
+import org.apache.hadoop.hive.ql.IDriver;
 import org.apache.hadoop.hive.ql.session.SessionState;
 import org.apache.hive.hcatalog.MiniCluster;
 import org.slf4j.Logger;
@@ -40,7 +41,7 @@ public class HCatDataCheckUtil {
 
   private static final Logger LOG = 
LoggerFactory.getLogger(HCatDataCheckUtil.class);
 
-  public static Driver instantiateDriver(MiniCluster cluster) {
+  public static IDriver instantiateDriver(MiniCluster cluster) {
     HiveConf hiveConf = new HiveConf(HCatDataCheckUtil.class);
     for (Entry e : cluster.getProperties().entrySet()) {
       hiveConf.set(e.getKey().toString(), e.getValue().toString());
@@ -50,7 +51,7 @@ public class HCatDataCheckUtil {
     hiveConf.set(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false");
 
     LOG.debug("Hive conf : {}", hiveConf.getAllProperties());
-    Driver driver = new Driver(hiveConf);
+    IDriver driver = DriverFactory.newDriver(hiveConf);
     SessionState.start(new CliSessionState(hiveConf));
     return driver;
   }
@@ -64,7 +65,7 @@ public class HCatDataCheckUtil {
     MiniCluster.createInputFile(cluster, fileName, input);
   }
 
-  public static void createTable(Driver driver, String tableName, String 
createTableArgs)
+  public static void createTable(IDriver driver, String tableName, String 
createTableArgs)
     throws CommandNeedRetryException, IOException {
     String createTable = "create table " + tableName + createTableArgs;
     int retCode = driver.run(createTable).getResponseCode();
@@ -73,11 +74,11 @@ public class HCatDataCheckUtil {
     }
   }
 
-  public static void dropTable(Driver driver, String tablename) throws 
IOException, CommandNeedRetryException {
+  public static void dropTable(IDriver driver, String tablename) throws 
IOException, CommandNeedRetryException {
     driver.run("drop table if exists " + tablename);
   }
 
-  public static ArrayList<String> formattedRun(Driver driver, String name, 
String selectCmd)
+  public static ArrayList<String> formattedRun(IDriver driver, String name, 
String selectCmd)
     throws CommandNeedRetryException, IOException {
     driver.run(selectCmd);
     ArrayList<String> src_values = new ArrayList<String>();

http://git-wip-us.apache.org/repos/asf/hive/blob/0e2dab91/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/HCatBaseTest.java
----------------------------------------------------------------------
diff --git 
a/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/HCatBaseTest.java
 
b/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/HCatBaseTest.java
index f78eb15..616391f 100644
--- 
a/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/HCatBaseTest.java
+++ 
b/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/HCatBaseTest.java
@@ -23,7 +23,8 @@ import org.apache.hadoop.fs.FileUtil;
 import org.apache.hadoop.hive.cli.CliSessionState;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.metastore.HiveMetaStoreClient;
-import org.apache.hadoop.hive.ql.Driver;
+import org.apache.hadoop.hive.ql.DriverFactory;
+import org.apache.hadoop.hive.ql.IDriver;
 import org.apache.hadoop.hive.ql.session.SessionState;
 import org.apache.hive.hcatalog.common.HCatUtil;
 import org.apache.pig.ExecType;
@@ -49,7 +50,7 @@ public abstract class HCatBaseTest {
   protected static final String TEST_WAREHOUSE_DIR = TEST_DATA_DIR + 
"/warehouse";
 
   protected HiveConf hiveConf = null;
-  protected Driver driver = null;
+  protected IDriver driver = null;
   protected HiveMetaStoreClient client = null;
 
   @BeforeClass
@@ -66,7 +67,7 @@ public abstract class HCatBaseTest {
   public void setUp() throws Exception {
     if (driver == null) {
       setUpHiveConf();
-      driver = new Driver(hiveConf);
+      driver = DriverFactory.newDriver(hiveConf);
       client = new HiveMetaStoreClient(hiveConf);
       SessionState.start(new CliSessionState(hiveConf));
     }

http://git-wip-us.apache.org/repos/asf/hive/blob/0e2dab91/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestPassProperties.java
----------------------------------------------------------------------
diff --git 
a/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestPassProperties.java
 
b/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestPassProperties.java
index 31857bf..6d47b83 100644
--- 
a/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestPassProperties.java
+++ 
b/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestPassProperties.java
@@ -30,7 +30,8 @@ import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hive.cli.CliSessionState;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.metastore.Warehouse;
-import org.apache.hadoop.hive.ql.Driver;
+import org.apache.hadoop.hive.ql.DriverFactory;
+import org.apache.hadoop.hive.ql.IDriver;
 import org.apache.hadoop.hive.ql.session.SessionState;
 import org.apache.hadoop.io.LongWritable;
 import org.apache.hadoop.io.NullWritable;
@@ -53,7 +54,7 @@ public class TestPassProperties {
   private static final String TEST_WAREHOUSE_DIR = TEST_DATA_DIR + 
"/warehouse";
   private static final String INPUT_FILE_NAME = TEST_DATA_DIR + "/input.data";
 
-  private static Driver driver;
+  private static IDriver driver;
   private static PigServer server;
   private static String[] input;
   private static HiveConf hiveConf;
@@ -67,7 +68,7 @@ public class TestPassProperties {
     hiveConf
     .setVar(HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER,
         
"org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory");
-    driver = new Driver(hiveConf);
+    driver = DriverFactory.newDriver(hiveConf);
     SessionState.start(new CliSessionState(hiveConf));
 
     new File(TEST_WAREHOUSE_DIR).mkdirs();

http://git-wip-us.apache.org/repos/asf/hive/blob/0e2dab91/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/AbstractHCatLoaderTest.java
----------------------------------------------------------------------
diff --git 
a/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/AbstractHCatLoaderTest.java
 
b/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/AbstractHCatLoaderTest.java
index 124a5bc..265b0fe 100644
--- 
a/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/AbstractHCatLoaderTest.java
+++ 
b/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/AbstractHCatLoaderTest.java
@@ -43,7 +43,7 @@ import org.apache.commons.io.FileUtils;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.ql.CommandNeedRetryException;
-import org.apache.hadoop.hive.ql.Driver;
+import org.apache.hadoop.hive.ql.IDriver;
 import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse;
 import org.apache.hadoop.hive.serde2.ColumnProjectionUtils;
 import org.apache.hadoop.mapreduce.Job;
@@ -96,7 +96,7 @@ public abstract class AbstractHCatLoaderTest extends 
HCatBaseTest {
     dropTable(tablename, driver);
   }
 
-  static void dropTable(String tablename, Driver driver) throws IOException, 
CommandNeedRetryException {
+  static void dropTable(String tablename, IDriver driver) throws IOException, 
CommandNeedRetryException {
     driver.run("drop table if exists " + tablename);
   }
 
@@ -104,7 +104,7 @@ public abstract class AbstractHCatLoaderTest extends 
HCatBaseTest {
     createTable(tablename, schema, partitionedBy, driver, storageFormat);
   }
 
-  static void createTable(String tablename, String schema, String 
partitionedBy, Driver driver, String storageFormat)
+  static void createTable(String tablename, String schema, String 
partitionedBy, IDriver driver, String storageFormat)
       throws IOException, CommandNeedRetryException {
     String createTable;
     createTable = "create table " + tablename + "(" + schema + ") ";
@@ -125,7 +125,7 @@ public abstract class AbstractHCatLoaderTest extends 
HCatBaseTest {
    * Execute Hive CLI statement
    * @param cmd arbitrary statement to execute
    */
-  static void executeStatementOnDriver(String cmd, Driver driver) throws 
IOException, CommandNeedRetryException {
+  static void executeStatementOnDriver(String cmd, IDriver driver) throws 
IOException, CommandNeedRetryException {
     LOG.debug("Executing: " + cmd);
     CommandProcessorResponse cpr = driver.run(cmd);
     if(cpr.getResponseCode() != 0) {
@@ -725,7 +725,7 @@ public abstract class AbstractHCatLoaderTest extends 
HCatBaseTest {
       }
       assertTrue("Expected " + primitiveRows.length + "; found " + 
numTuplesRead, numTuplesRead == primitiveRows.length);
     }
-    private static void setupAllTypesTable(Driver driver) throws Exception {
+    private static void setupAllTypesTable(IDriver driver) throws Exception {
       String[] primitiveData = new String[primitiveRows.length];
       for (int i = 0; i < primitiveRows.length; i++) {
         Object[] rowData = primitiveRows[i];

http://git-wip-us.apache.org/repos/asf/hive/blob/0e2dab91/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestE2EScenarios.java
----------------------------------------------------------------------
diff --git 
a/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestE2EScenarios.java
 
b/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestE2EScenarios.java
index 95a8164..6f372c9 100644
--- 
a/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestE2EScenarios.java
+++ 
b/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestE2EScenarios.java
@@ -30,7 +30,8 @@ import org.apache.hadoop.fs.FileUtil;
 import org.apache.hadoop.hive.cli.CliSessionState;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.ql.CommandNeedRetryException;
-import org.apache.hadoop.hive.ql.Driver;
+import org.apache.hadoop.hive.ql.DriverFactory;
+import org.apache.hadoop.hive.ql.IDriver;
 import org.apache.hadoop.hive.ql.session.SessionState;
 import org.apache.hadoop.io.WritableComparable;
 import org.apache.hadoop.mapreduce.InputSplit;
@@ -67,7 +68,7 @@ public class TestE2EScenarios {
 
   private static final String TEXTFILE_LOCN = TEST_DATA_DIR + "/textfile";
 
-  private static Driver driver;
+  private static IDriver driver;
 
   protected String storageFormat() {
     return "orc";
@@ -91,7 +92,7 @@ public class TestE2EScenarios {
     hiveConf
     .setVar(HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER,
         
"org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory");
-    driver = new Driver(hiveConf);
+    driver = DriverFactory.newDriver(hiveConf);
     SessionState.start(new CliSessionState(hiveConf));
 
   }

http://git-wip-us.apache.org/repos/asf/hive/blob/0e2dab91/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatLoaderComplexSchema.java
----------------------------------------------------------------------
diff --git 
a/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatLoaderComplexSchema.java
 
b/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatLoaderComplexSchema.java
index 5c9bae4..5c6626d 100644
--- 
a/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatLoaderComplexSchema.java
+++ 
b/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatLoaderComplexSchema.java
@@ -32,7 +32,8 @@ import java.util.Set;
 import org.apache.hadoop.hive.cli.CliSessionState;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.ql.CommandNeedRetryException;
-import org.apache.hadoop.hive.ql.Driver;
+import org.apache.hadoop.hive.ql.DriverFactory;
+import org.apache.hadoop.hive.ql.IDriver;
 import org.apache.hadoop.hive.ql.io.IOConstants;
 import org.apache.hadoop.hive.ql.io.StorageFormats;
 import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse;
@@ -67,7 +68,7 @@ import static org.junit.Assume.assumeTrue;
 public class TestHCatLoaderComplexSchema {
 
   //private static MiniCluster cluster = MiniCluster.buildCluster();
-  private static Driver driver;
+  private static IDriver driver;
   //private static Properties props;
   private static final Logger LOG = 
LoggerFactory.getLogger(TestHCatLoaderComplexSchema.class);
 
@@ -114,7 +115,7 @@ public class TestHCatLoaderComplexSchema {
     .setVar(HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER,
         
"org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory");
 
-    driver = new Driver(hiveConf);
+    driver = DriverFactory.newDriver(hiveConf);
     SessionState.start(new CliSessionState(hiveConf));
     //props = new Properties();
     //props.setProperty("fs.default.name", 
cluster.getProperties().getProperty("fs.default.name"));

http://git-wip-us.apache.org/repos/asf/hive/blob/0e2dab91/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatLoaderEncryption.java
----------------------------------------------------------------------
diff --git 
a/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatLoaderEncryption.java
 
b/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatLoaderEncryption.java
index 496f3c8..d1f46e4 100644
--- 
a/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatLoaderEncryption.java
+++ 
b/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatLoaderEncryption.java
@@ -28,7 +28,8 @@ import org.apache.hadoop.hive.cli.CliSessionState;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.metastore.Warehouse;
 import org.apache.hadoop.hive.ql.CommandNeedRetryException;
-import org.apache.hadoop.hive.ql.Driver;
+import org.apache.hadoop.hive.ql.DriverFactory;
+import org.apache.hadoop.hive.ql.IDriver;
 import org.apache.hadoop.hive.ql.io.StorageFormats;
 import org.apache.hadoop.hive.ql.processors.CommandProcessor;
 import org.apache.hadoop.hive.ql.processors.CommandProcessorFactory;
@@ -93,7 +94,7 @@ public class TestHCatLoaderEncryption {
   private HadoopShims.MiniDFSShim dfs = null;
   private HadoopShims.HdfsEncryptionShim hes = null;
   private final String[] testOnlyCommands = new String[]{"crypto"};
-  private Driver driver;
+  private IDriver driver;
   private Map<Integer, Pair<Integer, String>> basicInputData;
   private static List<HCatRecord> readRecords = new ArrayList<HCatRecord>();
 
@@ -115,7 +116,7 @@ public class TestHCatLoaderEncryption {
     dropTable(tablename, driver);
   }
 
-  static void dropTable(String tablename, Driver driver) throws IOException, 
CommandNeedRetryException {
+  static void dropTable(String tablename, IDriver driver) throws IOException, 
CommandNeedRetryException {
     driver.run("drop table if exists " + tablename);
   }
 
@@ -123,7 +124,7 @@ public class TestHCatLoaderEncryption {
     createTable(tablename, schema, partitionedBy, driver, storageFormat);
   }
 
-  static void createTable(String tablename, String schema, String 
partitionedBy, Driver driver, String storageFormat)
+  static void createTable(String tablename, String schema, String 
partitionedBy, IDriver driver, String storageFormat)
       throws IOException, CommandNeedRetryException {
     String createTable;
     createTable = "create table " + tablename + "(" + schema + ") ";
@@ -142,7 +143,7 @@ public class TestHCatLoaderEncryption {
    * Execute Hive CLI statement
    * @param cmd arbitrary statement to execute
    */
-  static void executeStatementOnDriver(String cmd, Driver driver) throws 
IOException, CommandNeedRetryException {
+  static void executeStatementOnDriver(String cmd, IDriver driver) throws 
IOException, CommandNeedRetryException {
     LOG.debug("Executing: " + cmd);
     CommandProcessorResponse cpr = driver.run(cmd);
     if(cpr.getResponseCode() != 0) {
@@ -177,7 +178,7 @@ public class TestHCatLoaderEncryption {
           "_" + salt.getAndIncrement() + "/dfs/");
     }
 
-    driver = new Driver(hiveConf);
+    driver = DriverFactory.newDriver(hiveConf);
 
     initEncryptionShim(hiveConf);
     String encryptedTablePath =  TEST_WAREHOUSE_DIR + "/encryptedTable";
@@ -393,7 +394,7 @@ public class TestHCatLoaderEncryption {
     }
   }
 
-  static void createTableInSpecifiedPath(String tableName, String schema, 
String path, Driver driver) throws IOException, CommandNeedRetryException {
+  static void createTableInSpecifiedPath(String tableName, String schema, 
String path, IDriver driver) throws IOException, CommandNeedRetryException {
     String createTableStr;
     createTableStr = "create table " + tableName + "(" + schema + ") location 
\'" + path + "\'";
     executeStatementOnDriver(createTableStr, driver);

http://git-wip-us.apache.org/repos/asf/hive/blob/0e2dab91/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatStorerMulti.java
----------------------------------------------------------------------
diff --git 
a/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatStorerMulti.java
 
b/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatStorerMulti.java
index e2c9b2e..c402ba8 100644
--- 
a/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatStorerMulti.java
+++ 
b/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatStorerMulti.java
@@ -31,7 +31,8 @@ import org.apache.hadoop.fs.FileUtil;
 import org.apache.hadoop.hive.cli.CliSessionState;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.ql.CommandNeedRetryException;
-import org.apache.hadoop.hive.ql.Driver;
+import org.apache.hadoop.hive.ql.DriverFactory;
+import org.apache.hadoop.hive.ql.IDriver;
 import org.apache.hadoop.hive.ql.io.StorageFormats;
 import org.apache.hadoop.hive.ql.session.SessionState;
 
@@ -60,7 +61,7 @@ public class TestHCatStorerMulti {
 
   private static final String BASIC_TABLE = "junit_unparted_basic";
   private static final String PARTITIONED_TABLE = "junit_parted_basic";
-  private static Driver driver;
+  private static IDriver driver;
 
   private static Map<Integer, Pair<Integer, String>> basicInputData;
 
@@ -104,7 +105,7 @@ public class TestHCatStorerMulti {
       hiveConf
       .setVar(HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER,
           
"org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory");
-      driver = new Driver(hiveConf);
+      driver = DriverFactory.newDriver(hiveConf);
       SessionState.start(new CliSessionState(hiveConf));
     }
 

http://git-wip-us.apache.org/repos/asf/hive/blob/0e2dab91/hcatalog/server-extensions/src/test/java/org/apache/hive/hcatalog/listener/TestMsgBusConnection.java
----------------------------------------------------------------------
diff --git 
a/hcatalog/server-extensions/src/test/java/org/apache/hive/hcatalog/listener/TestMsgBusConnection.java
 
b/hcatalog/server-extensions/src/test/java/org/apache/hive/hcatalog/listener/TestMsgBusConnection.java
index 9f7c207..7c5d6d9 100644
--- 
a/hcatalog/server-extensions/src/test/java/org/apache/hive/hcatalog/listener/TestMsgBusConnection.java
+++ 
b/hcatalog/server-extensions/src/test/java/org/apache/hive/hcatalog/listener/TestMsgBusConnection.java
@@ -37,7 +37,8 @@ import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
 import org.apache.hadoop.hive.metastore.api.AlreadyExistsException;
 import org.apache.hadoop.hive.metastore.api.NoSuchObjectException;
-import org.apache.hadoop.hive.ql.Driver;
+import org.apache.hadoop.hive.ql.DriverFactory;
+import org.apache.hadoop.hive.ql.IDriver;
 import org.apache.hadoop.hive.ql.session.SessionState;
 import org.apache.hive.hcatalog.common.HCatConstants;
 import org.apache.hive.hcatalog.messaging.HCatEventMessage;
@@ -45,7 +46,7 @@ import org.apache.hive.hcatalog.messaging.jms.MessagingUtils;
 
 public class TestMsgBusConnection extends TestCase {
 
-  private Driver driver;
+  private IDriver driver;
   private BrokerService broker;
   private MessageConsumer consumer;
   private static final int TIMEOUT = 2000;
@@ -74,7 +75,7 @@ public class TestMsgBusConnection extends TestCase {
     
"org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory");
     hiveConf.set(HCatConstants.HCAT_MSGBUS_TOPIC_PREFIX, "planetlab.hcat");
     SessionState.start(new CliSessionState(hiveConf));
-    driver = new Driver(hiveConf);
+    driver = DriverFactory.newDriver(hiveConf);
   }
 
   private void connectClient() throws JMSException {

http://git-wip-us.apache.org/repos/asf/hive/blob/0e2dab91/hcatalog/server-extensions/src/test/java/org/apache/hive/hcatalog/listener/TestNotificationListener.java
----------------------------------------------------------------------
diff --git 
a/hcatalog/server-extensions/src/test/java/org/apache/hive/hcatalog/listener/TestNotificationListener.java
 
b/hcatalog/server-extensions/src/test/java/org/apache/hive/hcatalog/listener/TestNotificationListener.java
index 3a33403..e432a3a 100644
--- 
a/hcatalog/server-extensions/src/test/java/org/apache/hive/hcatalog/listener/TestNotificationListener.java
+++ 
b/hcatalog/server-extensions/src/test/java/org/apache/hive/hcatalog/listener/TestNotificationListener.java
@@ -44,7 +44,7 @@ import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
 import org.apache.hadoop.hive.metastore.HiveMetaStoreClient;
 import org.apache.hadoop.hive.metastore.TableType;
 import org.apache.hadoop.hive.metastore.api.PartitionEventType;
-import org.apache.hadoop.hive.ql.Driver;
+import org.apache.hadoop.hive.ql.DriverFactory;
 import org.apache.hadoop.hive.ql.session.SessionState;
 import org.apache.hive.hcatalog.common.HCatConstants;
 import org.apache.hive.hcatalog.mapreduce.HCatBaseTest;
@@ -114,7 +114,7 @@ public class TestNotificationListener extends HCatBaseTest 
implements MessageLis
     .setVar(HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER,
         
"org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory");
     SessionState.start(new CliSessionState(hiveConf));
-    driver = new Driver(hiveConf);
+    driver = DriverFactory.newDriver(hiveConf);
     client = new HiveMetaStoreClient(hiveConf);
   }
 

http://git-wip-us.apache.org/repos/asf/hive/blob/0e2dab91/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/HiveEndPoint.java
----------------------------------------------------------------------
diff --git 
a/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/HiveEndPoint.java
 
b/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/HiveEndPoint.java
index bccf60c..19ed03a 100644
--- 
a/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/HiveEndPoint.java
+++ 
b/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/HiveEndPoint.java
@@ -41,7 +41,8 @@ import 
org.apache.hadoop.hive.metastore.api.NoSuchTxnException;
 import org.apache.hadoop.hive.metastore.api.Table;
 import org.apache.hadoop.hive.metastore.api.TxnAbortedException;
 import org.apache.hadoop.hive.ql.CommandNeedRetryException;
-import org.apache.hadoop.hive.ql.Driver;
+import org.apache.hadoop.hive.ql.DriverFactory;
+import org.apache.hadoop.hive.ql.IDriver;
 import org.apache.hadoop.hive.ql.session.SessionState;
 import org.apache.hive.hcatalog.common.HCatUtil;
 
@@ -451,7 +452,7 @@ public class HiveEndPoint {
       if(SessionState.get() == null) {
         localSession = SessionState.start(new CliSessionState(conf));
       }
-      Driver driver = new Driver(conf);
+      IDriver driver = DriverFactory.newDriver(conf);
 
       try {
         if (LOG.isDebugEnabled()) {
@@ -488,7 +489,7 @@ public class HiveEndPoint {
       }
     }
 
-    private static boolean runDDL(Driver driver, String sql) throws 
QueryFailedException {
+    private static boolean runDDL(IDriver driver, String sql) throws 
QueryFailedException {
       int retryCount = 1; // # of times to retry if first attempt fails
       for (int attempt=0; attempt<=retryCount; ++attempt) {
         try {

http://git-wip-us.apache.org/repos/asf/hive/blob/0e2dab91/hcatalog/streaming/src/test/org/apache/hive/hcatalog/streaming/TestStreaming.java
----------------------------------------------------------------------
diff --git 
a/hcatalog/streaming/src/test/org/apache/hive/hcatalog/streaming/TestStreaming.java
 
b/hcatalog/streaming/src/test/org/apache/hive/hcatalog/streaming/TestStreaming.java
index 101ba1a..07621d4 100644
--- 
a/hcatalog/streaming/src/test/org/apache/hive/hcatalog/streaming/TestStreaming.java
+++ 
b/hcatalog/streaming/src/test/org/apache/hive/hcatalog/streaming/TestStreaming.java
@@ -67,7 +67,8 @@ import 
org.apache.hadoop.hive.metastore.api.hive_metastoreConstants;
 import org.apache.hadoop.hive.metastore.txn.AcidHouseKeeperService;
 import org.apache.hadoop.hive.metastore.txn.TxnDbUtil;
 import org.apache.hadoop.hive.ql.CommandNeedRetryException;
-import org.apache.hadoop.hive.ql.Driver;
+import org.apache.hadoop.hive.ql.DriverFactory;
+import org.apache.hadoop.hive.ql.IDriver;
 import org.apache.hadoop.hive.ql.io.AcidUtils;
 import org.apache.hadoop.hive.ql.io.BucketCodec;
 import org.apache.hadoop.hive.ql.io.IOConstants;
@@ -153,7 +154,7 @@ public class TestStreaming {
   private static final String COL2 = "msg";
 
   private final HiveConf conf;
-  private Driver driver;
+  private IDriver driver;
   private final IMetaStoreClient msClient;
 
   final String metaStoreURI = null;
@@ -221,7 +222,7 @@ public class TestStreaming {
   @Before
   public void setup() throws Exception {
     SessionState.start(new CliSessionState(conf));
-    driver = new Driver(conf);
+    driver = DriverFactory.newDriver(conf);
     driver.setMaxRows(200002);//make sure Driver returns all results
     // drop and recreate the necessary databases and tables
     dropDB(msClient, dbName);
@@ -2078,7 +2079,7 @@ public class TestStreaming {
 
   ///////// -------- UTILS ------- /////////
   // returns Path of the partition created (if any) else Path of table
-  public static Path createDbAndTable(Driver driver, String databaseName,
+  public static Path createDbAndTable(IDriver driver, String databaseName,
                                       String tableName, List<String> partVals,
                                       String[] colNames, String[] colTypes,
                                       String[] bucketCols,
@@ -2105,14 +2106,14 @@ public class TestStreaming {
     return new Path(tableLoc);
   }
 
-  private static Path addPartition(Driver driver, String tableName, 
List<String> partVals, String[] partNames) throws QueryFailedException, 
CommandNeedRetryException, IOException {
+  private static Path addPartition(IDriver driver, String tableName, 
List<String> partVals, String[] partNames) throws QueryFailedException, 
CommandNeedRetryException, IOException {
     String partSpec = getPartsSpec(partNames, partVals);
     String addPart = "alter table " + tableName + " add partition ( " + 
partSpec  + " )";
     runDDL(driver, addPart);
     return getPartitionPath(driver, tableName, partSpec);
   }
 
-  private static Path getPartitionPath(Driver driver, String tableName, String 
partSpec) throws CommandNeedRetryException, IOException {
+  private static Path getPartitionPath(IDriver driver, String tableName, 
String partSpec) throws CommandNeedRetryException, IOException {
     ArrayList<String> res = queryTable(driver, "describe extended " + 
tableName + " PARTITION (" + partSpec + ")");
     String partInfo = res.get(res.size() - 1);
     int start = partInfo.indexOf("location:") + "location:".length();
@@ -2179,7 +2180,7 @@ public class TestStreaming {
     return " partitioned by (" + getTablePartsStr(partNames) + " )";
   }
 
-  private static boolean runDDL(Driver driver, String sql) throws 
QueryFailedException {
+  private static boolean runDDL(IDriver driver, String sql) throws 
QueryFailedException {
     LOG.debug(sql);
     System.out.println(sql);
     int retryCount = 1; // # of times to retry if first attempt fails
@@ -2202,7 +2203,7 @@ public class TestStreaming {
   }
 
 
-  public static ArrayList<String> queryTable(Driver driver, String query)
+  public static ArrayList<String> queryTable(IDriver driver, String query)
           throws CommandNeedRetryException, IOException {
     CommandProcessorResponse cpr = driver.run(query);
     if(cpr.getResponseCode() != 0) {

http://git-wip-us.apache.org/repos/asf/hive/blob/0e2dab91/hcatalog/webhcat/java-client/src/test/java/org/apache/hive/hcatalog/api/repl/commands/TestCommands.java
----------------------------------------------------------------------
diff --git 
a/hcatalog/webhcat/java-client/src/test/java/org/apache/hive/hcatalog/api/repl/commands/TestCommands.java
 
b/hcatalog/webhcat/java-client/src/test/java/org/apache/hive/hcatalog/api/repl/commands/TestCommands.java
index b959321..75cbc83 100644
--- 
a/hcatalog/webhcat/java-client/src/test/java/org/apache/hive/hcatalog/api/repl/commands/TestCommands.java
+++ 
b/hcatalog/webhcat/java-client/src/test/java/org/apache/hive/hcatalog/api/repl/commands/TestCommands.java
@@ -26,7 +26,8 @@ import org.apache.hadoop.hive.cli.CliSessionState;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.metastore.api.MetaException;
 import org.apache.hadoop.hive.ql.CommandNeedRetryException;
-import org.apache.hadoop.hive.ql.Driver;
+import org.apache.hadoop.hive.ql.DriverFactory;
+import org.apache.hadoop.hive.ql.IDriver;
 import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse;
 import org.apache.hadoop.hive.ql.session.SessionState;
 import org.apache.hive.hcatalog.HcatTestUtils;
@@ -67,7 +68,7 @@ public class TestCommands {
   private static Logger LOG = 
LoggerFactory.getLogger(CommandTestUtils.class.getName());
 
   private static HiveConf hconf;
-  private static Driver driver;
+  private static IDriver driver;
   private static HCatClient client;
   private static String TEST_PATH;
 
@@ -86,7 +87,7 @@ public class TestCommands {
     FileSystem fs = FileSystem.get(testPath.toUri(),hconf);
     fs.mkdirs(testPath);
 
-    driver = new Driver(hconf);
+    driver = DriverFactory.newDriver(hconf);
     SessionState.start(new CliSessionState(hconf));
     client = HCatClient.create(hconf);
   }

http://git-wip-us.apache.org/repos/asf/hive/blob/0e2dab91/itests/hcatalog-unit/src/test/java/org/apache/hive/hcatalog/hbase/TestPigHBaseStorageHandler.java
----------------------------------------------------------------------
diff --git 
a/itests/hcatalog-unit/src/test/java/org/apache/hive/hcatalog/hbase/TestPigHBaseStorageHandler.java
 
b/itests/hcatalog-unit/src/test/java/org/apache/hive/hcatalog/hbase/TestPigHBaseStorageHandler.java
index 120b4af..4778e1f 100644
--- 
a/itests/hcatalog-unit/src/test/java/org/apache/hive/hcatalog/hbase/TestPigHBaseStorageHandler.java
+++ 
b/itests/hcatalog-unit/src/test/java/org/apache/hive/hcatalog/hbase/TestPigHBaseStorageHandler.java
@@ -48,7 +48,8 @@ import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.hive.cli.CliSessionState;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
-import org.apache.hadoop.hive.ql.Driver;
+import org.apache.hadoop.hive.ql.DriverFactory;
+import org.apache.hadoop.hive.ql.IDriver;
 import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse;
 import org.apache.hadoop.hive.ql.session.SessionState;
 import org.apache.hive.hcatalog.common.HCatUtil;
@@ -63,7 +64,7 @@ import org.junit.Test;
 public class TestPigHBaseStorageHandler extends SkeletonHBaseTest {
 
   private static HiveConf   hcatConf;
-  private static Driver driver;
+  private static IDriver driver;
   private static String mypath;
 
   private final byte[] FAMILY     = Bytes.toBytes("testFamily");
@@ -93,7 +94,7 @@ public class TestPigHBaseStorageHandler extends 
SkeletonHBaseTest {
       }
     }
 
-    driver = new Driver(hcatConf);
+    driver = DriverFactory.newDriver(hcatConf);
     SessionState.start(new CliSessionState(hcatConf));
 
   }

http://git-wip-us.apache.org/repos/asf/hive/blob/0e2dab91/itests/hcatalog-unit/src/test/java/org/apache/hive/hcatalog/listener/TestDbNotificationListener.java
----------------------------------------------------------------------
diff --git 
a/itests/hcatalog-unit/src/test/java/org/apache/hive/hcatalog/listener/TestDbNotificationListener.java
 
b/itests/hcatalog-unit/src/test/java/org/apache/hive/hcatalog/listener/TestDbNotificationListener.java
index 308ab0d..c3b8e8c 100644
--- 
a/itests/hcatalog-unit/src/test/java/org/apache/hive/hcatalog/listener/TestDbNotificationListener.java
+++ 
b/itests/hcatalog-unit/src/test/java/org/apache/hive/hcatalog/listener/TestDbNotificationListener.java
@@ -94,7 +94,8 @@ import 
org.apache.hadoop.hive.metastore.messaging.EventMessage.EventType;
 import org.apache.hadoop.hive.metastore.messaging.InsertMessage;
 import org.apache.hadoop.hive.metastore.messaging.MessageDeserializer;
 import org.apache.hadoop.hive.metastore.messaging.MessageFactory;
-import org.apache.hadoop.hive.ql.Driver;
+import org.apache.hadoop.hive.ql.DriverFactory;
+import org.apache.hadoop.hive.ql.IDriver;
 import org.apache.hadoop.hive.ql.session.SessionState;
 import org.apache.hive.hcatalog.api.repl.ReplicationV1CompatRule;
 import org.apache.hive.hcatalog.data.Pair;
@@ -118,7 +119,7 @@ public class TestDbNotificationListener {
   private static final int CLEANUP_SLEEP_TIME = 10;
   private static Map<String, String> emptyParameters = new HashMap<String, 
String>();
   private static IMetaStoreClient msClient;
-  private static Driver driver;
+  private static IDriver driver;
   private static MessageDeserializer md = null;
   private int startTime;
   private long firstEventId;
@@ -255,7 +256,7 @@ public class TestDbNotificationListener {
         
"org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory");
     SessionState.start(new CliSessionState(conf));
     msClient = new HiveMetaStoreClient(conf);
-    driver = new Driver(conf);
+    driver = DriverFactory.newDriver(conf);
     md = MessageFactory.getInstance().getDeserializer();
 
     bcompat = new ReplicationV1CompatRule(msClient, conf, 
testsToSkipForReplV1BackwardCompatTesting );

http://git-wip-us.apache.org/repos/asf/hive/blob/0e2dab91/itests/hcatalog-unit/src/test/java/org/apache/hive/hcatalog/mapreduce/TestSequenceFileReadWrite.java
----------------------------------------------------------------------
diff --git 
a/itests/hcatalog-unit/src/test/java/org/apache/hive/hcatalog/mapreduce/TestSequenceFileReadWrite.java
 
b/itests/hcatalog-unit/src/test/java/org/apache/hive/hcatalog/mapreduce/TestSequenceFileReadWrite.java
index d2dbe8f..b7888c6 100644
--- 
a/itests/hcatalog-unit/src/test/java/org/apache/hive/hcatalog/mapreduce/TestSequenceFileReadWrite.java
+++ 
b/itests/hcatalog-unit/src/test/java/org/apache/hive/hcatalog/mapreduce/TestSequenceFileReadWrite.java
@@ -32,7 +32,8 @@ import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hive.cli.CliSessionState;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.metastore.Warehouse;
-import org.apache.hadoop.hive.ql.Driver;
+import org.apache.hadoop.hive.ql.DriverFactory;
+import org.apache.hadoop.hive.ql.IDriver;
 import org.apache.hadoop.hive.ql.session.SessionState;
 import org.apache.hadoop.io.LongWritable;
 import org.apache.hadoop.io.NullWritable;
@@ -59,7 +60,7 @@ public class TestSequenceFileReadWrite {
   private File dataDir;
   private String warehouseDir;
   private String inputFileName;
-  private Driver driver;
+  private IDriver driver;
   private PigServer server;
   private String[] input;
   private HiveConf hiveConf;
@@ -79,7 +80,7 @@ public class TestSequenceFileReadWrite {
     hiveConf
     .setVar(HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER,
         
"org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory");
-    driver = new Driver(hiveConf);
+    driver = DriverFactory.newDriver(hiveConf);
     SessionState.start(new CliSessionState(hiveConf));
 
     if(!(new File(warehouseDir).mkdirs())) {

http://git-wip-us.apache.org/repos/asf/hive/blob/0e2dab91/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestFilterHooks.java
----------------------------------------------------------------------
diff --git 
a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestFilterHooks.java
 
b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestFilterHooks.java
index 91fc706..2382582 100644
--- 
a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestFilterHooks.java
+++ 
b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestFilterHooks.java
@@ -36,18 +36,17 @@ import 
org.apache.hadoop.hive.metastore.api.NoSuchObjectException;
 import org.apache.hadoop.hive.metastore.api.Partition;
 import org.apache.hadoop.hive.metastore.api.PartitionSpec;
 import org.apache.hadoop.hive.metastore.api.Table;
-import org.apache.hadoop.hive.metastore.security.HadoopThriftAuthBridge;
-import org.apache.hadoop.hive.metastore.utils.MetaStoreUtils;
-import org.apache.hadoop.hive.ql.Driver;
+import org.apache.hadoop.hive.ql.DriverFactory;
+import org.apache.hadoop.hive.ql.IDriver;
 import org.apache.hadoop.hive.ql.session.SessionState;
 import org.junit.AfterClass;
 import org.junit.BeforeClass;
 import org.junit.Test;
-
-import com.google.common.collect.Lists;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import com.google.common.collect.Lists;
+
 public class TestFilterHooks {
   private static final Logger LOG = 
LoggerFactory.getLogger(TestFilterHooks.class);
 
@@ -165,7 +164,7 @@ public class TestFilterHooks {
   private static final String INDEX1 = "idx1";
   private static HiveConf hiveConf;
   private static HiveMetaStoreClient msc;
-  private static Driver driver;
+  private static IDriver driver;
 
   @BeforeClass
   public static void setUp() throws Exception {
@@ -183,7 +182,7 @@ public class TestFilterHooks {
 
     SessionState.start(new CliSessionState(hiveConf));
     msc = new HiveMetaStoreClient(hiveConf);
-    driver = new Driver(hiveConf);
+    driver = DriverFactory.newDriver(hiveConf);
 
     driver.run("drop database if exists " + DBNAME1  + " cascade");
     driver.run("drop database if exists " + DBNAME2  + " cascade");

http://git-wip-us.apache.org/repos/asf/hive/blob/0e2dab91/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestMarkPartition.java
----------------------------------------------------------------------
diff --git 
a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestMarkPartition.java
 
b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestMarkPartition.java
index 7cf351f..7b3a896 100644
--- 
a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestMarkPartition.java
+++ 
b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestMarkPartition.java
@@ -34,14 +34,15 @@ import 
org.apache.hadoop.hive.metastore.api.UnknownDBException;
 import org.apache.hadoop.hive.metastore.api.UnknownPartitionException;
 import org.apache.hadoop.hive.metastore.api.UnknownTableException;
 import org.apache.hadoop.hive.ql.CommandNeedRetryException;
-import org.apache.hadoop.hive.ql.Driver;
+import org.apache.hadoop.hive.ql.DriverFactory;
+import org.apache.hadoop.hive.ql.IDriver;
 import org.apache.hadoop.hive.ql.session.SessionState;
 import org.apache.thrift.TException;
 
 public class TestMarkPartition extends TestCase{
 
   protected HiveConf hiveConf;
-  private Driver driver;
+  private IDriver driver;
 
   @Override
   protected void setUp() throws Exception {
@@ -61,7 +62,7 @@ public class TestMarkPartition extends TestCase{
   TException, NoSuchObjectException, UnknownDBException, UnknownTableException,
   InvalidPartitionException, UnknownPartitionException, InterruptedException {
     HiveMetaStoreClient msc = new HiveMetaStoreClient(hiveConf);
-    driver = new Driver(hiveConf);
+    driver = DriverFactory.newDriver(hiveConf);
     driver.run("drop database if exists hive2215 cascade");
     driver.run("create database hive2215");
     driver.run("use hive2215");

http://git-wip-us.apache.org/repos/asf/hive/blob/0e2dab91/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestMetaStoreAuthorization.java
----------------------------------------------------------------------
diff --git 
a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestMetaStoreAuthorization.java
 
b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestMetaStoreAuthorization.java
index e7b899b..51c3ccd 100644
--- 
a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestMetaStoreAuthorization.java
+++ 
b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestMetaStoreAuthorization.java
@@ -22,8 +22,6 @@ import java.io.IOException;
 import java.net.ServerSocket;
 import java.util.concurrent.TimeUnit;
 
-import junit.framework.TestCase;
-
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.permission.FsPermission;
@@ -33,7 +31,8 @@ import org.apache.hadoop.hive.metastore.api.Database;
 import org.apache.hadoop.hive.metastore.api.MetaException;
 import org.apache.hadoop.hive.metastore.api.NoSuchObjectException;
 import org.apache.hadoop.hive.metastore.security.HadoopThriftAuthBridge;
-import org.apache.hadoop.hive.metastore.utils.MetaStoreUtils;
+
+import junit.framework.TestCase;
 
 
 public class TestMetaStoreAuthorization extends TestCase {

http://git-wip-us.apache.org/repos/asf/hive/blob/0e2dab91/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestMetaStoreEndFunctionListener.java
----------------------------------------------------------------------
diff --git 
a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestMetaStoreEndFunctionListener.java
 
b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestMetaStoreEndFunctionListener.java
index e44cfca..1ca18b9 100644
--- 
a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestMetaStoreEndFunctionListener.java
+++ 
b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestMetaStoreEndFunctionListener.java
@@ -20,15 +20,14 @@ package org.apache.hadoop.hive.metastore;
 
 
 
-import junit.framework.TestCase;
-
 import org.apache.hadoop.hive.cli.CliSessionState;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.metastore.api.NoSuchObjectException;
-import org.apache.hadoop.hive.metastore.security.HadoopThriftAuthBridge;
-import org.apache.hadoop.hive.ql.Driver;
+import org.apache.hadoop.hive.ql.DriverFactory;
+import org.apache.hadoop.hive.ql.IDriver;
 import org.apache.hadoop.hive.ql.session.SessionState;
-import org.apache.hadoop.hive.metastore.utils.MetaStoreUtils;
+
+import junit.framework.TestCase;
 
 /**
  * TestMetaStoreEventListener. Test case for
@@ -37,7 +36,7 @@ import org.apache.hadoop.hive.metastore.utils.MetaStoreUtils;
 public class TestMetaStoreEndFunctionListener extends TestCase {
   private HiveConf hiveConf;
   private HiveMetaStoreClient msc;
-  private Driver driver;
+  private IDriver driver;
 
   @Override
   protected void setUp() throws Exception {
@@ -58,7 +57,7 @@ public class TestMetaStoreEndFunctionListener extends 
TestCase {
     hiveConf.set(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false");
     SessionState.start(new CliSessionState(hiveConf));
     msc = new HiveMetaStoreClient(hiveConf);
-    driver = new Driver(hiveConf);
+    driver = DriverFactory.newDriver(hiveConf);
   }
 
   @Override

http://git-wip-us.apache.org/repos/asf/hive/blob/0e2dab91/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestMetaStoreEventListener.java
----------------------------------------------------------------------
diff --git 
a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestMetaStoreEventListener.java
 
b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestMetaStoreEventListener.java
index e803106..260c947 100644
--- 
a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestMetaStoreEventListener.java
+++ 
b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestMetaStoreEventListener.java
@@ -18,8 +18,6 @@
 
 package org.apache.hadoop.hive.metastore;
 
-import com.google.common.collect.Lists;
-
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.Collections;
@@ -27,7 +25,6 @@ import java.util.HashMap;
 import java.util.Iterator;
 import java.util.List;
 import java.util.Map;
-import junit.framework.TestCase;
 
 import org.apache.hadoop.hive.cli.CliSessionState;
 import org.apache.hadoop.hive.conf.HiveConf;
@@ -63,12 +60,15 @@ import 
org.apache.hadoop.hive.metastore.events.PreDropPartitionEvent;
 import org.apache.hadoop.hive.metastore.events.PreDropTableEvent;
 import org.apache.hadoop.hive.metastore.events.PreEventContext;
 import org.apache.hadoop.hive.metastore.events.PreLoadPartitionDoneEvent;
-import org.apache.hadoop.hive.metastore.security.HadoopThriftAuthBridge;
-import org.apache.hadoop.hive.metastore.utils.MetaStoreUtils;
-import org.apache.hadoop.hive.ql.Driver;
+import org.apache.hadoop.hive.ql.DriverFactory;
+import org.apache.hadoop.hive.ql.IDriver;
 import org.apache.hadoop.hive.ql.processors.SetProcessor;
 import org.apache.hadoop.hive.ql.session.SessionState;
 
+import com.google.common.collect.Lists;
+
+import junit.framework.TestCase;
+
 /**
  * TestMetaStoreEventListener. Test case for
  * {@link org.apache.hadoop.hive.metastore.MetaStoreEventListener} and
@@ -77,7 +77,7 @@ import org.apache.hadoop.hive.ql.session.SessionState;
 public class TestMetaStoreEventListener extends TestCase {
   private HiveConf hiveConf;
   private HiveMetaStoreClient msc;
-  private Driver driver;
+  private IDriver driver;
 
   private static final String dbName = "hive2038";
   private static final String tblName = "tmptbl";
@@ -107,7 +107,7 @@ public class TestMetaStoreEventListener extends TestCase {
     hiveConf.set(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false");
     SessionState.start(new CliSessionState(hiveConf));
     msc = new HiveMetaStoreClient(hiveConf);
-    driver = new Driver(hiveConf);
+    driver = DriverFactory.newDriver(hiveConf);
 
     driver.run("drop database if exists " + dbName + " cascade");
 

http://git-wip-us.apache.org/repos/asf/hive/blob/0e2dab91/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestMetaStoreEventListenerOnlyOnCommit.java
----------------------------------------------------------------------
diff --git 
a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestMetaStoreEventListenerOnlyOnCommit.java
 
b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestMetaStoreEventListenerOnlyOnCommit.java
index 9623fed..cc2c5f9 100644
--- 
a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestMetaStoreEventListenerOnlyOnCommit.java
+++ 
b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestMetaStoreEventListenerOnlyOnCommit.java
@@ -20,16 +20,15 @@ package org.apache.hadoop.hive.metastore;
 
 import java.util.List;
 
-import junit.framework.TestCase;
-
 import org.apache.hadoop.hive.cli.CliSessionState;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.metastore.events.ListenerEvent;
-import org.apache.hadoop.hive.metastore.security.HadoopThriftAuthBridge;
-import org.apache.hadoop.hive.metastore.utils.MetaStoreUtils;
-import org.apache.hadoop.hive.ql.Driver;
+import org.apache.hadoop.hive.ql.DriverFactory;
+import org.apache.hadoop.hive.ql.IDriver;
 import org.apache.hadoop.hive.ql.session.SessionState;
 
+import junit.framework.TestCase;
+
 /**
  * Ensure that the status of MetaStore events depend on the RawStore's commit 
status.
  */
@@ -37,7 +36,7 @@ public class TestMetaStoreEventListenerOnlyOnCommit extends 
TestCase {
 
   private HiveConf hiveConf;
   private HiveMetaStoreClient msc;
-  private Driver driver;
+  private IDriver driver;
 
   @Override
   protected void setUp() throws Exception {
@@ -61,7 +60,7 @@ public class TestMetaStoreEventListenerOnlyOnCommit extends 
TestCase {
     hiveConf.set(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false");
     SessionState.start(new CliSessionState(hiveConf));
     msc = new HiveMetaStoreClient(hiveConf);
-    driver = new Driver(hiveConf);
+    driver = DriverFactory.newDriver(hiveConf);
 
     DummyListener.notifyList.clear();
   }

http://git-wip-us.apache.org/repos/asf/hive/blob/0e2dab91/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestMetaStoreInitListener.java
----------------------------------------------------------------------
diff --git 
a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestMetaStoreInitListener.java
 
b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestMetaStoreInitListener.java
index 4982313..025cc40 100644
--- 
a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestMetaStoreInitListener.java
+++ 
b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestMetaStoreInitListener.java
@@ -18,15 +18,14 @@
 
 package org.apache.hadoop.hive.metastore;
 
-import junit.framework.TestCase;
-
 import org.apache.hadoop.hive.cli.CliSessionState;
 import org.apache.hadoop.hive.conf.HiveConf;
-import org.apache.hadoop.hive.metastore.security.HadoopThriftAuthBridge;
-import org.apache.hadoop.hive.metastore.utils.MetaStoreUtils;
-import org.apache.hadoop.hive.ql.Driver;
+import org.apache.hadoop.hive.ql.DriverFactory;
+import org.apache.hadoop.hive.ql.IDriver;
 import org.apache.hadoop.hive.ql.session.SessionState;
 
+import junit.framework.TestCase;
+
 /**
  * TestMetaStoreInitListener. Test case for
  * {@link org.apache.hadoop.hive.metastore.MetaStoreInitListener}
@@ -34,7 +33,7 @@ import org.apache.hadoop.hive.ql.session.SessionState;
 public class TestMetaStoreInitListener extends TestCase {
   private HiveConf hiveConf;
   private HiveMetaStoreClient msc;
-  private Driver driver;
+  private IDriver driver;
 
   @Override
   protected void setUp() throws Exception {
@@ -51,7 +50,7 @@ public class TestMetaStoreInitListener extends TestCase {
     hiveConf.set(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false");
     SessionState.start(new CliSessionState(hiveConf));
     msc = new HiveMetaStoreClient(hiveConf);
-    driver = new Driver(hiveConf);
+    driver = DriverFactory.newDriver(hiveConf);
   }
 
   @Override

http://git-wip-us.apache.org/repos/asf/hive/blob/0e2dab91/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestMetaStoreListenersError.java
----------------------------------------------------------------------
diff --git 
a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestMetaStoreListenersError.java
 
b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestMetaStoreListenersError.java
index 11ebf4d..99b67bb 100644
--- 
a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestMetaStoreListenersError.java
+++ 
b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestMetaStoreListenersError.java
@@ -18,12 +18,12 @@
 
 package org.apache.hadoop.hive.metastore;
 
-import junit.framework.Assert;
-import junit.framework.TestCase;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hive.metastore.api.MetaException;
 import org.apache.hadoop.hive.metastore.security.HadoopThriftAuthBridge;
-import org.apache.hadoop.hive.metastore.utils.MetaStoreUtils;
+
+import junit.framework.Assert;
+import junit.framework.TestCase;
 
 /**
  * Test for unwrapping InvocationTargetException, which is thrown from

http://git-wip-us.apache.org/repos/asf/hive/blob/0e2dab91/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestMetaStoreMetrics.java
----------------------------------------------------------------------
diff --git 
a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestMetaStoreMetrics.java
 
b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestMetaStoreMetrics.java
index 6f5a963..928de6d 100644
--- 
a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestMetaStoreMetrics.java
+++ 
b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestMetaStoreMetrics.java
@@ -21,9 +21,8 @@ import org.apache.hadoop.hive.cli.CliSessionState;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.metastore.metrics.Metrics;
 import org.apache.hadoop.hive.metastore.metrics.MetricsConstants;
-import org.apache.hadoop.hive.metastore.security.HadoopThriftAuthBridge;
-import org.apache.hadoop.hive.metastore.utils.MetaStoreUtils;
-import org.apache.hadoop.hive.ql.Driver;
+import org.apache.hadoop.hive.ql.DriverFactory;
+import org.apache.hadoop.hive.ql.IDriver;
 import org.apache.hadoop.hive.ql.session.SessionState;
 import org.junit.Assert;
 import org.junit.BeforeClass;
@@ -37,7 +36,7 @@ public class TestMetaStoreMetrics {
 
 
   private static HiveConf hiveConf;
-  private static Driver driver;
+  private static IDriver driver;
 
   @BeforeClass
   public static void before() throws Exception {
@@ -56,7 +55,7 @@ public class TestMetaStoreMetrics {
 
     //Increments one HMS connection (Hive.get())
     SessionState.start(new CliSessionState(hiveConf));
-    driver = new Driver(hiveConf);
+    driver = DriverFactory.newDriver(hiveConf);
   }
 
 

http://git-wip-us.apache.org/repos/asf/hive/blob/0e2dab91/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestMetastoreVersion.java
----------------------------------------------------------------------
diff --git 
a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestMetastoreVersion.java
 
b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestMetastoreVersion.java
index 0a034d3..7525c57 100644
--- 
a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestMetastoreVersion.java
+++ 
b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestMetastoreVersion.java
@@ -20,25 +20,25 @@ package org.apache.hadoop.hive.metastore;
 import java.io.File;
 import java.lang.reflect.Field;
 
-import junit.framework.TestCase;
-
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
 import org.apache.commons.io.FileUtils;
 import org.apache.hadoop.hive.cli.CliSessionState;
 import org.apache.hadoop.hive.conf.HiveConf;
-import org.apache.hive.common.util.HiveStringUtils;
 import org.apache.hadoop.hive.metastore.api.MetaException;
-import org.apache.hadoop.hive.metastore.ObjectStore;
-import org.apache.hadoop.hive.ql.Driver;
+import org.apache.hadoop.hive.ql.DriverFactory;
+import org.apache.hadoop.hive.ql.IDriver;
 import org.apache.hadoop.hive.ql.metadata.Hive;
 import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse;
 import org.apache.hadoop.hive.ql.session.SessionState;
+import org.apache.hive.common.util.HiveStringUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import junit.framework.TestCase;
 
 public class TestMetastoreVersion extends TestCase {
   private static final Logger LOG = 
LoggerFactory.getLogger(TestMetastoreVersion.class);
   protected HiveConf hiveConf;
-  private Driver driver;
+  private IDriver driver;
   private String metaStoreRoot;
   private String testMetastoreDB;
   private IMetaStoreSchemaInfo metastoreSchemaInfo;
@@ -120,7 +120,7 @@ public class TestMetastoreVersion extends TestCase {
     
System.setProperty(HiveConf.ConfVars.METASTORE_SCHEMA_VERIFICATION_RECORD_VERSION.toString(),
 "true");
     hiveConf = new HiveConf(this.getClass());
     SessionState.start(new CliSessionState(hiveConf));
-    driver = new Driver(hiveConf);
+    driver = DriverFactory.newDriver(hiveConf);
     driver.run("show tables");
 
     // correct version stored by Metastore during startup
@@ -137,13 +137,13 @@ public class TestMetastoreVersion extends TestCase {
     
System.setProperty(HiveConf.ConfVars.METASTORE_SCHEMA_VERIFICATION.toString(), 
"false");
     hiveConf = new HiveConf(this.getClass());
     SessionState.start(new CliSessionState(hiveConf));
-    driver = new Driver(hiveConf);
+    driver = DriverFactory.newDriver(hiveConf);
     driver.run("show tables");
 
     ObjectStore.setSchemaVerified(false);
     hiveConf.setBoolVar(HiveConf.ConfVars.METASTORE_SCHEMA_VERIFICATION, true);
     setVersion(hiveConf, metastoreSchemaInfo.getHiveSchemaVersion());
-    driver = new Driver(hiveConf);
+    driver = DriverFactory.newDriver(hiveConf);
     CommandProcessorResponse proc = driver.run("show tables");
     assertTrue(proc.getResponseCode() == 0);
   }
@@ -156,7 +156,7 @@ public class TestMetastoreVersion extends TestCase {
     
System.setProperty(HiveConf.ConfVars.METASTORE_SCHEMA_VERIFICATION.toString(), 
"false");
     hiveConf = new HiveConf(this.getClass());
     SessionState.start(new CliSessionState(hiveConf));
-    driver = new Driver(hiveConf);
+    driver = DriverFactory.newDriver(hiveConf);
     driver.run("show tables");
 
     ObjectStore.setSchemaVerified(false);
@@ -164,7 +164,7 @@ public class TestMetastoreVersion extends TestCase {
     hiveConf = new HiveConf(this.getClass());
     setVersion(hiveConf, "fooVersion");
     SessionState.start(new CliSessionState(hiveConf));
-    driver = new Driver(hiveConf);
+    driver = DriverFactory.newDriver(hiveConf);
     CommandProcessorResponse proc = driver.run("show tables");
     assertTrue(proc.getResponseCode() != 0);
   }
@@ -178,14 +178,14 @@ public class TestMetastoreVersion extends TestCase {
     
System.setProperty(HiveConf.ConfVars.METASTORE_SCHEMA_VERIFICATION.toString(), 
"false");
     hiveConf = new HiveConf(this.getClass());
     SessionState.start(new CliSessionState(hiveConf));
-    driver = new Driver(hiveConf);
+    driver = DriverFactory.newDriver(hiveConf);
     driver.run("show tables");
 
     
System.setProperty(HiveConf.ConfVars.METASTORE_SCHEMA_VERIFICATION.toString(), 
"true");
     hiveConf = new HiveConf(this.getClass());
     setVersion(hiveConf, "3.9000.0");
     SessionState.start(new CliSessionState(hiveConf));
-    driver = new Driver(hiveConf);
+    driver = DriverFactory.newDriver(hiveConf);
     CommandProcessorResponse proc = driver.run("show tables");
     assertEquals(0, proc.getResponseCode());
   }

http://git-wip-us.apache.org/repos/asf/hive/blob/0e2dab91/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestAcidOnTez.java
----------------------------------------------------------------------
diff --git 
a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestAcidOnTez.java 
b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestAcidOnTez.java
index 1fd84ac..d43c239 100644
--- 
a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestAcidOnTez.java
+++ 
b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestAcidOnTez.java
@@ -76,7 +76,7 @@ public class TestAcidOnTez {
   @Rule
   public TestName testName = new TestName();
   private HiveConf hiveConf;
-  private Driver d;
+  private IDriver d;
   private static enum Table {
     ACIDTBL("acidTbl"),
     ACIDTBLPART("acidTblPart"),
@@ -118,7 +118,7 @@ public class TestAcidOnTez {
       throw new RuntimeException("Could not create " + TEST_WAREHOUSE_DIR);
     }
     SessionState.start(new SessionState(hiveConf));
-    d = new Driver(hiveConf);
+    d = DriverFactory.newDriver(hiveConf);
     dropTables();
     runStatementOnDriver("create table " + Table.ACIDTBL + "(a int, b int) 
clustered by (a) into " + BUCKET_COUNT + " buckets stored as orc " + 
getTblProperties());
     runStatementOnDriver("create table " + Table.ACIDTBLPART + "(a int, b int) 
partitioned by (p string) clustered by (a) into " + BUCKET_COUNT + " buckets 
stored as orc " + getTblProperties());
@@ -791,7 +791,7 @@ ekoifman:apache-hive-3.0.0-SNAPSHOT-bin ekoifman$ tree  
~/dev/hiverwgit/itests/h
     }
 
     SessionState.start(conf);
-    d = new Driver(conf);
+    d = DriverFactory.newDriver(conf);
   }
 
   // Ideally test like this should be a qfile test. However, the explain 
output from qfile is always
@@ -886,7 +886,7 @@ ekoifman:apache-hive-3.0.0-SNAPSHOT-bin ekoifman$ tree  
~/dev/hiverwgit/itests/h
    */
   private List<String> runStatementOnDriver(String stmt, HiveConf conf)
       throws Exception {
-    Driver driver = new Driver(conf);
+    IDriver driver = DriverFactory.newDriver(conf);
     driver.setMaxRows(10000);
     CommandProcessorResponse cpr = driver.run(stmt);
     if(cpr.getResponseCode() != 0) {

http://git-wip-us.apache.org/repos/asf/hive/blob/0e2dab91/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestCreateUdfEntities.java
----------------------------------------------------------------------
diff --git 
a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestCreateUdfEntities.java
 
b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestCreateUdfEntities.java
index 34f4ed0..1894ba0 100644
--- 
a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestCreateUdfEntities.java
+++ 
b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestCreateUdfEntities.java
@@ -28,15 +28,15 @@ import org.junit.Before;
 import org.junit.Test;
 
 public class TestCreateUdfEntities {
-  private Driver driver;
+  private IDriver driver;
   private String funcName = "print_test";
 
   @Before
   public void setUp() throws Exception {
 
-    HiveConf conf = new HiveConf(Driver.class);
+    HiveConf conf = new HiveConf(IDriver.class);
     SessionState.start(conf);
-    driver = new Driver(conf);
+    driver = DriverFactory.newDriver(conf);
   }
 
   @After

http://git-wip-us.apache.org/repos/asf/hive/blob/0e2dab91/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestDDLWithRemoteMetastoreSecondNamenode.java
----------------------------------------------------------------------
diff --git 
a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestDDLWithRemoteMetastoreSecondNamenode.java
 
b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestDDLWithRemoteMetastoreSecondNamenode.java
index 179eed9..cde728e 100644
--- 
a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestDDLWithRemoteMetastoreSecondNamenode.java
+++ 
b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestDDLWithRemoteMetastoreSecondNamenode.java
@@ -64,7 +64,7 @@ public class TestDDLWithRemoteMetastoreSecondNamenode extends 
TestCase {
   private static Hive db;
   private static FileSystem fs, fs2;
   private static HiveConf jobConf;
-  private static Driver driver;
+  private static IDriver driver;
   private static int tests = 0;
   private static Boolean isInitialized = false;
 
@@ -100,7 +100,7 @@ public class TestDDLWithRemoteMetastoreSecondNamenode 
extends TestCase {
       fs2Uri = fs2.getUri().toString();
       jobConf.set(CommonConfigurationKeysPublic.FS_DEFAULT_NAME_KEY, fs2Uri);
 
-      driver = new Driver(jobConf);
+      driver = DriverFactory.newDriver(jobConf);
 
       fs = FileSystem.get(conf);
       if (fs.exists(tmppath) && !fs.getFileStatus(tmppath).isDir()) {

http://git-wip-us.apache.org/repos/asf/hive/blob/0e2dab91/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/history/TestHiveHistory.java
----------------------------------------------------------------------
diff --git 
a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/history/TestHiveHistory.java
 
b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/history/TestHiveHistory.java
index d73cd64..3d4f24d 100644
--- 
a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/history/TestHiveHistory.java
+++ 
b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/history/TestHiveHistory.java
@@ -35,7 +35,8 @@ import 
org.apache.hadoop.hive.common.LogUtils.LogInitializationException;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
 import org.apache.hadoop.hive.metastore.Warehouse;
-import org.apache.hadoop.hive.ql.Driver;
+import org.apache.hadoop.hive.ql.DriverFactory;
+import org.apache.hadoop.hive.ql.IDriver;
 import org.apache.hadoop.hive.ql.history.HiveHistory.Keys;
 import org.apache.hadoop.hive.ql.history.HiveHistory.QueryInfo;
 import org.apache.hadoop.hive.ql.history.HiveHistory.TaskInfo;
@@ -143,7 +144,7 @@ public class TestHiveHistory extends TestCase {
       SessionState.start(ss);
 
       String cmd = "select a.key+1 from src a";
-      Driver d = new Driver(conf);
+      IDriver d = DriverFactory.newDriver(conf);
       int ret = d.run(cmd).getResponseCode();
       if (ret != 0) {
         fail("Failed");

http://git-wip-us.apache.org/repos/asf/hive/blob/0e2dab91/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/metadata/TestSemanticAnalyzerHookLoading.java
----------------------------------------------------------------------
diff --git 
a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/metadata/TestSemanticAnalyzerHookLoading.java
 
b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/metadata/TestSemanticAnalyzerHookLoading.java
index 2170ca3..7e59241 100644
--- 
a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/metadata/TestSemanticAnalyzerHookLoading.java
+++ 
b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/metadata/TestSemanticAnalyzerHookLoading.java
@@ -25,7 +25,8 @@ import junit.framework.TestCase;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
 import org.apache.hadoop.hive.metastore.Warehouse;
-import org.apache.hadoop.hive.ql.Driver;
+import org.apache.hadoop.hive.ql.DriverFactory;
+import org.apache.hadoop.hive.ql.IDriver;
 import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse;
 import org.apache.hadoop.hive.ql.session.SessionState;
 
@@ -37,7 +38,7 @@ public class TestSemanticAnalyzerHookLoading extends TestCase 
{
     conf.set(ConfVars.SEMANTIC_ANALYZER_HOOK.varname, 
DummySemanticAnalyzerHook.class.getName());
     conf.set(ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false");
     SessionState.start(conf);
-    Driver driver = new Driver(conf);
+    IDriver driver = DriverFactory.newDriver(conf);
 
     driver.run("drop table testDL");
     CommandProcessorResponse resp = driver.run("create table testDL (a int) as 
select * from tbl2");

http://git-wip-us.apache.org/repos/asf/hive/blob/0e2dab91/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplicationScenarios.java
----------------------------------------------------------------------
diff --git 
a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplicationScenarios.java
 
b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplicationScenarios.java
index 0e08007..2bb06d5 100644
--- 
a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplicationScenarios.java
+++ 
b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplicationScenarios.java
@@ -51,7 +51,8 @@ import 
org.apache.hadoop.hive.metastore.messaging.event.filters.EventBoundaryFil
 import 
org.apache.hadoop.hive.metastore.messaging.event.filters.MessageFormatFilter;
 import org.apache.hadoop.hive.metastore.utils.MetaStoreUtils;
 import org.apache.hadoop.hive.ql.CommandNeedRetryException;
-import org.apache.hadoop.hive.ql.Driver;
+import org.apache.hadoop.hive.ql.DriverFactory;
+import org.apache.hadoop.hive.ql.IDriver;
 import org.apache.hadoop.hive.ql.exec.repl.ReplDumpWork;
 import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse;
 import org.apache.hadoop.hive.ql.session.SessionState;
@@ -101,12 +102,12 @@ public class TestReplicationScenarios {
 
   private static HiveConf hconf;
   private static int msPort;
-  private static Driver driver;
+  private static IDriver driver;
   private static HiveMetaStoreClient metaStoreClient;
   private static String proxySettingName;
   static HiveConf hconfMirror;
   static int msPortMirror;
-  static Driver driverMirror;
+  static IDriver driverMirror;
   static HiveMetaStoreClient metaStoreClientMirror;
 
   @Rule
@@ -159,7 +160,7 @@ public class TestReplicationScenarios {
     FileSystem fs = FileSystem.get(testPath.toUri(),hconf);
     fs.mkdirs(testPath);
 
-    driver = new Driver(hconf);
+    driver = DriverFactory.newDriver(hconf);
     SessionState.start(new CliSessionState(hconf));
     metaStoreClient = new HiveMetaStoreClient(hconf);
 
@@ -170,7 +171,7 @@ public class TestReplicationScenarios {
     hconfMirror = new HiveConf(hconf);
     hconfMirror.setVar(HiveConf.ConfVars.METASTOREURIS, "thrift://localhost:"
         + msPortMirror);
-    driverMirror = new Driver(hconfMirror);
+    driverMirror = DriverFactory.newDriver(hconfMirror);
     metaStoreClientMirror = new HiveMetaStoreClient(hconfMirror);
 
     ObjectStore.setTwoMetastoreTesting(true);
@@ -586,7 +587,7 @@ public class TestReplicationScenarios {
             public void run() {
               try {
                 LOG.info("Entered new thread");
-                Driver driver2 = new Driver(hconf);
+                IDriver driver2 = DriverFactory.newDriver(hconf);
                 SessionState.start(new CliSessionState(hconf));
                 CommandProcessorResponse ret = driver2.run("ALTER TABLE " + 
dbName + ".ptned PARTITION (b=1) RENAME TO PARTITION (b=10)");
                 success = (ret.getException() == null);
@@ -661,7 +662,7 @@ public class TestReplicationScenarios {
             public void run() {
               try {
                 LOG.info("Entered new thread");
-                Driver driver2 = new Driver(hconf);
+                IDriver driver2 = DriverFactory.newDriver(hconf);
                 SessionState.start(new CliSessionState(hconf));
                 CommandProcessorResponse ret = driver2.run("DROP TABLE " + 
dbName + ".ptned");
                 success = (ret.getException() == null);
@@ -3358,7 +3359,7 @@ public class TestReplicationScenarios {
     verifyRun("SELECT count(*) from " + dbName + "_dupe.unptned", new 
String[]{"2"}, driverMirror);
   }
 
-  private static String createDB(String name, Driver myDriver) {
+  private static String createDB(String name, IDriver myDriver) {
     LOG.info("Testing " + name);
     String dbName = name + "_" + tid;
     run("CREATE DATABASE " + dbName, myDriver);
@@ -3542,10 +3543,10 @@ public class TestReplicationScenarios {
   }
 
 
-  private String getResult(int rowNum, int colNum, Driver myDriver) throws 
IOException {
+  private String getResult(int rowNum, int colNum, IDriver myDriver) throws 
IOException {
     return getResult(rowNum,colNum,false, myDriver);
   }
-  private String getResult(int rowNum, int colNum, boolean reuse, Driver 
myDriver) throws IOException {
+  private String getResult(int rowNum, int colNum, boolean reuse, IDriver 
myDriver) throws IOException {
     if (!reuse) {
       lastResults = new ArrayList<String>();
       try {
@@ -3566,7 +3567,7 @@ public class TestReplicationScenarios {
    * Unless for Null Values it actually returns in UpperCase and hence 
explicitly lowering case
    * before assert.
    */
-  private void verifyResults(String[] data, Driver myDriver) throws 
IOException {
+  private void verifyResults(String[] data, IDriver myDriver) throws 
IOException {
     List<String> results = getOutput(myDriver);
     LOG.info("Expecting {}", data);
     LOG.info("Got {}", results);
@@ -3576,7 +3577,7 @@ public class TestReplicationScenarios {
     }
   }
 
-  private List<String> getOutput(Driver myDriver) throws IOException {
+  private List<String> getOutput(IDriver myDriver) throws IOException {
     List<String> results = new ArrayList<>();
     try {
       myDriver.getResults(results);
@@ -3587,7 +3588,7 @@ public class TestReplicationScenarios {
     return results;
   }
 
-  private void printOutput(Driver myDriver) throws IOException {
+  private void printOutput(IDriver myDriver) throws IOException {
     for (String s : getOutput(myDriver)){
       LOG.info(s);
     }
@@ -3648,23 +3649,23 @@ public class TestReplicationScenarios {
     }
   }
 
-  private void verifySetup(String cmd, String[] data, Driver myDriver) throws  
IOException {
+  private void verifySetup(String cmd, String[] data, IDriver myDriver) throws 
 IOException {
     if (VERIFY_SETUP_STEPS){
       run(cmd, myDriver);
       verifyResults(data, myDriver);
     }
   }
 
-  private void verifyRun(String cmd, String data, Driver myDriver) throws 
IOException {
+  private void verifyRun(String cmd, String data, IDriver myDriver) throws 
IOException {
     verifyRun(cmd, new String[] { data }, myDriver);
   }
 
-  private void verifyRun(String cmd, String[] data, Driver myDriver) throws 
IOException {
+  private void verifyRun(String cmd, String[] data, IDriver myDriver) throws 
IOException {
     run(cmd, myDriver);
     verifyResults(data, myDriver);
   }
 
-  private void verifyFail(String cmd, Driver myDriver) throws RuntimeException 
{
+  private void verifyFail(String cmd, IDriver myDriver) throws 
RuntimeException {
     boolean success = false;
     try {
       success = run(cmd, false, myDriver);
@@ -3677,7 +3678,7 @@ public class TestReplicationScenarios {
     assertFalse(success);
   }
 
-  private void verifyRunWithPatternMatch(String cmd, String key, String 
pattern, Driver myDriver) throws IOException {
+  private void verifyRunWithPatternMatch(String cmd, String key, String 
pattern, IDriver myDriver) throws IOException {
     run(cmd, myDriver);
     List<String> results = getOutput(myDriver);
     assertTrue(results.size() > 0);
@@ -3692,7 +3693,7 @@ public class TestReplicationScenarios {
     assertTrue(success);
   }
 
-  private static void run(String cmd, Driver myDriver) throws RuntimeException 
{
+  private static void run(String cmd, IDriver myDriver) throws 
RuntimeException {
     try {
     run(cmd,false, myDriver); // default arg-less run simply runs, and does 
not care about failure
     } catch (AssertionError ae){
@@ -3702,7 +3703,7 @@ public class TestReplicationScenarios {
     }
   }
 
-  private static boolean run(String cmd, boolean errorOnFail, Driver myDriver) 
throws RuntimeException {
+  private static boolean run(String cmd, boolean errorOnFail, IDriver 
myDriver) throws RuntimeException {
     boolean success = false;
     try {
       CommandProcessorResponse ret = myDriver.run(cmd);

Reply via email to