Repository: hive
Updated Branches:
  refs/heads/master 6356205c7 -> 1d15990ad


HIVE-18238: Driver execution may not have configuration changing sideeffects 
(Zoltan Haindrich reviewed by Ashutosh Chauhan)

Signed-off-by: Zoltan Haindrich <k...@rxd.hu>


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/f5c08a95
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/f5c08a95
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/f5c08a95

Branch: refs/heads/master
Commit: f5c08a951763d811289e6f39d6f08dcac36bb45d
Parents: 6356205
Author: Zoltan Haindrich <k...@rxd.hu>
Authored: Tue Feb 13 14:33:21 2018 +0100
Committer: Zoltan Haindrich <k...@rxd.hu>
Committed: Tue Feb 13 14:33:21 2018 +0100

----------------------------------------------------------------------
 .../org/apache/hadoop/hive/cli/CliDriver.java   | 24 +++----
 .../org/apache/hive/hcatalog/cli/HCatCli.java   |  8 +--
 .../apache/hive/hcatalog/cli/HCatDriver.java    | 16 ++---
 .../apache/hive/hcatalog/cli/TestPermsGrp.java  | 20 +++---
 .../hcatalog/pig/TestHCatLoaderEncryption.java  |  5 +-
 .../plugin/TestHiveAuthorizerShowFilters.java   |  3 +-
 .../java/org/apache/hadoop/hive/ql/Driver.java  | 66 ++++++--------------
 .../apache/hadoop/hive/ql/DriverFactory.java    | 19 ++----
 .../java/org/apache/hadoop/hive/ql/IDriver.java |  6 +-
 .../org/apache/hadoop/hive/ql/QueryState.java   | 51 ++++++++-------
 .../hadoop/hive/ql/hooks/HooksLoader.java       |  8 +--
 .../hadoop/hive/ql/lockmgr/DummyTxnManager.java | 11 +++-
 .../hive/ql/lockmgr/HiveTxnManagerImpl.java     | 16 ++++-
 .../hadoop/hive/ql/parse/SemanticAnalyzer.java  |  1 +
 .../ql/processors/AddResourceProcessor.java     |  4 ++
 .../hive/ql/processors/CommandProcessor.java    |  2 +-
 .../ql/processors/CommandProcessorFactory.java  | 40 ++----------
 .../hive/ql/processors/CompileProcessor.java    |  4 ++
 .../hive/ql/processors/CryptoProcessor.java     |  4 ++
 .../ql/processors/DeleteResourceProcessor.java  |  4 ++
 .../hadoop/hive/ql/processors/DfsProcessor.java | 11 ++--
 .../ql/processors/ListResourceProcessor.java    |  4 ++
 .../hive/ql/processors/ReloadProcessor.java     |  4 ++
 .../hive/ql/processors/ResetProcessor.java      |  4 ++
 .../hadoop/hive/ql/processors/SetProcessor.java |  3 +
 .../hadoop/hive/ql/txn/compactor/Worker.java    |  5 +-
 .../ql/udf/generic/GenericUDTFGetSplits.java    |  6 +-
 .../apache/hadoop/hive/ql/TestTxnCommands2.java |  5 +-
 .../hadoop/hive/ql/TxnCommandsBaseForTests.java |  8 +--
 .../hadoop/hive/ql/exec/TestOperators.java      |  2 +-
 .../hadoop/hive/ql/hooks/TestQueryHooks.java    |  4 +-
 .../hive/ql/lockmgr/TestDbTxnManager2.java      | 11 ++--
 .../hive/ql/lockmgr/TestDummyTxnManager.java    |  9 +++
 .../clientpositive/driver_conf_isolation.q      |  5 ++
 .../special_character_in_tabnames_1.q           |  1 +
 .../clientpositive/driver_conf_isolation.q.out  | 34 ++++++++++
 .../test/results/clientpositive/input39.q.out   |  2 +-
 .../hive/service/cli/operation/Operation.java   |  8 +--
 38 files changed, 240 insertions(+), 198 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hive/blob/f5c08a95/cli/src/java/org/apache/hadoop/hive/cli/CliDriver.java
----------------------------------------------------------------------
diff --git a/cli/src/java/org/apache/hadoop/hive/cli/CliDriver.java 
b/cli/src/java/org/apache/hadoop/hive/cli/CliDriver.java
index e57412a..68741f6 100644
--- a/cli/src/java/org/apache/hadoop/hive/cli/CliDriver.java
+++ b/cli/src/java/org/apache/hadoop/hive/cli/CliDriver.java
@@ -181,18 +181,23 @@ public class CliDriver {
       }
     }  else { // local mode
       try {
-        CommandProcessor proc = CommandProcessorFactory.get(tokens, (HiveConf) 
conf);
-        if (proc instanceof IDriver) {
-          // Let Driver strip comments using sql parser
-          ret = processLocalCmd(cmd, proc, ss);
-        } else {
-          ret = processLocalCmd(cmd_trimmed, proc, ss);
+
+        try (CommandProcessor proc = CommandProcessorFactory.get(tokens, 
(HiveConf) conf)) {
+          if (proc instanceof IDriver) {
+            // Let Driver strip comments using sql parser
+            ret = processLocalCmd(cmd, proc, ss);
+          } else {
+            ret = processLocalCmd(cmd_trimmed, proc, ss);
+          }
         }
       } catch (SQLException e) {
         console.printError("Failed processing command " + tokens[0] + " " + 
e.getLocalizedMessage(),
           org.apache.hadoop.util.StringUtils.stringifyException(e));
         ret = 1;
       }
+      catch (Exception e) {
+        throw new RuntimeException(e);
+      }
     }
 
     ss.resetThreadName();
@@ -270,10 +275,7 @@ public class CliDriver {
           ret = 1;
         }
 
-        int cret = qp.close();
-        if (ret == 0) {
-          ret = cret;
-        }
+        qp.close();
 
         if (out instanceof FetchConverter) {
           ((FetchConverter) out).fetchFinished();
@@ -402,11 +404,9 @@ public class CliDriver {
         lastRet = ret;
         boolean ignoreErrors = HiveConf.getBoolVar(conf, 
HiveConf.ConfVars.CLIIGNOREERRORS);
         if (ret != 0 && !ignoreErrors) {
-          CommandProcessorFactory.clean((HiveConf) conf);
           return ret;
         }
       }
-      CommandProcessorFactory.clean((HiveConf) conf);
       return lastRet;
     } finally {
       // Once we are done processing the line, restore the old handler

http://git-wip-us.apache.org/repos/asf/hive/blob/f5c08a95/hcatalog/core/src/main/java/org/apache/hive/hcatalog/cli/HCatCli.java
----------------------------------------------------------------------
diff --git 
a/hcatalog/core/src/main/java/org/apache/hive/hcatalog/cli/HCatCli.java 
b/hcatalog/core/src/main/java/org/apache/hive/hcatalog/cli/HCatCli.java
index a36b0db..d7a9bb0 100644
--- a/hcatalog/core/src/main/java/org/apache/hive/hcatalog/cli/HCatCli.java
+++ b/hcatalog/core/src/main/java/org/apache/hive/hcatalog/cli/HCatCli.java
@@ -144,9 +144,6 @@ public class HCatCli {
     // -D : process these first, so that we can instantiate SessionState 
appropriately.
     setConfProperties(conf, cmdLine.getOptionProperties("D"));
 
-    // Now that the properties are in, we can instantiate SessionState.
-    SessionState.start(ss);
-
     // -h
     if (cmdLine.hasOption('h')) {
       printUsage(options, ss.out);
@@ -176,6 +173,9 @@ public class HCatCli {
       conf.set(HCatConstants.HCAT_GROUP, grp);
     }
 
+    // Now that the properties are in, we can instantiate SessionState.
+    SessionState.start(ss);
+
     // all done parsing, let's run stuff!
 
     if (execString != null) {
@@ -286,7 +286,7 @@ public class HCatCli {
       return new 
DfsProcessor(ss.getConf()).run(cmd.substring(firstToken.length()).trim()).getResponseCode();
     }
 
-    HCatDriver driver = new HCatDriver();
+    HCatDriver driver = new HCatDriver(ss.getConf());
 
     int ret = driver.run(cmd).getResponseCode();
 

http://git-wip-us.apache.org/repos/asf/hive/blob/f5c08a95/hcatalog/core/src/main/java/org/apache/hive/hcatalog/cli/HCatDriver.java
----------------------------------------------------------------------
diff --git 
a/hcatalog/core/src/main/java/org/apache/hive/hcatalog/cli/HCatDriver.java 
b/hcatalog/core/src/main/java/org/apache/hive/hcatalog/cli/HCatDriver.java
index e112412..6a7b939 100644
--- a/hcatalog/core/src/main/java/org/apache/hive/hcatalog/cli/HCatDriver.java
+++ b/hcatalog/core/src/main/java/org/apache/hive/hcatalog/cli/HCatDriver.java
@@ -21,10 +21,10 @@ package org.apache.hive.hcatalog.cli;
 import java.io.IOException;
 import java.util.ArrayList;
 
-import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.permission.FsPermission;
+import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.metastore.Warehouse;
 import org.apache.hadoop.hive.ql.DriverFactory;
 import org.apache.hadoop.hive.ql.IDriver;
@@ -39,8 +39,8 @@ public class HCatDriver {
 
   private IDriver driver;
 
-  public HCatDriver() {
-    driver = DriverFactory.newDriver();
+  public HCatDriver(HiveConf hiveConf) {
+    driver = DriverFactory.newDriver(hiveConf);
   }
 
   public CommandProcessorResponse run(String command) {
@@ -52,7 +52,8 @@ public class HCatDriver {
 
     if (cpr.getResponseCode() == 0) {
       // Only attempt to do this, if cmd was successful.
-      int rc = setFSPermsNGrp(ss);
+      // FIXME: it would be probably better to move this to an after-execution
+      int rc = setFSPermsNGrp(ss, driver.getConf());
       cpr = new CommandProcessorResponse(rc);
     }
     // reset conf vars
@@ -62,9 +63,7 @@ public class HCatDriver {
     return cpr;
   }
 
-  private int setFSPermsNGrp(SessionState ss) {
-
-    Configuration conf = ss.getConf();
+  private int setFSPermsNGrp(SessionState ss, HiveConf conf) {
 
     String tblName = conf.get(HCatConstants.HCAT_CREATE_TBL_NAME, "");
     if (tblName.isEmpty()) {
@@ -145,7 +144,8 @@ public class HCatDriver {
   }
 
   public int close() {
-    return driver.close();
+    driver.close();
+    return 0;
   }
 
   public boolean getResults(ArrayList<String> res) throws IOException {

http://git-wip-us.apache.org/repos/asf/hive/blob/f5c08a95/hcatalog/core/src/test/java/org/apache/hive/hcatalog/cli/TestPermsGrp.java
----------------------------------------------------------------------
diff --git 
a/hcatalog/core/src/test/java/org/apache/hive/hcatalog/cli/TestPermsGrp.java 
b/hcatalog/core/src/test/java/org/apache/hive/hcatalog/cli/TestPermsGrp.java
index 4dbf7ac..d78ab78 100644
--- a/hcatalog/core/src/test/java/org/apache/hive/hcatalog/cli/TestPermsGrp.java
+++ b/hcatalog/core/src/test/java/org/apache/hive/hcatalog/cli/TestPermsGrp.java
@@ -25,8 +25,6 @@ import java.util.HashMap;
 import java.util.List;
 import java.util.concurrent.TimeUnit;
 
-import junit.framework.TestCase;
-
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.permission.FsPermission;
 import org.apache.hadoop.hive.conf.HiveConf;
@@ -44,7 +42,6 @@ import org.apache.hadoop.hive.metastore.api.SerDeInfo;
 import org.apache.hadoop.hive.metastore.api.StorageDescriptor;
 import org.apache.hadoop.hive.metastore.api.Table;
 import org.apache.hadoop.hive.metastore.api.Type;
-import org.apache.hadoop.hive.metastore.security.HadoopThriftAuthBridge;
 import org.apache.hadoop.hive.ql.io.HiveInputFormat;
 import org.apache.hadoop.hive.ql.io.HiveOutputFormat;
 import org.apache.hadoop.hive.ql.metadata.Hive;
@@ -58,6 +55,8 @@ import org.apache.thrift.TException;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import junit.framework.TestCase;
+
 public class TestPermsGrp extends TestCase {
 
   private boolean isServerRunning = false;
@@ -120,12 +119,13 @@ public class TestPermsGrp extends TestCase {
       // Next user did specify perms.
       try {
         callHCatCli(new String[]{"-e", "create table simptbl (name string) 
stored as RCFILE", "-p", "rwx-wx---"});
+        fail();
       } catch (Exception e) {
         assertTrue(e instanceof ExitException);
         assertEquals(((ExitException) e).getStatus(), 0);
       }
       dfsPath = clientWH.getDefaultTablePath(db, tblName);
-      
assertTrue(dfsPath.getFileSystem(hcatConf).getFileStatus(dfsPath).getPermission().equals(FsPermission.valueOf("drwx-wx---")));
+      assertEquals(FsPermission.valueOf("drwx-wx---"), 
dfsPath.getFileSystem(hcatConf).getFileStatus(dfsPath).getPermission());
 
       cleanupTbl(dbName, tblName, typeName);
 
@@ -134,7 +134,7 @@ public class TestPermsGrp extends TestCase {
       // make sure create table fails.
       try {
         callHCatCli(new String[]{"-e", "create table simptbl (name string) 
stored as RCFILE", "-p", "rwx"});
-        assert false;
+        fail();
       } catch (Exception me) {
         assertTrue(me instanceof ExitException);
       }
@@ -142,7 +142,7 @@ public class TestPermsGrp extends TestCase {
       dfsPath = clientWH.getDefaultTablePath(db, tblName);
       try {
         dfsPath.getFileSystem(hcatConf).getFileStatus(dfsPath);
-        assert false;
+        fail();
       } catch (Exception fnfe) {
         assertTrue(fnfe instanceof FileNotFoundException);
       }
@@ -150,7 +150,7 @@ public class TestPermsGrp extends TestCase {
       // And no metadata gets created.
       try {
         msc.getTable(Warehouse.DEFAULT_DATABASE_NAME, tblName);
-        assert false;
+        fail();
       } catch (Exception e) {
         assertTrue(e instanceof NoSuchObjectException);
         assertEquals("default.simptbl table not found", e.getMessage());
@@ -163,7 +163,7 @@ public class TestPermsGrp extends TestCase {
       try {
         // create table must fail.
         callHCatCli(new String[]{"-e", "create table simptbl (name string) 
stored as RCFILE", "-p", "rw-rw-rw-", "-g", 
"THIS_CANNOT_BE_A_VALID_GRP_NAME_EVER"});
-        assert false;
+        fail();
       } catch (Exception me) {
         assertTrue(me instanceof SecurityException);
       }
@@ -171,7 +171,7 @@ public class TestPermsGrp extends TestCase {
       try {
         // no metadata should get created.
         msc.getTable(dbName, tblName);
-        assert false;
+        fail();
       } catch (Exception e) {
         assertTrue(e instanceof NoSuchObjectException);
         assertEquals("default.simptbl table not found", e.getMessage());
@@ -179,7 +179,7 @@ public class TestPermsGrp extends TestCase {
       try {
         // neither dir should get created.
         dfsPath.getFileSystem(hcatConf).getFileStatus(dfsPath);
-        assert false;
+        fail();
       } catch (Exception e) {
         assertTrue(e instanceof FileNotFoundException);
       }

http://git-wip-us.apache.org/repos/asf/hive/blob/f5c08a95/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatLoaderEncryption.java
----------------------------------------------------------------------
diff --git 
a/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatLoaderEncryption.java
 
b/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatLoaderEncryption.java
index 1560571..72ed8df 100644
--- 
a/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatLoaderEncryption.java
+++ 
b/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatLoaderEncryption.java
@@ -79,6 +79,7 @@ import org.slf4j.LoggerFactory;
 
 @RunWith(Parameterized.class)
 public class TestHCatLoaderEncryption {
+
   private static final AtomicInteger salt = new AtomicInteger(new 
Random().nextInt());
   private static final Logger LOG = 
LoggerFactory.getLogger(TestHCatLoaderEncryption.class);
   private final String TEST_DATA_DIR = 
HCatUtil.makePathASafeFileName(System.getProperty
@@ -177,12 +178,12 @@ public class TestHCatLoaderEncryption {
           "_" + salt.getAndIncrement() + "/dfs/");
     }
 
-    driver = DriverFactory.newDriver(hiveConf);
-
     initEncryptionShim(hiveConf);
     String encryptedTablePath =  TEST_WAREHOUSE_DIR + "/encryptedTable";
     SessionState.start(new CliSessionState(hiveConf));
 
+    driver = DriverFactory.newDriver(hiveConf);
+
     SessionState.get().out = System.out;
 
     createTable(BASIC_TABLE, "a int, b string");

http://git-wip-us.apache.org/repos/asf/hive/blob/f5c08a95/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/authorization/plugin/TestHiveAuthorizerShowFilters.java
----------------------------------------------------------------------
diff --git 
a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/authorization/plugin/TestHiveAuthorizerShowFilters.java
 
b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/authorization/plugin/TestHiveAuthorizerShowFilters.java
index 8981223..d69696c 100644
--- 
a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/authorization/plugin/TestHiveAuthorizerShowFilters.java
+++ 
b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/authorization/plugin/TestHiveAuthorizerShowFilters.java
@@ -121,7 +121,8 @@ public class TestHiveAuthorizerShowFilters {
     conf.setBoolVar(ConfVars.HIVE_SUPPORT_CONCURRENCY, false);
     UtilsForTest.setNewDerbyDbLocation(conf, 
TestHiveAuthorizerShowFilters.class.getSimpleName());
 
-    SessionState.start(conf);
+    SessionState ss = SessionState.start(conf);
+    ss.applyAuthorizationPolicy();
     driver = DriverFactory.newDriver(conf);
     runCmd("create table " + tableName1
         + " (i int, j int, k string) partitioned by (city string, `date` 
string) ");

http://git-wip-us.apache.org/repos/asf/hive/blob/f5c08a95/ql/src/java/org/apache/hadoop/hive/ql/Driver.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/Driver.java 
b/ql/src/java/org/apache/hadoop/hive/ql/Driver.java
index c6f7d64..8f7291d 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/Driver.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/Driver.java
@@ -40,6 +40,7 @@ import java.util.concurrent.locks.ReentrantLock;
 
 import com.google.common.annotations.VisibleForTesting;
 import org.apache.commons.lang.StringUtils;
+import org.apache.hadoop.conf.Configurable;
 import org.apache.hadoop.fs.FSDataInputStream;
 import org.apache.hadoop.hive.common.JavaUtils;
 import org.apache.hadoop.hive.common.ValidReadTxnList;
@@ -172,7 +173,7 @@ public class Driver implements IDriver {
   private LockedDriverState lDrvState = new LockedDriverState();
 
   // Query specific info
-  private QueryState queryState;
+  private final QueryState queryState;
 
   // Query hooks that execute before compilation and after execution
   private QueryLifeTimeHookRunner queryLifeTimeHookRunner;
@@ -371,25 +372,17 @@ public class Driver implements IDriver {
     this.maxRows = maxRows;
   }
 
-  public Driver() {
-    this(getNewQueryState((SessionState.get() != null) ?
-        SessionState.get().getConf() : new HiveConf()), null);
-  }
-
   public Driver(HiveConf conf) {
-    this(getNewQueryState(conf), null);
+    this(new 
QueryState.Builder().withGenerateNewQueryId(true).withHiveConf(conf).build(), 
null);
   }
 
   // Pass lineageState when a driver instantiates another Driver to run
   // or compile another query
+  // NOTE: only used from index related classes
   public Driver(HiveConf conf, LineageState lineageState) {
     this(getNewQueryState(conf, lineageState), null);
   }
 
-  public Driver(HiveConf conf, HiveTxnManager txnMgr) {
-    this(getNewQueryState(conf), null, null, txnMgr);
-  }
-
   // Pass lineageState when a driver instantiates another Driver to run
   // or compile another query
   public Driver(HiveConf conf, Context ctx, LineageState lineageState) {
@@ -397,10 +390,6 @@ public class Driver implements IDriver {
     this.ctx = ctx;
   }
 
-  public Driver(HiveConf conf, String userName) {
-    this(getNewQueryState(conf), userName, null);
-  }
-
   // Pass lineageState when a driver instantiates another Driver to run
   // or compile another query
   public Driver(HiveConf conf, String userName, LineageState lineageState) {
@@ -411,10 +400,6 @@ public class Driver implements IDriver {
     this(queryState, userName, new HooksLoader(queryState.getConf()), null, 
null);
   }
 
-  public Driver(HiveConf conf, HooksLoader hooksLoader) {
-    this(getNewQueryState(conf), null, hooksLoader, null, null);
-  }
-
   public Driver(QueryState queryState, String userName, QueryInfo queryInfo) {
      this(queryState, userName, new HooksLoader(queryState.getConf()), 
queryInfo, null);
   }
@@ -438,17 +423,6 @@ public class Driver implements IDriver {
   /**
    * Generating the new QueryState object. Making sure, that the new queryId 
is generated.
    * @param conf The HiveConf which should be used
-   * @return The new QueryState object
-   */
-  // move to driverFactory ; with those constructors...
-  @Deprecated
-  private static QueryState getNewQueryState(HiveConf conf) {
-    return new 
QueryState.Builder().withGenerateNewQueryId(true).withHiveConf(conf).build();
-  }
-
-  /**
-   * Generating the new QueryState object. Making sure, that the new queryId 
is generated.
-   * @param conf The HiveConf which should be used
    * @param lineageState a LineageState to be set in the new QueryState object
    * @return The new QueryState object
    */
@@ -542,6 +516,9 @@ public class Driver implements IDriver {
 
     LOG.info("Compiling command(queryId=" + queryId + "): " + queryStr);
 
+    conf.setQueryString(queryStr);
+    // FIXME: sideeffect will leave the last query set at the session level
+    SessionState.get().getConf().setQueryString(queryStr);
     SessionState.get().setupQueryCurrentTimestamp();
 
     // Whether any error occurred during query compilation. Used for query 
lifetime hook.
@@ -556,6 +533,9 @@ public class Driver implements IDriver {
       } else {
         queryTxnMgr = SessionState.get().initTxnMgr(conf);
       }
+      if (queryTxnMgr instanceof Configurable) {
+        ((Configurable) queryTxnMgr).setConf(conf);
+      }
       queryState.setTxnManager(queryTxnMgr);
 
       // In case when user Ctrl-C twice to kill Hive CLI JVM, we want to 
release locks
@@ -660,7 +640,6 @@ public class Driver implements IDriver {
       plan = new QueryPlan(queryStr, sem, 
perfLogger.getStartTime(PerfLogger.DRIVER_RUN), queryId,
         queryState.getHiveOperation(), schema);
 
-      conf.setQueryString(queryStr);
 
       conf.set("mapreduce.workflow.id", "hive_" + queryId);
       conf.set("mapreduce.workflow.name", queryStr);
@@ -1203,6 +1182,11 @@ public class Driver implements IDriver {
     return HiveOperationType.valueOf(op.name());
   }
 
+  @Override
+  public HiveConf getConf() {
+    return conf;
+  }
+
   /**
    * @return The current query plan associated with this Driver, if any.
    */
@@ -1379,7 +1363,7 @@ public class Driver implements IDriver {
    * Release some resources after a query is executed
    * while keeping the result around.
    */
-  private void releaseResources() {
+  public void releaseResources() {
     releasePlan();
     releaseDriverContext();
   }
@@ -2498,14 +2482,13 @@ public class Driver implements IDriver {
 
   // is called to stop the query if it is running, clean query results, and 
release resources.
   @Override
-  public int close() {
+  public void close() {
     lDrvState.stateLock.lock();
     try {
       releaseDriverContext();
       if (lDrvState.driverState == DriverState.COMPILING ||
           lDrvState.driverState == DriverState.EXECUTING) {
         lDrvState.abort();
-        return 0;
       }
       releasePlan();
       releaseCachedResult();
@@ -2517,7 +2500,7 @@ public class Driver implements IDriver {
       lDrvState.stateLock.unlock();
       LockedDriverState.removeLockedDriverState();
     }
-    return 0;
+    destroy();
   }
 
   // is usually called after close() to commit or rollback a query and end the 
driver life cycle.
@@ -2571,19 +2554,6 @@ public class Driver implements IDriver {
     this.operationId = opId;
   }
 
-  /**
-   * Resets QueryState to get new queryId on Driver reuse.
-   */
-
-  @Override
-  public void resetQueryState() {
-    // Note: Driver cleanup for reuse at this point is not very clear. The 
assumption here is that
-    // repeated compile/execute calls create new contexts, plan, etc., so we 
don't need to worry
-    // propagating queryState into those existing fields, or resetting them.
-    releaseResources();
-    this.queryState = getNewQueryState(queryState.getConf());
-  }
-
   public QueryState getQueryState() {
     return queryState;
   }

http://git-wip-us.apache.org/repos/asf/hive/blob/f5c08a95/ql/src/java/org/apache/hadoop/hive/ql/DriverFactory.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/DriverFactory.java 
b/ql/src/java/org/apache/hadoop/hive/ql/DriverFactory.java
index 49d2bf5..60e8de8 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/DriverFactory.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/DriverFactory.java
@@ -19,17 +19,12 @@
 package org.apache.hadoop.hive.ql;
 
 import org.apache.hadoop.hive.conf.HiveConf;
-import org.apache.hadoop.hive.ql.session.SessionState;
 
 /**
  * Constructs a driver for ql clients
  */
 public class DriverFactory {
 
-  public static IDriver newDriver(HiveConf conf) {
-    return newDriver(getNewQueryState(conf), null, null);
-  }
-
   enum ExecutionStrategy {
     none {
       @Override
@@ -41,22 +36,16 @@ public class DriverFactory {
     abstract IDriver build(QueryState queryState, String userName, QueryInfo 
queryInfo);
   }
 
+  public static IDriver newDriver(HiveConf conf) {
+    return newDriver(getNewQueryState(conf), null, null);
+  }
+
   public static IDriver newDriver(QueryState queryState, String userName, 
QueryInfo queryInfo) {
     ExecutionStrategy strategy = ExecutionStrategy.none;
     return strategy.build(queryState, userName, queryInfo);
   }
 
   private static QueryState getNewQueryState(HiveConf conf) {
-    // FIXME: isolate hiveConf used for a single query
     return new 
QueryState.Builder().withGenerateNewQueryId(true).withHiveConf(conf).build();
   }
-
-  // FIXME: remove this method ; and use the conf at the callsite...
-  @Deprecated
-  public static IDriver newDriver() {
-    // only CLIDriver enter at this point
-    HiveConf conf = (SessionState.get() != null) ? 
SessionState.get().getConf() : new HiveConf();
-    return newDriver(conf);
-  }
-
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/f5c08a95/ql/src/java/org/apache/hadoop/hive/ql/IDriver.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/IDriver.java 
b/ql/src/java/org/apache/hadoop/hive/ql/IDriver.java
index d4494cc..9f13fa8 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/IDriver.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/IDriver.java
@@ -21,6 +21,7 @@ package org.apache.hadoop.hive.ql;
 import java.io.IOException;
 import java.util.List;
 
+import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.metastore.api.Schema;
 import org.apache.hadoop.hive.ql.exec.FetchTask;
 import org.apache.hadoop.hive.ql.processors.CommandProcessor;
@@ -60,8 +61,9 @@ public interface IDriver extends CommandProcessor {
   void resetFetch() throws IOException;
 
   // close&destroy is used in seq coupling most of the time - the difference 
is either not clear; or not relevant - remove?
-  int close();
+  @Override
+  void close();
   void destroy();
 
-  void resetQueryState();
+  HiveConf getConf();
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/f5c08a95/ql/src/java/org/apache/hadoop/hive/ql/QueryState.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/QueryState.java 
b/ql/src/java/org/apache/hadoop/hive/ql/QueryState.java
index d8d19e8..706c9ff 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/QueryState.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/QueryState.java
@@ -19,7 +19,6 @@
 package org.apache.hadoop.hive.ql;
 
 import java.util.Map;
-
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.ql.lockmgr.HiveTxnManager;
 import org.apache.hadoop.hive.ql.plan.HiveOperation;
@@ -106,7 +105,7 @@ public class QueryState {
    */
   public static class Builder {
     private Map<String, String> confOverlay = null;
-    private boolean runAsync = false;
+    private boolean isolated = true;
     private boolean generateNewQueryId = false;
     private HiveConf hiveConf = null;
     private LineageState lineageState = null;
@@ -118,17 +117,6 @@ public class QueryState {
     }
 
     /**
-     * Set this to true if the configuration should be detached from the 
original config. If not
-     * set the default value is false.
-     * @param runAsync If the configuration should be detached
-     * @return The builder
-     */
-    public Builder withRunAsync(boolean runAsync) {
-      this.runAsync = runAsync;
-      return this;
-    }
-
-    /**
      * Set this if there are specific configuration values which should be 
added to the original
      * config. If at least one value is set, then the configuration will be 
detached from the
      * original one.
@@ -141,6 +129,16 @@ public class QueryState {
     }
 
     /**
+     * Disable configuration isolation.
+     *
+     * For internal use / testing purposes only.
+     */
+    public Builder nonIsolated() {
+      isolated = false;
+      return this;
+    }
+
+    /**
      * Set this to true if new queryId should be generated, otherwise the 
original one will be kept.
      * If not set the default value is false.
      * @param generateNewQueryId If new queryId should be generated
@@ -182,14 +180,17 @@ public class QueryState {
      * @return The generated QueryState object
      */
     public QueryState build() {
-      HiveConf queryConf = hiveConf;
-
-      if (queryConf == null) {
-        // Generate a new conf if necessary
-        queryConf = new HiveConf();
-      } else if (runAsync || (confOverlay != null && !confOverlay.isEmpty())) {
-        // Detach the original conf if necessary
-        queryConf = new HiveConf(queryConf);
+      HiveConf queryConf;
+
+      if (isolated) {
+        // isolate query conf
+        if (hiveConf == null) {
+          queryConf = new HiveConf();
+        } else {
+          queryConf = new HiveConf(hiveConf);
+        }
+      } else {
+        queryConf = hiveConf;
       }
 
       // Set the specific parameters if needed
@@ -206,7 +207,13 @@ public class QueryState {
 
       // Generate the new queryId if needed
       if (generateNewQueryId) {
-        queryConf.setVar(HiveConf.ConfVars.HIVEQUERYID, 
QueryPlan.makeQueryId());
+        String queryId = QueryPlan.makeQueryId();
+        queryConf.setVar(HiveConf.ConfVars.HIVEQUERYID, queryId);
+        // FIXME: druid storage handler relies on query.id to maintain some 
staging directories
+        // expose queryid to session level
+        if (hiveConf != null) {
+          hiveConf.setVar(HiveConf.ConfVars.HIVEQUERYID, queryId);
+        }
       }
 
       QueryState queryState = new QueryState(queryConf);

http://git-wip-us.apache.org/repos/asf/hive/blob/f5c08a95/ql/src/java/org/apache/hadoop/hive/ql/hooks/HooksLoader.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/hooks/HooksLoader.java 
b/ql/src/java/org/apache/hadoop/hive/ql/hooks/HooksLoader.java
index 5a370e8..8c19338 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/hooks/HooksLoader.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/hooks/HooksLoader.java
@@ -58,8 +58,8 @@ public class HooksLoader {
    * @throws IllegalAccessException if the specified class names could not be 
accessed
    * @throws InstantiationException if the specified class names could not be 
instantiated
    */
-  public final <T extends Hook> List<T> getHooks(HiveConf.ConfVars 
hookConfVar, SessionState.LogHelper console, Class<?> clazz)
-      throws IllegalAccessException, InstantiationException, 
ClassNotFoundException {
+  public final <T extends Hook> List<T> getHooks(HiveConf.ConfVars 
hookConfVar, SessionState.LogHelper console,
+      Class<T> clazz) throws IllegalAccessException, InstantiationException, 
ClassNotFoundException {
     try {
       return getHooks(hookConfVar, clazz);
     } catch (ClassNotFoundException e) {
@@ -85,8 +85,8 @@ public class HooksLoader {
    * @throws IllegalAccessException if the specified class names could not be 
accessed
    * @throws InstantiationException if the specified class names could not be 
instantiated
    */
-  public <T extends Hook> List<T> getHooks(HiveConf.ConfVars hookConfVar, 
Class<?> clazz)
-          throws InstantiationException, IllegalAccessException, 
ClassNotFoundException {
+  public <T extends Hook> List<T> getHooks(HiveConf.ConfVars hookConfVar, 
Class<T> clazz)
+      throws InstantiationException, IllegalAccessException, 
ClassNotFoundException {
     String csHooks = conf.getVar(hookConfVar);
     ImmutableList.Builder<T> hooks = ImmutableList.builder();
     if (csHooks == null) {

http://git-wip-us.apache.org/repos/asf/hive/blob/f5c08a95/ql/src/java/org/apache/hadoop/hive/ql/lockmgr/DummyTxnManager.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/lockmgr/DummyTxnManager.java 
b/ql/src/java/org/apache/hadoop/hive/ql/lockmgr/DummyTxnManager.java
index cf8bc7f..fca6408 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/lockmgr/DummyTxnManager.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/lockmgr/DummyTxnManager.java
@@ -47,6 +47,8 @@ class DummyTxnManager extends HiveTxnManagerImpl {
 
   private HiveLockManager lockMgr;
 
+  private HiveLockManagerCtx lockManagerCtx;
+
   @Override
   public long openTxn(Context ctx, String user) throws LockException {
     // No-op
@@ -81,7 +83,8 @@ class DummyTxnManager extends HiveTxnManagerImpl {
           LOG.info("Creating lock manager of type " + lockMgrName);
           lockMgr = (HiveLockManager)ReflectionUtils.newInstance(
               conf.getClassByName(lockMgrName), conf);
-          lockMgr.setContext(new HiveLockManagerCtx(conf));
+          lockManagerCtx = new HiveLockManagerCtx(conf);
+          lockMgr.setContext(lockManagerCtx);
         } catch (Exception e) {
           // set hiveLockMgr to null just in case this invalid manager got set 
to
           // next query's ctx.
@@ -103,6 +106,7 @@ class DummyTxnManager extends HiveTxnManagerImpl {
     }
     // Force a re-read of the configuration file.  This is done because
     // different queries in the session may be using the same lock manager.
+    lockManagerCtx.setConf(conf);
     lockMgr.refresh();
     return lockMgr;
   }
@@ -119,7 +123,9 @@ class DummyTxnManager extends HiveTxnManagerImpl {
 
     // If the lock manager is still null, then it means we aren't using a
     // lock manager
-    if (lockMgr == null) return;
+    if (lockMgr == null) {
+      return;
+    }
 
     List<HiveLockObj> lockObjects = new ArrayList<HiveLockObj>();
 
@@ -234,6 +240,7 @@ class DummyTxnManager extends HiveTxnManagerImpl {
   }
 
 
+  @Override
   protected void destruct() {
     if (lockMgr != null) {
       try {

http://git-wip-us.apache.org/repos/asf/hive/blob/f5c08a95/ql/src/java/org/apache/hadoop/hive/ql/lockmgr/HiveTxnManagerImpl.java
----------------------------------------------------------------------
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/lockmgr/HiveTxnManagerImpl.java 
b/ql/src/java/org/apache/hadoop/hive/ql/lockmgr/HiveTxnManagerImpl.java
index d750e77..c8cafa2 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/lockmgr/HiveTxnManagerImpl.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/lockmgr/HiveTxnManagerImpl.java
@@ -21,6 +21,8 @@ import java.util.Iterator;
 import java.util.List;
 import java.util.Map;
 
+import org.apache.hadoop.conf.Configurable;
+import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.ql.Context;
 import org.apache.hadoop.hive.ql.Driver.LockedDriverState;
@@ -42,12 +44,22 @@ import org.apache.hadoop.hive.ql.plan.UnlockTableDesc;
  * transaction managers need to implement but that we don't want to expose to
  * outside.
  */
-abstract class HiveTxnManagerImpl implements HiveTxnManager {
+abstract class HiveTxnManagerImpl implements HiveTxnManager, Configurable {
 
   protected HiveConf conf;
 
   void setHiveConf(HiveConf c) {
-    conf = c;
+    setConf(c);
+  }
+
+  @Override
+  public void setConf(Configuration c) {
+    conf = (HiveConf) c;
+  }
+
+  @Override
+  public Configuration getConf() {
+    return conf;
   }
 
   abstract protected void destruct();

http://git-wip-us.apache.org/repos/asf/hive/blob/f5c08a95/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java 
b/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
index 7ed9fe4..9d77f49 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
@@ -7510,6 +7510,7 @@ public class SemanticAnalyzer extends 
BaseSemanticAnalyzer {
     backwards incompatible.
     */
     conf.set(AcidUtils.CONF_ACID_KEY, "true");
+    SessionState.get().getConf().set(AcidUtils.CONF_ACID_KEY, "true");
   }
 
   /**

http://git-wip-us.apache.org/repos/asf/hive/blob/f5c08a95/ql/src/java/org/apache/hadoop/hive/ql/processors/AddResourceProcessor.java
----------------------------------------------------------------------
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/processors/AddResourceProcessor.java 
b/ql/src/java/org/apache/hadoop/hive/ql/processors/AddResourceProcessor.java
index 5fcbd69..d228682 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/processors/AddResourceProcessor.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/processors/AddResourceProcessor.java
@@ -75,4 +75,8 @@ public class AddResourceProcessor implements CommandProcessor 
{
     return new CommandProcessorResponse(0);
   }
 
+  @Override
+  public void close() throws Exception {
+  }
+
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/f5c08a95/ql/src/java/org/apache/hadoop/hive/ql/processors/CommandProcessor.java
----------------------------------------------------------------------
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/processors/CommandProcessor.java 
b/ql/src/java/org/apache/hadoop/hive/ql/processors/CommandProcessor.java
index c753264..4d73181 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/processors/CommandProcessor.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/processors/CommandProcessor.java
@@ -18,6 +18,6 @@
 
 package org.apache.hadoop.hive.ql.processors;
 
-public interface CommandProcessor {
+public interface CommandProcessor extends AutoCloseable {
   CommandProcessorResponse run(String command);
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/f5c08a95/ql/src/java/org/apache/hadoop/hive/ql/processors/CommandProcessorFactory.java
----------------------------------------------------------------------
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/processors/CommandProcessorFactory.java 
b/ql/src/java/org/apache/hadoop/hive/ql/processors/CommandProcessorFactory.java
index dcf8d31..74a34b3 100644
--- 
a/ql/src/java/org/apache/hadoop/hive/ql/processors/CommandProcessorFactory.java
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/processors/CommandProcessorFactory.java
@@ -21,18 +21,15 @@ package org.apache.hadoop.hive.ql.processors;
 import static org.apache.commons.lang.StringUtils.isBlank;
 
 import java.sql.SQLException;
-import java.util.Collections;
-import java.util.HashMap;
 import java.util.HashSet;
-import java.util.Map;
 import java.util.Set;
 
+import javax.annotation.Nonnull;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
+
 import org.apache.hadoop.hive.conf.HiveConf;
-import org.apache.hadoop.hive.ql.Driver;
 import org.apache.hadoop.hive.ql.DriverFactory;
-import org.apache.hadoop.hive.ql.IDriver;
 import org.apache.hadoop.hive.ql.metadata.*;
 import org.apache.hadoop.hive.ql.session.SessionState;
 
@@ -46,13 +43,6 @@ public final class CommandProcessorFactory {
     // prevent instantiation
   }
 
-  private static final Map<HiveConf, IDriver> mapDrivers = 
Collections.synchronizedMap(new HashMap<HiveConf, IDriver>());
-
-  public static CommandProcessor get(String cmd)
-      throws SQLException {
-    return get(new String[]{cmd}, null);
-  }
-
   public static CommandProcessor getForHiveCommand(String[] cmd, HiveConf conf)
     throws SQLException {
     return getForHiveCommandInternal(cmd, conf, false);
@@ -111,8 +101,8 @@ public final class CommandProcessorFactory {
   }
 
   static Logger LOG = LoggerFactory.getLogger(CommandProcessorFactory.class);
-  public static CommandProcessor get(String[] cmd, HiveConf conf)
-      throws SQLException {
+
+  public static CommandProcessor get(String[] cmd, @Nonnull HiveConf conf) 
throws SQLException {
     CommandProcessor result = getForHiveCommand(cmd, conf);
     if (result != null) {
       return result;
@@ -120,27 +110,7 @@ public final class CommandProcessorFactory {
     if (isBlank(cmd[0])) {
       return null;
     } else {
-      if (conf == null) {
-        return new Driver();
-      }
-      IDriver drv = mapDrivers.get(conf);
-      if (drv == null) {
-        // FIXME: why this method didn't use the conf constructor?
-        drv = DriverFactory.newDriver();
-        mapDrivers.put(conf, drv);
-      } else {
-        drv.resetQueryState();
-      }
-      return drv;
+      return DriverFactory.newDriver(conf);
     }
   }
-
-  public static void clean(HiveConf conf) {
-    IDriver drv = mapDrivers.get(conf);
-    if (drv != null) {
-      drv.destroy();
-    }
-
-    mapDrivers.remove(conf);
-  }
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/f5c08a95/ql/src/java/org/apache/hadoop/hive/ql/processors/CompileProcessor.java
----------------------------------------------------------------------
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/processors/CompileProcessor.java 
b/ql/src/java/org/apache/hadoop/hive/ql/processors/CompileProcessor.java
index fad4f52..7b96b33 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/processors/CompileProcessor.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/processors/CompileProcessor.java
@@ -309,4 +309,8 @@ public class CompileProcessor implements CommandProcessor {
       super(s);
     }
   }
+
+  @Override
+  public void close() throws Exception {
+  }
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/f5c08a95/ql/src/java/org/apache/hadoop/hive/ql/processors/CryptoProcessor.java
----------------------------------------------------------------------
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/processors/CryptoProcessor.java 
b/ql/src/java/org/apache/hadoop/hive/ql/processors/CryptoProcessor.java
index d1202f9..d2a864a 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/processors/CryptoProcessor.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/processors/CryptoProcessor.java
@@ -176,4 +176,8 @@ public class CryptoProcessor implements CommandProcessor {
 
     writeTestOutput("Encryption key deleted: '" + keyName + "'");
   }
+
+  @Override
+  public void close() throws Exception {
+  }
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/f5c08a95/ql/src/java/org/apache/hadoop/hive/ql/processors/DeleteResourceProcessor.java
----------------------------------------------------------------------
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/processors/DeleteResourceProcessor.java 
b/ql/src/java/org/apache/hadoop/hive/ql/processors/DeleteResourceProcessor.java
index 54a7d4b..bac020d 100644
--- 
a/ql/src/java/org/apache/hadoop/hive/ql/processors/DeleteResourceProcessor.java
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/processors/DeleteResourceProcessor.java
@@ -72,4 +72,8 @@ public class DeleteResourceProcessor implements 
CommandProcessor {
 
     return new CommandProcessorResponse(0);
   }
+
+  @Override
+  public void close() throws Exception {
+  }
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/f5c08a95/ql/src/java/org/apache/hadoop/hive/ql/processors/DfsProcessor.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/processors/DfsProcessor.java 
b/ql/src/java/org/apache/hadoop/hive/ql/processors/DfsProcessor.java
index 62a1725..0b334e1 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/processors/DfsProcessor.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/processors/DfsProcessor.java
@@ -114,7 +114,7 @@ public class DfsProcessor implements CommandProcessor {
 
       switch(x) {
         case ' ':
-          if ((int) y == 0) {
+          if (y == 0) {
             String str = command.substring(start, i).trim();
             if (!str.equals("")) {
               paras.add(str);
@@ -123,7 +123,7 @@ public class DfsProcessor implements CommandProcessor {
           }
           break;
         case '"':
-          if ((int) y == 0) {
+          if (y == 0) {
             y = x;
             start = i + 1;
           } else if ('"' == y) {
@@ -133,7 +133,7 @@ public class DfsProcessor implements CommandProcessor {
           }
           break;
         case '\'':
-          if ((int) y == 0) {
+          if (y == 0) {
             y = x;
             start = i + 1;
           } else if ('\'' == y) {
@@ -150,7 +150,7 @@ public class DfsProcessor implements CommandProcessor {
       }
     }
 
-    if ((int) y != 0) {
+    if (y != 0) {
       String message = "Syntax error on hadoop options: dfs " + command;
       console.printError(message);
       throw new HiveException(message);
@@ -159,4 +159,7 @@ public class DfsProcessor implements CommandProcessor {
     return paras.toArray(new String[paras.size()]);
   }
 
+  @Override
+  public void close() throws Exception {
+  }
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/f5c08a95/ql/src/java/org/apache/hadoop/hive/ql/processors/ListResourceProcessor.java
----------------------------------------------------------------------
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/processors/ListResourceProcessor.java 
b/ql/src/java/org/apache/hadoop/hive/ql/processors/ListResourceProcessor.java
index 91a6aba..afd604a 100644
--- 
a/ql/src/java/org/apache/hadoop/hive/ql/processors/ListResourceProcessor.java
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/processors/ListResourceProcessor.java
@@ -62,4 +62,8 @@ public class ListResourceProcessor implements 
CommandProcessor {
     }
     return new CommandProcessorResponse(0, null, null, SCHEMA);
   }
+
+  @Override
+  public void close() throws Exception {
+  }
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/f5c08a95/ql/src/java/org/apache/hadoop/hive/ql/processors/ReloadProcessor.java
----------------------------------------------------------------------
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/processors/ReloadProcessor.java 
b/ql/src/java/org/apache/hadoop/hive/ql/processors/ReloadProcessor.java
index 4caab91..bcbc030 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/processors/ReloadProcessor.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/processors/ReloadProcessor.java
@@ -41,4 +41,8 @@ public class ReloadProcessor implements CommandProcessor{
     }
     return new CommandProcessorResponse(0);
   }
+
+  @Override
+  public void close() throws Exception {
+  }
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/f5c08a95/ql/src/java/org/apache/hadoop/hive/ql/processors/ResetProcessor.java
----------------------------------------------------------------------
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/processors/ResetProcessor.java 
b/ql/src/java/org/apache/hadoop/hive/ql/processors/ResetProcessor.java
index ca39ff9..ce65ccd 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/processors/ResetProcessor.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/processors/ResetProcessor.java
@@ -156,4 +156,8 @@ public class ResetProcessor implements CommandProcessor {
     }
     return confVars;
   }
+
+  @Override
+  public void close() throws Exception {
+  }
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/f5c08a95/ql/src/java/org/apache/hadoop/hive/ql/processors/SetProcessor.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/processors/SetProcessor.java 
b/ql/src/java/org/apache/hadoop/hive/ql/processors/SetProcessor.java
index 1ff4b3c..db0fef1 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/processors/SetProcessor.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/processors/SetProcessor.java
@@ -433,4 +433,7 @@ public class SetProcessor implements CommandProcessor {
     return sch;
   }
 
+  @Override
+  public void close() throws Exception {
+  }
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/f5c08a95/ql/src/java/org/apache/hadoop/hive/ql/txn/compactor/Worker.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/txn/compactor/Worker.java 
b/ql/src/java/org/apache/hadoop/hive/ql/txn/compactor/Worker.java
index 26c6700..e5ebf9a 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/txn/compactor/Worker.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/txn/compactor/Worker.java
@@ -31,6 +31,7 @@ import org.apache.hadoop.hive.metastore.api.Table;
 import org.apache.hadoop.hive.metastore.txn.CompactionInfo;
 import org.apache.hadoop.hive.metastore.txn.TxnUtils;
 import org.apache.hadoop.hive.ql.Driver;
+import org.apache.hadoop.hive.ql.QueryState;
 import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse;
 import org.apache.hadoop.hive.ql.session.SessionState;
 import org.apache.hadoop.hive.ql.stats.StatsUtils;
@@ -237,7 +238,7 @@ public class Worker extends CompactorThread {
     private final HiveConf conf;
     private final String userName;
     private final CompactionInfo ci;
-      
+
     private StatsUpdater(CompactionInfo ci, List<String> columnListForStats,
                          HiveConf conf, String userName) {
       this.conf = conf;
@@ -287,7 +288,7 @@ public class Worker extends CompactorThread {
       }
       sb.setLength(sb.length() - 1); //remove trailing ,
       LOG.info("running '" + sb.toString() + "'");
-      Driver d = new Driver(conf, userName);
+      Driver d = new Driver(new 
QueryState.Builder().withGenerateNewQueryId(true).withHiveConf(conf).build(), 
userName);
       SessionState localSession = null;
       if(SessionState.get() == null) {
          localSession = SessionState.start(new SessionState(conf));

http://git-wip-us.apache.org/repos/asf/hive/blob/f5c08a95/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDTFGetSplits.java
----------------------------------------------------------------------
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDTFGetSplits.java 
b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDTFGetSplits.java
index 1e83799..128b66c 100644
--- 
a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDTFGetSplits.java
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDTFGetSplits.java
@@ -61,6 +61,7 @@ import org.apache.hadoop.hive.metastore.api.FieldSchema;
 import org.apache.hadoop.hive.ql.Context;
 import org.apache.hadoop.hive.ql.Driver;
 import org.apache.hadoop.hive.ql.QueryPlan;
+import org.apache.hadoop.hive.ql.QueryState;
 import org.apache.hadoop.hive.ql.exec.Description;
 import org.apache.hadoop.hive.ql.exec.Task;
 import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
@@ -242,7 +243,7 @@ public class GenericUDTFGetSplits extends GenericUDTF {
     // So initialize the new Driver with a new TxnManager so that it does not 
use the
     // Session TxnManager that is already in use.
     HiveTxnManager txnManager = 
TxnManagerFactory.getTxnManagerFactory().getTxnManager(conf);
-    Driver driver = new Driver(conf, txnManager);
+    Driver driver = new Driver(new 
QueryState.Builder().withHiveConf(conf).nonIsolated().build(), null, null, 
txnManager);
     DriverCleanup driverCleanup = new DriverCleanup(driver, txnManager, 
splitsAppId.toString());
     boolean needsCleanup = true;
     try {
@@ -267,8 +268,7 @@ public class GenericUDTFGetSplits extends GenericUDTF {
 
         String ctas = "create temporary table " + tableName + " as " + query;
         LOG.info("Materializing the query for LLAPIF; CTAS: " + ctas);
-
-        driver.resetQueryState();
+        driver.releaseResources();
         HiveConf.setVar(conf, ConfVars.HIVE_EXECUTION_MODE, originalMode);
         cpr = driver.run(ctas, false);
 

http://git-wip-us.apache.org/repos/asf/hive/blob/f5c08a95/ql/src/test/org/apache/hadoop/hive/ql/TestTxnCommands2.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/TestTxnCommands2.java 
b/ql/src/test/org/apache/hadoop/hive/ql/TestTxnCommands2.java
index 048215a..bab6d5e 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/TestTxnCommands2.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/TestTxnCommands2.java
@@ -138,8 +138,9 @@ public class TestTxnCommands2 {
     if (!(new File(TEST_WAREHOUSE_DIR).mkdirs())) {
       throw new RuntimeException("Could not create " + TEST_WAREHOUSE_DIR);
     }
-    SessionState.start(new SessionState(hiveConf));
-    d = new Driver(hiveConf);
+    SessionState ss = SessionState.start(hiveConf);
+    ss.applyAuthorizationPolicy();
+    d = new Driver(new 
QueryState.Builder().withHiveConf(hiveConf).nonIsolated().build(), null);
     d.setMaxRows(10000);
     dropTables();
     runStatementOnDriver("create table " + Table.ACIDTBL + "(a int, b int) 
clustered by (a) into " + BUCKET_COUNT + " buckets stored as orc TBLPROPERTIES 
(" + tableProperties + ")");

http://git-wip-us.apache.org/repos/asf/hive/blob/f5c08a95/ql/src/test/org/apache/hadoop/hive/ql/TxnCommandsBaseForTests.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/TxnCommandsBaseForTests.java 
b/ql/src/test/org/apache/hadoop/hive/ql/TxnCommandsBaseForTests.java
index 93074e9..12083fd 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/TxnCommandsBaseForTests.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/TxnCommandsBaseForTests.java
@@ -25,7 +25,6 @@ import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.RemoteIterator;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.metastore.api.MetaException;
-import org.apache.hadoop.hive.metastore.conf.MetastoreConf;
 import org.apache.hadoop.hive.metastore.txn.TxnDbUtil;
 import org.apache.hadoop.hive.ql.io.HiveInputFormat;
 import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse;
@@ -92,8 +91,9 @@ public abstract class TxnCommandsBaseForTests {
     if (!(new File(getWarehouseDir()).mkdirs())) {
       throw new RuntimeException("Could not create " + getWarehouseDir());
     }
-    SessionState.start(new SessionState(hiveConf));
-    d = new Driver(hiveConf);
+    SessionState ss = SessionState.start(hiveConf);
+    ss.applyAuthorizationPolicy();
+    d = new Driver(new 
QueryState.Builder().withHiveConf(hiveConf).nonIsolated().build(), null);
     d.setMaxRows(10000);
     dropTables();
     runStatementOnDriver("create table " + Table.ACIDTBL + "(a int, b int) 
clustered by (a) into " + BUCKET_COUNT + " buckets stored as orc TBLPROPERTIES 
('transactional'='true')");
@@ -136,7 +136,7 @@ public abstract class TxnCommandsBaseForTests {
   String makeValuesClause(int[][] rows) {
     return TestTxnCommands2.makeValuesClause(rows);
   }
-  
+
   void runWorker(HiveConf hiveConf) throws MetaException {
     TestTxnCommands2.runWorker(hiveConf);
   }

http://git-wip-us.apache.org/repos/asf/hive/blob/f5c08a95/ql/src/test/org/apache/hadoop/hive/ql/exec/TestOperators.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/TestOperators.java 
b/ql/src/test/org/apache/hadoop/hive/ql/exec/TestOperators.java
index 635a357..bbc2453 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/exec/TestOperators.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/TestOperators.java
@@ -413,7 +413,7 @@ public class TestOperators extends TestCase {
         "inputformat 
'org.apache.hadoop.hive.ql.exec.TestOperators$CustomInFmt' " +
         "outputformat 
'org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat' " +
         "tblproperties ('myprop1'='val1', 'myprop2' = 'val2')";
-    Driver driver = new Driver();
+    Driver driver = new Driver(conf);
     CommandProcessorResponse response = driver.run(cmd);
     assertEquals(0, response.getResponseCode());
     List<Object> result = new ArrayList<Object>();

http://git-wip-us.apache.org/repos/asf/hive/blob/f5c08a95/ql/src/test/org/apache/hadoop/hive/ql/hooks/TestQueryHooks.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/hooks/TestQueryHooks.java 
b/ql/src/test/org/apache/hadoop/hive/ql/hooks/TestQueryHooks.java
index 06a96d5..492b63d 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/hooks/TestQueryHooks.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/hooks/TestQueryHooks.java
@@ -22,6 +22,7 @@ import com.google.common.collect.Lists;
 
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.ql.Driver;
+import org.apache.hadoop.hive.ql.QueryState;
 import org.apache.hadoop.hive.ql.session.SessionState;
 
 import org.junit.BeforeClass;
@@ -135,7 +136,8 @@ public class TestQueryHooks {
             Lists.newArrayList(mockHook));
 
     SessionState.start(conf);
-    Driver driver = new Driver(conf, mockLoader);
+
+    Driver driver = new Driver(new 
QueryState.Builder().withGenerateNewQueryId(true).withHiveConf(conf).build(), 
null, mockLoader, null, null);
     return driver;
   }
 

http://git-wip-us.apache.org/repos/asf/hive/blob/f5c08a95/ql/src/test/org/apache/hadoop/hive/ql/lockmgr/TestDbTxnManager2.java
----------------------------------------------------------------------
diff --git 
a/ql/src/test/org/apache/hadoop/hive/ql/lockmgr/TestDbTxnManager2.java 
b/ql/src/test/org/apache/hadoop/hive/ql/lockmgr/TestDbTxnManager2.java
index 71d960f..7650917 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/lockmgr/TestDbTxnManager2.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/lockmgr/TestDbTxnManager2.java
@@ -38,6 +38,7 @@ import org.apache.hadoop.hive.metastore.txn.TxnDbUtil;
 import org.apache.hadoop.hive.ql.Context;
 import org.apache.hadoop.hive.ql.Driver;
 import org.apache.hadoop.hive.ql.ErrorMsg;
+import org.apache.hadoop.hive.ql.QueryState;
 import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse;
 import org.apache.hadoop.hive.ql.session.SessionState;
 import org.junit.Before;
@@ -63,17 +64,17 @@ import java.util.Map;
  * code path that CLI would but with the advantage that you can create a 2nd 
HiveTxnManager and then
  * simulate interleaved transactional/locking operations but all from within a 
single thread.
  * The later not only controls concurrency precisely but is the only way to 
run in UT env with DerbyDB.
- * 
+ *
  * A slightly different (and simpler) approach is to use "start 
transaction/(commit/rollback)"
  * command with the Driver.run().  This allows you to "see" the state of the 
Lock Manager after
  * each statement and can also simulate concurrent (but very controlled) work 
but w/o forking any
  * threads.  The limitation here is that not all statements are allowed in an 
explicit transaction.
  * For example, "drop table foo".  This approach will also cause the query to 
execute which will
  * make tests slower but will exericise the code path that is much closer to 
the actual user calls.
- * 
+ *
  * In either approach, each logical "session" should use it's own Transaction 
Manager.  This requires
  * using {@link #swapTxnManager(HiveTxnManager)} since in the SessionState the 
TM is associated with
- * each thread.  
+ * each thread.
  */
 public class TestDbTxnManager2 {
   private static final Logger LOG = 
LoggerFactory.getLogger(TestDbTxnManager2.class);
@@ -94,7 +95,7 @@ public class TestDbTxnManager2 {
   public void setUp() throws Exception {
     SessionState.start(conf);
     ctx = new Context(conf);
-    driver = new Driver(conf);
+    driver = new Driver(new 
QueryState.Builder().withHiveConf(conf).nonIsolated().build(), null);
     TxnDbUtil.cleanDb(conf);
     TxnDbUtil.prepDb(conf);
     SessionState ss = SessionState.get();
@@ -663,7 +664,7 @@ public class TestDbTxnManager2 {
     checkLock(LockType.SHARED_READ, LockState.ACQUIRED, "default", "acidPart", 
"p=1", locks);
     checkLock(LockType.SHARED_READ, LockState.ACQUIRED, "_dummy_database", 
"_dummy_table", null, locks);
     txnMgr.rollbackTxn();
-    
+
     cpr = driver.compileAndRespond("update acidPart set b = 17 where a = 1");
     checkCmdOnDriver(cpr);
     lockState = ((DbTxnManager) txnMgr).acquireLocks(driver.getPlan(), ctx, 
"Practical", false);

http://git-wip-us.apache.org/repos/asf/hive/blob/f5c08a95/ql/src/test/org/apache/hadoop/hive/ql/lockmgr/TestDummyTxnManager.java
----------------------------------------------------------------------
diff --git 
a/ql/src/test/org/apache/hadoop/hive/ql/lockmgr/TestDummyTxnManager.java 
b/ql/src/test/org/apache/hadoop/hive/ql/lockmgr/TestDummyTxnManager.java
index 2f5fc2f..8f7505d 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/lockmgr/TestDummyTxnManager.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/lockmgr/TestDummyTxnManager.java
@@ -59,6 +59,9 @@ public class TestDummyTxnManager {
   HiveLockManager mockLockManager;
 
   @Mock
+  HiveLockManagerCtx mockLockManagerCtx;
+
+  @Mock
   QueryPlan mockQueryPlan;
 
   @Before
@@ -73,11 +76,17 @@ public class TestDummyTxnManager {
 
     txnMgr = TxnManagerFactory.getTxnManagerFactory().getTxnManager(conf);
     Assert.assertTrue(txnMgr instanceof DummyTxnManager);
+
     // Use reflection to set LockManager since creating the object using the
     // relection in DummyTxnManager won't take Mocked object
     Field field = DummyTxnManager.class.getDeclaredField("lockMgr");
     field.setAccessible(true);
     field.set(txnMgr, mockLockManager);
+
+    Field field2 = DummyTxnManager.class.getDeclaredField("lockManagerCtx");
+    field2.setAccessible(true);
+    field2.set(txnMgr, mockLockManagerCtx);
+
   }
 
   @After

http://git-wip-us.apache.org/repos/asf/hive/blob/f5c08a95/ql/src/test/queries/clientpositive/driver_conf_isolation.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientpositive/driver_conf_isolation.q 
b/ql/src/test/queries/clientpositive/driver_conf_isolation.q
new file mode 100644
index 0000000..3a8356c
--- /dev/null
+++ b/ql/src/test/queries/clientpositive/driver_conf_isolation.q
@@ -0,0 +1,5 @@
+set hive.mapred.mode=strict;
+select "${hiveconf:hive.mapred.mode}";
+create table t (a int);
+analyze table t compute statistics;
+select "${hiveconf:hive.mapred.mode}";

http://git-wip-us.apache.org/repos/asf/hive/blob/f5c08a95/ql/src/test/queries/clientpositive/special_character_in_tabnames_1.q
----------------------------------------------------------------------
diff --git 
a/ql/src/test/queries/clientpositive/special_character_in_tabnames_1.q 
b/ql/src/test/queries/clientpositive/special_character_in_tabnames_1.q
index c017172..adc23e9 100644
--- a/ql/src/test/queries/clientpositive/special_character_in_tabnames_1.q
+++ b/ql/src/test/queries/clientpositive/special_character_in_tabnames_1.q
@@ -2,6 +2,7 @@ set hive.cbo.enable=true;
 set hive.exec.check.crossproducts=false;
 set hive.stats.fetch.column.stats=true;
 set hive.auto.convert.join=false;
+set hive.strict.checks.cartesian.product=false;
 
 -- SORT_QUERY_RESULTS
 

http://git-wip-us.apache.org/repos/asf/hive/blob/f5c08a95/ql/src/test/results/clientpositive/driver_conf_isolation.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/driver_conf_isolation.q.out 
b/ql/src/test/results/clientpositive/driver_conf_isolation.q.out
new file mode 100644
index 0000000..298c24f
--- /dev/null
+++ b/ql/src/test/results/clientpositive/driver_conf_isolation.q.out
@@ -0,0 +1,34 @@
+PREHOOK: query: select "strict"
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+#### A masked pattern was here ####
+POSTHOOK: query: select "strict"
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+#### A masked pattern was here ####
+strict
+PREHOOK: query: create table t (a int)
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@t
+POSTHOOK: query: create table t (a int)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@t
+PREHOOK: query: analyze table t compute statistics
+PREHOOK: type: QUERY
+PREHOOK: Input: default@t
+PREHOOK: Output: default@t
+POSTHOOK: query: analyze table t compute statistics
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@t
+POSTHOOK: Output: default@t
+PREHOOK: query: select "strict"
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+#### A masked pattern was here ####
+POSTHOOK: query: select "strict"
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+#### A masked pattern was here ####
+strict

http://git-wip-us.apache.org/repos/asf/hive/blob/f5c08a95/ql/src/test/results/clientpositive/input39.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/input39.q.out 
b/ql/src/test/results/clientpositive/input39.q.out
index 3000404..6a5b82d 100644
--- a/ql/src/test/results/clientpositive/input39.q.out
+++ b/ql/src/test/results/clientpositive/input39.q.out
@@ -165,4 +165,4 @@ POSTHOOK: Input: default@t2@ds=1
 #### A masked pattern was here ####
 18
 mapreduce.framework.name=yarn
-mapreduce.jobtracker.address=local
+mapreduce.jobtracker.address=localhost:58

http://git-wip-us.apache.org/repos/asf/hive/blob/f5c08a95/service/src/java/org/apache/hive/service/cli/operation/Operation.java
----------------------------------------------------------------------
diff --git 
a/service/src/java/org/apache/hive/service/cli/operation/Operation.java 
b/service/src/java/org/apache/hive/service/cli/operation/Operation.java
index 2ef1479..51e2165 100644
--- a/service/src/java/org/apache/hive/service/cli/operation/Operation.java
+++ b/service/src/java/org/apache/hive/service/cli/operation/Operation.java
@@ -80,14 +80,9 @@ public abstract class Operation {
   protected Operation(HiveSession parentSession, OperationType opType) {
     this(parentSession, null, opType);
   }
-  
-  protected Operation(HiveSession parentSession, Map<String, String> 
confOverlay,
-      OperationType opType) {
-    this(parentSession, confOverlay, opType, false);
-  }
 
   protected Operation(HiveSession parentSession,
-      Map<String, String> confOverlay, OperationType opType, boolean 
isAsyncQueryState) {
+      Map<String, String> confOverlay, OperationType opType) {
     this.parentSession = parentSession;
     this.opHandle = new OperationHandle(opType, 
parentSession.getProtocolVersion());
     beginTime = System.currentTimeMillis();
@@ -99,7 +94,6 @@ public abstract class Operation {
         MetricsConstant.COMPLETED_OPERATION_PREFIX, state);
     queryState = new QueryState.Builder()
                      .withConfOverlay(confOverlay)
-                     .withRunAsync(isAsyncQueryState)
                      .withGenerateNewQueryId(true)
                      .withHiveConf(parentSession.getHiveConf())
                      .build();

Reply via email to