Repository: hive
Updated Branches:
  refs/heads/master 539896482 -> 10944ee34


HIVE-16962: Better error msg for Hive on Spark in case user cancels query and 
closes session (reviewed by Chao)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/10944ee3
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/10944ee3
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/10944ee3

Branch: refs/heads/master
Commit: 10944ee34a39efc0503ca917d1153751e1d495d2
Parents: 5398964
Author: Xuefu Zhang <xu...@uber.com>
Authored: Thu Jun 29 10:01:05 2017 -0700
Committer: Xuefu Zhang <xu...@uber.com>
Committed: Thu Jun 29 10:01:05 2017 -0700

----------------------------------------------------------------------
 .../hive/ql/exec/spark/session/SparkSessionImpl.java  |  9 ++++++++-
 .../org/apache/hive/spark/client/SparkClientImpl.java | 14 ++++++++++----
 2 files changed, 18 insertions(+), 5 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hive/blob/10944ee3/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/session/SparkSessionImpl.java
----------------------------------------------------------------------
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/session/SparkSessionImpl.java
 
b/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/session/SparkSessionImpl.java
index 51c6715..8224ef9 100644
--- 
a/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/session/SparkSessionImpl.java
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/session/SparkSessionImpl.java
@@ -56,13 +56,18 @@ public class SparkSessionImpl implements SparkSession {
 
   @Override
   public void open(HiveConf conf) throws HiveException {
+    LOG.info("Trying to open Spark session {}", sessionId);
     this.conf = conf;
     isOpen = true;
     try {
       hiveSparkClient = HiveSparkClientFactory.createHiveSparkClient(conf);
     } catch (Throwable e) {
-      throw new HiveException("Failed to create spark client.", e);
+      // It's possible that user session is closed while creating Spark client.
+      String msg = isOpen ? "Failed to create Spark client for Spark session " 
+ sessionId :
+        "Spark Session " + sessionId + " is closed before Spark client is 
created";
+      throw new HiveException(msg, e);
     }
+    LOG.info("Spark session {} is successfully opened", sessionId);
   }
 
   @Override
@@ -121,10 +126,12 @@ public class SparkSessionImpl implements SparkSession {
 
   @Override
   public void close() {
+    LOG.info("Trying to close Spark session {}", sessionId);
     isOpen = false;
     if (hiveSparkClient != null) {
       try {
         hiveSparkClient.close();
+        LOG.info("Spark session {} is successfully closed", sessionId);
         cleanScratchDir();
       } catch (IOException e) {
         LOG.error("Failed to close spark session (" + sessionId + ").", e);

http://git-wip-us.apache.org/repos/asf/hive/blob/10944ee3/spark-client/src/main/java/org/apache/hive/spark/client/SparkClientImpl.java
----------------------------------------------------------------------
diff --git 
a/spark-client/src/main/java/org/apache/hive/spark/client/SparkClientImpl.java 
b/spark-client/src/main/java/org/apache/hive/spark/client/SparkClientImpl.java
index e40aa6b..bf7e8db 100644
--- 
a/spark-client/src/main/java/org/apache/hive/spark/client/SparkClientImpl.java
+++ 
b/spark-client/src/main/java/org/apache/hive/spark/client/SparkClientImpl.java
@@ -107,19 +107,25 @@ class SparkClientImpl implements SparkClient {
       // The RPC server will take care of timeouts here.
       this.driverRpc = rpcServer.registerClient(clientId, secret, 
protocol).get();
     } catch (Throwable e) {
+      String errorMsg = null;
       if (e.getCause() instanceof TimeoutException) {
-        LOG.error("Timed out waiting for client to connect.\nPossible reasons 
include network " +
+        errorMsg = "Timed out waiting for client to connect.\nPossible reasons 
include network " +
             "issues, errors in remote driver or the cluster has no available 
resources, etc." +
-            "\nPlease check YARN or Spark driver's logs for further 
information.", e);
+            "\nPlease check YARN or Spark driver's logs for further 
information.";
+      } else if (e.getCause() instanceof InterruptedException) {
+        errorMsg = "Interruption occurred while waiting for client to 
connect.\nPossibly the Spark session is closed " +
+            "such as in case of query cancellation." +
+            "\nPlease refer to HiveServer2 logs for further information.";
       } else {
-        LOG.error("Error while waiting for client to connect.", e);
+        errorMsg = "Error while waiting for client to connect.";
       }
+      LOG.error(errorMsg, e);
       driverThread.interrupt();
       try {
         driverThread.join();
       } catch (InterruptedException ie) {
         // Give up.
-        LOG.debug("Interrupted before driver thread was finished.");
+        LOG.warn("Interrupted before driver thread was finished.", ie);
       }
       throw Throwables.propagate(e);
     }

Reply via email to