Repository: spark
Updated Branches:
  refs/heads/branch-2.3 88003f02c -> 2f60df09d


[SPARK-24246][SQL] Improve AnalysisException by setting the cause when it's 
available

## What changes were proposed in this pull request?

If there is an exception, it's better to set it as the cause of 
AnalysisException since the exception may contain useful debug information.

## How was this patch tested?

Jenkins

Author: Shixiong Zhu <zsxw...@gmail.com>

Closes #21297 from zsxwing/SPARK-24246.

(cherry picked from commit c26f673252c2cbbccf8c395ba6d4ab80c098d60e)
Signed-off-by: gatorsmile <gatorsm...@gmail.com>


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/2f60df09
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/2f60df09
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/2f60df09

Branch: refs/heads/branch-2.3
Commit: 2f60df09dc1bb65da254e00abe8463122e8c77cf
Parents: 88003f0
Author: Shixiong Zhu <zsxw...@gmail.com>
Authored: Mon May 14 11:37:57 2018 -0700
Committer: gatorsmile <gatorsm...@gmail.com>
Committed: Mon May 14 11:38:09 2018 -0700

----------------------------------------------------------------------
 .../org/apache/spark/sql/catalyst/analysis/Analyzer.scala      | 6 +++---
 .../spark/sql/catalyst/analysis/ResolveInlineTables.scala      | 2 +-
 .../scala/org/apache/spark/sql/catalyst/analysis/package.scala | 5 +++++
 .../org/apache/spark/sql/execution/datasources/rules.scala     | 2 +-
 4 files changed, 10 insertions(+), 5 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/2f60df09/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala
----------------------------------------------------------------------
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala
index 9cc928c..8597d83 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala
@@ -661,13 +661,13 @@ class Analyzer(
       try {
         catalog.lookupRelation(tableIdentWithDb)
       } catch {
-        case _: NoSuchTableException =>
-          u.failAnalysis(s"Table or view not found: 
${tableIdentWithDb.unquotedString}")
+        case e: NoSuchTableException =>
+          u.failAnalysis(s"Table or view not found: 
${tableIdentWithDb.unquotedString}", e)
         // If the database is defined and that database is not found, throw an 
AnalysisException.
         // Note that if the database is not defined, it is possible we are 
looking up a temp view.
         case e: NoSuchDatabaseException =>
           u.failAnalysis(s"Table or view not found: 
${tableIdentWithDb.unquotedString}, the " +
-            s"database ${e.db} doesn't exist.")
+            s"database ${e.db} doesn't exist.", e)
       }
     }
 

http://git-wip-us.apache.org/repos/asf/spark/blob/2f60df09/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/ResolveInlineTables.scala
----------------------------------------------------------------------
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/ResolveInlineTables.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/ResolveInlineTables.scala
index f2df3e1..71ed754 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/ResolveInlineTables.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/ResolveInlineTables.scala
@@ -103,7 +103,7 @@ case class ResolveInlineTables(conf: SQLConf) extends 
Rule[LogicalPlan] with Cas
           castedExpr.eval()
         } catch {
           case NonFatal(ex) =>
-            table.failAnalysis(s"failed to evaluate expression ${e.sql}: 
${ex.getMessage}")
+            table.failAnalysis(s"failed to evaluate expression ${e.sql}: 
${ex.getMessage}", ex)
         }
       })
     }

http://git-wip-us.apache.org/repos/asf/spark/blob/2f60df09/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/package.scala
----------------------------------------------------------------------
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/package.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/package.scala
index 7731336..354a3fa 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/package.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/package.scala
@@ -41,6 +41,11 @@ package object analysis {
     def failAnalysis(msg: String): Nothing = {
       throw new AnalysisException(msg, t.origin.line, t.origin.startPosition)
     }
+
+    /** Fails the analysis at the point where a specific tree node was parsed. 
*/
+    def failAnalysis(msg: String, cause: Throwable): Nothing = {
+      throw new AnalysisException(msg, t.origin.line, t.origin.startPosition, 
cause = Some(cause))
+    }
   }
 
   /** Catches any AnalysisExceptions thrown by `f` and attaches `t`'s position 
if any. */

http://git-wip-us.apache.org/repos/asf/spark/blob/2f60df09/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/rules.scala
----------------------------------------------------------------------
diff --git 
a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/rules.scala
 
b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/rules.scala
index 0dea767..cab0025 100644
--- 
a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/rules.scala
+++ 
b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/rules.scala
@@ -61,7 +61,7 @@ class ResolveSQLOnFile(sparkSession: SparkSession) extends 
Rule[LogicalPlan] {
         case _: ClassNotFoundException => u
         case e: Exception =>
           // the provider is valid, but failed to create a logical plan
-          u.failAnalysis(e.getMessage)
+          u.failAnalysis(e.getMessage, e)
       }
   }
 }


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to