Repository: spark
Updated Branches:
  refs/heads/branch-2.1 8460b0905 -> 6696ad0e8


[SPARK-20439][SQL][BACKPORT-2.1] Fix Catalog API listTables and getTable when 
failed to fetch table metadata

### What changes were proposed in this pull request?

This PR is to backport https://github.com/apache/spark/pull/17730 to Spark 2.1
--- --
`spark.catalog.listTables` and `spark.catalog.getTable` does not work if we are 
unable to retrieve table metadata due to any reason (e.g., table serde class is 
not accessible or the table type is not accepted by Spark SQL). After this PR, 
the APIs still return the corresponding Table without the description and 
tableType)

### How was this patch tested?
Added a test case

Author: Xiao Li <gatorsm...@gmail.com>

Closes #17760 from gatorsmile/backport-17730.


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/6696ad0e
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/6696ad0e
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/6696ad0e

Branch: refs/heads/branch-2.1
Commit: 6696ad0e8ce196a27e2908108f6e7eb7661affc4
Parents: 8460b09
Author: Xiao Li <gatorsm...@gmail.com>
Authored: Wed Apr 26 11:39:10 2017 +0800
Committer: Wenchen Fan <wenc...@databricks.com>
Committed: Wed Apr 26 11:39:10 2017 +0800

----------------------------------------------------------------------
 .../apache/spark/sql/internal/CatalogImpl.scala | 28 ++++++++++++++++----
 .../spark/sql/hive/execution/HiveDDLSuite.scala |  8 ++++++
 2 files changed, 31 insertions(+), 5 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/6696ad0e/sql/core/src/main/scala/org/apache/spark/sql/internal/CatalogImpl.scala
----------------------------------------------------------------------
diff --git 
a/sql/core/src/main/scala/org/apache/spark/sql/internal/CatalogImpl.scala 
b/sql/core/src/main/scala/org/apache/spark/sql/internal/CatalogImpl.scala
index 9d0b214..c3c3513 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/internal/CatalogImpl.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/internal/CatalogImpl.scala
@@ -19,6 +19,7 @@ package org.apache.spark.sql.internal
 
 import scala.collection.JavaConverters._
 import scala.reflect.runtime.universe.TypeTag
+import scala.util.control.NonFatal
 
 import org.apache.spark.annotation.Experimental
 import org.apache.spark.sql._
@@ -99,14 +100,27 @@ class CatalogImpl(sparkSession: SparkSession) extends 
Catalog {
     CatalogImpl.makeDataset(tables, sparkSession)
   }
 
+  /**
+   * Returns a Table for the given table/view or temporary view.
+   *
+   * Note that this function requires the table already exists in the Catalog.
+   *
+   * If the table metadata retrieval failed due to any reason (e.g., table 
serde class
+   * is not accessible or the table type is not accepted by Spark SQL), this 
function
+   * still returns the corresponding Table without the description and 
tableType)
+   */
   private def makeTable(tableIdent: TableIdentifier): Table = {
-    val metadata = 
sessionCatalog.getTempViewOrPermanentTableMetadata(tableIdent)
+    val metadata = try {
+      Some(sessionCatalog.getTempViewOrPermanentTableMetadata(tableIdent))
+    } catch {
+      case NonFatal(_) => None
+    }
     val isTemp = sessionCatalog.isTemporaryTable(tableIdent)
     new Table(
       name = tableIdent.table,
-      database = metadata.identifier.database.orNull,
-      description = metadata.comment.orNull,
-      tableType = if (isTemp) "TEMPORARY" else metadata.tableType.name,
+      database = 
metadata.map(_.identifier.database).getOrElse(tableIdent.database).orNull,
+      description = metadata.map(_.comment.orNull).orNull,
+      tableType = if (isTemp) "TEMPORARY" else 
metadata.map(_.tableType.name).orNull,
       isTemporary = isTemp)
   }
 
@@ -197,7 +211,11 @@ class CatalogImpl(sparkSession: SparkSession) extends 
Catalog {
    * `AnalysisException` when no `Table` can be found.
    */
   override def getTable(dbName: String, tableName: String): Table = {
-    makeTable(TableIdentifier(tableName, Option(dbName)))
+    if (tableExists(dbName, tableName)) {
+      makeTable(TableIdentifier(tableName, Option(dbName)))
+    } else {
+      throw new AnalysisException(s"Table or view '$tableName' not found in 
database '$dbName'")
+    }
   }
 
   /**

http://git-wip-us.apache.org/repos/asf/spark/blob/6696ad0e/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveDDLSuite.scala
----------------------------------------------------------------------
diff --git 
a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveDDLSuite.scala
 
b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveDDLSuite.scala
index 3b9437da..037ab47 100644
--- 
a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveDDLSuite.scala
+++ 
b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveDDLSuite.scala
@@ -1060,6 +1060,14 @@ class HiveDDLSuite
           s"CREATE INDEX $indexName ON TABLE $tabName (a) AS 'COMPACT' WITH 
DEFERRED REBUILD")
         val indexTabName =
           spark.sessionState.catalog.listTables("default", 
s"*$indexName*").head.table
+
+        // Even if index tables exist, listTables and getTable APIs should 
still work
+        checkAnswer(
+          spark.catalog.listTables().toDF(),
+          Row(indexTabName, "default", null, null, false) ::
+            Row(tabName, "default", null, "MANAGED", false) :: Nil)
+        assert(spark.catalog.getTable("default", indexTabName).name === 
indexTabName)
+
         intercept[TableAlreadyExistsException] {
           sql(s"CREATE TABLE $indexTabName(b int)")
         }


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to