spark git commit: [SPARK-14127][SQL] Makes 'DESC [EXTENDED|FORMATTED] ' support data source tables

Mon, 09 May 2016 10:54:12 -0700

Repository: spark
Updated Branches:
  refs/heads/master b1e01fd51 -> 671b382a8


[SPARK-14127][SQL] Makes 'DESC [EXTENDED|FORMATTED] <table>' support data 
source tables

## What changes were proposed in this pull request?

This is a follow-up of PR #12844. It makes the newly updated 
`DescribeTableCommand` to support data sources tables.

## How was this patch tested?

A test case is added to check `DESC [EXTENDED | FORMATTED] <table>` output.

Author: Cheng Lian <l...@databricks.com>

Closes #12934 from liancheng/spark-14127-desc-table-follow-up.


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/671b382a
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/671b382a
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/671b382a

Branch: refs/heads/master
Commit: 671b382a80bc789d50f609783c7ba88fafc0c251
Parents: b1e01fd
Author: Cheng Lian <l...@databricks.com>
Authored: Mon May 9 10:53:32 2016 -0700
Committer: Yin Huai <yh...@databricks.com>
Committed: Mon May 9 10:53:32 2016 -0700

----------------------------------------------------------------------
 .../spark/sql/execution/command/tables.scala    | 58 ++++++++++----------
 .../spark/sql/hive/execution/HiveDDLSuite.scala | 19 ++++++-
 2 files changed, 47 insertions(+), 30 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/671b382a/sql/core/src/main/scala/org/apache/spark/sql/execution/command/tables.scala
----------------------------------------------------------------------
diff --git 
a/sql/core/src/main/scala/org/apache/spark/sql/execution/command/tables.scala 
b/sql/core/src/main/scala/org/apache/spark/sql/execution/command/tables.scala
index 31c804f..954dcca 100644
--- 
a/sql/core/src/main/scala/org/apache/spark/sql/execution/command/tables.scala
+++ 
b/sql/core/src/main/scala/org/apache/spark/sql/execution/command/tables.scala
@@ -25,7 +25,7 @@ import scala.collection.mutable.ArrayBuffer
 
 import org.apache.spark.sql.{AnalysisException, Row, SparkSession}
 import org.apache.spark.sql.catalyst.TableIdentifier
-import org.apache.spark.sql.catalyst.catalog.{CatalogColumn, CatalogRelation, 
CatalogTable, CatalogTableType}
+import org.apache.spark.sql.catalyst.catalog.{CatalogColumn, CatalogTable, 
CatalogTableType}
 import org.apache.spark.sql.catalyst.catalog.CatalogTypes.TablePartitionSpec
 import org.apache.spark.sql.catalyst.expressions.{Attribute, 
AttributeReference}
 import org.apache.spark.sql.catalyst.plans.logical.{Command, LogicalPlan, 
UnaryNode}
@@ -288,45 +288,45 @@ case class DescribeTableCommand(table: TableIdentifier, 
isExtended: Boolean, isF
 
   override def run(sparkSession: SparkSession): Seq[Row] = {
     val result = new ArrayBuffer[Row]
-    sparkSession.sessionState.catalog.lookupRelation(table) match {
-      case catalogRelation: CatalogRelation =>
-        if (isExtended) {
-          describeExtended(catalogRelation, result)
-        } else if (isFormatted) {
-          describeFormatted(catalogRelation, result)
-        } else {
-          describe(catalogRelation, result)
-        }
+    val catalog = sparkSession.sessionState.catalog
+
+    if (catalog.isTemporaryTable(table)) {
+      describeSchema(catalog.lookupRelation(table).schema, result)
+    } else {
+      val metadata = catalog.getTableMetadata(table)
 
-      case relation =>
-        describeSchema(relation.schema, result)
+      if (isExtended) {
+        describeExtended(metadata, result)
+      } else if (isFormatted) {
+        describeFormatted(metadata, result)
+      } else {
+        describe(metadata, result)
+      }
     }
 
     result
   }
 
   // Shows data columns and partitioned columns (if any)
-  private def describe(relation: CatalogRelation, buffer: ArrayBuffer[Row]): 
Unit = {
-    describeSchema(relation.catalogTable.schema, buffer)
+  private def describe(table: CatalogTable, buffer: ArrayBuffer[Row]): Unit = {
+    describeSchema(table.schema, buffer)
 
-    if (relation.catalogTable.partitionColumns.nonEmpty) {
+    if (table.partitionColumns.nonEmpty) {
       append(buffer, "# Partition Information", "", "")
       append(buffer, s"# ${output(0).name}", output(1).name, output(2).name)
-      describeSchema(relation.catalogTable.partitionColumns, buffer)
+      describeSchema(table.partitionColumns, buffer)
     }
   }
 
-  private def describeExtended(relation: CatalogRelation, buffer: 
ArrayBuffer[Row]): Unit = {
-    describe(relation, buffer)
+  private def describeExtended(table: CatalogTable, buffer: ArrayBuffer[Row]): 
Unit = {
+    describe(table, buffer)
 
     append(buffer, "", "", "")
-    append(buffer, "# Detailed Table Information", 
relation.catalogTable.toString, "")
+    append(buffer, "# Detailed Table Information", table.toString, "")
   }
 
-  private def describeFormatted(relation: CatalogRelation, buffer: 
ArrayBuffer[Row]): Unit = {
-    describe(relation, buffer)
-
-    val table = relation.catalogTable
+  private def describeFormatted(table: CatalogTable, buffer: 
ArrayBuffer[Row]): Unit = {
+    describe(table, buffer)
 
     append(buffer, "", "", "")
     append(buffer, "# Detailed Table Information", "", "")
@@ -358,17 +358,17 @@ case class DescribeTableCommand(table: TableIdentifier, 
isExtended: Boolean, isF
     }
   }
 
-  private def describeSchema(schema: StructType, buffer: ArrayBuffer[Row]): 
Unit = {
+  private def describeSchema(schema: Seq[CatalogColumn], buffer: 
ArrayBuffer[Row]): Unit = {
     schema.foreach { column =>
-      val comment =
-        if (column.metadata.contains("comment")) 
column.metadata.getString("comment") else ""
-      append(buffer, column.name, column.dataType.simpleString, comment)
+      append(buffer, column.name, column.dataType.toLowerCase, 
column.comment.orNull)
     }
   }
 
-  private def describeSchema(schema: Seq[CatalogColumn], buffer: 
ArrayBuffer[Row]): Unit = {
+  private def describeSchema(schema: StructType, buffer: ArrayBuffer[Row]): 
Unit = {
     schema.foreach { column =>
-      append(buffer, column.name, column.dataType.toLowerCase, 
column.comment.orNull)
+      val comment =
+        if (column.metadata.contains("comment")) 
column.metadata.getString("comment") else ""
+      append(buffer, column.name, column.dataType.simpleString, comment)
     }
   }
 

http://git-wip-us.apache.org/repos/asf/spark/blob/671b382a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveDDLSuite.scala
----------------------------------------------------------------------
diff --git 
a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveDDLSuite.scala
 
b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveDDLSuite.scala
index d55ddb2..aa5b5e6 100644
--- 
a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveDDLSuite.scala
+++ 
b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveDDLSuite.scala
@@ -365,7 +365,7 @@ class HiveDDLSuite
     }
   }
 
-  test("desc table") {
+  test("desc table for Hive table") {
     withTable("tab1") {
       val tabName = "tab1"
       sql(s"CREATE TABLE $tabName(c1 int)")
@@ -503,4 +503,21 @@ class HiveDDLSuite
     }.getMessage
     assert(message.contains("Can not drop default database"))
   }
+
+  test("desc table for data source table") {
+    withTable("tab1") {
+      val tabName = "tab1"
+      sqlContext.range(1).write.format("json").saveAsTable(tabName)
+
+      assert(sql(s"DESC $tabName").collect().length == 1)
+
+      assert(
+        sql(s"DESC FORMATTED $tabName").collect()
+          .exists(_.getString(0) == "# Storage Information"))
+
+      assert(
+        sql(s"DESC EXTENDED $tabName").collect()
+          .exists(_.getString(0) == "# Detailed Table Information"))
+    }
+  }
 }


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to