Repository: spark
Updated Branches:
  refs/heads/branch-1.5 874b9d855 -> 251d1eef4


[SPARK-6212] [SQL] The EXPLAIN output of CTAS only shows the analyzed plan

JIRA: https://issues.apache.org/jira/browse/SPARK-6212

Author: Yijie Shen <henry.yijies...@gmail.com>

Closes #7986 from yjshen/ctas_explain and squashes the following commits:

bb6fee5 [Yijie Shen] refine test
f731041 [Yijie Shen] address comment
b2cf8ab [Yijie Shen] bug fix
bd7eb20 [Yijie Shen] ctas explain

(cherry picked from commit 3ca995b78f373251081f6877623649bfba3040b2)
Signed-off-by: Yin Huai <yh...@databricks.com>


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/251d1eef
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/251d1eef
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/251d1eef

Branch: refs/heads/branch-1.5
Commit: 251d1eef4848e7c1bd0fff826dfbde48c6de1145
Parents: 874b9d8
Author: Yijie Shen <henry.yijies...@gmail.com>
Authored: Sat Aug 8 21:05:50 2015 -0700
Committer: Yin Huai <yh...@databricks.com>
Committed: Sat Aug 8 21:06:26 2015 -0700

----------------------------------------------------------------------
 .../apache/spark/sql/execution/commands.scala   |  2 ++
 .../hive/execution/CreateTableAsSelect.scala    |  4 ++-
 .../sql/hive/execution/HiveExplainSuite.scala   | 35 ++++++++++++++++++--
 3 files changed, 38 insertions(+), 3 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/251d1eef/sql/core/src/main/scala/org/apache/spark/sql/execution/commands.scala
----------------------------------------------------------------------
diff --git 
a/sql/core/src/main/scala/org/apache/spark/sql/execution/commands.scala 
b/sql/core/src/main/scala/org/apache/spark/sql/execution/commands.scala
index 6b83025..95209e6 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/execution/commands.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/commands.scala
@@ -69,6 +69,8 @@ private[sql] case class ExecutedCommand(cmd: RunnableCommand) 
extends SparkPlan
     val converted = sideEffectResult.map(convert(_).asInstanceOf[InternalRow])
     sqlContext.sparkContext.parallelize(converted, 1)
   }
+
+  override def argString: String = cmd.toString
 }
 
 /**

http://git-wip-us.apache.org/repos/asf/spark/blob/251d1eef/sql/hive/src/main/scala/org/apache/spark/sql/hive/execution/CreateTableAsSelect.scala
----------------------------------------------------------------------
diff --git 
a/sql/hive/src/main/scala/org/apache/spark/sql/hive/execution/CreateTableAsSelect.scala
 
b/sql/hive/src/main/scala/org/apache/spark/sql/hive/execution/CreateTableAsSelect.scala
index 84358cb..8422287 100644
--- 
a/sql/hive/src/main/scala/org/apache/spark/sql/hive/execution/CreateTableAsSelect.scala
+++ 
b/sql/hive/src/main/scala/org/apache/spark/sql/hive/execution/CreateTableAsSelect.scala
@@ -40,6 +40,8 @@ case class CreateTableAsSelect(
   def database: String = tableDesc.database
   def tableName: String = tableDesc.name
 
+  override def children: Seq[LogicalPlan] = Seq(query)
+
   override def run(sqlContext: SQLContext): Seq[Row] = {
     val hiveContext = sqlContext.asInstanceOf[HiveContext]
     lazy val metastoreRelation: MetastoreRelation = {
@@ -91,6 +93,6 @@ case class CreateTableAsSelect(
   }
 
   override def argString: String = {
-    s"[Database:$database, TableName: $tableName, InsertIntoHiveTable]\n" + 
query.toString
+    s"[Database:$database, TableName: $tableName, InsertIntoHiveTable]"
   }
 }

http://git-wip-us.apache.org/repos/asf/spark/blob/251d1eef/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveExplainSuite.scala
----------------------------------------------------------------------
diff --git 
a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveExplainSuite.scala
 
b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveExplainSuite.scala
index 8215dd6..44c5b80 100644
--- 
a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveExplainSuite.scala
+++ 
b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveExplainSuite.scala
@@ -17,13 +17,18 @@
 
 package org.apache.spark.sql.hive.execution
 
-import org.apache.spark.sql.QueryTest
+import org.apache.spark.sql.{SQLContext, QueryTest}
+import org.apache.spark.sql.hive.test.TestHive
 import org.apache.spark.sql.hive.test.TestHive._
+import org.apache.spark.sql.test.SQLTestUtils
 
 /**
  * A set of tests that validates support for Hive Explain command.
  */
-class HiveExplainSuite extends QueryTest {
+class HiveExplainSuite extends QueryTest with SQLTestUtils {
+
+  def sqlContext: SQLContext = TestHive
+
   test("explain extended command") {
     checkExistence(sql(" explain   select * from src where key=123 "), true,
                    "== Physical Plan ==")
@@ -74,4 +79,30 @@ class HiveExplainSuite extends QueryTest {
       "Limit",
       "src")
   }
+
+  test("SPARK-6212: The EXPLAIN output of CTAS only shows the analyzed plan") {
+    withTempTable("jt") {
+      val rdd = sparkContext.parallelize((1 to 10).map(i => s"""{"a":$i, 
"b":"str$i"}"""))
+      read.json(rdd).registerTempTable("jt")
+      val outputs = sql(
+        s"""
+           |EXPLAIN EXTENDED
+           |CREATE TABLE t1
+           |AS
+           |SELECT * FROM jt
+      """.stripMargin).collect().map(_.mkString).mkString
+
+      val shouldContain =
+        "== Parsed Logical Plan ==" :: "== Analyzed Logical Plan ==" :: 
"Subquery" ::
+        "== Optimized Logical Plan ==" :: "== Physical Plan ==" ::
+        "CreateTableAsSelect" :: "InsertIntoHiveTable" :: "jt" :: Nil
+      for (key <- shouldContain) {
+        assert(outputs.contains(key), s"$key doesn't exist in result")
+      }
+
+      val physicalIndex = outputs.indexOf("== Physical Plan ==")
+      assert(!outputs.substring(physicalIndex).contains("Subquery"),
+        "Physical Plan should not contain Subquery since it's eliminated by 
optimizer")
+    }
+  }
 }


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to