This is an automated email from the ASF dual-hosted git repository.

wenchen pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new 6c0e13b  [SPARK-27285] Support describing output of CTE
6c0e13b is described below

commit 6c0e13b456e731f2967c1d820803a33d44b35e31
Author: Dilip Biswal <dbis...@us.ibm.com>
AuthorDate: Tue Mar 26 16:00:56 2019 -0700

    [SPARK-27285] Support describing output of CTE
    
    ## What changes were proposed in this pull request?
    SPARK-26982 allows users to describe output of a query. However, it had a 
limitation of not supporting CTEs due to limitation of the grammar having a 
single rule to parse both select and inserts. After SPARK-27209, which splits 
select and insert parsing to two different rules, we can now support describing 
output of the CTEs easily.
    
    ## How was this patch tested?
    Existing tests were modified.
    
    Closes #24224 from dilipbiswal/describe_support_cte.
    
    Authored-by: Dilip Biswal <dbis...@us.ibm.com>
    Signed-off-by: Wenchen Fan <wenc...@databricks.com>
---
 .../org/apache/spark/sql/catalyst/parser/SqlBase.g4 |  6 +-----
 .../spark/sql/catalyst/parser/AstBuilder.scala      |  4 ----
 .../apache/spark/sql/execution/SparkSqlParser.scala |  2 +-
 .../apache/spark/sql/execution/command/tables.scala |  3 +--
 .../sql-tests/results/describe-query.sql.out        | 21 +++++----------------
 5 files changed, 8 insertions(+), 28 deletions(-)

diff --git 
a/sql/catalyst/src/main/antlr4/org/apache/spark/sql/catalyst/parser/SqlBase.g4 
b/sql/catalyst/src/main/antlr4/org/apache/spark/sql/catalyst/parser/SqlBase.g4
index 78dc60c..0f9387b 100644
--- 
a/sql/catalyst/src/main/antlr4/org/apache/spark/sql/catalyst/parser/SqlBase.g4
+++ 
b/sql/catalyst/src/main/antlr4/org/apache/spark/sql/catalyst/parser/SqlBase.g4
@@ -172,7 +172,7 @@ statement
     | (DESC | DESCRIBE) database EXTENDED? identifier                  
#describeDatabase
     | (DESC | DESCRIBE) TABLE? option=(EXTENDED | FORMATTED)?
         tableIdentifier partitionSpec? describeColName?                
#describeTable
-    | (DESC | DESCRIBE) QUERY? queryToDesc                             
#describeQuery
+    | (DESC | DESCRIBE) QUERY? query                                   
#describeQuery
     | REFRESH TABLE tableIdentifier                                    
#refreshTable
     | REFRESH (STRING | .*?)                                           
#refreshResource
     | CACHE LAZY? TABLE tableIdentifier
@@ -262,10 +262,6 @@ query
     : ctes? queryNoWith
     ;
 
-queryToDesc
-    : queryTerm queryOrganization
-    ;
-
 insertInto
     : INSERT OVERWRITE TABLE tableIdentifier (partitionSpec (IF NOT EXISTS)?)? 
                             #insertOverwriteTable
     | INSERT INTO TABLE? tableIdentifier partitionSpec?                        
                             #insertIntoTable
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala
index 7164ad2..111815b 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala
@@ -125,10 +125,6 @@ class AstBuilder(conf: SQLConf) extends 
SqlBaseBaseVisitor[AnyRef] with Logging
     With(plan, ctes)
   }
 
-  override def visitQueryToDesc(ctx: QueryToDescContext): LogicalPlan = 
withOrigin(ctx) {
-    
plan(ctx.queryTerm).optionalMap(ctx.queryOrganization)(withQueryResultClauses)
-  }
-
   override def visitQueryWithFrom(ctx: QueryWithFromContext): LogicalPlan = 
withOrigin(ctx) {
     val from = visitFromClause(ctx.fromClause)
     validate(ctx.selectStatement.querySpecification.fromClause == null,
diff --git 
a/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlParser.scala 
b/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlParser.scala
index 8c7b2cb..2c20b53 100644
--- 
a/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlParser.scala
+++ 
b/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlParser.scala
@@ -373,7 +373,7 @@ class SparkSqlAstBuilder(conf: SQLConf) extends 
AstBuilder(conf) {
    * Create a [[DescribeQueryCommand]] logical command.
    */
   override def visitDescribeQuery(ctx: DescribeQueryContext): LogicalPlan = 
withOrigin(ctx) {
-    DescribeQueryCommand(visitQueryToDesc(ctx.queryToDesc()))
+    DescribeQueryCommand(visitQuery(ctx.query))
   }
 
   /**
diff --git 
a/sql/core/src/main/scala/org/apache/spark/sql/execution/command/tables.scala 
b/sql/core/src/main/scala/org/apache/spark/sql/execution/command/tables.scala
index 08d6dc6..fb619a7 100644
--- 
a/sql/core/src/main/scala/org/apache/spark/sql/execution/command/tables.scala
+++ 
b/sql/core/src/main/scala/org/apache/spark/sql/execution/command/tables.scala
@@ -635,8 +635,7 @@ case class DescribeTableCommand(
  * 3. VALUES statement.
  * 4. TABLE statement. Example : TABLE table_name
  * 5. statements of the form 'FROM table SELECT *'
- *
- * TODO : support CTEs.
+ * 6. Common table expressions (CTEs)
  */
 case class DescribeQueryCommand(query: LogicalPlan)
   extends DescribeCommandBase {
diff --git 
a/sql/core/src/test/resources/sql-tests/results/describe-query.sql.out 
b/sql/core/src/test/resources/sql-tests/results/describe-query.sql.out
index 9205b70..fc51b46 100644
--- a/sql/core/src/test/resources/sql-tests/results/describe-query.sql.out
+++ b/sql/core/src/test/resources/sql-tests/results/describe-query.sql.out
@@ -82,29 +82,18 @@ val                         string
 -- !query 9
 DESC WITH s AS (SELECT 'hello' as col1) SELECT * FROM s
 -- !query 9 schema
-struct<>
+struct<col_name:string,data_type:string,comment:string>
 -- !query 9 output
-org.apache.spark.sql.catalyst.parser.ParseException
-
-mismatched input 'AS' expecting {<EOF>, '.'}(line 1, pos 12)
-
-== SQL ==
-DESC WITH s AS (SELECT 'hello' as col1) SELECT * FROM s
-------------^^^
+col1                   string
 
 
 -- !query 10
 DESCRIBE QUERY WITH s AS (SELECT * from desc_temp1) SELECT * FROM s
 -- !query 10 schema
-struct<>
+struct<col_name:string,data_type:string,comment:string>
 -- !query 10 output
-org.apache.spark.sql.catalyst.parser.ParseException
-
-mismatched input 's' expecting {<EOF>, '.'}(line 1, pos 20)
-
-== SQL ==
-DESCRIBE QUERY WITH s AS (SELECT * from desc_temp1) SELECT * FROM s
---------------------^^^
+key                    int                     column_comment      
+val                    string
 
 
 -- !query 11


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to