Repository: spark
Updated Branches:
  refs/heads/master 89516c1c4 -> 6a05eb24d


[SPARK-17328][SQL] Fix NPE with EXPLAIN DESCRIBE TABLE

## What changes were proposed in this pull request?

This PR fixes the following NPE scenario in two ways.

**Reported Error Scenario**
```scala
scala> sql("EXPLAIN DESCRIBE TABLE x").show(truncate = false)
INFO SparkSqlParser: Parsing command: EXPLAIN DESCRIBE TABLE x
java.lang.NullPointerException
```

- **DESCRIBE**: Extend `DESCRIBE` syntax to accept `TABLE`.
- **EXPLAIN**: Prevent NPE in case of the parsing failure of target statement, 
e.g., `EXPLAIN DESCRIBE TABLES x`.

## How was this patch tested?

Pass the Jenkins test with a new test case.

Author: Dongjoon Hyun <dongj...@apache.org>

Closes #15357 from dongjoon-hyun/SPARK-17328.


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/6a05eb24
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/6a05eb24
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/6a05eb24

Branch: refs/heads/master
Commit: 6a05eb24d043aa93390f353850d56efa6124e063
Parents: 89516c1
Author: Dongjoon Hyun <dongj...@apache.org>
Authored: Wed Oct 5 10:52:43 2016 -0700
Committer: Herman van Hovell <hvanhov...@databricks.com>
Committed: Wed Oct 5 10:52:43 2016 -0700

----------------------------------------------------------------------
 .../apache/spark/sql/catalyst/parser/SqlBase.g4 |  2 +-
 .../spark/sql/execution/SparkSqlParser.scala    |  4 +-
 .../resources/sql-tests/inputs/describe.sql     |  4 ++
 .../sql-tests/results/describe.sql.out          | 58 +++++++++++++++-----
 .../sql/execution/SparkSqlParserSuite.scala     | 18 +++++-
 5 files changed, 68 insertions(+), 18 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/6a05eb24/sql/catalyst/src/main/antlr4/org/apache/spark/sql/catalyst/parser/SqlBase.g4
----------------------------------------------------------------------
diff --git 
a/sql/catalyst/src/main/antlr4/org/apache/spark/sql/catalyst/parser/SqlBase.g4 
b/sql/catalyst/src/main/antlr4/org/apache/spark/sql/catalyst/parser/SqlBase.g4
index 87719d9..6a94def 100644
--- 
a/sql/catalyst/src/main/antlr4/org/apache/spark/sql/catalyst/parser/SqlBase.g4
+++ 
b/sql/catalyst/src/main/antlr4/org/apache/spark/sql/catalyst/parser/SqlBase.g4
@@ -136,7 +136,7 @@ statement
     | SHOW CREATE TABLE tableIdentifier                                
#showCreateTable
     | (DESC | DESCRIBE) FUNCTION EXTENDED? describeFuncName            
#describeFunction
     | (DESC | DESCRIBE) DATABASE EXTENDED? identifier                  
#describeDatabase
-    | (DESC | DESCRIBE) option=(EXTENDED | FORMATTED)?
+    | (DESC | DESCRIBE) TABLE? option=(EXTENDED | FORMATTED)?
         tableIdentifier partitionSpec? describeColName?                
#describeTable
     | REFRESH TABLE tableIdentifier                                    
#refreshTable
     | REFRESH .*?                                                      
#refreshResource

http://git-wip-us.apache.org/repos/asf/spark/blob/6a05eb24/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlParser.scala
----------------------------------------------------------------------
diff --git 
a/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlParser.scala 
b/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlParser.scala
index 7f1e23e..085bb9f 100644
--- 
a/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlParser.scala
+++ 
b/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlParser.scala
@@ -265,7 +265,9 @@ class SparkSqlAstBuilder(conf: SQLConf) extends AstBuilder {
     }
 
     val statement = plan(ctx.statement)
-    if (isExplainableStatement(statement)) {
+    if (statement == null) {
+      null  // This is enough since ParseException will raise later.
+    } else if (isExplainableStatement(statement)) {
       ExplainCommand(statement, extended = ctx.EXTENDED != null, codegen = 
ctx.CODEGEN != null)
     } else {
       ExplainCommand(OneRowRelation)

http://git-wip-us.apache.org/repos/asf/spark/blob/6a05eb24/sql/core/src/test/resources/sql-tests/inputs/describe.sql
----------------------------------------------------------------------
diff --git a/sql/core/src/test/resources/sql-tests/inputs/describe.sql 
b/sql/core/src/test/resources/sql-tests/inputs/describe.sql
index 3f0ae90..84503d0 100644
--- a/sql/core/src/test/resources/sql-tests/inputs/describe.sql
+++ b/sql/core/src/test/resources/sql-tests/inputs/describe.sql
@@ -2,8 +2,12 @@ CREATE TABLE t (a STRING, b INT) PARTITIONED BY (c STRING, d 
STRING);
 
 ALTER TABLE t ADD PARTITION (c='Us', d=1);
 
+DESCRIBE t;
+
 DESC t;
 
+DESC TABLE t;
+
 -- Ignore these because there exist timestamp results, e.g., `Create Table`.
 -- DESC EXTENDED t;
 -- DESC FORMATTED t;

http://git-wip-us.apache.org/repos/asf/spark/blob/6a05eb24/sql/core/src/test/resources/sql-tests/results/describe.sql.out
----------------------------------------------------------------------
diff --git a/sql/core/src/test/resources/sql-tests/results/describe.sql.out 
b/sql/core/src/test/resources/sql-tests/results/describe.sql.out
index 37bf303..b448d60 100644
--- a/sql/core/src/test/resources/sql-tests/results/describe.sql.out
+++ b/sql/core/src/test/resources/sql-tests/results/describe.sql.out
@@ -1,5 +1,5 @@
 -- Automatically generated by SQLQueryTestSuite
--- Number of queries: 8
+-- Number of queries: 10
 
 
 -- !query 0
@@ -19,7 +19,7 @@ struct<>
 
 
 -- !query 2
-DESC t
+DESCRIBE t
 -- !query 2 schema
 struct<col_name:string,data_type:string,comment:string>
 -- !query 2 output
@@ -34,7 +34,7 @@ d                     string
 
 
 -- !query 3
-DESC t PARTITION (c='Us', d=1)
+DESC t
 -- !query 3 schema
 struct<col_name:string,data_type:string,comment:string>
 -- !query 3 output
@@ -49,30 +49,60 @@ d                           string
 
 
 -- !query 4
-DESC t PARTITION (c='Us', d=2)
+DESC TABLE t
 -- !query 4 schema
-struct<>
+struct<col_name:string,data_type:string,comment:string>
 -- !query 4 output
+# Partition Information                                                    
+# col_name             data_type               comment             
+a                      string                                      
+b                      int                                         
+c                      string                                      
+c                      string                                      
+d                      string                                      
+d                      string
+
+
+-- !query 5
+DESC t PARTITION (c='Us', d=1)
+-- !query 5 schema
+struct<col_name:string,data_type:string,comment:string>
+-- !query 5 output
+# Partition Information                                                    
+# col_name             data_type               comment             
+a                      string                                      
+b                      int                                         
+c                      string                                      
+c                      string                                      
+d                      string                                      
+d                      string
+
+
+-- !query 6
+DESC t PARTITION (c='Us', d=2)
+-- !query 6 schema
+struct<>
+-- !query 6 output
 org.apache.spark.sql.catalyst.analysis.NoSuchPartitionException
 Partition not found in table 't' database 'default':
 c -> Us
 d -> 2;
 
 
--- !query 5
+-- !query 7
 DESC t PARTITION (c='Us')
--- !query 5 schema
+-- !query 7 schema
 struct<>
--- !query 5 output
+-- !query 7 output
 org.apache.spark.sql.AnalysisException
 Partition spec is invalid. The spec (c) must match the partition spec (c, d) 
defined in table '`default`.`t`';
 
 
--- !query 6
+-- !query 8
 DESC t PARTITION (c='Us', d)
--- !query 6 schema
+-- !query 8 schema
 struct<>
--- !query 6 output
+-- !query 8 output
 org.apache.spark.sql.catalyst.parser.ParseException
 
 PARTITION specification is incomplete: `d`(line 1, pos 0)
@@ -82,9 +112,9 @@ DESC t PARTITION (c='Us', d)
 ^^^
 
 
--- !query 7
+-- !query 9
 DROP TABLE t
--- !query 7 schema
+-- !query 9 schema
 struct<>
--- !query 7 output
+-- !query 9 output
 

http://git-wip-us.apache.org/repos/asf/spark/blob/6a05eb24/sql/core/src/test/scala/org/apache/spark/sql/execution/SparkSqlParserSuite.scala
----------------------------------------------------------------------
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/execution/SparkSqlParserSuite.scala
 
b/sql/core/src/test/scala/org/apache/spark/sql/execution/SparkSqlParserSuite.scala
index 8161c08..6712d32 100644
--- 
a/sql/core/src/test/scala/org/apache/spark/sql/execution/SparkSqlParserSuite.scala
+++ 
b/sql/core/src/test/scala/org/apache/spark/sql/execution/SparkSqlParserSuite.scala
@@ -17,11 +17,12 @@
 
 package org.apache.spark.sql.execution
 
-import org.apache.spark.sql.catalyst.FunctionIdentifier
+import org.apache.spark.sql.catalyst.{FunctionIdentifier, TableIdentifier}
 import org.apache.spark.sql.catalyst.parser.ParseException
 import org.apache.spark.sql.catalyst.plans.PlanTest
 import org.apache.spark.sql.catalyst.plans.logical.LogicalPlan
-import org.apache.spark.sql.execution.command.{DescribeFunctionCommand, 
ShowFunctionsCommand}
+import org.apache.spark.sql.execution.command.{DescribeFunctionCommand, 
DescribeTableCommand,
+  ShowFunctionsCommand}
 import org.apache.spark.sql.internal.SQLConf
 
 /**
@@ -72,4 +73,17 @@ class SparkSqlParserSuite extends PlanTest {
       DescribeFunctionCommand(FunctionIdentifier("bar", database = 
Option("f")), isExtended = true))
   }
 
+  test("SPARK-17328 Fix NPE with EXPLAIN DESCRIBE TABLE") {
+    assertEqual("describe table t",
+      DescribeTableCommand(
+        TableIdentifier("t"), Map.empty, isExtended = false, isFormatted = 
false))
+    assertEqual("describe table extended t",
+      DescribeTableCommand(
+        TableIdentifier("t"), Map.empty, isExtended = true, isFormatted = 
false))
+    assertEqual("describe table formatted t",
+      DescribeTableCommand(
+        TableIdentifier("t"), Map.empty, isExtended = false, isFormatted = 
true))
+
+    intercept("explain describe tables x", "Unsupported SQL statement")
+  }
 }


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to