cloud-fan commented on a change in pull request #28833:
URL: https://github.com/apache/spark/pull/28833#discussion_r448115666



##########
File path: 
sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveDDLSuite.scala
##########
@@ -2309,6 +2309,108 @@ class HiveDDLSuite
     }
   }
 
+  test("SPARK-20680: Spark-sql do not support for void column datatype") {
+    withTable("t") {
+      withView("tabVoidType") {
+        val client =
+          
spark.sharedState.externalCatalog.unwrapped.asInstanceOf[HiveExternalCatalog].client
+        client.runSqlHive("CREATE TABLE t (t1 int)")
+        client.runSqlHive("INSERT INTO t VALUES (3)")
+        client.runSqlHive("CREATE VIEW tabVoidType AS SELECT NULL AS col FROM 
t")
+        checkAnswer(spark.table("tabVoidType"), Row(null))
+        // No exception shows
+        val desc = spark.sql("DESC tabVoidType").collect().toSeq
+        assert(desc.contains(Row("col", "null", null)))
+      }
+    }
+
+    // Forbid CTAS with null type
+    withTable("t1", "t2", "t3") {
+      val e1 = intercept[AnalysisException] {
+        spark.sql("CREATE TABLE t1 USING PARQUET AS SELECT null as null_col")
+      }.getMessage
+      assert(e1.contains("Cannot create tables with VOID type"))
+
+      val e2 = intercept[AnalysisException] {
+        spark.sql("CREATE TABLE t2 AS SELECT null as null_col")
+      }.getMessage
+      assert(e2.contains("Cannot create tables with VOID type"))
+
+      val e3 = intercept[AnalysisException] {
+        spark.sql("CREATE TABLE t3 STORED AS PARQUET AS SELECT null as 
null_col")
+      }.getMessage
+      assert(e3.contains("Cannot create tables with VOID type"))
+    }
+
+    // Forbid creating table with void/null type in Spark
+    Seq("void", "null").foreach { colType =>
+      withTable("t1", "t2", "t3") {
+        val e1 = intercept[AnalysisException] {
+          spark.sql(s"CREATE TABLE t1 (v $colType) USING parquet")
+        }.getMessage
+        assert(e1.contains("Cannot create tables with VOID type"))
+        val e2 = intercept[AnalysisException] {
+          spark.sql(s"CREATE TABLE t2 (v $colType) USING hive")

Review comment:
       can we follow the CTAS test and use `STORED AS PARQUET`?




----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org



---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org

Reply via email to