kevinyu98 commented on a change in pull request #27130: 
[SPARK-25993][SQL][TESTS] Add test cases for CREATE EXTERNAL TABLE with 
subdirectories
URL: https://github.com/apache/spark/pull/27130#discussion_r364994378
 
 

 ##########
 File path: 
sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveParquetSourceSuite.scala
 ##########
 @@ -222,4 +223,115 @@ class HiveParquetSourceSuite extends 
ParquetPartitioningTest {
       assert(df4.columns === Array("str", "max_int"))
     }
   }
+
+  test("SPARK-25993 CREATE EXTERNAL TABLE with subdirectories") {
+    Seq("true", "false").foreach { parquetConversion =>
+      withSQLConf(HiveUtils.CONVERT_METASTORE_PARQUET.key -> 
parquetConversion) {
+        withTempPath { path =>
+          withTable("tbl1", "tbl2", "tbl3", "tbl4", "tbl5", "tbl6") {
+            val someDF1 = Seq((1, 1, "parq1"), (2, 2, "parq2")).
+              toDF("c1", "c2", "c3").repartition(1)
+            val someDF2 = Seq((3, 3, "parq3"), (4, 4, "parq4")).
+              toDF("c1", "c2", "c3").repartition(1)
+            val someDF3 = Seq((5, 5, "parq5"), (6, 6, "parq6")).
+              toDF("c1", "c2", "c3").repartition(1)
+            someDF1.write.parquet(s"${path.getCanonicalPath}/l1/")
+            someDF2.write.parquet(s"${path.getCanonicalPath}/l1/l2/")
+            someDF3.write.parquet(s"${path.getCanonicalPath}/l1/l2/l3/")
+
+            val topDirStatement =
+              s"""
+                 |CREATE EXTERNAL TABLE tbl1(
+                 |  c1 int,
+                 |  c2 int,
+                 |  c3 string)
+                 |STORED AS parquet
+                 |LOCATION '${s"${path.getCanonicalPath}"}'""".stripMargin
+            sql(topDirStatement)
+            if (parquetConversion == "true") {
+              checkAnswer(sql("select * from tbl1"), Nil)
+            } else {
+              intercept[IOException](sql("select * from tbl1").show())
+            }
+
+            val l1DirStatement =
+              s"""
+                 |CREATE EXTERNAL TABLE tbl2(
+                 |  c1 int,
+                 |  c2 int,
+                 |  c3 string)
+                 |STORED AS parquet
+                 |LOCATION '${s"${path.getCanonicalPath}/l1/"}'""".stripMargin
+            sql(l1DirStatement)
+            if (parquetConversion == "true") {
+              checkAnswer(sql("select * from tbl2"),
+                (1 to 2).map(i => Row(i, i, s"parq$i")))
+            } else {
+              intercept[IOException](sql("select * from tbl2").show())
 
 Review comment:
   the whole exception message is 
   `Not a file: 
file:/Users/qianyangyu/IdeaProjects/spark/target/tmp/spark-abc8c1ad-4a3a-420f-b4fc-58d995be9bb0/l1`,
 I will check the first part `Not a file:`.

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
us...@infra.apache.org


With regards,
Apache Git Services

---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org

Reply via email to