cloud-fan commented on a change in pull request #28004: [SPARK-31204][SQL] HiveResult compatibility for DatasourceV2 command URL: https://github.com/apache/spark/pull/28004#discussion_r399016671
########## File path: sql/core/src/test/scala/org/apache/spark/sql/execution/HiveResultSuite.scala ########## @@ -68,4 +69,35 @@ class HiveResultSuite extends SharedSparkSession { val result = HiveResult.hiveResultString(executedPlan) assert(result.head === "0.00000000") } + + test("SHOW TABLES in hive result") { + withSQLConf("spark.sql.catalog.testcat" -> classOf[InMemoryTableCatalog].getName) { + Seq(("testcat.ns", "tbl", "foo"), ("spark_catalog.default", "tbl", "csv")).foreach { + case (ns, tbl, source) => + withTable(s"$ns.$tbl") { + spark.sql(s"CREATE TABLE $ns.$tbl (id bigint) USING $source") + val df = spark.sql(s"SHOW TABLES FROM $ns") + val executedPlan = df.queryExecution.executedPlan + assert(HiveResult.hiveResultString(executedPlan).head == tbl) + } + } + } + } + + test("DESCRIBE TABLE in hive result") { + withSQLConf("spark.sql.catalog.testcat" -> classOf[InMemoryTableCatalog].getName) { + Seq(("testcat.ns", "tbl", "foo"), ("spark_catalog.default", "tbl", "csv")).foreach { + case (ns, tbl, source) => + withTable(s"$ns.$tbl") { + spark.sql(s"CREATE TABLE $ns.$tbl (id bigint COMMENT 'col1') USING $source") + val df = spark.sql(s"DESCRIBE $ns.$tbl") + val executedPlan = df.queryExecution.executedPlan + val expected = "id " + + "\tbigint " + + "\tcol1 " + assert(HiveResult.hiveResultString(executedPlan).head == expected) Review comment: ok so the number of spaces is also defined by Hive. ---------------------------------------------------------------- This is an automated message from the Apache Git Service. To respond to the message, please log on to GitHub and use the URL above to go to the specific comment. For queries about this service, please contact Infrastructure at: us...@infra.apache.org With regards, Apache Git Services --------------------------------------------------------------------- To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org For additional commands, e-mail: reviews-h...@spark.apache.org