amaliujia commented on code in PR #36586:
URL: https://github.com/apache/spark/pull/36586#discussion_r891537461


##########
sql/core/src/test/scala/org/apache/spark/sql/internal/CatalogSuite.scala:
##########
@@ -553,4 +570,100 @@ class CatalogSuite extends SharedSparkSession with 
AnalysisTest {
     }.getMessage
     assert(errMsg.contains("my_temp_table is a temp view. 
'recoverPartitions()' expects a table"))
   }
+
+  test("three layer namespace compatibility - create managed table") {
+    spark.conf.set("spark.sql.catalog.testcat", 
classOf[InMemoryCatalog].getName)
+    val catalogName = "testcat"
+    val dbName = "my_db"
+    val tableName = "my_table"
+    val tableSchema = new StructType().add("i", "int")
+    val description = "this is a test table"
+
+    val df = spark.catalog.createTable(
+      tableName = Array(catalogName, dbName, tableName).mkString("."),
+      source = classOf[FakeV2Provider].getName,
+      schema = tableSchema,
+      description = description,
+      options = Map.empty[String, String])
+    assert(df.schema.equals(tableSchema))
+
+    val testCatalog =
+      spark.sessionState.catalogManager.catalog("testcat").asTableCatalog
+    val table = testCatalog.loadTable(Identifier.of(Array(dbName), tableName))
+    assert(table.schema().equals(tableSchema))
+    
assert(table.properties().get("provider").equals(classOf[FakeV2Provider].getName))
+    assert(table.properties().get("comment").equals(description))
+  }
+
+  test("three layer namespace compatibility - create external table") {
+    withTempDir { dir =>
+      val catalogName = "testcat"
+      val dbName = "my_db"
+      val tableName = "my_table"
+      val tableSchema = new StructType().add("i", "int")
+      val description = "this is a test table"
+
+      val df = spark.catalog.createTable(
+        tableName = Array(catalogName, dbName, tableName).mkString("."),
+        source = classOf[FakeV2Provider].getName,
+        schema = tableSchema,
+        description = description,
+        options = Map("path" -> dir.getAbsolutePath))
+      assert(df.schema.equals(tableSchema))
+
+      val testCatalog =
+        spark.sessionState.catalogManager.catalog("testcat").asTableCatalog
+      val table = testCatalog.loadTable(Identifier.of(Array(dbName), 
tableName))
+      assert(table.schema().equals(tableSchema))
+      
assert(table.properties().get("provider").equals(classOf[FakeV2Provider].getName))
+      assert(table.properties().get("comment").equals(description))
+      assert(table.properties().get("path").equals(dir.getAbsolutePath))
+    }
+  }
+
+  test("three layer namespace compatibility - list tables") {

Review Comment:
   Actually we still need the change in `listTables`: 
   
   existing users do not use 3-part name and they just use 2 part name. In the 
case of the `listTables`, existing users use `dbname` directly (so just `b` but 
not `a.b`). In this case, there is a choice to decide which catalog it is, and 
the default catalog is hive metastore.



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org


---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org

Reply via email to