amaliujia commented on code in PR #36586:
URL: https://github.com/apache/spark/pull/36586#discussion_r889237990


##########
sql/core/src/test/scala/org/apache/spark/sql/internal/CatalogSuite.scala:
##########
@@ -553,4 +570,100 @@ class CatalogSuite extends SharedSparkSession with 
AnalysisTest {
     }.getMessage
     assert(errMsg.contains("my_temp_table is a temp view. 
'recoverPartitions()' expects a table"))
   }
+
+  test("three layer namespace compatibility - create managed table") {
+    spark.conf.set("spark.sql.catalog.testcat", 
classOf[InMemoryCatalog].getName)
+    val catalogName = "testcat"
+    val dbName = "my_db"
+    val tableName = "my_table"
+    val tableSchema = new StructType().add("i", "int")
+    val description = "this is a test table"
+
+    val df = spark.catalog.createTable(
+      tableName = Array(catalogName, dbName, tableName).mkString("."),
+      source = classOf[FakeV2Provider].getName,
+      schema = tableSchema,
+      description = description,
+      options = Map.empty[String, String])
+    assert(df.schema.equals(tableSchema))
+
+    val testCatalog =
+      spark.sessionState.catalogManager.catalog("testcat").asTableCatalog
+    val table = testCatalog.loadTable(Identifier.of(Array(dbName), tableName))
+    assert(table.schema().equals(tableSchema))
+    
assert(table.properties().get("provider").equals(classOf[FakeV2Provider].getName))
+    assert(table.properties().get("comment").equals(description))
+  }
+
+  test("three layer namespace compatibility - create external table") {
+    withTempDir { dir =>
+      val catalogName = "testcat"
+      val dbName = "my_db"
+      val tableName = "my_table"
+      val tableSchema = new StructType().add("i", "int")
+      val description = "this is a test table"
+
+      val df = spark.catalog.createTable(
+        tableName = Array(catalogName, dbName, tableName).mkString("."),
+        source = classOf[FakeV2Provider].getName,
+        schema = tableSchema,
+        description = description,
+        options = Map("path" -> dir.getAbsolutePath))
+      assert(df.schema.equals(tableSchema))
+
+      val testCatalog =
+        spark.sessionState.catalogManager.catalog("testcat").asTableCatalog
+      val table = testCatalog.loadTable(Identifier.of(Array(dbName), 
tableName))
+      assert(table.schema().equals(tableSchema))
+      
assert(table.properties().get("provider").equals(classOf[FakeV2Provider].getName))
+      assert(table.properties().get("comment").equals(description))
+      assert(table.properties().get("path").equals(dir.getAbsolutePath))
+    }
+  }
+
+  test("three layer namespace compatibility - list tables") {

Review Comment:
   It turns out our code does not support `a.b` as table/database name:
   
   ```
   `also_name.my_db` is not a valid name for tables/databases. Valid names only 
contain alphabet characters, numbers and _.
   org.apache.spark.sql.AnalysisException: `also_name.my_db` is not a valid 
name for tables/databases. Valid names only contain alphabet characters, 
numbers and _.
        at 
org.apache.spark.sql.errors.QueryCompilationErrors$.invalidNameForTableOrDatabaseError(QueryCompilationErrors.scala:572)
        at 
org.apache.spark.sql.catalyst.catalog.SessionCatalog.validateName(SessionCatalog.scala:146)
        at 
org.apache.spark.sql.catalyst.catalog.SessionCatalog.createTable(SessionCatalog.scala:350)
        at 
org.apache.spark.sql.internal.CatalogSuite.createTable(CatalogSuite.scala:63)
        at 
org.apache.spark.sql.internal.CatalogSuite.$anonfun$new$101(CatalogSuite.scala:677)
   ```
   
   In this case, we won't hit the ambiguity that a database name `a.b` could be 
either a database name or `a` is catalog name for `ListTables` API?
   



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org


---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org

Reply via email to