[GitHub] [spark] cloud-fan commented on a diff in pull request #36586: [SPARK-39236][SQL] Make CreateTable and ListTables be compatible with 3 layer namespace

2022-06-08 Thread GitBox


cloud-fan commented on code in PR #36586:
URL: https://github.com/apache/spark/pull/36586#discussion_r893029290


##
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalog.scala:
##
@@ -965,6 +965,10 @@ class SessionCatalog(
 isTempView(nameParts.asTableIdentifier)
   }
 
+  def isGlobalTempViewDB(dbName: String): Boolean = {
+globalTempViewManager.database.equals(dbName)

Review Comment:
   this should use `equalsIgnoreCase`, we can fix it in a followup



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org


-
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org



[GitHub] [spark] cloud-fan commented on a diff in pull request #36586: [SPARK-39236][SQL] Make CreateTable and ListTables be compatible with 3 layer namespace

2022-06-07 Thread GitBox


cloud-fan commented on code in PR #36586:
URL: https://github.com/apache/spark/pull/36586#discussion_r891881553


##
sql/core/src/test/scala/org/apache/spark/sql/internal/CatalogSuite.scala:
##
@@ -553,4 +571,103 @@ class CatalogSuite extends SharedSparkSession with 
AnalysisTest {
 }.getMessage
 assert(errMsg.contains("my_temp_table is a temp view. 
'recoverPartitions()' expects a table"))
   }
+
+  test("three layer namespace compatibility - create managed table") {
+val catalogName = "testcat"
+val dbName = "my_db"
+val tableName = "my_table"
+val tableSchema = new StructType().add("i", "int")
+val description = "this is a test table"
+
+val df = spark.catalog.createTable(
+  tableName = Array(catalogName, dbName, tableName).mkString("."),
+  source = classOf[FakeV2Provider].getName,
+  schema = tableSchema,
+  description = description,
+  options = Map.empty[String, String])
+assert(df.schema.equals(tableSchema))
+
+val testCatalog =
+  spark.sessionState.catalogManager.catalog(catalogName).asTableCatalog
+val table = testCatalog.loadTable(Identifier.of(Array(dbName), tableName))
+assert(table.schema().equals(tableSchema))
+
assert(table.properties().get("provider").equals(classOf[FakeV2Provider].getName))
+assert(table.properties().get("comment").equals(description))
+  }
+
+  test("three layer namespace compatibility - create external table") {
+withTempDir { dir =>
+  val catalogName = "testcat"
+  val dbName = "my_db"
+  val tableName = "my_table"
+  val tableSchema = new StructType().add("i", "int")
+  val description = "this is a test table"
+
+  val df = spark.catalog.createTable(
+tableName = Array(catalogName, dbName, tableName).mkString("."),
+source = classOf[FakeV2Provider].getName,
+schema = tableSchema,
+description = description,
+options = Map("path" -> dir.getAbsolutePath))
+  assert(df.schema.equals(tableSchema))
+
+  val testCatalog =
+spark.sessionState.catalogManager.catalog("testcat").asTableCatalog
+  val table = testCatalog.loadTable(Identifier.of(Array(dbName), 
tableName))
+  assert(table.schema().equals(tableSchema))
+  
assert(table.properties().get("provider").equals(classOf[FakeV2Provider].getName))
+  assert(table.properties().get("comment").equals(description))
+  assert(table.properties().get("path").equals(dir.getAbsolutePath))
+  assert(table.properties().get("external").equals("true"))
+  assert(table.properties().get("location").equals("file:" + 
dir.getAbsolutePath))
+}
+  }
+
+  test("three layer namespace compatibility - list tables") {
+withTempDir { dir =>
+  val catalogName = "testcat"
+  val dbName = "my_db"
+  val tableName = "my_table"
+  val tableSchema = new StructType().add("i", "int")
+  val description = "this is a test managed table"
+
+  spark.catalog.createTable(
+tableName = Array(catalogName, dbName, tableName).mkString("."),
+source = classOf[FakeV2Provider].getName,
+schema = tableSchema,
+description = description,
+options = Map.empty[String, String])
+
+  val tableName2 = "my_table2"
+  val description2 = "this is a test external table"
+
+  spark.catalog.createTable(
+tableName = Array(catalogName, dbName, tableName2).mkString("."),
+source = classOf[FakeV2Provider].getName,
+schema = tableSchema,
+description = description2,
+options = Map("path" -> dir.getAbsolutePath))
+
+  val tables = spark.catalog.listTables("testcat.my_db").collect()
+  assert(tables.size == 2)
+
+  val expectedTable1 =
+new Table(tableName, catalogName, Array(dbName), description,
+  CatalogTableType.MANAGED.name, false)
+  assert(tables.exists(t =>
+expectedTable1.name.equals(t.name) && 
expectedTable1.database.equals(t.database) &&
+expectedTable1.description.equals(t.description) &&
+expectedTable1.tableType.equals(t.tableType) &&
+expectedTable1.isTemporary == t.isTemporary))
+
+  val expectedTable2 =
+new Table(tableName2, catalogName, Array(dbName), description2,
+  CatalogTableType.EXTERNAL.name, false)
+  assert(tables.exists(t =>
+expectedTable2.name.equals(t.name) && 
expectedTable2.database.equals(t.database) &&
+expectedTable2.description.equals(t.description) &&
+expectedTable2.tableType.equals(t.tableType) &&
+expectedTable2.isTemporary == t.isTemporary))
+}

Review Comment:
   can we test the backward compatibility case? e.g. register a new catalog 
"default" in this test case, and `listTable("default")` should still list 
tables under "default" schema in HMS.



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go 

[GitHub] [spark] cloud-fan commented on a diff in pull request #36586: [SPARK-39236][SQL] Make CreateTable and ListTables be compatible with 3 layer namespace

2022-06-07 Thread GitBox


cloud-fan commented on code in PR #36586:
URL: https://github.com/apache/spark/pull/36586#discussion_r891880524


##
sql/core/src/test/scala/org/apache/spark/sql/execution/GlobalTempViewSuite.scala:
##
@@ -165,7 +165,8 @@ class GlobalTempViewSuite extends QueryTest with 
SharedSparkSession {
   assert(spark.catalog.tableExists(globalTempDB, "src"))
   assert(spark.catalog.getTable(globalTempDB, "src").toString == new Table(
 name = "src",
-database = globalTempDB,
+catalog = "spark_catalog",

Review Comment:
   ```suggestion
   catalog = CatalogManager.SESSION_CATALOG_NAME,
   ```



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org


-
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org



[GitHub] [spark] cloud-fan commented on a diff in pull request #36586: [SPARK-39236][SQL] Make CreateTable and ListTables be compatible with 3 layer namespace

2022-06-07 Thread GitBox


cloud-fan commented on code in PR #36586:
URL: https://github.com/apache/spark/pull/36586#discussion_r891879819


##
sql/core/src/main/scala/org/apache/spark/sql/internal/CatalogImpl.scala:
##
@@ -117,14 +131,45 @@ class CatalogImpl(sparkSession: SparkSession) extends 
Catalog {
   case NonFatal(_) => None
 }
 val isTemp = sessionCatalog.isTempView(tableIdent)
+val qualifier =
+  
Array(metadata.map(_.identifier.database).getOrElse(tableIdent.database).orNull)

Review Comment:
   
`metadata.map(_.identifier.database).getOrElse(tableIdent.database).map(Array(_)).orNull`.
 In abnormal case we should return null, not Array(null)



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org


-
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org



[GitHub] [spark] cloud-fan commented on a diff in pull request #36586: [SPARK-39236][SQL] Make CreateTable and ListTables be compatible with 3 layer namespace

2022-06-07 Thread GitBox


cloud-fan commented on code in PR #36586:
URL: https://github.com/apache/spark/pull/36586#discussion_r891879819


##
sql/core/src/main/scala/org/apache/spark/sql/internal/CatalogImpl.scala:
##
@@ -117,14 +131,45 @@ class CatalogImpl(sparkSession: SparkSession) extends 
Catalog {
   case NonFatal(_) => None
 }
 val isTemp = sessionCatalog.isTempView(tableIdent)
+val qualifier =
+  
Array(metadata.map(_.identifier.database).getOrElse(tableIdent.database).orNull)

Review Comment:
   
`metadata.map(_.identifier.database).getOrElse(tableIdent.database).toArray`. 
In abnormal case we should return null, not Array(null)



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org


-
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org



[GitHub] [spark] cloud-fan commented on a diff in pull request #36586: [SPARK-39236][SQL] Make CreateTable and ListTables be compatible with 3 layer namespace

2022-06-07 Thread GitBox


cloud-fan commented on code in PR #36586:
URL: https://github.com/apache/spark/pull/36586#discussion_r891879436


##
sql/core/src/main/scala/org/apache/spark/sql/internal/CatalogImpl.scala:
##
@@ -117,14 +131,45 @@ class CatalogImpl(sparkSession: SparkSession) extends 
Catalog {
   case NonFatal(_) => None
 }
 val isTemp = sessionCatalog.isTempView(tableIdent)
+val qualifier =
+  
Array(metadata.map(_.identifier.database).getOrElse(tableIdent.database).orNull)
 new Table(
   name = tableIdent.table,
-  database = 
metadata.map(_.identifier.database).getOrElse(tableIdent.database).orNull,
+  catalog = "spark_catalog",

Review Comment:
   ```suggestion
 catalog = CatalogManager.SESSION_CATALOG_NAME,
   ```



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org


-
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org



[GitHub] [spark] cloud-fan commented on a diff in pull request #36586: [SPARK-39236][SQL] Make CreateTable and ListTables be compatible with 3 layer namespace

2022-06-07 Thread GitBox


cloud-fan commented on code in PR #36586:
URL: https://github.com/apache/spark/pull/36586#discussion_r891879053


##
sql/core/src/main/scala/org/apache/spark/sql/internal/CatalogImpl.scala:
##
@@ -97,8 +98,21 @@ class CatalogImpl(sparkSession: SparkSession) extends 
Catalog {
*/
   @throws[AnalysisException]("database does not exist")
   override def listTables(dbName: String): Dataset[Table] = {
-val tables = sessionCatalog.listTables(dbName).map(makeTable)
-CatalogImpl.makeDataset(tables, sparkSession)
+// dbName could be either 2-part name or 3-part name. We assume it is 
2-part name

Review Comment:
   ```suggestion
   // `dbName` could be either a single database name (behavior in Spark 
3.3 and prior) or
   // a qualified namespace with catalog name. We assume it's a single 
database name and ...
   ```



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org


-
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org



[GitHub] [spark] cloud-fan commented on a diff in pull request #36586: [SPARK-39236][SQL] Make CreateTable and ListTables be compatible with 3 layer namespace

2022-06-07 Thread GitBox


cloud-fan commented on code in PR #36586:
URL: https://github.com/apache/spark/pull/36586#discussion_r891878020


##
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalog.scala:
##
@@ -965,6 +965,10 @@ class SessionCatalog(
 isTempView(nameParts.asTableIdentifier)
   }
 
+  def isGlobalTempView(dbName: String): Boolean = {

Review Comment:
   ```suggestion
 def isGlobalTempViewDB(dbName: String): Boolean = {
   ```



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org


-
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org



[GitHub] [spark] cloud-fan commented on a diff in pull request #36586: [SPARK-39236][SQL] Make CreateTable and ListTables be compatible with 3 layer namespace

2022-06-07 Thread GitBox


cloud-fan commented on code in PR #36586:
URL: https://github.com/apache/spark/pull/36586#discussion_r891346881


##
sql/core/src/test/scala/org/apache/spark/sql/internal/CatalogSuite.scala:
##
@@ -553,4 +570,100 @@ class CatalogSuite extends SharedSparkSession with 
AnalysisTest {
 }.getMessage
 assert(errMsg.contains("my_temp_table is a temp view. 
'recoverPartitions()' expects a table"))
   }
+
+  test("three layer namespace compatibility - create managed table") {
+spark.conf.set("spark.sql.catalog.testcat", 
classOf[InMemoryCatalog].getName)
+val catalogName = "testcat"
+val dbName = "my_db"
+val tableName = "my_table"
+val tableSchema = new StructType().add("i", "int")
+val description = "this is a test table"
+
+val df = spark.catalog.createTable(
+  tableName = Array(catalogName, dbName, tableName).mkString("."),
+  source = classOf[FakeV2Provider].getName,
+  schema = tableSchema,
+  description = description,
+  options = Map.empty[String, String])
+assert(df.schema.equals(tableSchema))
+
+val testCatalog =
+  spark.sessionState.catalogManager.catalog("testcat").asTableCatalog
+val table = testCatalog.loadTable(Identifier.of(Array(dbName), tableName))
+assert(table.schema().equals(tableSchema))
+
assert(table.properties().get("provider").equals(classOf[FakeV2Provider].getName))
+assert(table.properties().get("comment").equals(description))
+  }
+
+  test("three layer namespace compatibility - create external table") {
+withTempDir { dir =>
+  val catalogName = "testcat"
+  val dbName = "my_db"
+  val tableName = "my_table"
+  val tableSchema = new StructType().add("i", "int")
+  val description = "this is a test table"
+
+  val df = spark.catalog.createTable(
+tableName = Array(catalogName, dbName, tableName).mkString("."),
+source = classOf[FakeV2Provider].getName,
+schema = tableSchema,
+description = description,
+options = Map("path" -> dir.getAbsolutePath))
+  assert(df.schema.equals(tableSchema))
+
+  val testCatalog =
+spark.sessionState.catalogManager.catalog("testcat").asTableCatalog
+  val table = testCatalog.loadTable(Identifier.of(Array(dbName), 
tableName))
+  assert(table.schema().equals(tableSchema))
+  
assert(table.properties().get("provider").equals(classOf[FakeV2Provider].getName))
+  assert(table.properties().get("comment").equals(description))
+  assert(table.properties().get("path").equals(dir.getAbsolutePath))
+}
+  }
+
+  test("three layer namespace compatibility - list tables") {

Review Comment:
   ah, then we don't need the special handling in `listTables` at all!



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org


-
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org



[GitHub] [spark] cloud-fan commented on a diff in pull request #36586: [SPARK-39236][SQL] Make CreateTable and ListTables be compatible with 3 layer namespace

2022-06-07 Thread GitBox


cloud-fan commented on code in PR #36586:
URL: https://github.com/apache/spark/pull/36586#discussion_r891345079


##
sql/core/src/test/scala/org/apache/spark/sql/internal/CatalogSuite.scala:
##
@@ -299,15 +314,19 @@ class CatalogSuite extends SharedSparkSession with 
AnalysisTest {
 val functionFields = 
ScalaReflection.getConstructorParameterValues(function)
 val columnFields = ScalaReflection.getConstructorParameterValues(column)
 assert(dbFields == Seq("nama", "descripta", "locata"))
-assert(tableFields == Seq("nama", "databasa", "descripta", "typa", false))
+assert(Seq(tableFields.apply(0), tableFields.apply(1), 
tableFields.apply(3),

Review Comment:
   ditto for many other places



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org


-
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org



[GitHub] [spark] cloud-fan commented on a diff in pull request #36586: [SPARK-39236][SQL] Make CreateTable and ListTables be compatible with 3 layer namespace

2022-06-07 Thread GitBox


cloud-fan commented on code in PR #36586:
URL: https://github.com/apache/spark/pull/36586#discussion_r891344711


##
sql/core/src/test/scala/org/apache/spark/sql/internal/CatalogSuite.scala:
##
@@ -299,15 +314,19 @@ class CatalogSuite extends SharedSparkSession with 
AnalysisTest {
 val functionFields = 
ScalaReflection.getConstructorParameterValues(function)
 val columnFields = ScalaReflection.getConstructorParameterValues(column)
 assert(dbFields == Seq("nama", "descripta", "locata"))
-assert(tableFields == Seq("nama", "databasa", "descripta", "typa", false))
+assert(Seq(tableFields.apply(0), tableFields.apply(1), 
tableFields.apply(3),

Review Comment:
   ```suggestion
   assert(Seq(tableFields(0), tableFields(1), tableFields(3),
   ```



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org


-
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org



[GitHub] [spark] cloud-fan commented on a diff in pull request #36586: [SPARK-39236][SQL] Make CreateTable and ListTables be compatible with 3 layer namespace

2022-06-07 Thread GitBox


cloud-fan commented on code in PR #36586:
URL: https://github.com/apache/spark/pull/36586#discussion_r891344191


##
sql/core/src/test/scala/org/apache/spark/sql/internal/CatalogSuite.scala:
##
@@ -290,7 +304,8 @@ class CatalogSuite extends SharedSparkSession with 
AnalysisTest {
 
   test("catalog classes format in Dataset.show") {
 val db = new Database("nama", "descripta", "locata")
-val table = new Table("nama", "databasa", "descripta", "typa", isTemporary 
= false)
+val table = new Table("nama", "cataloa", Array("databasa"), "descripta", 
"typa",

Review Comment:
   ```suggestion
   val table = new Table("nama", "catalog", Array("databasa"), "descripta", 
"typa",
   ```



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org


-
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org



[GitHub] [spark] cloud-fan commented on a diff in pull request #36586: [SPARK-39236][SQL] Make CreateTable and ListTables be compatible with 3 layer namespace

2022-06-07 Thread GitBox


cloud-fan commented on code in PR #36586:
URL: https://github.com/apache/spark/pull/36586#discussion_r891341003


##
sql/core/src/main/scala/org/apache/spark/sql/internal/CatalogImpl.scala:
##
@@ -117,14 +128,44 @@ class CatalogImpl(sparkSession: SparkSession) extends 
Catalog {
   case NonFatal(_) => None
 }
 val isTemp = sessionCatalog.isTempView(tableIdent)
+val qualifier =
+  
Array(metadata.map(_.identifier.database).getOrElse(tableIdent.database).orNull)
 new Table(
   name = tableIdent.table,

Review Comment:
   shall we set the `catalog` parameter to `spark_catalog` here?



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org


-
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org



[GitHub] [spark] cloud-fan commented on a diff in pull request #36586: [SPARK-39236][SQL] Make CreateTable and ListTables be compatible with 3 layer namespace

2022-06-07 Thread GitBox


cloud-fan commented on code in PR #36586:
URL: https://github.com/apache/spark/pull/36586#discussion_r891340370


##
sql/core/src/main/scala/org/apache/spark/sql/internal/CatalogImpl.scala:
##
@@ -117,14 +128,44 @@ class CatalogImpl(sparkSession: SparkSession) extends 
Catalog {
   case NonFatal(_) => None
 }
 val isTemp = sessionCatalog.isTempView(tableIdent)
+val qualifier =

Review Comment:
   this variable is not used.



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org


-
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org



[GitHub] [spark] cloud-fan commented on a diff in pull request #36586: [SPARK-39236][SQL] Make CreateTable and ListTables be compatible with 3 layer namespace

2022-06-07 Thread GitBox


cloud-fan commented on code in PR #36586:
URL: https://github.com/apache/spark/pull/36586#discussion_r891339983


##
sql/core/src/main/scala/org/apache/spark/sql/catalog/interface.scala:
##
@@ -55,7 +55,8 @@ class Database(
  * A table in Spark, as returned by the `listTables` method in [[Catalog]].
  *
  * @param name name of the table.
- * @param database name of the database the table belongs to.
+ * @param catalog name of the catalog that the table belongs to.
+ * @param namespace qualifier of the namespace that the table belongs to.

Review Comment:
   ```suggestion
* @param namespace the namespace that the table belongs to.
   ```



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org


-
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org



[GitHub] [spark] cloud-fan commented on a diff in pull request #36586: [SPARK-39236][SQL] Make CreateTable and ListTables be compatible with 3 layer namespace

2022-06-07 Thread GitBox


cloud-fan commented on code in PR #36586:
URL: https://github.com/apache/spark/pull/36586#discussion_r891337356


##
sql/core/src/main/scala/org/apache/spark/sql/internal/CatalogImpl.scala:
##
@@ -97,8 +98,18 @@ class CatalogImpl(sparkSession: SparkSession) extends 
Catalog {
*/
   @throws[AnalysisException]("database does not exist")
   override def listTables(dbName: String): Dataset[Table] = {
-val tables = sessionCatalog.listTables(dbName).map(makeTable)
-CatalogImpl.makeDataset(tables, sparkSession)
+if (sessionCatalog.databaseExists(dbName)) {

Review Comment:
   let's add some comments to explain this backward compatible behavior



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org


-
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org



[GitHub] [spark] cloud-fan commented on a diff in pull request #36586: [SPARK-39236][SQL] Make CreateTable and ListTables be compatible with 3 layer namespace

2022-06-07 Thread GitBox


cloud-fan commented on code in PR #36586:
URL: https://github.com/apache/spark/pull/36586#discussion_r891336456


##
sql/core/src/main/scala/org/apache/spark/sql/catalog/interface.scala:
##
@@ -64,15 +65,34 @@ class Database(
 @Stable
 class Table(
 val name: String,
-@Nullable val database: String,
+@Nullable val catalog: String,
+@Nullable val namespace: Array[String],
 @Nullable val description: String,
 val tableType: String,
 val isTemporary: Boolean)
   extends DefinedByConstructorParams {
 
+  def this(name: String, database: String, description: String, tableType: 
String,
+isTemporary: Boolean) = {
+this(name, null, Array(database), description, tableType, isTemporary)
+  }
+
+  def database: String = {
+if (namespace == null) {
+  null
+} else if (namespace.length == 2) {

Review Comment:
   when can this happen?



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org


-
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org



[GitHub] [spark] cloud-fan commented on a diff in pull request #36586: [SPARK-39236][SQL] Make CreateTable and ListTables be compatible with 3 layer namespace

2022-06-07 Thread GitBox


cloud-fan commented on code in PR #36586:
URL: https://github.com/apache/spark/pull/36586#discussion_r891335856


##
sql/core/src/main/scala/org/apache/spark/sql/catalog/interface.scala:
##
@@ -64,15 +65,34 @@ class Database(
 @Stable
 class Table(
 val name: String,
-@Nullable val database: String,
+@Nullable val catalog: String,
+@Nullable val namespace: Array[String],
 @Nullable val description: String,
 val tableType: String,
 val isTemporary: Boolean)
   extends DefinedByConstructorParams {
 
+  def this(name: String, database: String, description: String, tableType: 
String,
+isTemporary: Boolean) = {

Review Comment:
   style nit: one parameter per line



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org


-
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org



[GitHub] [spark] cloud-fan commented on a diff in pull request #36586: [SPARK-39236][SQL] Make CreateTable and ListTables be compatible with 3 layer namespace

2022-06-07 Thread GitBox


cloud-fan commented on code in PR #36586:
URL: https://github.com/apache/spark/pull/36586#discussion_r891334793


##
sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala:
##
@@ -2185,6 +2185,11 @@ object QueryCompilationErrors extends QueryErrorsBase {
 new AnalysisException(s"Table or view '$tableName' not found in database 
'$dbName'")
   }
 
+  def tableOrViewNotFound(ident: Seq[String]): Throwable = {
+val fullName = ident.quoted
+new AnalysisException(s"Table or view '$fullName' not found")

Review Comment:
   ```suggestion
   new AnalysisException(s"Table or view '${ident.quoted}' not found")
   ```



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org


-
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org



[GitHub] [spark] cloud-fan commented on a diff in pull request #36586: [SPARK-39236][SQL] Make CreateTable and ListTables be compatible with 3 layer namespace

2022-05-24 Thread GitBox


cloud-fan commented on code in PR #36586:
URL: https://github.com/apache/spark/pull/36586#discussion_r881182863


##
sql/core/src/test/scala/org/apache/spark/sql/internal/CatalogSuite.scala:
##
@@ -553,4 +570,100 @@ class CatalogSuite extends SharedSparkSession with 
AnalysisTest {
 }.getMessage
 assert(errMsg.contains("my_temp_table is a temp view. 
'recoverPartitions()' expects a table"))
   }
+
+  test("three layer namespace compatibility - create managed table") {
+spark.conf.set("spark.sql.catalog.testcat", 
classOf[InMemoryCatalog].getName)
+val catalogName = "testcat"
+val dbName = "my_db"
+val tableName = "my_table"
+val tableSchema = new StructType().add("i", "int")
+val description = "this is a test table"
+
+val df = spark.catalog.createTable(
+  tableName = Array(catalogName, dbName, tableName).mkString("."),
+  source = classOf[FakeV2Provider].getName,
+  schema = tableSchema,
+  description = description,
+  options = Map.empty[String, String])
+assert(df.schema.equals(tableSchema))
+
+val testCatalog =
+  spark.sessionState.catalogManager.catalog("testcat").asTableCatalog
+val table = testCatalog.loadTable(Identifier.of(Array(dbName), tableName))
+assert(table.schema().equals(tableSchema))
+
assert(table.properties().get("provider").equals(classOf[FakeV2Provider].getName))
+assert(table.properties().get("comment").equals(description))
+  }
+
+  test("three layer namespace compatibility - create external table") {
+withTempDir { dir =>
+  val catalogName = "testcat"
+  val dbName = "my_db"
+  val tableName = "my_table"
+  val tableSchema = new StructType().add("i", "int")
+  val description = "this is a test table"
+
+  val df = spark.catalog.createTable(
+tableName = Array(catalogName, dbName, tableName).mkString("."),
+source = classOf[FakeV2Provider].getName,
+schema = tableSchema,
+description = description,
+options = Map("path" -> dir.getAbsolutePath))
+  assert(df.schema.equals(tableSchema))
+
+  val testCatalog =
+spark.sessionState.catalogManager.catalog("testcat").asTableCatalog
+  val table = testCatalog.loadTable(Identifier.of(Array(dbName), 
tableName))
+  assert(table.schema().equals(tableSchema))
+  
assert(table.properties().get("provider").equals(classOf[FakeV2Provider].getName))
+  assert(table.properties().get("comment").equals(description))
+  assert(table.properties().get("path").equals(dir.getAbsolutePath))
+}
+  }
+
+  test("three layer namespace compatibility - list tables") {

Review Comment:
   shall we check the backward compatibility behavior as well? e.g. a same-name 
schema exists in the hive metastore while the current catalog is not hive 
metastore.



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org


-
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org



[GitHub] [spark] cloud-fan commented on a diff in pull request #36586: [SPARK-39236][SQL] Make CreateTable and ListTables be compatible with 3 layer namespace

2022-05-24 Thread GitBox


cloud-fan commented on code in PR #36586:
URL: https://github.com/apache/spark/pull/36586#discussion_r881182863


##
sql/core/src/test/scala/org/apache/spark/sql/internal/CatalogSuite.scala:
##
@@ -553,4 +570,100 @@ class CatalogSuite extends SharedSparkSession with 
AnalysisTest {
 }.getMessage
 assert(errMsg.contains("my_temp_table is a temp view. 
'recoverPartitions()' expects a table"))
   }
+
+  test("three layer namespace compatibility - create managed table") {
+spark.conf.set("spark.sql.catalog.testcat", 
classOf[InMemoryCatalog].getName)
+val catalogName = "testcat"
+val dbName = "my_db"
+val tableName = "my_table"
+val tableSchema = new StructType().add("i", "int")
+val description = "this is a test table"
+
+val df = spark.catalog.createTable(
+  tableName = Array(catalogName, dbName, tableName).mkString("."),
+  source = classOf[FakeV2Provider].getName,
+  schema = tableSchema,
+  description = description,
+  options = Map.empty[String, String])
+assert(df.schema.equals(tableSchema))
+
+val testCatalog =
+  spark.sessionState.catalogManager.catalog("testcat").asTableCatalog
+val table = testCatalog.loadTable(Identifier.of(Array(dbName), tableName))
+assert(table.schema().equals(tableSchema))
+
assert(table.properties().get("provider").equals(classOf[FakeV2Provider].getName))
+assert(table.properties().get("comment").equals(description))
+  }
+
+  test("three layer namespace compatibility - create external table") {
+withTempDir { dir =>
+  val catalogName = "testcat"
+  val dbName = "my_db"
+  val tableName = "my_table"
+  val tableSchema = new StructType().add("i", "int")
+  val description = "this is a test table"
+
+  val df = spark.catalog.createTable(
+tableName = Array(catalogName, dbName, tableName).mkString("."),
+source = classOf[FakeV2Provider].getName,
+schema = tableSchema,
+description = description,
+options = Map("path" -> dir.getAbsolutePath))
+  assert(df.schema.equals(tableSchema))
+
+  val testCatalog =
+spark.sessionState.catalogManager.catalog("testcat").asTableCatalog
+  val table = testCatalog.loadTable(Identifier.of(Array(dbName), 
tableName))
+  assert(table.schema().equals(tableSchema))
+  
assert(table.properties().get("provider").equals(classOf[FakeV2Provider].getName))
+  assert(table.properties().get("comment").equals(description))
+  assert(table.properties().get("path").equals(dir.getAbsolutePath))
+}
+  }
+
+  test("three layer namespace compatibility - list tables") {

Review Comment:
   shall we check the backward compatibility behavior as well? e.g. a same-name 
schema exists in the hive metastore.



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org


-
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org



[GitHub] [spark] cloud-fan commented on a diff in pull request #36586: [SPARK-39236][SQL] Make CreateTable and ListTables be compatible with 3 layer namespace

2022-05-24 Thread GitBox


cloud-fan commented on code in PR #36586:
URL: https://github.com/apache/spark/pull/36586#discussion_r881182373


##
sql/core/src/test/scala/org/apache/spark/sql/internal/CatalogSuite.scala:
##
@@ -553,4 +570,100 @@ class CatalogSuite extends SharedSparkSession with 
AnalysisTest {
 }.getMessage
 assert(errMsg.contains("my_temp_table is a temp view. 
'recoverPartitions()' expects a table"))
   }
+
+  test("three layer namespace compatibility - create managed table") {
+spark.conf.set("spark.sql.catalog.testcat", 
classOf[InMemoryCatalog].getName)
+val catalogName = "testcat"
+val dbName = "my_db"
+val tableName = "my_table"
+val tableSchema = new StructType().add("i", "int")
+val description = "this is a test table"
+
+val df = spark.catalog.createTable(
+  tableName = Array(catalogName, dbName, tableName).mkString("."),
+  source = classOf[FakeV2Provider].getName,
+  schema = tableSchema,
+  description = description,
+  options = Map.empty[String, String])
+assert(df.schema.equals(tableSchema))
+
+val testCatalog =
+  spark.sessionState.catalogManager.catalog("testcat").asTableCatalog
+val table = testCatalog.loadTable(Identifier.of(Array(dbName), tableName))
+assert(table.schema().equals(tableSchema))
+
assert(table.properties().get("provider").equals(classOf[FakeV2Provider].getName))
+assert(table.properties().get("comment").equals(description))
+  }
+
+  test("three layer namespace compatibility - create external table") {
+withTempDir { dir =>
+  val catalogName = "testcat"
+  val dbName = "my_db"
+  val tableName = "my_table"
+  val tableSchema = new StructType().add("i", "int")
+  val description = "this is a test table"
+
+  val df = spark.catalog.createTable(
+tableName = Array(catalogName, dbName, tableName).mkString("."),
+source = classOf[FakeV2Provider].getName,
+schema = tableSchema,
+description = description,
+options = Map("path" -> dir.getAbsolutePath))
+  assert(df.schema.equals(tableSchema))
+
+  val testCatalog =
+spark.sessionState.catalogManager.catalog("testcat").asTableCatalog
+  val table = testCatalog.loadTable(Identifier.of(Array(dbName), 
tableName))
+  assert(table.schema().equals(tableSchema))
+  
assert(table.properties().get("provider").equals(classOf[FakeV2Provider].getName))
+  assert(table.properties().get("comment").equals(description))
+  assert(table.properties().get("path").equals(dir.getAbsolutePath))

Review Comment:
   let's check `location` property as well, and also `external` property.



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org


-
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org



[GitHub] [spark] cloud-fan commented on a diff in pull request #36586: [SPARK-39236][SQL] Make CreateTable and ListTables be compatible with 3 layer namespace

2022-05-24 Thread GitBox


cloud-fan commented on code in PR #36586:
URL: https://github.com/apache/spark/pull/36586#discussion_r881181890


##
sql/core/src/test/scala/org/apache/spark/sql/internal/CatalogSuite.scala:
##
@@ -553,4 +570,100 @@ class CatalogSuite extends SharedSparkSession with 
AnalysisTest {
 }.getMessage
 assert(errMsg.contains("my_temp_table is a temp view. 
'recoverPartitions()' expects a table"))
   }
+
+  test("three layer namespace compatibility - create managed table") {
+spark.conf.set("spark.sql.catalog.testcat", 
classOf[InMemoryCatalog].getName)
+val catalogName = "testcat"
+val dbName = "my_db"
+val tableName = "my_table"
+val tableSchema = new StructType().add("i", "int")
+val description = "this is a test table"
+
+val df = spark.catalog.createTable(
+  tableName = Array(catalogName, dbName, tableName).mkString("."),
+  source = classOf[FakeV2Provider].getName,
+  schema = tableSchema,
+  description = description,
+  options = Map.empty[String, String])
+assert(df.schema.equals(tableSchema))
+
+val testCatalog =
+  spark.sessionState.catalogManager.catalog("testcat").asTableCatalog

Review Comment:
   ```suggestion
 spark.sessionState.catalogManager.catalog(catalogName).asTableCatalog
   ```



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org


-
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org



[GitHub] [spark] cloud-fan commented on a diff in pull request #36586: [SPARK-39236][SQL] Make CreateTable and ListTables be compatible with 3 layer namespace

2022-05-24 Thread GitBox


cloud-fan commented on code in PR #36586:
URL: https://github.com/apache/spark/pull/36586#discussion_r881181600


##
sql/core/src/test/scala/org/apache/spark/sql/internal/CatalogSuite.scala:
##
@@ -553,4 +570,100 @@ class CatalogSuite extends SharedSparkSession with 
AnalysisTest {
 }.getMessage
 assert(errMsg.contains("my_temp_table is a temp view. 
'recoverPartitions()' expects a table"))
   }
+
+  test("three layer namespace compatibility - create managed table") {
+spark.conf.set("spark.sql.catalog.testcat", 
classOf[InMemoryCatalog].getName)

Review Comment:
   it's set in `before` already



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org


-
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org



[GitHub] [spark] cloud-fan commented on a diff in pull request #36586: [SPARK-39236][SQL] Make CreateTable and ListTables be compatible with 3 layer namespace

2022-05-24 Thread GitBox


cloud-fan commented on code in PR #36586:
URL: https://github.com/apache/spark/pull/36586#discussion_r881180781


##
sql/core/src/test/scala/org/apache/spark/sql/internal/CatalogSuite.scala:
##
@@ -299,15 +313,18 @@ class CatalogSuite extends SharedSparkSession with 
AnalysisTest {
 val functionFields = 
ScalaReflection.getConstructorParameterValues(function)
 val columnFields = ScalaReflection.getConstructorParameterValues(column)
 assert(dbFields == Seq("nama", "descripta", "locata"))
-assert(tableFields == Seq("nama", "databasa", "descripta", "typa", false))
+assert(Seq(tableFields.apply(0), tableFields.apply(2), 
tableFields.apply(3),

Review Comment:
   ```suggestion
   assert(tableFields.take(4) == Seq(...))
   ```



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org


-
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org



[GitHub] [spark] cloud-fan commented on a diff in pull request #36586: [SPARK-39236][SQL] Make CreateTable and ListTables be compatible with 3 layer namespace

2022-05-24 Thread GitBox


cloud-fan commented on code in PR #36586:
URL: https://github.com/apache/spark/pull/36586#discussion_r881180781


##
sql/core/src/test/scala/org/apache/spark/sql/internal/CatalogSuite.scala:
##
@@ -299,15 +313,18 @@ class CatalogSuite extends SharedSparkSession with 
AnalysisTest {
 val functionFields = 
ScalaReflection.getConstructorParameterValues(function)
 val columnFields = ScalaReflection.getConstructorParameterValues(column)
 assert(dbFields == Seq("nama", "descripta", "locata"))
-assert(tableFields == Seq("nama", "databasa", "descripta", "typa", false))
+assert(Seq(tableFields.apply(0), tableFields.apply(2), 
tableFields.apply(3),

Review Comment:
   ```suggestion
   assert(tableFields.toSeq == Seq(...))
   ```



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org


-
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org



[GitHub] [spark] cloud-fan commented on a diff in pull request #36586: [SPARK-39236][SQL] Make CreateTable and ListTables be compatible with 3 layer namespace

2022-05-24 Thread GitBox


cloud-fan commented on code in PR #36586:
URL: https://github.com/apache/spark/pull/36586#discussion_r881180781


##
sql/core/src/test/scala/org/apache/spark/sql/internal/CatalogSuite.scala:
##
@@ -299,15 +313,18 @@ class CatalogSuite extends SharedSparkSession with 
AnalysisTest {
 val functionFields = 
ScalaReflection.getConstructorParameterValues(function)
 val columnFields = ScalaReflection.getConstructorParameterValues(column)
 assert(dbFields == Seq("nama", "descripta", "locata"))
-assert(tableFields == Seq("nama", "databasa", "descripta", "typa", false))
+assert(Seq(tableFields.apply(0), tableFields.apply(2), 
tableFields.apply(3),

Review Comment:
   ```suggestion
   assert(Seq(tableFields(0), tableFields(2), tableFields(3),
   ```



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org


-
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org



[GitHub] [spark] cloud-fan commented on a diff in pull request #36586: [SPARK-39236][SQL] Make CreateTable and ListTables be compatible with 3 layer namespace

2022-05-24 Thread GitBox


cloud-fan commented on code in PR #36586:
URL: https://github.com/apache/spark/pull/36586#discussion_r881180341


##
sql/core/src/main/scala/org/apache/spark/sql/catalog/interface.scala:
##
@@ -64,12 +64,26 @@ class Database(
 @Stable
 class Table(
 val name: String,
-@Nullable val database: String,
+@Nullable val qualifier: Array[String],

Review Comment:
   After more thought, Maybe 2 parameters is better here: `catalog: String, 
namespace: Array[String]`. The point is, for temp views, they don't belong to 
any catalog, and setting `catalog` to null is more clear.



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org


-
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org



[GitHub] [spark] cloud-fan commented on a diff in pull request #36586: [SPARK-39236][SQL] Make CreateTable and ListTables be compatible with 3 layer namespace

2022-05-24 Thread GitBox


cloud-fan commented on code in PR #36586:
URL: https://github.com/apache/spark/pull/36586#discussion_r881179909


##
sql/core/src/main/scala/org/apache/spark/sql/catalog/interface.scala:
##
@@ -64,12 +64,26 @@ class Database(
 @Stable
 class Table(

Review Comment:
   let's update the classdoc



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org


-
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org



[GitHub] [spark] cloud-fan commented on a diff in pull request #36586: [SPARK-39236][SQL] Make CreateTable and ListTables be compatible with 3 layer namespace

2022-05-24 Thread GitBox


cloud-fan commented on code in PR #36586:
URL: https://github.com/apache/spark/pull/36586#discussion_r881179292


##
sql/core/src/main/scala/org/apache/spark/sql/catalog/interface.scala:
##
@@ -64,12 +64,26 @@ class Database(
 @Stable
 class Table(
 val name: String,
-@Nullable val database: String,
+@Nullable val qualifier: Array[String],
 @Nullable val description: String,
 val tableType: String,
 val isTemporary: Boolean)
   extends DefinedByConstructorParams {
 
+  def database: String = parseQualifier
+
+  def parseQualifier: String = {
+if (qualifier == null) {
+  null
+} else if (qualifier.length == 2) {
+  qualifier(1)
+} else if (qualifier.length == 1) {

Review Comment:
   when will `qualifiers` be length 1?



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org


-
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org



[GitHub] [spark] cloud-fan commented on a diff in pull request #36586: [SPARK-39236][SQL] Make CreateTable and ListTables be compatible with 3 layer namespace

2022-05-24 Thread GitBox


cloud-fan commented on code in PR #36586:
URL: https://github.com/apache/spark/pull/36586#discussion_r881179044


##
sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala:
##
@@ -2187,6 +2187,11 @@ object QueryCompilationErrors extends QueryErrorsBase {
 new AnalysisException(s"Table or view '$tableName' not found in database 
'$dbName'")
   }
 
+  def tableOrViewNotFound(ident: Seq[String]): Throwable = {
+val fullName = ident.mkString(".")

Review Comment:
   ```suggestion
   val fullName = ident.quoted
   ```



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org


-
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org



[GitHub] [spark] cloud-fan commented on a diff in pull request #36586: [SPARK-39236][SQL] Make CreateTable and ListTables be compatible with 3 layer namespace

2022-05-24 Thread GitBox


cloud-fan commented on code in PR #36586:
URL: https://github.com/apache/spark/pull/36586#discussion_r881179189


##
sql/core/src/main/scala/org/apache/spark/sql/catalog/interface.scala:
##
@@ -64,12 +64,26 @@ class Database(
 @Stable
 class Table(
 val name: String,
-@Nullable val database: String,
+@Nullable val qualifier: Array[String],
 @Nullable val description: String,
 val tableType: String,
 val isTemporary: Boolean)
   extends DefinedByConstructorParams {
 
+  def database: String = parseQualifier
+
+  def parseQualifier: String = {

Review Comment:
   ```suggestion
 def database: String = {
   ```



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org


-
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org



[GitHub] [spark] cloud-fan commented on a diff in pull request #36586: [SPARK-39236][SQL] Make CreateTable and ListTables be compatible with 3 layer namespace

2022-05-23 Thread GitBox


cloud-fan commented on code in PR #36586:
URL: https://github.com/apache/spark/pull/36586#discussion_r880060129


##
sql/core/src/main/scala/org/apache/spark/sql/internal/CatalogImpl.scala:
##
@@ -125,6 +135,31 @@ class CatalogImpl(sparkSession: SparkSession) extends 
Catalog {
   isTemporary = isTemp)
   }
 
+  private def makeTable(ident: Seq[String]): Table = {
+val plan = UnresolvedTableOrView(ident, "Catalog.listTables", true)
+val node = sparkSession.sessionState.executePlan(plan).analyzed
+node match {
+  case t: ResolvedTable =>
+val isExternal = t.table.properties().getOrDefault("external", 
"false").equals("true")
+new Table(
+  name = t.identifier.name(),
+  database = t.identifier.namespace().head,
+  description = t.table.properties().get("comment"),
+  tableType =
+if (isExternal) CatalogTableType.EXTERNAL.name
+else CatalogTableType.MANAGED.name,
+  isTemporary = false)
+  case v: ResolvedView =>
+new Table(

Review Comment:
   add a new constructor to match the old class definition



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org


-
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org



[GitHub] [spark] cloud-fan commented on a diff in pull request #36586: [SPARK-39236][SQL] Make CreateTable and ListTables be compatible with 3 layer namespace

2022-05-23 Thread GitBox


cloud-fan commented on code in PR #36586:
URL: https://github.com/apache/spark/pull/36586#discussion_r880059787


##
sql/core/src/main/scala/org/apache/spark/sql/internal/CatalogImpl.scala:
##
@@ -117,14 +128,42 @@ class CatalogImpl(sparkSession: SparkSession) extends 
Catalog {
   case NonFatal(_) => None
 }
 val isTemp = sessionCatalog.isTempView(tableIdent)
+val qualifier =
+  
Array(metadata.map(_.identifier.database).getOrElse(tableIdent.database).orNull)

Review Comment:
   shall we include catalog name (`spark_catalog`) as well? 



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org


-
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org



[GitHub] [spark] cloud-fan commented on a diff in pull request #36586: [SPARK-39236][SQL] Make CreateTable and ListTables be compatible with 3 layer namespace

2022-05-23 Thread GitBox


cloud-fan commented on code in PR #36586:
URL: https://github.com/apache/spark/pull/36586#discussion_r879230753


##
sql/core/src/main/scala/org/apache/spark/sql/internal/CatalogImpl.scala:
##
@@ -125,6 +135,31 @@ class CatalogImpl(sparkSession: SparkSession) extends 
Catalog {
   isTemporary = isTemp)
   }
 
+  private def makeTable(ident: Seq[String]): Table = {
+val plan = UnresolvedTableOrView(ident, "Catalog.listTables", true)
+val node = sparkSession.sessionState.executePlan(plan).analyzed
+node match {
+  case t: ResolvedTable =>
+val isExternal = t.table.properties().getOrDefault("external", 
"false").equals("true")
+new Table(
+  name = t.identifier.name(),
+  database = t.identifier.namespace().head,
+  description = t.table.properties().get("comment"),
+  tableType =
+if (isExternal) CatalogTableType.EXTERNAL.name
+else CatalogTableType.MANAGED.name,
+  isTemporary = false)
+  case v: ResolvedView =>
+new Table(

Review Comment:
   if `qualifier.length > 2`, it's a new code path and I don't know what to 
report as a "database name". NULL seems reasonable here.



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org


-
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org



[GitHub] [spark] cloud-fan commented on a diff in pull request #36586: [SPARK-39236][SQL] Make CreateTable and ListTables be compatible with 3 layer namespace

2022-05-20 Thread GitBox


cloud-fan commented on code in PR #36586:
URL: https://github.com/apache/spark/pull/36586#discussion_r877869223


##
sql/core/src/main/scala/org/apache/spark/sql/internal/CatalogImpl.scala:
##
@@ -125,6 +135,31 @@ class CatalogImpl(sparkSession: SparkSession) extends 
Catalog {
   isTemporary = isTemp)
   }
 
+  private def makeTable(ident: Seq[String]): Table = {
+val plan = UnresolvedTableOrView(ident, "Catalog.listTables", true)
+val node = sparkSession.sessionState.executePlan(plan).analyzed
+node match {
+  case t: ResolvedTable =>
+val isExternal = t.table.properties().getOrDefault("external", 
"false").equals("true")
+new Table(
+  name = t.identifier.name(),
+  database = t.identifier.namespace().head,
+  description = t.table.properties().get("comment"),
+  tableType =
+if (isExternal) CatalogTableType.EXTERNAL.name
+else CatalogTableType.MANAGED.name,
+  isTemporary = false)
+  case v: ResolvedView =>
+new Table(

Review Comment:
   We will hit similar problems in `listDatabases` and `listFunctions`



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org


-
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org



[GitHub] [spark] cloud-fan commented on a diff in pull request #36586: [SPARK-39236][SQL] Make CreateTable and ListTables be compatible with 3 layer namespace

2022-05-20 Thread GitBox


cloud-fan commented on code in PR #36586:
URL: https://github.com/apache/spark/pull/36586#discussion_r877867840


##
sql/core/src/main/scala/org/apache/spark/sql/internal/CatalogImpl.scala:
##
@@ -125,6 +135,31 @@ class CatalogImpl(sparkSession: SparkSession) extends 
Catalog {
   isTemporary = isTemp)
   }
 
+  private def makeTable(ident: Seq[String]): Table = {
+val plan = UnresolvedTableOrView(ident, "Catalog.listTables", true)
+val node = sparkSession.sessionState.executePlan(plan).analyzed
+node match {
+  case t: ResolvedTable =>
+val isExternal = t.table.properties().getOrDefault("external", 
"false").equals("true")
+new Table(
+  name = t.identifier.name(),
+  database = t.identifier.namespace().head,
+  description = t.table.properties().get("comment"),
+  tableType =
+if (isExternal) CatalogTableType.EXTERNAL.name
+else CatalogTableType.MANAGED.name,
+  isTemporary = false)
+  case v: ResolvedView =>
+new Table(

Review Comment:
   my major concern is this `Table` API. It needs to support n-part name but 
keep backward compatibility. I'm thinking about
   ```
   class Table(
 val name: String,
 val qualifier: Array[String],
 ...
   ) {
 def database: String = if (qualifier.length == 2) qualifier(1) else null
   }
   ```



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org


-
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org



[GitHub] [spark] cloud-fan commented on a diff in pull request #36586: [SPARK-39236][SQL] Make CreateTable and ListTables be compatible with 3 layer namespace

2022-05-20 Thread GitBox


cloud-fan commented on code in PR #36586:
URL: https://github.com/apache/spark/pull/36586#discussion_r877867840


##
sql/core/src/main/scala/org/apache/spark/sql/internal/CatalogImpl.scala:
##
@@ -125,6 +135,31 @@ class CatalogImpl(sparkSession: SparkSession) extends 
Catalog {
   isTemporary = isTemp)
   }
 
+  private def makeTable(ident: Seq[String]): Table = {
+val plan = UnresolvedTableOrView(ident, "Catalog.listTables", true)
+val node = sparkSession.sessionState.executePlan(plan).analyzed
+node match {
+  case t: ResolvedTable =>
+val isExternal = t.table.properties().getOrDefault("external", 
"false").equals("true")
+new Table(
+  name = t.identifier.name(),
+  database = t.identifier.namespace().head,
+  description = t.table.properties().get("comment"),
+  tableType =
+if (isExternal) CatalogTableType.EXTERNAL.name
+else CatalogTableType.MANAGED.name,
+  isTemporary = false)
+  case v: ResolvedView =>
+new Table(

Review Comment:
   my major concern is this `Table` API. It needs to support n-part name but 
keep backward compatibility. I'm thinking about
   ```
   class Table(
 val name: String,
 val qualifiers: Array[String],
 ...
   ) {
 def database: String = if (qualifiers.length == 2) qualifiers(1) else null
   }
   ```



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org


-
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org



[GitHub] [spark] cloud-fan commented on a diff in pull request #36586: [SPARK-39236][SQL] Make CreateTable and ListTables be compatible with 3 layer namespace

2022-05-20 Thread GitBox


cloud-fan commented on code in PR #36586:
URL: https://github.com/apache/spark/pull/36586#discussion_r877861919


##
sql/core/src/main/scala/org/apache/spark/sql/internal/CatalogImpl.scala:
##
@@ -125,6 +135,31 @@ class CatalogImpl(sparkSession: SparkSession) extends 
Catalog {
   isTemporary = isTemp)
   }
 
+  private def makeTable(ident: Seq[String]): Table = {
+val plan = UnresolvedTableOrView(ident, "Catalog.listTables", true)
+val node = sparkSession.sessionState.executePlan(plan).analyzed
+node match {
+  case t: ResolvedTable =>
+val isExternal = t.table.properties().getOrDefault("external", 
"false").equals("true")
+new Table(
+  name = t.identifier.name(),
+  database = t.identifier.namespace().head,
+  description = t.table.properties().get("comment"),
+  tableType =
+if (isExternal) CatalogTableType.EXTERNAL.name
+else CatalogTableType.MANAGED.name,
+  isTemporary = false)
+  case v: ResolvedView =>
+new Table(
+  name = v.identifier.name(),
+  database = v.identifier.namespace().toString,
+  description = "",
+  tableType = "",

Review Comment:
   looking at the previous behavior, I think this should be `if (v.isTemp) 
"TEMPORARY" else "VIEW"`



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org


-
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org



[GitHub] [spark] cloud-fan commented on a diff in pull request #36586: [SPARK-39236][SQL] Make CreateTable and ListTables be compatible with 3 layer namespace

2022-05-20 Thread GitBox


cloud-fan commented on code in PR #36586:
URL: https://github.com/apache/spark/pull/36586#discussion_r877853440


##
sql/core/src/main/scala/org/apache/spark/sql/internal/CatalogImpl.scala:
##
@@ -125,6 +135,31 @@ class CatalogImpl(sparkSession: SparkSession) extends 
Catalog {
   isTemporary = isTemp)
   }
 
+  private def makeTable(ident: Seq[String]): Table = {
+val plan = UnresolvedTableOrView(ident, "Catalog.listTables", true)
+val node = sparkSession.sessionState.executePlan(plan).analyzed
+node match {
+  case t: ResolvedTable =>
+val isExternal = t.table.properties().getOrDefault("external", 
"false").equals("true")

Review Comment:
   ```suggestion
   val isExternal = 
t.table.properties().getOrDefault(TableCatalog.PROP_EXTERNAL, 
"false").equals("true")
   ```



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org


-
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org



[GitHub] [spark] cloud-fan commented on a diff in pull request #36586: [SPARK-39236][SQL] Make CreateTable and ListTables be compatible with 3 layer namespace

2022-05-19 Thread GitBox


cloud-fan commented on code in PR #36586:
URL: https://github.com/apache/spark/pull/36586#discussion_r877685094


##
sql/core/src/main/scala/org/apache/spark/sql/internal/CatalogImpl.scala:
##
@@ -367,24 +377,40 @@ class CatalogImpl(sparkSession: SparkSession) extends 
Catalog {
   schema: StructType,
   description: String,
   options: Map[String, String]): DataFrame = {
-val tableIdent = 
sparkSession.sessionState.sqlParser.parseTableIdentifier(tableName)
+val idents = 
sparkSession.sessionState.sqlParser.parseMultipartIdentifier(tableName)
 val storage = DataSource.buildStorageFormatFromOptions(options)
 val tableType = if (storage.locationUri.isDefined) {
   CatalogTableType.EXTERNAL
 } else {
   CatalogTableType.MANAGED
 }
-val tableDesc = CatalogTable(
-  identifier = tableIdent,
-  tableType = tableType,
-  storage = storage,
-  schema = schema,
-  provider = Some(source),
-  comment = { if (description.isEmpty) None else Some(description) }
-)
-val plan = CreateTable(tableDesc, SaveMode.ErrorIfExists, None)
+val location = if (storage.locationUri.isDefined) {
+  val locationStr = storage.locationUri.get.toString
+  Some(locationStr)
+} else {
+  None
+}
+
+val tableSpec =
+  TableSpec(
+properties = Map(),
+provider = Some(source),
+options = options,
+location = location,
+comment = { if (description.isEmpty) None else Some(description) },
+serde = None,
+external = tableType == CatalogTableType.EXTERNAL)
+
+val plan =
+  CreateTable(
+name = UnresolvedDBObjectName(idents, isNamespace = true),

Review Comment:
   `isNamespace` should be false?



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org


-
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org



[GitHub] [spark] cloud-fan commented on a diff in pull request #36586: [SPARK-39236][SQL] Make CreateTable and ListTables be compatible with 3 layer namespace

2022-05-19 Thread GitBox


cloud-fan commented on code in PR #36586:
URL: https://github.com/apache/spark/pull/36586#discussion_r877684804


##
sql/core/src/main/scala/org/apache/spark/sql/internal/CatalogImpl.scala:
##
@@ -367,24 +377,40 @@ class CatalogImpl(sparkSession: SparkSession) extends 
Catalog {
   schema: StructType,
   description: String,
   options: Map[String, String]): DataFrame = {
-val tableIdent = 
sparkSession.sessionState.sqlParser.parseTableIdentifier(tableName)
+val idents = 
sparkSession.sessionState.sqlParser.parseMultipartIdentifier(tableName)
 val storage = DataSource.buildStorageFormatFromOptions(options)
 val tableType = if (storage.locationUri.isDefined) {
   CatalogTableType.EXTERNAL
 } else {
   CatalogTableType.MANAGED
 }
-val tableDesc = CatalogTable(
-  identifier = tableIdent,
-  tableType = tableType,
-  storage = storage,
-  schema = schema,
-  provider = Some(source),
-  comment = { if (description.isEmpty) None else Some(description) }
-)
-val plan = CreateTable(tableDesc, SaveMode.ErrorIfExists, None)
+val location = if (storage.locationUri.isDefined) {
+  val locationStr = storage.locationUri.get.toString
+  Some(locationStr)
+} else {
+  None
+}
+
+val tableSpec =
+  TableSpec(
+properties = Map(),
+provider = Some(source),
+options = options,
+location = location,
+comment = { if (description.isEmpty) None else Some(description) },
+serde = None,
+external = tableType == CatalogTableType.EXTERNAL)
+
+val plan =
+  CreateTable(
+name = UnresolvedDBObjectName(idents, isNamespace = true),
+tableSchema = schema,
+partitioning = Seq(),
+tableSpec = tableSpec,
+ignoreIfExists = false)

Review Comment:
   ```suggestion
   val plan = CreateTable(
 name = UnresolvedDBObjectName(idents, isNamespace = true),
 tableSchema = schema,
 partitioning = Seq(),
 tableSpec = tableSpec,
 ignoreIfExists = false)
   ```



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org


-
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org



[GitHub] [spark] cloud-fan commented on a diff in pull request #36586: [SPARK-39236][SQL] Make CreateTable and ListTables be compatible with 3 layer namespace

2022-05-19 Thread GitBox


cloud-fan commented on code in PR #36586:
URL: https://github.com/apache/spark/pull/36586#discussion_r877684610


##
sql/core/src/main/scala/org/apache/spark/sql/internal/CatalogImpl.scala:
##
@@ -367,24 +377,40 @@ class CatalogImpl(sparkSession: SparkSession) extends 
Catalog {
   schema: StructType,
   description: String,
   options: Map[String, String]): DataFrame = {
-val tableIdent = 
sparkSession.sessionState.sqlParser.parseTableIdentifier(tableName)
+val idents = 
sparkSession.sessionState.sqlParser.parseMultipartIdentifier(tableName)
 val storage = DataSource.buildStorageFormatFromOptions(options)
 val tableType = if (storage.locationUri.isDefined) {
   CatalogTableType.EXTERNAL
 } else {
   CatalogTableType.MANAGED
 }
-val tableDesc = CatalogTable(
-  identifier = tableIdent,
-  tableType = tableType,
-  storage = storage,
-  schema = schema,
-  provider = Some(source),
-  comment = { if (description.isEmpty) None else Some(description) }
-)
-val plan = CreateTable(tableDesc, SaveMode.ErrorIfExists, None)
+val location = if (storage.locationUri.isDefined) {
+  val locationStr = storage.locationUri.get.toString
+  Some(locationStr)
+} else {
+  None
+}
+
+val tableSpec =
+  TableSpec(
+properties = Map(),
+provider = Some(source),
+options = options,
+location = location,
+comment = { if (description.isEmpty) None else Some(description) },
+serde = None,
+external = tableType == CatalogTableType.EXTERNAL)

Review Comment:
   ```suggestion
   val tableSpec = TableSpec(
 properties = Map(),
 provider = Some(source),
 options = options,
 location = location,
 comment = { if (description.isEmpty) None else Some(description) },
 serde = None,
 external = tableType == CatalogTableType.EXTERNAL)
   ```



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org


-
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org



[GitHub] [spark] cloud-fan commented on a diff in pull request #36586: [SPARK-39236][SQL] Make CreateTable and ListTables be compatible with 3 layer namespace

2022-05-19 Thread GitBox


cloud-fan commented on code in PR #36586:
URL: https://github.com/apache/spark/pull/36586#discussion_r877684350


##
sql/core/src/main/scala/org/apache/spark/sql/internal/CatalogImpl.scala:
##
@@ -367,24 +377,40 @@ class CatalogImpl(sparkSession: SparkSession) extends 
Catalog {
   schema: StructType,
   description: String,
   options: Map[String, String]): DataFrame = {
-val tableIdent = 
sparkSession.sessionState.sqlParser.parseTableIdentifier(tableName)
+val idents = 
sparkSession.sessionState.sqlParser.parseMultipartIdentifier(tableName)

Review Comment:
   ```suggestion
   val ident = 
sparkSession.sessionState.sqlParser.parseMultipartIdentifier(tableName)
   ```



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org


-
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org



[GitHub] [spark] cloud-fan commented on a diff in pull request #36586: [SPARK-39236][SQL] Make CreateTable and ListTables be compatible with 3 layer namespace

2022-05-19 Thread GitBox


cloud-fan commented on code in PR #36586:
URL: https://github.com/apache/spark/pull/36586#discussion_r877684235


##
sql/core/src/main/scala/org/apache/spark/sql/internal/CatalogImpl.scala:
##
@@ -97,8 +97,18 @@ class CatalogImpl(sparkSession: SparkSession) extends 
Catalog {
*/
   @throws[AnalysisException]("database does not exist")
   override def listTables(dbName: String): Dataset[Table] = {
-val tables = sessionCatalog.listTables(dbName).map(makeTable)
-CatalogImpl.makeDataset(tables, sparkSession)
+if (sessionCatalog.databaseExists(dbName)) {
+  val tables = sessionCatalog.listTables(dbName).map(makeTable)
+  CatalogImpl.makeDataset(tables, sparkSession)
+} else {
+  val multiParts = 
sparkSession.sessionState.sqlParser.parseMultipartIdentifier(dbName)
+  val plan = ShowTables(UnresolvedNamespace(multiParts), None)
+  val ret = sparkSession.sessionState.executePlan(plan).toRdd.collect()
+  val tables = ret
+.map(row => TableIdentifier(row.getString(1), Some(row.getString(0
+.map(makeTable)

Review Comment:
   `makeTable` only looks up table from hive metastore. I think we need a new 
`makeTable` which takes `Seq[String]` and looks up table through analyzer.



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org


-
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org



[GitHub] [spark] cloud-fan commented on a diff in pull request #36586: [SPARK-39236][SQL] Make CreateTable and ListTables be compatible with 3 layer namespace

2022-05-19 Thread GitBox


cloud-fan commented on code in PR #36586:
URL: https://github.com/apache/spark/pull/36586#discussion_r877682958


##
sql/core/src/main/scala/org/apache/spark/sql/internal/CatalogImpl.scala:
##
@@ -97,8 +97,18 @@ class CatalogImpl(sparkSession: SparkSession) extends 
Catalog {
*/
   @throws[AnalysisException]("database does not exist")
   override def listTables(dbName: String): Dataset[Table] = {
-val tables = sessionCatalog.listTables(dbName).map(makeTable)
-CatalogImpl.makeDataset(tables, sparkSession)
+if (sessionCatalog.databaseExists(dbName)) {
+  val tables = sessionCatalog.listTables(dbName).map(makeTable)
+  CatalogImpl.makeDataset(tables, sparkSession)
+} else {
+  val multiParts = 
sparkSession.sessionState.sqlParser.parseMultipartIdentifier(dbName)

Review Comment:
   ```suggestion
 val ident = 
sparkSession.sessionState.sqlParser.parseMultipartIdentifier(dbName)
   ```



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org


-
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org