cloud-fan commented on a change in pull request #30403:
URL: https://github.com/apache/spark/pull/30403#discussion_r530758190



##########
File path: 
sql/core/src/main/scala/org/apache/spark/sql/execution/command/cache.scala
##########
@@ -51,32 +54,35 @@ case class CacheTableCommand(
 
     if (storageLevelValue.nonEmpty) {
       sparkSession.catalog.cacheTable(
-        tableIdent.quotedString, 
StorageLevel.fromString(storageLevelValue.get))
+        tableName, StorageLevel.fromString(storageLevelValue.get))
     } else {
-      sparkSession.catalog.cacheTable(tableIdent.quotedString)
+      sparkSession.catalog.cacheTable(tableName)
     }
 
     if (!isLazy) {
       // Performs eager caching
-      sparkSession.table(tableIdent).count()
+      sparkSession.table(tableName).count()
     }
 
     Seq.empty[Row]
   }
 }
 
-
 case class UncacheTableCommand(
-    tableIdent: TableIdentifier,
+    multipartIdentifier: Seq[String],
     ifExists: Boolean) extends RunnableCommand {
 
   override def run(sparkSession: SparkSession): Seq[Row] = {
-    val tableId = tableIdent.quotedString
-    if (!ifExists || sparkSession.catalog.tableExists(tableId)) {

Review comment:
       This reminds me that we should check `CatalogImpl` and see if n-part 
name is well supported. Seems like `tableExists` is broken...




----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org



---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org

Reply via email to