cloud-fan commented on code in PR #36968: URL: https://github.com/apache/spark/pull/36968#discussion_r905773687
########## sql/core/src/main/scala/org/apache/spark/sql/internal/CatalogImpl.scala: ########## @@ -243,7 +243,29 @@ class CatalogImpl(sparkSession: SparkSession) extends Catalog { * `Database` can be found. */ override def getDatabase(dbName: String): Database = { - makeDatabase(dbName) + // `dbName` could be either a single database name (behavior in Spark 3.3 and prior) or a + // qualified namespace with catalog name. To maintain backwards compatibility, we first assume + // it's a single database name and return the database from sessionCatalog if it exists. + // Otherwise we try 3-part name parsing and locate the database. If the parased identifier + // contains both catalog name and database name, we then search the database in the catalog. + if (sessionCatalog.databaseExists(dbName) || sessionCatalog.isGlobalTempViewDB(dbName)) { + makeDatabase(dbName) + } else { + val ident = sparkSession.sessionState.sqlParser.parseMultipartIdentifier(dbName) + val plan = UnresolvedNamespace(ident) + val resolved = sparkSession.sessionState.executePlan(plan).analyzed + val db = ident.tail + val metadata = resolved match { + case ResolvedNamespace(catalog: SupportsNamespaces, _) => + catalog.loadNamespaceMetadata(db.toArray) + // TODO what to do if it doesn't support namespaces + case _ => throw new RuntimeException(s"unexpected catalog resolved: $resolved") Review Comment: Let's follow `databaseExists`: if the catalog doesn't support namespace, we assume it's an implicit namespace, which exists but has no metadata. -- This is an automated message from the Apache Git Service. To respond to the message, please log on to GitHub and use the URL above to go to the specific comment. To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org For queries about this service, please contact Infrastructure at: us...@infra.apache.org --------------------------------------------------------------------- To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org For additional commands, e-mail: reviews-h...@spark.apache.org