This is an automated email from the ASF dual-hosted git repository. gurwls223 pushed a commit to branch branch-3.0 in repository https://gitbox.apache.org/repos/asf/spark.git
The following commit(s) were added to refs/heads/branch-3.0 by this push: new 94027d4 [SPARK-31407][SQL][TEST] TestHiveQueryExecution should respect database when creating table 94027d4 is described below commit 94027d47aadb0329650f8ac12ba8d8823a8b9e8e Author: yi.wu <yi...@databricks.com> AuthorDate: Mon Apr 13 19:04:36 2020 +0900 [SPARK-31407][SQL][TEST] TestHiveQueryExecution should respect database when creating table ### What changes were proposed in this pull request? In `TestHiveQueryExecution`, if we detect a database in the referenced table, we should create the table under that database. ### Why are the changes needed? This fix the test `Fix hive/SQLQuerySuite.derived from Hive query file: drop_database_removes_partition_dirs.q` which currently only pass when we run it with the whole test suit but fail when run it separately. ### Does this PR introduce any user-facing change? No. ### How was this patch tested? Run the test separately and together with the whole test suite. Closes #28177 from Ngone51/fix_derived. Authored-by: yi.wu <yi...@databricks.com> Signed-off-by: HyukjinKwon <gurwls...@apache.org> (cherry picked from commit 4de8ae1a0f74e24e76ea337285bf5a6aa2a6af29) Signed-off-by: HyukjinKwon <gurwls...@apache.org> --- .../org/apache/spark/sql/hive/test/TestHive.scala | 22 ++++++++++++++++------ 1 file changed, 16 insertions(+), 6 deletions(-) diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/test/TestHive.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/test/TestHive.scala index 868bc27..b9f10c3 100644 --- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/test/TestHive.scala +++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/test/TestHive.scala @@ -40,6 +40,7 @@ import org.apache.spark.sql.catalyst.analysis.UnresolvedRelation import org.apache.spark.sql.catalyst.catalog.ExternalCatalogWithListener import org.apache.spark.sql.catalyst.optimizer.ConvertToLocalRelation import org.apache.spark.sql.catalyst.plans.logical.{LogicalPlan, OneRowRelation} +import org.apache.spark.sql.connector.catalog.CatalogV2Implicits._ import org.apache.spark.sql.execution.{QueryExecution, SQLExecution} import org.apache.spark.sql.execution.command.CacheTableCommand import org.apache.spark.sql.hive._ @@ -588,20 +589,29 @@ private[hive] class TestHiveQueryExecution( override lazy val analyzed: LogicalPlan = { val describedTables = logical match { - case CacheTableCommand(tbl, _, _, _) => tbl.table :: Nil + case CacheTableCommand(tbl, _, _, _) => tbl :: Nil case _ => Nil } // Make sure any test tables referenced are loaded. val referencedTables = describedTables ++ - logical.collect { case UnresolvedRelation(ident) => ident.last } + logical.collect { case UnresolvedRelation(ident) => ident.asTableIdentifier } val resolver = sparkSession.sessionState.conf.resolver - val referencedTestTables = sparkSession.testTables.keys.filter { testTable => - referencedTables.exists(resolver(_, testTable)) + val referencedTestTables = referencedTables.flatMap { tbl => + val testTableOpt = sparkSession.testTables.keys.find(resolver(_, tbl.table)) + testTableOpt.map(testTable => tbl.copy(table = testTable)) + } + logDebug(s"Query references test tables: ${referencedTestTables.map(_.table).mkString(", ")}") + referencedTestTables.foreach { tbl => + val curDB = sparkSession.catalog.currentDatabase + try { + tbl.database.foreach(db => sparkSession.catalog.setCurrentDatabase(db)) + sparkSession.loadTestTable(tbl.table) + } finally { + tbl.database.foreach(_ => sparkSession.catalog.setCurrentDatabase(curDB)) + } } - logDebug(s"Query references test tables: ${referencedTestTables.mkString(", ")}") - referencedTestTables.foreach(sparkSession.loadTestTable) // Proceed with analysis. sparkSession.sessionState.analyzer.executeAndCheck(logical, tracker) } --------------------------------------------------------------------- To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org For additional commands, e-mail: commits-h...@spark.apache.org