This is an automated email from the ASF dual-hosted git repository. wenchen pushed a commit to branch master in repository https://gitbox.apache.org/repos/asf/spark.git
The following commit(s) were added to refs/heads/master by this push: new a22d20a [SPARK-34468][SQL] Rename v2 table in place if new name has single part a22d20a is described below commit a22d20a6ca6e763b3c6011d6019ab92a7f54ea87 Author: Max Gekk <max.g...@gmail.com> AuthorDate: Mon Feb 22 04:43:19 2021 +0000 [SPARK-34468][SQL] Rename v2 table in place if new name has single part ### What changes were proposed in this pull request? If new table name consists of single part (no namespaces), the v2 `ALTER TABLE .. RENAME TO` command renames the table while keeping it in the same namespace. For example: ```sql ALTER TABLE catalog_name.ns1.ns2.ns3.ns4.ns5.tbl RENAME TO new_table ``` the command should rename the source table to `catalog_name.ns1.ns2.ns3.ns4.ns5.new_table`. Before the changes, the command moves the table to the "root" name space i.e. `catalog_name.new_table`. ### Why are the changes needed? To have the same behavior as v1 implementation of `ALTER TABLE .. RENAME TO`, and other DBMSs. ### Does this PR introduce _any_ user-facing change? Yes ### How was this patch tested? By running new test: ``` $ build/sbt "sql/test:testOnly *DataSourceV2SQLSuite" ``` Closes #31594 from MaxGekk/rename-table-single-part. Authored-by: Max Gekk <max.g...@gmail.com> Signed-off-by: Wenchen Fan <wenc...@databricks.com> --- .../sql/execution/datasources/v2/RenameTableExec.scala | 16 ++++++++++++---- .../spark/sql/connector/DataSourceV2SQLSuite.scala | 14 ++++++++++++++ 2 files changed, 26 insertions(+), 4 deletions(-) diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/RenameTableExec.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/RenameTableExec.scala index a71dd33..e44ad64 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/RenameTableExec.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/RenameTableExec.scala @@ -43,12 +43,20 @@ case class RenameTableExec( val optOldStorageLevel = invalidateCache() catalog.invalidateTable(oldIdent) - catalog.renameTable(oldIdent, newIdent) + // If new identifier consists of a table name only, the table should be renamed in place. + // Such behavior matches to the v1 implementation of table renaming in Spark and other DBMSs. + val qualifiedNewIdent = if (newIdent.namespace.isEmpty) { + Identifier.of(oldIdent.namespace, newIdent.name) + } else newIdent + catalog.renameTable(oldIdent, qualifiedNewIdent) optOldStorageLevel.foreach { oldStorageLevel => - val tbl = catalog.loadTable(newIdent) - val newRelation = DataSourceV2Relation.create(tbl, Some(catalog), Some(newIdent)) - cacheTable(sqlContext.sparkSession, newRelation, Some(newIdent.quoted), oldStorageLevel) + val tbl = catalog.loadTable(qualifiedNewIdent) + val newRelation = DataSourceV2Relation.create(tbl, Some(catalog), Some(qualifiedNewIdent)) + cacheTable( + sqlContext.sparkSession, + newRelation, + Some(qualifiedNewIdent.quoted), oldStorageLevel) } Seq.empty } diff --git a/sql/core/src/test/scala/org/apache/spark/sql/connector/DataSourceV2SQLSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/connector/DataSourceV2SQLSuite.scala index 7d67e1c..5be1fa1 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/connector/DataSourceV2SQLSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/connector/DataSourceV2SQLSuite.scala @@ -2585,6 +2585,20 @@ class DataSourceV2SQLSuite } } + test("SPARK-34468: rename table in place when the destination name has single part") { + val tbl = s"${catalogAndNamespace}src_tbl" + withTable(tbl) { + sql(s"CREATE TABLE $tbl (c0 INT) USING $v2Format") + sql(s"INSERT INTO $tbl SELECT 0") + checkAnswer(sql(s"SHOW TABLES FROM testcat.ns1.ns2 LIKE 'new_tbl'"), Nil) + sql(s"ALTER TABLE $tbl RENAME TO new_tbl") + checkAnswer( + sql(s"SHOW TABLES FROM testcat.ns1.ns2 LIKE 'new_tbl'"), + Row("ns1.ns2", "new_tbl", false)) + checkAnswer(sql(s"SELECT c0 FROM ${catalogAndNamespace}new_tbl"), Row(0)) + } + } + private def testNotSupportedV2Command(sqlCommand: String, sqlParams: String): Unit = { val e = intercept[AnalysisException] { sql(s"$sqlCommand $sqlParams") --------------------------------------------------------------------- To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org For additional commands, e-mail: commits-h...@spark.apache.org