This is an automated email from the ASF dual-hosted git repository.
lzljs3620320 pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/paimon.git
The following commit(s) were added to refs/heads/master by this push:
new a47f77f0d [hive] Rename hive external table should not change table
path (#4562)
a47f77f0d is described below
commit a47f77f0dfb077a3e7628a7c8dc9c2df19cc6e81
Author: Zouxxyy <[email protected]>
AuthorDate: Thu Nov 21 15:34:22 2024 +0800
[hive] Rename hive external table should not change table path (#4562)
---
.../java/org/apache/paimon/hive/HiveCatalog.java | 10 ++--
.../spark/sql/DDLWithHiveCatalogTestBase.scala | 54 ++++++++++++++++++++++
2 files changed, 60 insertions(+), 4 deletions(-)
diff --git
a/paimon-hive/paimon-hive-catalog/src/main/java/org/apache/paimon/hive/HiveCatalog.java
b/paimon-hive/paimon-hive-catalog/src/main/java/org/apache/paimon/hive/HiveCatalog.java
index 93e7e87ef..e93658732 100644
---
a/paimon-hive/paimon-hive-catalog/src/main/java/org/apache/paimon/hive/HiveCatalog.java
+++
b/paimon-hive/paimon-hive-catalog/src/main/java/org/apache/paimon/hive/HiveCatalog.java
@@ -792,13 +792,15 @@ public class HiveCatalog extends AbstractCatalog {
@Override
protected void renameTableImpl(Identifier fromTable, Identifier toTable) {
try {
- Table table = renameHiveTable(fromTable, toTable);
-
+ // Get fromTable's location before rename
Path fromPath = getTableLocation(fromTable);
- if (!new SchemaManager(fileIO, fromPath).listAllIds().isEmpty()) {
+ Table table = renameHiveTable(fromTable, toTable);
+ Path toPath = getTableLocation(toTable);
+ if (!isExternalTable(table)
+ && !fromPath.equals(toPath)
+ && !new SchemaManager(fileIO,
fromPath).listAllIds().isEmpty()) {
// Rename the file system's table directory. Maintain
consistency between tables in
// the file system and tables in the Hive Metastore.
- Path toPath = getTableLocation(toTable);
try {
fileIO.rename(fromPath, toPath);
} catch (IOException e) {
diff --git
a/paimon-spark/paimon-spark-common/src/test/scala/org/apache/paimon/spark/sql/DDLWithHiveCatalogTestBase.scala
b/paimon-spark/paimon-spark-common/src/test/scala/org/apache/paimon/spark/sql/DDLWithHiveCatalogTestBase.scala
index bfd6716b2..9be8e21a8 100644
---
a/paimon-spark/paimon-spark-common/src/test/scala/org/apache/paimon/spark/sql/DDLWithHiveCatalogTestBase.scala
+++
b/paimon-spark/paimon-spark-common/src/test/scala/org/apache/paimon/spark/sql/DDLWithHiveCatalogTestBase.scala
@@ -347,6 +347,60 @@ abstract class DDLWithHiveCatalogTestBase extends
PaimonHiveTestBase {
}
}
+ test("Paimon DDL with hive catalog: rename external / managed table") {
+ Seq(sparkCatalogName, paimonHiveCatalogName).foreach {
+ catalogName =>
+ spark.sql(s"USE $catalogName")
+ withTempDir {
+ tbLocation =>
+ withDatabase("paimon_db") {
+ spark.sql(s"CREATE DATABASE paimon_db")
+ spark.sql(s"USE paimon_db")
+ withTable(
+ "external_tbl",
+ "managed_tbl",
+ "external_tbl_renamed",
+ "managed_tbl_renamed") {
+ val expertTbLocation = tbLocation.getCanonicalPath
+ // create external table
+ spark.sql(
+ s"CREATE TABLE external_tbl (id INT) USING paimon LOCATION
'$expertTbLocation'")
+ spark.sql("INSERT INTO external_tbl VALUES (1)")
+ val actualTbLocation = loadTable("paimon_db",
"external_tbl").location()
+
assert(actualTbLocation.toString.split(':').apply(1).equals(expertTbLocation))
+
+ // rename external table, location should not change
+ spark.sql("ALTER TABLE external_tbl RENAME TO
external_tbl_renamed")
+ checkAnswer(spark.sql("SELECT * FROM external_tbl_renamed"),
Row(1))
+ assert(
+ loadTable("paimon_db", "external_tbl_renamed")
+ .location()
+ .toString
+ .split(':')
+ .apply(1)
+ .equals(expertTbLocation))
+
+ // create managed table
+ spark.sql(s"CREATE TABLE managed_tbl (id INT) USING paimon")
+ spark.sql("INSERT INTO managed_tbl VALUES (1)")
+ val managedTbLocation = loadTable("paimon_db",
"managed_tbl").location()
+
+ // rename managed table, location should change
+ spark.sql("ALTER TABLE managed_tbl RENAME TO
managed_tbl_renamed")
+ checkAnswer(spark.sql("SELECT * FROM managed_tbl_renamed"),
Row(1))
+ assert(
+ !loadTable("paimon_db", "managed_tbl_renamed")
+ .location()
+ .toString
+ .split(':')
+ .apply(1)
+ .equals(managedTbLocation.toString))
+ }
+ }
+ }
+ }
+ }
+
def getDatabaseProp(dbName: String, propertyName: String): String = {
spark
.sql(s"DESC DATABASE EXTENDED $dbName")