This is an automated email from the ASF dual-hosted git repository.
biyan pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/paimon.git
The following commit(s) were added to refs/heads/master by this push:
new f448fd1892 [spark] Fix rename table with catalog name (#5027)
f448fd1892 is described below
commit f448fd1892afe3b7929831faac268c269580136a
Author: Zouxxyy <[email protected]>
AuthorDate: Fri Feb 7 10:13:15 2025 +0800
[spark] Fix rename table with catalog name (#5027)
---
.../main/java/org/apache/paimon/spark/SparkCatalog.java | 6 +++++-
.../java/org/apache/paimon/spark/utils/CatalogUtils.java | 14 ++++++++++++++
.../scala/org/apache/paimon/spark/sql/DDLTestBase.scala | 14 ++++++++++++++
3 files changed, 33 insertions(+), 1 deletion(-)
diff --git
a/paimon-spark/paimon-spark-common/src/main/java/org/apache/paimon/spark/SparkCatalog.java
b/paimon-spark/paimon-spark-common/src/main/java/org/apache/paimon/spark/SparkCatalog.java
index f32b87603f..3790927dbf 100644
---
a/paimon-spark/paimon-spark-common/src/main/java/org/apache/paimon/spark/SparkCatalog.java
+++
b/paimon-spark/paimon-spark-common/src/main/java/org/apache/paimon/spark/SparkCatalog.java
@@ -73,6 +73,7 @@ import static
org.apache.paimon.spark.SparkCatalogOptions.DEFAULT_DATABASE;
import static org.apache.paimon.spark.SparkTypeUtils.toPaimonType;
import static org.apache.paimon.spark.util.OptionUtils.copyWithSQLConf;
import static org.apache.paimon.spark.utils.CatalogUtils.checkNamespace;
+import static org.apache.paimon.spark.utils.CatalogUtils.removeCatalogName;
import static org.apache.paimon.spark.utils.CatalogUtils.toIdentifier;
/** Spark {@link TableCatalog} for paimon. */
@@ -427,7 +428,10 @@ public class SparkCatalog extends SparkBaseCatalog
implements SupportFunction, S
public void renameTable(Identifier oldIdent, Identifier newIdent)
throws NoSuchTableException, TableAlreadyExistsException {
try {
- catalog.renameTable(toIdentifier(oldIdent),
toIdentifier(newIdent), false);
+ catalog.renameTable(
+ toIdentifier(oldIdent),
+ toIdentifier(removeCatalogName(newIdent, catalogName)),
+ false);
} catch (Catalog.TableNotExistException e) {
throw new NoSuchTableException(oldIdent);
} catch (Catalog.TableAlreadyExistException e) {
diff --git
a/paimon-spark/paimon-spark-common/src/main/java/org/apache/paimon/spark/utils/CatalogUtils.java
b/paimon-spark/paimon-spark-common/src/main/java/org/apache/paimon/spark/utils/CatalogUtils.java
index fca9df210e..da2bcc10a2 100644
---
a/paimon-spark/paimon-spark-common/src/main/java/org/apache/paimon/spark/utils/CatalogUtils.java
+++
b/paimon-spark/paimon-spark-common/src/main/java/org/apache/paimon/spark/utils/CatalogUtils.java
@@ -38,4 +38,18 @@ public class CatalogUtils {
checkNamespace(ident.namespace());
return new org.apache.paimon.catalog.Identifier(ident.namespace()[0],
ident.name());
}
+
+ public static Identifier removeCatalogName(Identifier ident, String
catalogName) {
+ String[] namespace = ident.namespace();
+ if (namespace.length > 1) {
+ checkArgument(
+ namespace[0].equals(catalogName),
+ "Only supports operations within the same catalog, target
catalog name: %s, current catalog name: %s",
+ namespace[0],
+ catalogName);
+ return Identifier.of(Arrays.copyOfRange(namespace, 1,
namespace.length), ident.name());
+ } else {
+ return ident;
+ }
+ }
}
diff --git
a/paimon-spark/paimon-spark-ut/src/test/scala/org/apache/paimon/spark/sql/DDLTestBase.scala
b/paimon-spark/paimon-spark-ut/src/test/scala/org/apache/paimon/spark/sql/DDLTestBase.scala
index 3ed2c98306..8ce0e178eb 100644
---
a/paimon-spark/paimon-spark-ut/src/test/scala/org/apache/paimon/spark/sql/DDLTestBase.scala
+++
b/paimon-spark/paimon-spark-ut/src/test/scala/org/apache/paimon/spark/sql/DDLTestBase.scala
@@ -572,4 +572,18 @@ abstract class DDLTestBase extends PaimonSparkTestBase {
}
}
}
+
+ test("Paimon DDL: rename table with catalog name") {
+ sql("USE default")
+ withTable("t1", "t2") {
+ sql("CREATE TABLE t1 (id INT) USING paimon")
+ sql("INSERT INTO t1 VALUES 1")
+ sql("ALTER TABLE paimon.default.t1 RENAME TO paimon.default.t2")
+ checkAnswer(sql("SELECT * FROM t2"), Row(1))
+
+ assert(intercept[Exception] {
+ sql("ALTER TABLE paimon.default.t2 RENAME TO spark_catalog.default.t2")
+ }.getMessage.contains("Only supports operations within the same
catalog"))
+ }
+ }
}