This is an automated email from the ASF dual-hosted git repository.
lzljs3620320 pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/paimon.git
The following commit(s) were added to refs/heads/master by this push:
new ecc0c51d62 [spark] SparkGenericCatalog support default value (#5881)
ecc0c51d62 is described below
commit ecc0c51d62a4915b65c6717cdb6e8255e00a367c
Author: Zouxxyy <[email protected]>
AuthorDate: Sun Jul 13 10:01:51 2025 +0800
[spark] SparkGenericCatalog support default value (#5881)
---
.../connector/catalog/TableCatalogCapability.java | 51 +++++++++++++++++++++
.../connector/catalog/TableCatalogCapability.java | 52 ----------------------
.../java/org/apache/paimon/spark/SparkCatalog.java | 7 ---
.../paimon/spark/catalog/SparkBaseCatalog.java | 10 +++++
.../spark/sql/DDLWithHiveCatalogTestBase.scala | 27 +++++++++++
5 files changed, 88 insertions(+), 59 deletions(-)
diff --git
a/paimon-spark/paimon-spark-3.2/src/main/java/org/apache/spark/sql/connector/catalog/TableCatalogCapability.java
b/paimon-spark/paimon-spark-3.2/src/main/java/org/apache/spark/sql/connector/catalog/TableCatalogCapability.java
new file mode 100644
index 0000000000..94fde80128
--- /dev/null
+++
b/paimon-spark/paimon-spark-3.2/src/main/java/org/apache/spark/sql/connector/catalog/TableCatalogCapability.java
@@ -0,0 +1,51 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.spark.sql.connector.catalog;
+
+/** Capabilities that can be provided by a {@link TableCatalog}
implementation. */
+public enum TableCatalogCapability {
+
+ /**
+ * Signals that the TableCatalog supports defining generated columns upon
table creation in SQL.
+ *
+ * <p>Without this capability, any create/replace table statements with a
generated column
+ * defined in the table schema will throw an exception during analysis.
+ *
+ * <p>A generated column is defined with syntax: {@code colName colType
GENERATED ALWAYS AS
+ * (expr)}
+ *
+ * <p>Generation expression are included in the column definition for APIs
like {@link
+ * TableCatalog#createTable}.
+ */
+ SUPPORTS_CREATE_TABLE_WITH_GENERATED_COLUMNS,
+
+ /**
+ * Signals that the TableCatalog supports defining column default value as
expression in
+ * CREATE/REPLACE/ALTER TABLE.
+ *
+ * <p>Without this capability, any CREATE/REPLACE/ALTER TABLE statement
with a column default
+ * value defined in the table schema will throw an exception during
analysis.
+ *
+ * <p>A column default value is defined with syntax: {@code colName
colType DEFAULT expr}
+ *
+ * <p>Column default value expression is included in the column definition
for APIs like {@link
+ * TableCatalog#createTable}.
+ */
+ SUPPORT_COLUMN_DEFAULT_VALUE
+}
diff --git
a/paimon-spark/paimon-spark-3.2/src/main/scala/org/apache/spark/sql/connector/catalog/TableCatalogCapability.java
b/paimon-spark/paimon-spark-3.2/src/main/scala/org/apache/spark/sql/connector/catalog/TableCatalogCapability.java
deleted file mode 100644
index ce375ff56d..0000000000
---
a/paimon-spark/paimon-spark-3.2/src/main/scala/org/apache/spark/sql/connector/catalog/TableCatalogCapability.java
+++ /dev/null
@@ -1,52 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.spark.sql.connector.catalog;
-
-/**
- * Capabilities that can be provided by a {@link TableCatalog} implementation.
- */
-public enum TableCatalogCapability {
-
- /**
- * Signals that the TableCatalog supports defining generated columns upon
table creation in SQL.
- * <p>
- * Without this capability, any create/replace table statements with a
generated column defined
- * in the table schema will throw an exception during analysis.
- * <p>
- * A generated column is defined with syntax: {@code colName colType
GENERATED ALWAYS AS (expr)}
- * <p>
- * Generation expression are included in the column definition for APIs like
- * {@link TableCatalog#createTable}.
- */
- SUPPORTS_CREATE_TABLE_WITH_GENERATED_COLUMNS,
-
- /**
- * Signals that the TableCatalog supports defining column default value as
expression in
- * CREATE/REPLACE/ALTER TABLE.
- * <p>
- * Without this capability, any CREATE/REPLACE/ALTER TABLE statement with a
column default value
- * defined in the table schema will throw an exception during analysis.
- * <p>
- * A column default value is defined with syntax: {@code colName colType
DEFAULT expr}
- * <p>
- * Column default value expression is included in the column definition for
APIs like
- * {@link TableCatalog#createTable}.
- */
- SUPPORT_COLUMN_DEFAULT_VALUE
-}
diff --git
a/paimon-spark/paimon-spark-common/src/main/java/org/apache/paimon/spark/SparkCatalog.java
b/paimon-spark/paimon-spark-common/src/main/java/org/apache/paimon/spark/SparkCatalog.java
index d910ade8fa..94657d4a8d 100644
---
a/paimon-spark/paimon-spark-common/src/main/java/org/apache/paimon/spark/SparkCatalog.java
+++
b/paimon-spark/paimon-spark-common/src/main/java/org/apache/paimon/spark/SparkCatalog.java
@@ -51,7 +51,6 @@ import org.apache.spark.sql.connector.catalog.Identifier;
import org.apache.spark.sql.connector.catalog.NamespaceChange;
import org.apache.spark.sql.connector.catalog.SupportsNamespaces;
import org.apache.spark.sql.connector.catalog.TableCatalog;
-import org.apache.spark.sql.connector.catalog.TableCatalogCapability;
import org.apache.spark.sql.connector.catalog.TableChange;
import org.apache.spark.sql.connector.catalog.functions.UnboundFunction;
import org.apache.spark.sql.connector.expressions.FieldReference;
@@ -79,7 +78,6 @@ import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
-import java.util.Set;
import java.util.stream.Collectors;
import static org.apache.paimon.CoreOptions.FILE_FORMAT;
@@ -96,7 +94,6 @@ import static
org.apache.paimon.spark.utils.CatalogUtils.isUpdateColumnDefaultVa
import static org.apache.paimon.spark.utils.CatalogUtils.removeCatalogName;
import static org.apache.paimon.spark.utils.CatalogUtils.toIdentifier;
import static
org.apache.paimon.spark.utils.CatalogUtils.toUpdateColumnDefaultValue;
-import static
org.apache.spark.sql.connector.catalog.TableCatalogCapability.SUPPORT_COLUMN_DEFAULT_VALUE;
/** Spark {@link TableCatalog} for paimon. */
public class SparkCatalog extends SparkBaseCatalog
@@ -111,10 +108,6 @@ public class SparkCatalog extends SparkBaseCatalog
private String defaultDatabase;
- public Set<TableCatalogCapability> capabilities() {
- return Collections.singleton(SUPPORT_COLUMN_DEFAULT_VALUE);
- }
-
@Override
public void initialize(String name, CaseInsensitiveStringMap options) {
checkRequiredConfigurations();
diff --git
a/paimon-spark/paimon-spark-common/src/main/java/org/apache/paimon/spark/catalog/SparkBaseCatalog.java
b/paimon-spark/paimon-spark-common/src/main/java/org/apache/paimon/spark/catalog/SparkBaseCatalog.java
index d3d173c903..1cb3035fad 100644
---
a/paimon-spark/paimon-spark-common/src/main/java/org/apache/paimon/spark/catalog/SparkBaseCatalog.java
+++
b/paimon-spark/paimon-spark-common/src/main/java/org/apache/paimon/spark/catalog/SparkBaseCatalog.java
@@ -28,9 +28,15 @@ import org.apache.paimon.spark.procedure.ProcedureBuilder;
import org.apache.spark.sql.connector.catalog.Identifier;
import org.apache.spark.sql.connector.catalog.SupportsNamespaces;
import org.apache.spark.sql.connector.catalog.TableCatalog;
+import org.apache.spark.sql.connector.catalog.TableCatalogCapability;
import javax.annotation.Nullable;
+import java.util.Collections;
+import java.util.Set;
+
+import static
org.apache.spark.sql.connector.catalog.TableCatalogCapability.SUPPORT_COLUMN_DEFAULT_VALUE;
+
/** Spark base catalog. */
public abstract class SparkBaseCatalog
implements TableCatalog, SupportsNamespaces, ProcedureCatalog,
WithPaimonCatalog {
@@ -42,6 +48,10 @@ public abstract class SparkBaseCatalog
return catalogName;
}
+ public Set<TableCatalogCapability> capabilities() {
+ return Collections.singleton(SUPPORT_COLUMN_DEFAULT_VALUE);
+ }
+
@Override
public Procedure loadProcedure(Identifier identifier) throws
NoSuchProcedureException {
if (Catalog.SYSTEM_DATABASE_NAME.equals(identifier.namespace()[0])) {
diff --git
a/paimon-spark/paimon-spark-ut/src/test/scala/org/apache/paimon/spark/sql/DDLWithHiveCatalogTestBase.scala
b/paimon-spark/paimon-spark-ut/src/test/scala/org/apache/paimon/spark/sql/DDLWithHiveCatalogTestBase.scala
index 06c2eaf049..2a8b8e9b39 100644
---
a/paimon-spark/paimon-spark-ut/src/test/scala/org/apache/paimon/spark/sql/DDLWithHiveCatalogTestBase.scala
+++
b/paimon-spark/paimon-spark-ut/src/test/scala/org/apache/paimon/spark/sql/DDLWithHiveCatalogTestBase.scala
@@ -731,6 +731,33 @@ abstract class DDLWithHiveCatalogTestBase extends
PaimonHiveTestBase {
}
}
+ test("Paimon DDL with hive catalog: default value") {
+ // Spark support default value since 3.4
+ if (gteqSpark3_4) {
+ Seq(sparkCatalogName, paimonHiveCatalogName).foreach {
+ catalogName =>
+ spark.sql(s"USE $catalogName")
+ val databaseName = "paimon_db"
+ withDatabase(databaseName) {
+ withTable("t1", "t2") {
+ // test paimon table
+ spark.sql("CREATE TABLE t1 (a INT, b int default 3) using
paimon")
+ spark.sql("INSERT INTO t1 (a) VALUES (1)")
+ checkAnswer(spark.sql("SELECT * FROM t1"), Seq(Row(1, 3)))
+
+ // test non paimon table
+ // todo: support default value for paimon format table
+ if (catalogName != paimonHiveCatalogName) {
+ spark.sql("CREATE TABLE t2 (a INT, b int default 3) using csv")
+ spark.sql("INSERT INTO t2 (a) VALUES (1)")
+ checkAnswer(spark.sql("SELECT * FROM t2"), Seq(Row(1, 3)))
+ }
+ }
+ }
+ }
+ }
+ }
+
def getDatabaseProp(dbName: String, propertyName: String): String = {
spark
.sql(s"DESC DATABASE EXTENDED $dbName")