This is an automated email from the ASF dual-hosted git repository.

lzljs3620320 pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/paimon.git


The following commit(s) were added to refs/heads/master by this push:
     new fb073429c6 [spark] Wrap the corresponding table to ensure it 
guarantees its capabilities (#6607)
fb073429c6 is described below

commit fb073429c6a202ff11ed8a5d07f1a120afe813cf
Author: Zouxxyy <[email protected]>
AuthorDate: Sun Nov 16 21:56:31 2025 +0800

    [spark] Wrap the corresponding table to ensure it guarantees its 
capabilities (#6607)
---
 .../main/java/org/apache/paimon/spark/SparkCatalog.java | 17 +++++++++++++----
 .../main/scala/org/apache/paimon/spark/BaseTable.scala  |  9 +++++----
 .../main/scala/org/apache/paimon/spark/SparkTable.scala |  6 ++++++
 3 files changed, 24 insertions(+), 8 deletions(-)

diff --git 
a/paimon-spark/paimon-spark-common/src/main/java/org/apache/paimon/spark/SparkCatalog.java
 
b/paimon-spark/paimon-spark-common/src/main/java/org/apache/paimon/spark/SparkCatalog.java
index 46162aabd6..d794ac7b99 100644
--- 
a/paimon-spark/paimon-spark-common/src/main/java/org/apache/paimon/spark/SparkCatalog.java
+++ 
b/paimon-spark/paimon-spark-common/src/main/java/org/apache/paimon/spark/SparkCatalog.java
@@ -38,6 +38,9 @@ import 
org.apache.paimon.spark.catalog.functions.PaimonFunctions;
 import org.apache.paimon.spark.catalog.functions.V1FunctionConverter;
 import org.apache.paimon.spark.utils.CatalogUtils;
 import org.apache.paimon.table.FormatTable;
+import org.apache.paimon.table.iceberg.IcebergTable;
+import org.apache.paimon.table.lance.LanceTable;
+import org.apache.paimon.table.object.ObjectTable;
 import org.apache.paimon.types.BlobType;
 import org.apache.paimon.types.DataField;
 import org.apache.paimon.types.DataType;
@@ -649,13 +652,19 @@ public class SparkCatalog extends SparkBaseCatalog
             Identifier ident, Map<String, String> extraOptions) throws 
NoSuchTableException {
         try {
             org.apache.paimon.catalog.Identifier tblIdent = 
toIdentifier(ident, catalogName);
-            org.apache.paimon.table.Table paimonTable =
+            org.apache.paimon.table.Table table =
                     copyWithSQLConf(
                             catalog.getTable(tblIdent), catalogName, tblIdent, 
extraOptions);
-            if (paimonTable instanceof FormatTable) {
-                return toSparkFormatTable(ident, (FormatTable) paimonTable);
+            if (table instanceof FormatTable) {
+                return toSparkFormatTable(ident, (FormatTable) table);
+            } else if (table instanceof IcebergTable) {
+                return new SparkIcebergTable(table);
+            } else if (table instanceof LanceTable) {
+                return new SparkLanceTable(table);
+            } else if (table instanceof ObjectTable) {
+                return new SparkObjectTable(table);
             } else {
-                return new SparkTable(paimonTable);
+                return new SparkTable(table);
             }
         } catch (Catalog.TableNotExistException e) {
             throw new NoSuchTableException(ident);
diff --git 
a/paimon-spark/paimon-spark-common/src/main/scala/org/apache/paimon/spark/BaseTable.scala
 
b/paimon-spark/paimon-spark-common/src/main/scala/org/apache/paimon/spark/BaseTable.scala
index e7521cd409..9ffa6b194d 100644
--- 
a/paimon-spark/paimon-spark-common/src/main/scala/org/apache/paimon/spark/BaseTable.scala
+++ 
b/paimon-spark/paimon-spark-common/src/main/scala/org/apache/paimon/spark/BaseTable.scala
@@ -21,10 +21,11 @@ package org.apache.paimon.spark
 import org.apache.paimon.table.Table
 import org.apache.paimon.utils.StringUtils
 
+import org.apache.spark.sql.connector.catalog.TableCapability
 import org.apache.spark.sql.connector.expressions.{Expressions, Transform}
 import org.apache.spark.sql.types.StructType
 
-import java.util.{Map => JMap}
+import java.util.{Collections => JCollections, Map => JMap, Set => JSet}
 
 import scala.collection.JavaConverters._
 
@@ -34,6 +35,8 @@ abstract class BaseTable
 
   val table: Table
 
+  override def capabilities(): JSet[TableCapability] = 
JCollections.emptySet[TableCapability]()
+
   override def name: String = table.fullName
 
   override lazy val schema: StructType = 
SparkTypeUtils.fromPaimonRowType(table.rowType)
@@ -42,9 +45,7 @@ abstract class BaseTable
     table.partitionKeys().asScala.map(p => 
Expressions.identity(StringUtils.quote(p))).toArray
   }
 
-  override def properties: JMap[String, String] = {
-    table.options()
-  }
+  override def properties: JMap[String, String] = table.options()
 
   override def toString: String = {
     s"${table.getClass.getSimpleName}[${table.fullName()}]"
diff --git 
a/paimon-spark/paimon-spark-common/src/main/scala/org/apache/paimon/spark/SparkTable.scala
 
b/paimon-spark/paimon-spark-common/src/main/scala/org/apache/paimon/spark/SparkTable.scala
index 284426b615..69f134196f 100644
--- 
a/paimon-spark/paimon-spark-common/src/main/scala/org/apache/paimon/spark/SparkTable.scala
+++ 
b/paimon-spark/paimon-spark-common/src/main/scala/org/apache/paimon/spark/SparkTable.scala
@@ -22,3 +22,9 @@ import org.apache.paimon.table.Table
 
 /** A spark [[org.apache.spark.sql.connector.catalog.Table]] for paimon. */
 case class SparkTable(override val table: Table) extends 
PaimonSparkTableBase(table) {}
+
+case class SparkIcebergTable(table: Table) extends BaseTable
+
+case class SparkLanceTable(table: Table) extends BaseTable
+
+case class SparkObjectTable(table: Table) extends BaseTable

Reply via email to