This is an automated email from the ASF dual-hosted git repository.

lzljs3620320 pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/paimon.git


The following commit(s) were added to refs/heads/master by this push:
     new 0c994759d8 [spark] Add version check for v2 write (#6809)
0c994759d8 is described below

commit 0c994759d8ffc38f61bf12cd79a5d4bae7cef40f
Author: Kerwin Zhang <[email protected]>
AuthorDate: Sun Dec 14 20:17:45 2025 +0800

    [spark] Add version check for v2 write (#6809)
---
 .../scala/org/apache/paimon/spark/util/OptionUtils.scala  | 15 +++++++++++++--
 1 file changed, 13 insertions(+), 2 deletions(-)

diff --git 
a/paimon-spark/paimon-spark-common/src/main/scala/org/apache/paimon/spark/util/OptionUtils.scala
 
b/paimon-spark/paimon-spark-common/src/main/scala/org/apache/paimon/spark/util/OptionUtils.scala
index 3a09919f85..acbb378586 100644
--- 
a/paimon-spark/paimon-spark-common/src/main/scala/org/apache/paimon/spark/util/OptionUtils.scala
+++ 
b/paimon-spark/paimon-spark-common/src/main/scala/org/apache/paimon/spark/util/OptionUtils.scala
@@ -23,6 +23,7 @@ import org.apache.paimon.options.ConfigOption
 import org.apache.paimon.spark.{SparkCatalogOptions, SparkConnectorOptions}
 import org.apache.paimon.table.Table
 
+import org.apache.spark.internal.Logging
 import org.apache.spark.sql.catalyst.SQLConfHelper
 import org.apache.spark.sql.internal.StaticSQLConf
 
@@ -31,7 +32,7 @@ import java.util.regex.Pattern
 
 import scala.collection.JavaConverters._
 
-object OptionUtils extends SQLConfHelper {
+object OptionUtils extends SQLConfHelper with Logging {
 
   private val PAIMON_OPTION_PREFIX = "spark.paimon."
   private val SPARK_CATALOG_PREFIX = "spark.sql.catalog."
@@ -76,12 +77,22 @@ object OptionUtils extends SQLConfHelper {
 
   def useV2Write(): Boolean = {
     val defaultValue = 
getSparkVersionSpecificDefault(SparkConnectorOptions.USE_V2_WRITE)
-    conf
+    val configuredValue = conf
       .getConfString(
         s"$PAIMON_OPTION_PREFIX${SparkConnectorOptions.USE_V2_WRITE.key()}",
         defaultValue
       )
       .toBoolean
+
+    val sparkVersion = org.apache.spark.SPARK_VERSION
+    val isVersionSupported = sparkVersion >= "3.4"
+
+    if (configuredValue && !isVersionSupported) {
+      logWarning(
+        "DataSourceV2 write is not supported in Spark versions prior to 3.4. 
Falling back to DataSourceV1.")
+    }
+
+    configuredValue && isVersionSupported
   }
 
   def writeMergeSchemaEnabled(): Boolean = {

Reply via email to