This is an automated email from the ASF dual-hosted git repository.

lzljs3620320 pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/paimon.git


The following commit(s) were added to refs/heads/master by this push:
     new 1a1ff56beb [spark] Fix NPE in spark truncate null partitions
1a1ff56beb is described below

commit 1a1ff56bebc3462682cc8afca2e23e27b2d9b26e
Author: zhongyujiang <[email protected]>
AuthorDate: Sun Nov 23 12:00:04 2025 +0800

    [spark] Fix NPE in spark truncate null partitions
    
    Part of #6662
---
 .../spark/catalyst/analysis/PaimonIncompatiblePHRRules.scala |  5 ++++-
 .../test/java/org/apache/paimon/spark/SparkWriteITCase.java  | 12 ++++++++++++
 2 files changed, 16 insertions(+), 1 deletion(-)

diff --git 
a/paimon-spark/paimon-spark-common/src/main/scala/org/apache/paimon/spark/catalyst/analysis/PaimonIncompatiblePHRRules.scala
 
b/paimon-spark/paimon-spark-common/src/main/scala/org/apache/paimon/spark/catalyst/analysis/PaimonIncompatiblePHRRules.scala
index c5745f5751..bf6eb35757 100644
--- 
a/paimon-spark/paimon-spark-common/src/main/scala/org/apache/paimon/spark/catalyst/analysis/PaimonIncompatiblePHRRules.scala
+++ 
b/paimon-spark/paimon-spark-common/src/main/scala/org/apache/paimon/spark/catalyst/analysis/PaimonIncompatiblePHRRules.scala
@@ -40,7 +40,10 @@ case class PaimonIncompatiblePHRRules(session: SparkSession) 
extends Rule[Logica
             val field = schema.find(f => resolver(f.name, name)).getOrElse {
               throw new RuntimeException(s"$name is not a valid partition 
column in $schema.")
             }
-            (name -> ident.get(index, field.dataType).toString)
+
+            val partVal: String =
+              if (ident.isNullAt(index)) null else ident.get(index, 
field.dataType).toString
+            (name -> partVal)
         }.toMap
         PaimonTruncateTableCommand(table, partitionSpec)
 
diff --git 
a/paimon-spark/paimon-spark-ut/src/test/java/org/apache/paimon/spark/SparkWriteITCase.java
 
b/paimon-spark/paimon-spark-ut/src/test/java/org/apache/paimon/spark/SparkWriteITCase.java
index d03172fcb8..2573179ccc 100644
--- 
a/paimon-spark/paimon-spark-ut/src/test/java/org/apache/paimon/spark/SparkWriteITCase.java
+++ 
b/paimon-spark/paimon-spark-ut/src/test/java/org/apache/paimon/spark/SparkWriteITCase.java
@@ -479,6 +479,18 @@ public class SparkWriteITCase {
         assertThat(rows.toString()).isEqualTo("[[2,22,222,b], [3,33,333,b]]");
     }
 
+    @Test
+    public void testTruncatePartitionValueNull() {
+        spark.sql("CREATE TABLE t (pt STRING, data STRING) PARTITIONED BY (pt) 
");
+
+        spark.sql("INSERT INTO t VALUES('1', 'a'), (null, 'b')");
+
+        spark.sql("TRUNCATE TABLE T PARTITION (pt = null)");
+
+        List<Row> rows = spark.sql("SELECT * FROM T ORDER BY 
pt").collectAsList();
+        assertThat(rows.toString()).isEqualTo("[[1,a]]");
+    }
+
     @Test
     public void testWriteDynamicBucketPartitionedTable() {
         spark.sql(

Reply via email to