This is an automated email from the ASF dual-hosted git repository.

agrove pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/datafusion-comet.git


The following commit(s) were added to refs/heads/main by this push:
     new ea5dc31f6 fix: handle type mismatches in native c2r conversion (#3583)
ea5dc31f6 is described below

commit ea5dc31f684e7d3384bbbe610e37396807b1fec8
Author: Andy Grove <[email protected]>
AuthorDate: Tue Feb 24 16:02:48 2026 -0700

    fix: handle type mismatches in native c2r conversion (#3583)
---
 native/core/src/execution/columnar_to_row.rs             | 14 +++++++++++++-
 .../spark/sql/comet/CometNativeColumnarToRowExec.scala   |  5 -----
 .../test/scala/org/apache/comet/CometFuzzTestBase.scala  | 16 +++++++++-------
 3 files changed, 22 insertions(+), 13 deletions(-)

diff --git a/native/core/src/execution/columnar_to_row.rs 
b/native/core/src/execution/columnar_to_row.rs
index 66b53af2b..495253c83 100644
--- a/native/core/src/execution/columnar_to_row.rs
+++ b/native/core/src/execution/columnar_to_row.rs
@@ -1068,7 +1068,19 @@ impl ColumnarToRowContext {
                     })?;
                 Ok(Arc::new(decimal_array))
             }
-            _ => Ok(Arc::clone(array)),
+            _ => {
+                // For any other type mismatch, attempt an Arrow cast.
+                // This handles cases like Int32 → Date32 (which can happen 
when Spark
+                // generates default column values using the physical storage 
type rather
+                // than the logical type).
+                let options = CastOptions::default();
+                cast_with_options(array, schema_type, &options).map_err(|e| {
+                    CometError::Internal(format!(
+                        "Failed to cast array from {:?} to {:?}: {}",
+                        actual_type, schema_type, e
+                    ))
+                })
+            }
         }
     }
 
diff --git 
a/spark/src/main/scala/org/apache/spark/sql/comet/CometNativeColumnarToRowExec.scala
 
b/spark/src/main/scala/org/apache/spark/sql/comet/CometNativeColumnarToRowExec.scala
index a520098ed..234538614 100644
--- 
a/spark/src/main/scala/org/apache/spark/sql/comet/CometNativeColumnarToRowExec.scala
+++ 
b/spark/src/main/scala/org/apache/spark/sql/comet/CometNativeColumnarToRowExec.scala
@@ -232,11 +232,6 @@ case class CometNativeColumnarToRowExec(child: SparkPlan)
 
 object CometNativeColumnarToRowExec {
 
-  /**
-   * Checks if native columnar to row conversion is enabled.
-   */
-  def isEnabled: Boolean = CometConf.COMET_NATIVE_COLUMNAR_TO_ROW_ENABLED.get()
-
   /**
    * Checks if the given schema is supported by native columnar to row 
conversion.
    *
diff --git a/spark/src/test/scala/org/apache/comet/CometFuzzTestBase.scala 
b/spark/src/test/scala/org/apache/comet/CometFuzzTestBase.scala
index 0e179b40f..6b6f02f9f 100644
--- a/spark/src/test/scala/org/apache/comet/CometFuzzTestBase.scala
+++ b/spark/src/test/scala/org/apache/comet/CometFuzzTestBase.scala
@@ -111,14 +111,16 @@ class CometFuzzTestBase extends CometTestBase with 
AdaptiveSparkPlanHelper {
 
   override protected def test(testName: String, testTags: Tag*)(testFun: => 
Any)(implicit
       pos: Position): Unit = {
-    Seq("native", "jvm").foreach { shuffleMode =>
-      super.test(testName + s" ($shuffleMode shuffle)", testTags: _*) {
-        withSQLConf(
-          CometConf.COMET_PARQUET_UNSIGNED_SMALL_INT_CHECK.key -> "false",
-          CometConf.COMET_SHUFFLE_MODE.key -> shuffleMode) {
-          testFun
+    Seq(("native", "false"), ("jvm", "true"), ("jvm", "false")).foreach {
+      case (shuffleMode, nativeC2R) =>
+        super.test(testName + s" ($shuffleMode shuffle, 
nativeC2R=$nativeC2R)", testTags: _*) {
+          withSQLConf(
+            CometConf.COMET_NATIVE_COLUMNAR_TO_ROW_ENABLED.key -> nativeC2R,
+            CometConf.COMET_PARQUET_UNSIGNED_SMALL_INT_CHECK.key -> "false",
+            CometConf.COMET_SHUFFLE_MODE.key -> shuffleMode) {
+            testFun
+          }
         }
-      }
     }
   }
 


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to