martin-g commented on code in PR #2815:
URL: https://github.com/apache/datafusion-comet/pull/2815#discussion_r2556984138


##########
spark/src/test/scala/org/apache/comet/CometExpressionSuite.scala:
##########
@@ -3187,4 +3188,30 @@ class CometExpressionSuite extends CometTestBase with 
AdaptiveSparkPlanHelper {
         CometConcat.unsupportedReason)
     }
   }
+
+  // https://github.com/apache/datafusion-comet/issues/2813
+  test("make decimal using DataFrame API") {
+    withTable("t1") {
+      sql("create table t1 using parquet as select 123456 as c1 from range(1)")
+
+      withSQLConf(
+        CometConf.COMET_EXEC_ENABLED.key -> "true",
+        SQLConf.USE_V1_SOURCE_LIST.key -> "parquet",
+        CometConf.COMET_ENABLED.key -> "true",
+        CometConf.COMET_EXPLAIN_FALLBACK_ENABLED.key -> "true",
+        SQLConf.ANSI_ENABLED.key -> "false",
+        SQLConf.ADAPTIVE_EXECUTION_ENABLED.key -> "false",
+        CometConf.getExprAllowIncompatConfigKey(classOf[Sum]) -> "true",
+        CometConf.COMET_NATIVE_SCAN_IMPL.key -> 
CometConf.SCAN_NATIVE_ICEBERG_COMPAT,

Review Comment:
   Are all settings needed for this test ? E.g. `Sum` and `Iceberg compat` look 
unrelated ?



##########
spark/src/test/scala/org/apache/comet/CometExpressionSuite.scala:
##########
@@ -3187,4 +3188,30 @@ class CometExpressionSuite extends CometTestBase with 
AdaptiveSparkPlanHelper {
         CometConcat.unsupportedReason)
     }
   }
+
+  // https://github.com/apache/datafusion-comet/issues/2813
+  test("make decimal using DataFrame API") {
+    withTable("t1") {
+      sql("create table t1 using parquet as select 123456 as c1 from range(1)")
+
+      withSQLConf(
+        CometConf.COMET_EXEC_ENABLED.key -> "true",
+        SQLConf.USE_V1_SOURCE_LIST.key -> "parquet",
+        CometConf.COMET_ENABLED.key -> "true",
+        CometConf.COMET_EXPLAIN_FALLBACK_ENABLED.key -> "true",
+        SQLConf.ANSI_ENABLED.key -> "false",
+        SQLConf.ADAPTIVE_EXECUTION_ENABLED.key -> "false",
+        CometConf.getExprAllowIncompatConfigKey(classOf[Sum]) -> "true",
+        CometConf.COMET_NATIVE_SCAN_IMPL.key -> 
CometConf.SCAN_NATIVE_ICEBERG_COMPAT,
+        SQLConf.ADAPTIVE_OPTIMIZER_EXCLUDED_RULES.key -> 
"org.apache.spark.sql.catalyst.optimizer.ConstantFolding") {
+
+        val df = sql("select * from t1")
+        val makeDecimalColumn = createMakeDecimalColumn(df.col("c1").expr, 3, 
0)
+        val df1 = df.withColumn("result", makeDecimalColumn)
+
+        checkSparkAnswerAndFallbackReason(df1, "Unsupported input data type: 
IntegerType")
+      }
+    }
+  }

Review Comment:
   IMO it would be useful to add a positive test too - with LongType



##########
spark/src/main/scala/org/apache/comet/serde/decimalExpressions.scala:
##########
@@ -38,6 +38,14 @@ object CometUnscaledValue extends 
CometExpressionSerde[UnscaledValue] {
 }
 
 object CometMakeDecimal extends CometExpressionSerde[MakeDecimal] {
+
+  override def getSupportLevel(expr: MakeDecimal): SupportLevel = {
+    expr.child.dataType match {
+      case _: LongType => Compatible()

Review Comment:
   ```suggestion
         case LongType => Compatible()
   ```
   because LongType is an object/singleton



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: [email protected]

For queries about this service, please contact Infrastructure at:
[email protected]


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to