kazuyukitanimura commented on code in PR #1325:
URL: https://github.com/apache/datafusion-comet/pull/1325#discussion_r1932970709
##########
spark/src/test/scala/org/apache/comet/CometExpressionSuite.scala:
##########
@@ -1929,19 +1929,45 @@ class CometExpressionSuite extends CometTestBase with
AdaptiveSparkPlanHelper {
}
}
- test("hash functions with decimal input") {
- withTable("t1", "t2") {
- // Apache Spark: if it's a small decimal, i.e. precision <= 18, turn it
into long and hash it.
- // Else, turn it into bytes and hash it.
- sql("create table t1(c1 decimal(18, 2)) using parquet")
- sql("insert into t1 values(1.23), (-1.23), (0.0), (null)")
- checkSparkAnswerAndOperator("select c1, hash(c1), xxhash64(c1) from t1
order by c1")
-
- // TODO: comet hash function is not compatible with spark for decimal
with precision greater than 18.
- // https://github.com/apache/datafusion-comet/issues/1294
-// sql("create table t2(c1 decimal(20, 2)) using parquet")
-// sql("insert into t2 values(1.23), (-1.23), (0.0), (null)")
-// checkSparkAnswerAndOperator("select c1, hash(c1), xxhash64(c1) from
t2 order by c1")
+ test("hash function with decimal input") {
+ val testPrecisionScales: Seq[(Int, Int)] = Seq(
+ (1, 0),
+ (17, 2),
+ (18, 2),
+ (19, 2),
+ (DecimalType.MAX_PRECISION, DecimalType.MAX_PRECISION - 1))
Review Comment:
nit: perhaps `MAX_SCALE - 1` instead of `DecimalType.MAX_PRECISION - 1`?
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
To unsubscribe, e-mail: [email protected]
For queries about this service, please contact Infrastructure at:
[email protected]
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]