This is an automated email from the ASF dual-hosted git repository.
yuanzhou pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/incubator-gluten.git
The following commit(s) were added to refs/heads/main by this push:
new 54e7e49ace [GLUTEN-11088] Fix GlutenDateExpressionsSuite in Spark-4.0
(#11172)
54e7e49ace is described below
commit 54e7e49ace86b4b9d458f46543924f242887262c
Author: Mingliang Zhu <[email protected]>
AuthorDate: Tue Nov 25 20:43:19 2025 +0800
[GLUTEN-11088] Fix GlutenDateExpressionsSuite in Spark-4.0 (#11172)
Fix GlutenDateExpressionsSuite in Spark-4.0
apache/spark#44261 make CurrentTimestamp as Unevaluable.
---
.../expressions/GlutenDateExpressionsSuite.scala | 16 ++--------------
1 file changed, 2 insertions(+), 14 deletions(-)
diff --git
a/gluten-ut/spark40/src/test/scala/org/apache/spark/sql/catalyst/expressions/GlutenDateExpressionsSuite.scala
b/gluten-ut/spark40/src/test/scala/org/apache/spark/sql/catalyst/expressions/GlutenDateExpressionsSuite.scala
index 306a377ff5..30198ad3b1 100644
---
a/gluten-ut/spark40/src/test/scala/org/apache/spark/sql/catalyst/expressions/GlutenDateExpressionsSuite.scala
+++
b/gluten-ut/spark40/src/test/scala/org/apache/spark/sql/catalyst/expressions/GlutenDateExpressionsSuite.scala
@@ -87,8 +87,7 @@ class GlutenDateExpressionsSuite extends DateExpressionsSuite
with GlutenTestsTr
"Europe/Brussels")
val outstandingZoneIds: Seq[ZoneId] = outstandingTimezonesIds.map(getZoneId)
- // TODO: fix in Spark-4.0
- ignoreGluten("unix_timestamp") {
+ testGluten("unix_timestamp") {
Seq("legacy", "corrected").foreach {
legacyParserPolicy =>
withDefaultTimeZone(UTC) {
@@ -156,13 +155,6 @@ class GlutenDateExpressionsSuite extends
DateExpressionsSuite with GlutenTestsTr
DateTimeUtils.fromJavaDate(Date.valueOf("2015-07-24")),
tz.toZoneId))
)
- val t1 = UnixTimestamp(CurrentTimestamp(), Literal("yyyy-MM-dd
HH:mm:ss"))
- .eval()
- .asInstanceOf[Long]
- val t2 = UnixTimestamp(CurrentTimestamp(), Literal("yyyy-MM-dd
HH:mm:ss"))
- .eval()
- .asInstanceOf[Long]
- assert(t2 - t1 <= 1)
checkEvaluation(
UnixTimestamp(
Literal.create(null, DateType),
@@ -189,8 +181,7 @@ class GlutenDateExpressionsSuite extends
DateExpressionsSuite with GlutenTestsTr
UnixTimestamp(Literal("2015-07-24"), Literal("\""), UTC_OPT) :: Nil)
}
- // TODO: fix in Spark-4.0
- ignoreGluten("to_unix_timestamp") {
+ testGluten("to_unix_timestamp") {
withDefaultTimeZone(UTC) {
for (zid <- outstandingZoneIds) {
Seq("legacy", "corrected").foreach {
@@ -249,9 +240,6 @@ class GlutenDateExpressionsSuite extends
DateExpressionsSuite with GlutenTestsTr
MICROSECONDS.toSeconds(DateTimeUtils
.daysToMicros(DateTimeUtils.fromJavaDate(Date.valueOf("2015-07-24")), zid))
)
- val t1 = ToUnixTimestamp(CurrentTimestamp(),
Literal(fmt1)).eval().asInstanceOf[Long]
- val t2 = ToUnixTimestamp(CurrentTimestamp(),
Literal(fmt1)).eval().asInstanceOf[Long]
- assert(t2 - t1 <= 1)
checkEvaluation(
ToUnixTimestamp(
Literal.create(null, DateType),
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]