This is an automated email from the ASF dual-hosted git repository.

wenchen pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new ee1fd926802b [SPARK-46331][SQL] Removing CodegenFallback from subset 
of DateTime expressions and version() expression
ee1fd926802b is described below

commit ee1fd926802bb75f901ca72ab9b0c144c5eae035
Author: Aleksandar Tomic <aleksandar.to...@databricks.com>
AuthorDate: Tue Jan 9 19:20:24 2024 +0800

    [SPARK-46331][SQL] Removing CodegenFallback from subset of DateTime 
expressions and version() expression
    
    ### What changes were proposed in this pull request?
    
    This PR moves us a bit closer to removing CodegenFallback class and instead 
of it relying on RuntimeReplaceable with StaticInvoke.
    
    In this PR there are following changes:
    - Doing StaticInvoke + RuntimeReplaceable against spark version expression.
    - Adding Unevaluable trait for DateTime expressions. These expressions need 
to be replaced during analysis anyhow so we explicitly forbid eval from being 
called.
    
    ### Why are the changes needed?
    
    Direction is to get away from CodegenFallback. This PR moves us closer to 
that destination.
    
    ### Does this PR introduce _any_ user-facing change?
    
    No.
    
    ### How was this patch tested?
    
    Running existing tests.
    
    ### Was this patch authored or co-authored using generative AI tooling?
    
    No
    
    Closes #44261 from dbatomic/codegenfallback_removal.
    
    Lead-authored-by: Aleksandar Tomic <aleksandar.to...@databricks.com>
    Co-authored-by: Aleksandar Tomic 
<150942779+dbato...@users.noreply.github.com>
    Signed-off-by: Wenchen Fan <wenc...@databricks.com>
---
 .../catalyst/expressions/ExpressionImplUtils.java  | 13 ++++++
 .../catalyst/analysis/ResolveInlineTables.scala    | 11 +++--
 .../sql/catalyst/analysis/ResolveTableSpec.scala   | 12 +++++-
 .../catalyst/expressions/datetimeExpressions.scala | 25 ++---------
 .../spark/sql/catalyst/expressions/misc.scala      | 15 +++----
 .../spark/sql/catalyst/util/DateTimeUtils.scala    |  7 +--
 .../org/apache/spark/sql/internal/SQLConf.scala    | 14 ++++++
 .../expressions/DateExpressionsSuite.scala         | 50 +++++-----------------
 .../optimizer/ComputeCurrentTimeSuite.scala        | 14 ++++++
 .../catalyst/optimizer/EliminateSortsSuite.scala   |  4 +-
 .../optimizer/FoldablePropagationSuite.scala       | 24 +++++------
 .../sql/TableOptionsConstantFoldingSuite.scala     |  4 +-
 12 files changed, 96 insertions(+), 97 deletions(-)

diff --git 
a/sql/catalyst/src/main/java/org/apache/spark/sql/catalyst/expressions/ExpressionImplUtils.java
 
b/sql/catalyst/src/main/java/org/apache/spark/sql/catalyst/expressions/ExpressionImplUtils.java
index b4fb9eae48da..8fe59cb7fae5 100644
--- 
a/sql/catalyst/src/main/java/org/apache/spark/sql/catalyst/expressions/ExpressionImplUtils.java
+++ 
b/sql/catalyst/src/main/java/org/apache/spark/sql/catalyst/expressions/ExpressionImplUtils.java
@@ -17,8 +17,10 @@
 
 package org.apache.spark.sql.catalyst.expressions;
 
+import org.apache.spark.SparkBuildInfo;
 import org.apache.spark.sql.errors.QueryExecutionErrors;
 import org.apache.spark.unsafe.types.UTF8String;
+import org.apache.spark.util.VersionUtils;
 
 import javax.crypto.Cipher;
 import javax.crypto.spec.GCMParameterSpec;
@@ -143,6 +145,17 @@ public class ExpressionImplUtils {
     );
   }
 
+  /**
+   * Function to return the Spark version.
+   * @return
+   *  Space separated version and revision.
+   */
+  public static UTF8String getSparkVersion() {
+    String shortVersion = 
VersionUtils.shortVersion(SparkBuildInfo.spark_version());
+    String revision = SparkBuildInfo.spark_revision();
+    return UTF8String.fromString(shortVersion + " " + revision);
+  }
+
   private static SecretKeySpec getSecretKeySpec(byte[] key) {
     return switch (key.length) {
       case 16, 24, 32 -> new SecretKeySpec(key, 0, key.length, "AES");
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/ResolveInlineTables.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/ResolveInlineTables.scala
index 811e02b4d97b..3b9c6799bfaf 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/ResolveInlineTables.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/ResolveInlineTables.scala
@@ -68,17 +68,16 @@ object ResolveInlineTables extends Rule[LogicalPlan]
   /**
    * Validates that all inline table data are valid expressions that can be 
evaluated
    * (in this they must be foldable).
-   *
+   * Note that nondeterministic expressions are not supported since they are 
not foldable.
+   * Exception are CURRENT_LIKE expressions, which are replaced by a literal 
in later stages.
    * This is package visible for unit testing.
    */
   private[analysis] def validateInputEvaluable(table: UnresolvedInlineTable): 
Unit = {
     table.rows.foreach { row =>
       row.foreach { e =>
-        // Note that nondeterministic expressions are not supported since they 
are not foldable.
-        // Only exception are CURRENT_LIKE expressions, which are replaced by 
a literal
-        // In later stages.
-        if ((!e.resolved && !e.containsPattern(CURRENT_LIKE))
-          || !trimAliases(prepareForEval(e)).foldable) {
+        if (e.containsPattern(CURRENT_LIKE)) {
+          // Do nothing.
+        } else if (!e.resolved || !trimAliases(prepareForEval(e)).foldable) {
           e.failAnalysis(
             errorClass = 
"INVALID_INLINE_TABLE.CANNOT_EVALUATE_EXPRESSION_IN_INLINE_TABLE",
             messageParameters = Map("expr" -> toSQLExpr(e)))
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/ResolveTableSpec.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/ResolveTableSpec.scala
index d8be6824b909..cc9979ad4c5e 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/ResolveTableSpec.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/ResolveTableSpec.scala
@@ -19,10 +19,12 @@ package org.apache.spark.sql.catalyst.analysis
 
 import org.apache.spark.SparkThrowable
 import org.apache.spark.sql.catalyst.expressions.{Expression, Literal}
+import org.apache.spark.sql.catalyst.optimizer.ComputeCurrentTime
 import org.apache.spark.sql.catalyst.plans.logical._
 import org.apache.spark.sql.catalyst.rules.Rule
 import org.apache.spark.sql.catalyst.trees.TreePattern.COMMAND
 import org.apache.spark.sql.errors.QueryCompilationErrors
+import org.apache.spark.sql.internal.SQLConf
 import org.apache.spark.sql.types.{ArrayType, MapType, StructType}
 
 /**
@@ -34,7 +36,15 @@ import org.apache.spark.sql.types.{ArrayType, MapType, 
StructType}
  */
 object ResolveTableSpec extends Rule[LogicalPlan] {
   override def apply(plan: LogicalPlan): LogicalPlan = {
-    plan.resolveOperatorsWithPruning(_.containsAnyPattern(COMMAND), ruleId) {
+    val preparedPlan = if (SQLConf.get.legacyEvalCurrentTime && 
plan.containsPattern(COMMAND)) {
+      AnalysisHelper.allowInvokingTransformsInAnalyzer {
+        ComputeCurrentTime(ResolveTimeZone(plan))
+      }
+    } else {
+      plan
+    }
+
+    preparedPlan.resolveOperatorsWithPruning(_.containsAnyPattern(COMMAND), 
ruleId) {
       case t: CreateTable =>
         resolveTableSpec(t, t.tableSpec, s => t.copy(tableSpec = s))
       case t: CreateTableAsSelect =>
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/datetimeExpressions.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/datetimeExpressions.scala
index 9be260a9f3da..22d4e175b9a3 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/datetimeExpressions.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/datetimeExpressions.scala
@@ -134,22 +134,14 @@ case class CurrentTimeZone() extends LeafExpression with 
Unevaluable {
   since = "1.5.0")
 // scalastyle:on line.size.limit
 case class CurrentDate(timeZoneId: Option[String] = None)
-  extends LeafExpression with TimeZoneAwareExpression with CodegenFallback {
-
+  extends LeafExpression with TimeZoneAwareExpression with Unevaluable {
   def this() = this(None)
-
-  override def foldable: Boolean = true
   override def nullable: Boolean = false
-
   override def dataType: DataType = DateType
-
   final override def nodePatternsInternal(): Seq[TreePattern] = 
Seq(CURRENT_LIKE)
-
   override def withTimeZone(timeZoneId: String): TimeZoneAwareExpression =
     copy(timeZoneId = Option(timeZoneId))
 
-  override def eval(input: InternalRow): Any = currentDate(zoneId)
-
   override def prettyName: String = "current_date"
 }
 
@@ -177,11 +169,9 @@ object CurDateExpressionBuilder extends ExpressionBuilder {
   }
 }
 
-abstract class CurrentTimestampLike() extends LeafExpression with 
CodegenFallback {
-  override def foldable: Boolean = true
+abstract class CurrentTimestampLike() extends LeafExpression with Unevaluable {
   override def nullable: Boolean = false
   override def dataType: DataType = TimestampType
-  override def eval(input: InternalRow): Any = currentTimestamp()
   final override val nodePatterns: Seq[TreePattern] = Seq(CURRENT_LIKE)
 }
 
@@ -245,22 +235,13 @@ case class Now() extends CurrentTimestampLike {
   group = "datetime_funcs",
   since = "3.4.0")
 case class LocalTimestamp(timeZoneId: Option[String] = None) extends 
LeafExpression
-  with TimeZoneAwareExpression with CodegenFallback {
-
+  with TimeZoneAwareExpression with Unevaluable {
   def this() = this(None)
-
-  override def foldable: Boolean = true
   override def nullable: Boolean = false
-
   override def dataType: DataType = TimestampNTZType
-
   final override def nodePatternsInternal(): Seq[TreePattern] = 
Seq(CURRENT_LIKE)
-
   override def withTimeZone(timeZoneId: String): TimeZoneAwareExpression =
     copy(timeZoneId = Option(timeZoneId))
-
-  override def eval(input: InternalRow): Any = 
localDateTimeToMicros(LocalDateTime.now(zoneId))
-
   override def prettyName: String = "localtimestamp"
 }
 
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/misc.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/misc.scala
index 8816e84490da..c7281e4e8737 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/misc.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/misc.scala
@@ -17,7 +17,6 @@
 
 package org.apache.spark.sql.catalyst.expressions
 
-import org.apache.spark.{SPARK_REVISION, SPARK_VERSION_SHORT}
 import org.apache.spark.sql.catalyst.InternalRow
 import org.apache.spark.sql.catalyst.analysis.{ExpressionBuilder, 
FunctionRegistry, UnresolvedSeed}
 import org.apache.spark.sql.catalyst.expressions.codegen._
@@ -288,14 +287,14 @@ case class Uuid(randomSeed: Option[Long] = None) extends 
LeafExpression with Non
   since = "3.0.0",
   group = "misc_funcs")
 // scalastyle:on line.size.limit
-case class SparkVersion() extends LeafExpression with CodegenFallback {
-  override def nullable: Boolean = false
-  override def foldable: Boolean = true
-  override def dataType: DataType = StringType
+case class SparkVersion() extends LeafExpression with RuntimeReplaceable {
   override def prettyName: String = "version"
-  override def eval(input: InternalRow): Any = {
-    UTF8String.fromString(SPARK_VERSION_SHORT + " " + SPARK_REVISION)
-  }
+
+  override lazy val replacement: Expression = StaticInvoke(
+    classOf[ExpressionImplUtils],
+    StringType,
+    "getSparkVersion",
+    returnNullable = false)
 }
 
 @ExpressionDescription(
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/DateTimeUtils.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/DateTimeUtils.scala
index cb93814e90e5..0dd83c4b499e 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/DateTimeUtils.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/DateTimeUtils.scala
@@ -516,11 +516,6 @@ object DateTimeUtils extends SparkDateTimeUtils {
     convertTz(micros, getZoneId(timeZone), ZoneOffset.UTC)
   }
 
-  /**
-   * Obtains the current instant as microseconds since the epoch at the UTC 
time zone.
-   */
-  def currentTimestamp(): Long = instantToMicros(Instant.now())
-
   /**
    * Obtains the current date as days since the epoch in the specified 
time-zone.
    */
@@ -572,7 +567,7 @@ object DateTimeUtils extends SparkDateTimeUtils {
   def convertSpecialTimestamp(input: String, zoneId: ZoneId): Option[Long] = {
     extractSpecialValue(input.trim).flatMap {
       case "epoch" => Some(0)
-      case "now" => Some(currentTimestamp())
+      case "now" => Some(instantToMicros(Instant.now()))
       case "today" => Some(instantToMicros(today(zoneId).toInstant))
       case "tomorrow" => 
Some(instantToMicros(today(zoneId).plusDays(1).toInstant))
       case "yesterday" => 
Some(instantToMicros(today(zoneId).minusDays(1).toInstant))
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala
index fbceea4e5f8e..d1ac061f02af 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala
@@ -4612,6 +4612,18 @@ object SQLConf {
     .booleanConf
     .createWithDefault(false)
 
+  val LEGACY_EVAL_CURRENT_TIME = 
buildConf("spark.sql.legacy.earlyEvalCurrentTime")
+    .internal()
+    .doc("When set to true, evaluation and constant folding will happen for 
now() and " +
+      "current_timestamp() expressions before finish analysis phase. " +
+      "This flag will allow a bit more liberal syntax but it will sacrifice 
correctness - " +
+      "Results of now() and current_timestamp() can be different for different 
operations " +
+      "in a single query."
+    )
+    .version("4.0.0")
+    .booleanConf
+    .createWithDefault(false)
+
   /**
    * Holds information about keys that have been deprecated.
    *
@@ -5516,6 +5528,8 @@ class SQLConf extends Serializable with Logging with 
SqlApiConf {
 
   def legacyJavaCharsets: Boolean = getConf(SQLConf.LEGACY_JAVA_CHARSETS)
 
+  def legacyEvalCurrentTime: Boolean = 
getConf(SQLConf.LEGACY_EVAL_CURRENT_TIME)
+
   /** ********************** SQLConf functionality methods ************ */
 
   /** Set Spark SQL configuration properties. */
diff --git 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/DateExpressionsSuite.scala
 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/DateExpressionsSuite.scala
index 09c2b6f5cc9b..88bb05cbf917 100644
--- 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/DateExpressionsSuite.scala
+++ 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/DateExpressionsSuite.scala
@@ -28,7 +28,7 @@ import scala.language.postfixOps
 import scala.reflect.ClassTag
 import scala.util.Random
 
-import org.apache.spark.{SparkArithmeticException, SparkDateTimeException, 
SparkFunSuite, SparkUpgradeException}
+import org.apache.spark.{SparkArithmeticException, SparkDateTimeException, 
SparkException, SparkFunSuite, SparkUpgradeException}
 import org.apache.spark.sql.catalyst.{CatalystTypeConverters, InternalRow}
 import 
org.apache.spark.sql.catalyst.expressions.codegen.GenerateUnsafeProjection
 import org.apache.spark.sql.catalyst.util.{DateTimeUtils, IntervalUtils, 
TimestampFormatter}
@@ -78,33 +78,6 @@ class DateExpressionsSuite extends SparkFunSuite with 
ExpressionEvalHelper {
     }
   }
 
-  test("datetime function current_date") {
-    val d0 = DateTimeUtils.currentDate(UTC)
-    val cd = CurrentDate(UTC_OPT).eval(EmptyRow).asInstanceOf[Int]
-    val d1 = DateTimeUtils.currentDate(UTC)
-    assert(d0 <= cd && cd <= d1 && d1 - d0 <= 1)
-
-    val cdjst = CurrentDate(JST_OPT).eval(EmptyRow).asInstanceOf[Int]
-    val cdpst = CurrentDate(PST_OPT).eval(EmptyRow).asInstanceOf[Int]
-    assert(cdpst <= cd && cd <= cdjst)
-  }
-
-  test("datetime function current_timestamp") {
-    val ct = 
DateTimeUtils.toJavaTimestamp(CurrentTimestamp().eval(EmptyRow).asInstanceOf[Long])
-    val t1 = System.currentTimeMillis()
-    assert(math.abs(t1 - ct.getTime) < 5000)
-  }
-
-  test("datetime function localtimestamp") {
-    // Verify with multiple outstanding time zones which has no daylight 
saving time.
-    Seq("UTC", "Africa/Dakar", "Asia/Hong_Kong").foreach { zid =>
-      val zoneId = DateTimeUtils.getZoneId(zid)
-      val ct = LocalTimestamp(Some(zid)).eval(EmptyRow).asInstanceOf[Long]
-      val t1 = DateTimeUtils.localDateTimeToMicros(LocalDateTime.now(zoneId))
-      assert(math.abs(t1 - ct) < 1000000)
-    }
-  }
-
   test("DayOfYear") {
     val sdfDay = new SimpleDateFormat("D", Locale.US)
 
@@ -970,11 +943,6 @@ class DateExpressionsSuite extends SparkFunSuite with 
ExpressionEvalHelper {
               Literal(sdf3.format(Date.valueOf("2015-07-24"))), Literal(fmt3), 
timeZoneId),
               MICROSECONDS.toSeconds(DateTimeUtils.daysToMicros(
                 DateTimeUtils.fromJavaDate(Date.valueOf("2015-07-24")), 
tz.toZoneId)))
-            val t1 = UnixTimestamp(
-              CurrentTimestamp(), Literal("yyyy-MM-dd 
HH:mm:ss")).eval().asInstanceOf[Long]
-            val t2 = UnixTimestamp(
-              CurrentTimestamp(), Literal("yyyy-MM-dd 
HH:mm:ss")).eval().asInstanceOf[Long]
-            assert(t2 - t1 <= 1)
             checkEvaluation(
               UnixTimestamp(
                 Literal.create(null, DateType), Literal.create(null, 
StringType), timeZoneId),
@@ -1041,11 +1009,6 @@ class DateExpressionsSuite extends SparkFunSuite with 
ExpressionEvalHelper {
               Literal(sdf3.format(Date.valueOf("2015-07-24"))), Literal(fmt3), 
timeZoneId),
               MICROSECONDS.toSeconds(DateTimeUtils.daysToMicros(
                 DateTimeUtils.fromJavaDate(Date.valueOf("2015-07-24")), zid)))
-            val t1 = ToUnixTimestamp(
-              CurrentTimestamp(), Literal(fmt1)).eval().asInstanceOf[Long]
-            val t2 = ToUnixTimestamp(
-              CurrentTimestamp(), Literal(fmt1)).eval().asInstanceOf[Long]
-            assert(t2 - t1 <= 1)
             checkEvaluation(ToUnixTimestamp(
               Literal.create(null, DateType), Literal.create(null, 
StringType), timeZoneId), null)
             checkEvaluation(
@@ -1516,7 +1479,6 @@ class DateExpressionsSuite extends SparkFunSuite with 
ExpressionEvalHelper {
       checkExceptionInExpression[T](ToUnixTimestamp(Literal("1"), Literal(c)), 
c)
       checkExceptionInExpression[T](UnixTimestamp(Literal("1"), Literal(c)), c)
       if (!Set("E", "F", "q", "Q").contains(c)) {
-        checkExceptionInExpression[T](DateFormatClass(CurrentTimestamp(), 
Literal(c)), c)
         checkExceptionInExpression[T](FromUnixTime(Literal(0L), Literal(c)), c)
       }
     }
@@ -2124,4 +2086,14 @@ class DateExpressionsSuite extends SparkFunSuite with 
ExpressionEvalHelper {
       }
     }
   }
+
+  test("datetime function CurrentDate and localtimestamp are Unevaluable") {
+    checkError(exception = intercept[SparkException] { 
CurrentDate(UTC_OPT).eval(EmptyRow) },
+      errorClass = "INTERNAL_ERROR",
+      parameters = Map("message" -> "Cannot evaluate expression: 
current_date(Some(UTC))"))
+
+    checkError(exception = intercept[SparkException] { 
LocalTimestamp(UTC_OPT).eval(EmptyRow) },
+      errorClass = "INTERNAL_ERROR",
+      parameters = Map("message" -> "Cannot evaluate expression: 
localtimestamp(Some(UTC))"))
+  }
 }
diff --git 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/ComputeCurrentTimeSuite.scala
 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/ComputeCurrentTimeSuite.scala
index 447d77855fb3..6e1c7fc887d4 100644
--- 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/ComputeCurrentTimeSuite.scala
+++ 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/ComputeCurrentTimeSuite.scala
@@ -17,6 +17,7 @@
 
 package org.apache.spark.sql.catalyst.optimizer
 
+import java.lang.Thread.sleep
 import java.time.{LocalDateTime, ZoneId}
 
 import scala.concurrent.duration._
@@ -51,6 +52,19 @@ class ComputeCurrentTimeSuite extends PlanTest {
     assert(lits(0) == lits(1))
   }
 
+  test("analyzer should respect time flow in current timestamp calls") {
+    val in = Project(Alias(CurrentTimestamp(), "t1")() :: Nil, LocalRelation())
+
+    val planT1 = Optimize.execute(in.analyze).asInstanceOf[Project]
+    sleep(1)
+    val planT2 = Optimize.execute(in.analyze).asInstanceOf[Project]
+
+    val t1 = DateTimeUtils.microsToMillis(literals[Long](planT1)(0))
+    val t2 = DateTimeUtils.microsToMillis(literals[Long](planT2)(0))
+
+    assert(t2 - t1 <= 1000 && t2 - t1 > 0)
+  }
+
   test("analyzer should replace current_date with literals") {
     val in = Project(Seq(Alias(CurrentDate(), "a")(), Alias(CurrentDate(), 
"b")()), LocalRelation())
 
diff --git 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/EliminateSortsSuite.scala
 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/EliminateSortsSuite.scala
index 5cfe4a7bf462..e9e6bd9dcf62 100644
--- 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/EliminateSortsSuite.scala
+++ 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/EliminateSortsSuite.scala
@@ -98,11 +98,11 @@ class EliminateSortsSuite extends AnalysisTest {
   test("Remove no-op alias") {
     val x = testRelation
 
-    val query = x.select($"a".as("x"), Year(CurrentDate()).as("y"), $"b")
+    val query = x.select($"a".as("x"), Literal(1).as("y"), $"b")
       .orderBy($"x".asc, $"y".asc, $"b".desc)
     val optimized = Optimize.execute(analyzer.execute(query))
     val correctAnswer = analyzer.execute(
-      x.select($"a".as("x"), Year(CurrentDate()).as("y"), $"b")
+      x.select($"a".as("x"), Literal(1).as("y"), $"b")
         .orderBy($"x".asc, $"b".desc))
 
     comparePlans(optimized, correctAnswer)
diff --git 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/FoldablePropagationSuite.scala
 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/FoldablePropagationSuite.scala
index 034b5b747fd1..767ef38ea7f7 100644
--- 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/FoldablePropagationSuite.scala
+++ 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/FoldablePropagationSuite.scala
@@ -73,25 +73,25 @@ class FoldablePropagationSuite extends PlanTest {
 
   test("Propagate to orderBy clause") {
     val query = testRelation
-      .select($"a".as("x"), Year(CurrentDate()).as("y"), $"b")
+      .select($"a".as("x"), "str".as("y"), $"b")
       .orderBy($"x".asc, $"y".asc, $"b".desc)
     val optimized = Optimize.execute(query.analyze)
     val correctAnswer = testRelation
-      .select($"a".as("x"), Year(CurrentDate()).as("y"), $"b")
-      .orderBy($"x".asc, SortOrder(Year(CurrentDate()), Ascending), 
$"b".desc).analyze
+      .select($"a".as("x"), "str".as("y"), $"b")
+      .orderBy($"x".asc, SortOrder("str", Ascending), $"b".desc).analyze
 
     comparePlans(optimized, correctAnswer)
   }
 
   test("Propagate to groupBy clause") {
     val query = testRelation
-      .select($"a".as("x"), Year(CurrentDate()).as("y"), $"b")
+      .select($"a".as("x"), Literal(42).as("y"), $"b")
       .groupBy($"x", $"y", $"b")(sum($"x"), avg($"y").as("AVG"), count($"b"))
     val optimized = Optimize.execute(query.analyze)
     val correctAnswer = testRelation
-      .select($"a".as("x"), Year(CurrentDate()).as("y"), $"b")
-      .groupBy($"x", Year(CurrentDate()).as("y"), $"b")(sum($"x"),
-        avg(Year(CurrentDate())).as("AVG"),
+      .select($"a".as("x"), Literal(42).as("y"), $"b")
+      .groupBy($"x", Literal(42).as("y"), $"b")(sum($"x"),
+        avg(Literal(42)).as("AVG"),
         count($"b")).analyze
 
     comparePlans(optimized, correctAnswer)
@@ -99,16 +99,16 @@ class FoldablePropagationSuite extends PlanTest {
 
   test("Propagate in a complex query") {
     val query = testRelation
-      .select($"a".as("x"), Year(CurrentDate()).as("y"), $"b")
+      .select($"a".as("x"), Literal(42).as("y"), $"b")
       .where($"x" > 1 && $"y" === 2016 && $"b" > 1)
       .groupBy($"x", $"y", $"b")(sum($"x"), avg($"y").as("AVG"), count($"b"))
       .orderBy($"x".asc, $"AVG".asc)
     val optimized = Optimize.execute(query.analyze)
     val correctAnswer = testRelation
-      .select($"a".as("x"), Year(CurrentDate()).as("y"), $"b")
-      .where($"x" > 1 && Year(CurrentDate()).as("y") === 2016 && $"b" > 1)
-      .groupBy($"x", Year(CurrentDate()).as("y"), $"b")(sum($"x"),
-        avg(Year(CurrentDate())).as("AVG"),
+      .select($"a".as("x"), Literal(42).as("y"), $"b")
+      .where($"x" > 1 && Literal(42).as("y") === 2016 && $"b" > 1)
+      .groupBy($"x", Literal(42).as("y"), $"b")(sum($"x"),
+        avg(Literal(42)).as("AVG"),
         count($"b"))
       .orderBy($"x".asc, $"AVG".asc).analyze
 
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/TableOptionsConstantFoldingSuite.scala
 
b/sql/core/src/test/scala/org/apache/spark/sql/TableOptionsConstantFoldingSuite.scala
index b86d5b9b80c4..2e56327a6313 100644
--- 
a/sql/core/src/test/scala/org/apache/spark/sql/TableOptionsConstantFoldingSuite.scala
+++ 
b/sql/core/src/test/scala/org/apache/spark/sql/TableOptionsConstantFoldingSuite.scala
@@ -44,7 +44,9 @@ class TableOptionsConstantFoldingSuite extends QueryTest with 
SharedSparkSession
     checkOption("null", null)
     checkOption("cast('11 23:4:0' as interval day to second)",
       "INTERVAL '11 23:04:00' DAY TO SECOND")
-    checkOption("date_diff(current_date(), current_date())", "0")
+    withSQLConf(SQLConf.LEGACY_EVAL_CURRENT_TIME.key -> "true") {
+      checkOption("date_diff(current_date(), current_date())", "0")
+    }
     checkOption("date_sub(date'2022-02-02', 1)", "2022-02-01")
     checkOption("timestampadd(microsecond, 5, timestamp'2022-02-28 00:00:00')",
       "2022-02-28 00:00:00.000005")


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to