This is an automated email from the ASF dual-hosted git repository.

dongjoon pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new 6bd0ccf36dd2 [SPARK-47511][SQL][FOLLOWUP] Rename the config 
REPLACE_NULLIF_USING_WITH_EXPR to be more general
6bd0ccf36dd2 is described below

commit 6bd0ccf36dd23eb1887b73a898021a14baf1f2eb
Author: Wenchen Fan <wenc...@databricks.com>
AuthorDate: Fri Apr 5 07:47:39 2024 -0700

    [SPARK-47511][SQL][FOLLOWUP] Rename the config 
REPLACE_NULLIF_USING_WITH_EXPR to be more general
    
    ### What changes were proposed in this pull request?
    
    This is a follow-up of
    - #45649
    
    `With` is not only used by `NullIf`, but also `Between`. This PR renames 
the config `REPLACE_NULLIF_USING_WITH_EXPR` to be more general, and use it to 
control `Between` as well.
    
    ### Why are the changes needed?
    
    have a conf to control all the usages of `With`.
    
    ### Does this PR introduce _any_ user-facing change?
    
    no, not released yet
    
    ### How was this patch tested?
    
    existing tests
    
    ### Was this patch authored or co-authored using generative AI tooling?
    
    no
    
    Closes #45871 from cloud-fan/with-conf.
    
    Lead-authored-by: Wenchen Fan <wenc...@databricks.com>
    Co-authored-by: Wenchen Fan <cloud0...@gmail.com>
    Signed-off-by: Dongjoon Hyun <dh...@apple.com>
---
 .../spark/sql/catalyst/expressions/Between.scala      | 19 ++++++++++++-------
 .../sql/catalyst/expressions/nullExpressions.scala    |  2 +-
 .../scala/org/apache/spark/sql/internal/SQLConf.scala | 12 +++++++-----
 3 files changed, 20 insertions(+), 13 deletions(-)

diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Between.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Between.scala
index e5bb31bc34f1..de1122da646b 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Between.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Between.scala
@@ -17,6 +17,8 @@
 
 package org.apache.spark.sql.catalyst.expressions
 
+import org.apache.spark.sql.internal.SQLConf
+
 // scalastyle:off line.size.limit
 @ExpressionDescription(
   usage = "Usage: input [NOT] BETWEEN lower AND upper - evaluate if `input` is 
[not] in between `lower` and `upper`",
@@ -36,13 +38,16 @@ package org.apache.spark.sql.catalyst.expressions
 case class Between private(input: Expression, lower: Expression, upper: 
Expression, replacement: Expression)
   extends RuntimeReplaceable with InheritAnalysisRules  {
   def this(input: Expression, lower: Expression, upper: Expression) = {
-    this(input, lower, upper, {
-      val commonExpr = CommonExpressionDef(input)
-      val ref = new CommonExpressionRef(commonExpr)
-      val replacement = And(GreaterThanOrEqual(ref, lower), 
LessThanOrEqual(ref, upper))
-      With(replacement, Seq(commonExpr))
-    })
-  };
+    this(input, lower, upper,
+      if (!SQLConf.get.getConf(SQLConf.ALWAYS_INLINE_COMMON_EXPR)) {
+        With(input) { case Seq(ref) =>
+          And(GreaterThanOrEqual(ref, lower), LessThanOrEqual(ref, upper))
+        }
+      } else {
+        And(GreaterThanOrEqual(input, lower), LessThanOrEqual(input, upper))
+      }
+    )
+  }
 
   override def parameters: Seq[Expression] = Seq(input, lower, upper)
 
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/nullExpressions.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/nullExpressions.scala
index 9b51d6be4c50..010d79f808d1 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/nullExpressions.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/nullExpressions.scala
@@ -160,7 +160,7 @@ case class NullIf(left: Expression, right: Expression, 
replacement: Expression)
 
   def this(left: Expression, right: Expression) = {
     this(left, right,
-      if (SQLConf.get.getConf(SQLConf.REPLACE_NULLIF_USING_WITH_EXPR)) {
+      if (!SQLConf.get.getConf(SQLConf.ALWAYS_INLINE_COMMON_EXPR)) {
         With(left) { case Seq(ref) =>
           If(EqualTo(ref, right), Literal.create(null, left.dataType), ref)
         }
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala
index 23b0778162d6..71256c6c65fc 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala
@@ -3399,13 +3399,15 @@ object SQLConf {
       .booleanConf
       .createWithDefault(true)
 
-  val REPLACE_NULLIF_USING_WITH_EXPR =
-    buildConf("spark.databricks.sql.replaceNullIfUsingWithExpr")
+  val ALWAYS_INLINE_COMMON_EXPR =
+    buildConf("spark.sql.alwaysInlineCommonExpr")
       .internal()
-      .doc("When true, NullIf expressions are rewritten using With expressions 
to avoid " +
-        "expression duplication.")
+      .doc("When true, always inline common expressions instead of using the 
WITH expression. " +
+        "This may lead to duplicated expressions and the config should only be 
enabled if you " +
+        "hit bugs caused by the WITH expression.")
+      .version("4.0.0")
       .booleanConf
-      .createWithDefault(true)
+      .createWithDefault(false)
 
   val USE_NULLS_FOR_MISSING_DEFAULT_COLUMN_VALUES =
     buildConf("spark.sql.defaultColumn.useNullsForMissingDefaultValues")


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to