ulysses-you commented on code in PR #36468:
URL: https://github.com/apache/spark/pull/36468#discussion_r868755471


##########
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/optimizer/expressions.scala:
##########
@@ -52,23 +53,46 @@ object ConstantFolding extends Rule[LogicalPlan] {
     case _ => false
   }
 
-  def apply(plan: LogicalPlan): LogicalPlan = 
plan.transformWithPruning(AlwaysProcess.fn, ruleId) {
-    case q: LogicalPlan => q.transformExpressionsDownWithPruning(
-      AlwaysProcess.fn, ruleId) {
-      // Skip redundant folding of literals. This rule is technically not 
necessary. Placing this
-      // here avoids running the next rule for Literal values, which would 
create a new Literal
-      // object and running eval unnecessarily.
-      case l: Literal => l
-
-      case Size(c: CreateArray, _) if c.children.forall(hasNoSideEffect) =>
-        Literal(c.children.length)
-      case Size(c: CreateMap, _) if c.children.forall(hasNoSideEffect) =>
-        Literal(c.children.length / 2)
-
-      // Fold expressions that are foldable.
-      case e if e.foldable => Literal.create(e.eval(EmptyRow), e.dataType)
+  /**
+   * The method is used to fold the children expression inside a conditional 
expression which
+   * is not foldable. Some branches may not be evaluated at runtime, so here 
we should in case of
+   * the exception and leave it to runtime
+   */
+  private def conditionalExpressionFolding(child: Expression): Expression = {
+    if (child.foldable) {
+      try {
+        Literal.create(child.eval(EmptyRow), child.dataType)

Review Comment:
   make sense, add a tag `FAILED_TO_EVALUATED`



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org


---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org

Reply via email to