[ https://issues.apache.org/jira/browse/SPARK-46632?page=com.atlassian.jira.plugin.system.issuetabpanels:all-tabpanel ]
Wenchen Fan resolved SPARK-46632. --------------------------------- Fix Version/s: 4.0.0 3.5.3 Resolution: Fixed Issue resolved by pull request 46135 [https://github.com/apache/spark/pull/46135] > EquivalentExpressions throw IllegalStateException > ------------------------------------------------- > > Key: SPARK-46632 > URL: https://issues.apache.org/jira/browse/SPARK-46632 > Project: Spark > Issue Type: Bug > Components: Optimizer, Spark Core, SQL > Affects Versions: 3.3.0, 3.4.0, 3.5.0 > Reporter: zhangzhenhao > Priority: Major > Labels: pull-request-available > Fix For: 4.0.0, 3.5.3 > > > EquivalentExpressions throw IllegalStateException with some IF expresssion > ```scala > import org.apache.spark.sql.catalyst.dsl.expressions.DslExpression > import org.apache.spark.sql.catalyst.expressions.\{EquivalentExpressions, If, > Literal} > import org.apache.spark.sql.functions.col > val one = Literal(1.0) > val y = col("y").expr > val e1 = If( > Literal(true), > y * one * one + one * one * y, > y * one * one + one * one * y > ) > (new EquivalentExpressions).addExprTree(e1) > ``` > > result is > ``` > java.lang.IllegalStateException: Cannot update expression: (1.0 * 1.0) in > map: Map(ExpressionEquals(('y * 1.0)) -> ExpressionStats(('y * 1.0))) with > use count: -1 > at > org.apache.spark.sql.catalyst.expressions.EquivalentExpressions.updateExprInMap(EquivalentExpressions.scala:85) > at > org.apache.spark.sql.catalyst.expressions.EquivalentExpressions.updateExprTree(EquivalentExpressions.scala:198) > at > org.apache.spark.sql.catalyst.expressions.EquivalentExpressions.$anonfun$updateExprTree$1(EquivalentExpressions.scala:200) > at > org.apache.spark.sql.catalyst.expressions.EquivalentExpressions.$anonfun$updateExprTree$1$adapted(EquivalentExpressions.scala:200) > at scala.collection.Iterator.foreach(Iterator.scala:943) > at scala.collection.Iterator.foreach$(Iterator.scala:943) > at scala.collection.AbstractIterator.foreach(Iterator.scala:1431) > at scala.collection.IterableLike.foreach(IterableLike.scala:74) > at scala.collection.IterableLike.foreach$(IterableLike.scala:73) > at scala.collection.AbstractIterable.foreach(Iterable.scala:56) > at > org.apache.spark.sql.catalyst.expressions.EquivalentExpressions.updateExprTree(EquivalentExpressions.scala:200) > at > org.apache.spark.sql.catalyst.expressions.EquivalentExpressions.$anonfun$updateExprTree$1(EquivalentExpressions.scala:200) > at > org.apache.spark.sql.catalyst.expressions.EquivalentExpressions.$anonfun$updateExprTree$1$adapted(EquivalentExpressions.scala:200) > at scala.collection.Iterator.foreach(Iterator.scala:943) > at scala.collection.Iterator.foreach$(Iterator.scala:943) > at scala.collection.AbstractIterator.foreach(Iterator.scala:1431) > at scala.collection.IterableLike.foreach(IterableLike.scala:74) > at scala.collection.IterableLike.foreach$(IterableLike.scala:73) > at scala.collection.AbstractIterable.foreach(Iterable.scala:56) > at > org.apache.spark.sql.catalyst.expressions.EquivalentExpressions.updateExprTree(EquivalentExpressions.scala:200) > at > org.apache.spark.sql.catalyst.expressions.EquivalentExpressions.updateCommonExprs(EquivalentExpressions.scala:128) > at > org.apache.spark.sql.catalyst.expressions.EquivalentExpressions.$anonfun$updateExprTree$3(EquivalentExpressions.scala:201) > at > org.apache.spark.sql.catalyst.expressions.EquivalentExpressions.$anonfun$updateExprTree$3$adapted(EquivalentExpressions.scala:201) > at scala.collection.immutable.List.foreach(List.scala:431) > at > org.apache.spark.sql.catalyst.expressions.EquivalentExpressions.updateExprTree(EquivalentExpressions.scala:201) > at > org.apache.spark.sql.catalyst.expressions.EquivalentExpressions.addExprTree(EquivalentExpressions.scala:188) > ... 49 elided > ``` -- This message was sent by Atlassian Jira (v8.20.10#820010) --------------------------------------------------------------------- To unsubscribe, e-mail: issues-unsubscr...@spark.apache.org For additional commands, e-mail: issues-h...@spark.apache.org