maropu commented on a change in pull request #29094:
URL: https://github.com/apache/spark/pull/29094#discussion_r458201704



##########
File path: 
sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/CollapseProjectSuite.scala
##########
@@ -121,6 +121,17 @@ class CollapseProjectSuite extends PlanTest {
     comparePlans(optimized, correctAnswer)
   }
 
+  test("do not collapse project if number of leave expressions would be too 
big") {
+    var query: LogicalPlan = testRelation
+    for( _ <- 1 to 10) {
+      // after n iterations the number of leaf expressions will be 2^{n+1}
+      // => after 10 iterations we would end up with more than 1000 leaf 
expressions
+      query = query.select(('a + 'b).as('a), ('a - 'b).as('b))

Review comment:
       Actually, the same issue can happens in `ProjectExec`? 
https://github.com/apache/spark/blob/4da93b00d7c8b6dd35ae37ece584aac1d7793c33/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/planning/patterns.scala#L127-L135
   
   ```
   scala> Seq((1, 2)).toDF("a", "b").write.saveAsTable("a")
   scala> var query = spark.table("a")
   scala> for( _ <- 1 to 10) {
        |   query = query.select(('a + 'b).as('a), ('a - 'b).as('b))
        | }
   
   scala> query.explain(true)
   == Parsed Logical Plan ==
   ...
   
   == Analyzed Logical Plan ==
   ...
   
   == Optimized Logical Plan ==
   Project [(a#49 + b#50) AS a#53, (a#49 - b#50) AS b#54]
   +- Project [(a#45 + b#46) AS a#49, (a#45 - b#46) AS b#50]
      +- Project [(a#41 + b#42) AS a#45, (a#41 - b#42) AS b#46]
         +- Project [(a#37 + b#38) AS a#41, (a#37 - b#38) AS b#42]
            +- Project [(a#33 + b#34) AS a#37, (a#33 - b#34) AS b#38]
               +- Project [(a#29 + b#30) AS a#33, (a#29 - b#30) AS b#34]
                  +- Project [(a#25 + b#26) AS a#29, (a#25 - b#26) AS b#30]
                     +- Project [(a#21 + b#22) AS a#25, (a#21 - b#22) AS b#26]
                        +- Project [(a#17 + b#18) AS a#21, (a#17 - b#18) AS 
b#22]
                           +- Project [(a#13 + b#14) AS a#17, (a#13 - b#14) AS 
b#18]
                              +- Relation[a#13,b#14] parquet
   
   == Physical Plan ==
   *(1) Project [((((((((((a#13 + b#14) AS a#17 + ...
     // too many expressions...
   
   +- *(1) ColumnarToRow
      +- FileScan parquet default.a[a#13,b#14] Batched: true, DataFilters: [], 
Format: Parquet, Location: 
InMemoryFileIndex[file:/Users/maropu/Repositories/spark/spark-master/spark-warehouse/a],
 PartitionFilters: [], PushedFilters: [], ReadSchema: struct<a:int,b:int>
   ```
   

##########
File path: 
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/optimizer/Optimizer.scala
##########
@@ -697,16 +697,21 @@ object ColumnPruning extends Rule[LogicalPlan] {
  *    `GlobalLimit(LocalLimit)` pattern is also considered.
  */
 object CollapseProject extends Rule[LogicalPlan] {
+  // If number of leaf expressions exceed MAX_LEAF_SIZE, do not collapse to 
prevent driver oom
+  // due to a single large project.
+  private val MAX_LEAF_SIZE = 1000
 
   def apply(plan: LogicalPlan): LogicalPlan = plan transformUp {
     case p1 @ Project(_, p2: Project) =>

Review comment:
       Seems like you can simplify it like this?;
   ```
     private def hasTooManyExprs(exprs: Seq[Expression]): Boolean = {
       var numExprs = 0
       exprs.foreach { _.foreach { _ => numExprs += 1 } }
       numExprs > SQLConf.get.XXXX
     }
   
     def apply(plan: LogicalPlan): LogicalPlan = plan transformUp {
       case p1 @ Project(_, p2: Project) if hasTooManyExprs(p2.projectList) => 
// skip
         p1
   
       case p1 @ Project(_, p2: Project) =>
   ```




----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org



---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org

Reply via email to