Yuming Wang created SPARK-51592:
-----------------------------------

             Summary: AQE logging makes Driver OOM
                 Key: SPARK-51592
                 URL: https://issues.apache.org/jira/browse/SPARK-51592
             Project: Spark
          Issue Type: Bug
          Components: SQL
    Affects Versions: 4.1.0
            Reporter: Yuming Wang


{noformat}
25/03/24 00:30:34 INFO ApplicationMaster: Final app status: FAILED, exitCode: 
15, (reason: User class threw exception: java.lang.OutOfMemoryError: Required 
array length 2147483639 + 957 is too large
        at 
java.base/jdk.internal.util.ArraysSupport.hugeLength(ArraysSupport.java:649)
        at 
java.base/jdk.internal.util.ArraysSupport.newLength(ArraysSupport.java:642)
        at 
java.base/java.lang.AbstractStringBuilder.newCapacity(AbstractStringBuilder.java:257)
        at 
java.base/java.lang.AbstractStringBuilder.ensureCapacityInternal(AbstractStringBuilder.java:229)
        at 
java.base/java.lang.AbstractStringBuilder.append(AbstractStringBuilder.java:582)
        at java.base/java.lang.StringBuilder.append(StringBuilder.java:179)
        at 
scala.collection.mutable.StringBuilder.append(StringBuilder.scala:203)
        at 
scala.collection.TraversableOnce$appender$1.apply(TraversableOnce.scala:419)
        at 
scala.collection.TraversableOnce$appender$1.apply(TraversableOnce.scala:410)
        at 
scala.collection.mutable.ResizableArray.foreach(ResizableArray.scala:62)
        at 
scala.collection.mutable.ResizableArray.foreach$(ResizableArray.scala:55)
        at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:49)
        at scala.collection.TraversableOnce.addString(TraversableOnce.scala:424)
        at 
scala.collection.TraversableOnce.addString$(TraversableOnce.scala:407)
        at scala.collection.AbstractTraversable.addString(Traversable.scala:108)
        at scala.collection.TraversableOnce.mkString(TraversableOnce.scala:377)
        at scala.collection.TraversableOnce.mkString$(TraversableOnce.scala:376)
        at scala.collection.AbstractTraversable.mkString(Traversable.scala:108)
        at scala.collection.TraversableOnce.mkString(TraversableOnce.scala:379)
        at scala.collection.TraversableOnce.mkString$(TraversableOnce.scala:379)
        at scala.collection.AbstractTraversable.mkString(Traversable.scala:108)
        at 
org.apache.spark.sql.execution.adaptive.AdaptiveSparkPlanExec.$anonfun$getFinalPhysicalPlan$8(AdaptiveSparkPlanExec.scala:364)
        at org.apache.spark.internal.Logging.logDebug(Logging.scala:64)
        at org.apache.spark.internal.Logging.logDebug$(Logging.scala:63)
        at org.apache.spark.sql.execution.SparkPlan.logDebug(SparkPlan.scala:65)
        at 
org.apache.spark.sql.execution.adaptive.AdaptiveSparkPlanExec.$anonfun$logOnLevel$2(AdaptiveSparkPlanExec.scala:82)
        at 
org.apache.spark.sql.execution.adaptive.AdaptiveSparkPlanExec.$anonfun$logOnLevel$2$adapted(AdaptiveSparkPlanExec.scala:82)
        at 
org.apache.spark.sql.execution.adaptive.AdaptiveSparkPlanExec.$anonfun$getFinalPhysicalPlan$1(AdaptiveSparkPlanExec.scala:363)
        at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:916)
        at 
org.apache.spark.sql.execution.adaptive.AdaptiveSparkPlanExec.getFinalPhysicalPlan(AdaptiveSparkPlanExec.scala:280)
        at 
org.apache.spark.sql.execution.adaptive.AdaptiveSparkPlanExec.withFinalPlanUpdate(AdaptiveSparkPlanExec.scala:431)
        at 
org.apache.spark.sql.execution.adaptive.AdaptiveSparkPlanExec.executeCollect(AdaptiveSparkPlanExec.scala:398)
        at 
org.apache.spark.sql.execution.QueryExecution$$anonfun$eagerlyExecuteCommands$1.$anonfun$applyOrElse$1(QueryExecution.scala:144)
        at 
org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$6(SQLExecution.scala:153){noformat}



--
This message was sent by Atlassian Jira
(v8.20.10#820010)

---------------------------------------------------------------------
To unsubscribe, e-mail: issues-unsubscr...@spark.apache.org
For additional commands, e-mail: issues-h...@spark.apache.org

Reply via email to