[ https://issues.apache.org/jira/browse/SPARK-17356?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel&focusedCommentId=15454553#comment-15454553 ]
Sean Zhong commented on SPARK-17356: ------------------------------------ Reproducer: {code} # Trigger OOM scala> :paste -raw // Entering paste mode (ctrl-D to finish) package org.apache.spark.ml.attribute import org.apache.spark.ml.attribute._ import org.apache.spark.sql.catalyst.expressions.{Alias, Literal} import org.apache.spark.sql.catalyst.plans.logical.LocalRelation import org.apache.spark.sql.catalyst.dsl.plans._ object Test { def main(args: Array[String]): Unit = { val rand = new java.util.Random() val attr: Attribute = new BinaryAttribute(Some("a"), Some(rand.nextInt(100000)), Some(Array("value1", "value2"))) val attributeGroup = new AttributeGroup("group", Array.fill(1000000)(attr)) val alias = Alias(Literal(0), "alias")(explicitMetadata = Some(attributeGroup.toMetadata())) val testRelation = LocalRelation() val query = testRelation.select((0 to 100).toSeq.map(_ => alias): _*) System.out.print(query.toJSON.length) } } // Exiting paste mode, now interpreting. scala> org.apache.spark.ml.attribute.Test.main(null) {code} > Out of memory when calling TreeNode.toJSON > ------------------------------------------ > > Key: SPARK-17356 > URL: https://issues.apache.org/jira/browse/SPARK-17356 > Project: Spark > Issue Type: Bug > Components: SQL > Reporter: Sean Zhong > Attachments: jmap.txt, jstack.txt, queryplan.txt > > > When using MLLib, when calling toJSON on a plan with many level of > sub-queries, it may cause out of memory exception with stack trace like this > {code} > java.lang.OutOfMemoryError: GC overhead limit exceeded > at scala.collection.mutable.AbstractSeq.<init>(Seq.scala:47) > at scala.collection.mutable.AbstractBuffer.<init>(Buffer.scala:48) > at scala.collection.mutable.ListBuffer.<init>(ListBuffer.scala:46) > at scala.collection.immutable.List$.newBuilder(List.scala:396) > at > scala.collection.generic.GenericTraversableTemplate$class.newBuilder(GenericTraversableTemplate.scala:64) > at > scala.collection.AbstractTraversable.newBuilder(Traversable.scala:105) > at > scala.collection.TraversableLike$class.filter(TraversableLike.scala:262) > at scala.collection.AbstractTraversable.filter(Traversable.scala:105) > at > scala.collection.TraversableLike$class.filterNot(TraversableLike.scala:274) > at scala.collection.AbstractTraversable.filterNot(Traversable.scala:105) > at > org.json4s.jackson.JValueSerializer.serialize(JValueSerializer.scala:25) > at > org.json4s.jackson.JValueSerializer.serialize(JValueSerializer.scala:20) > at > org.json4s.jackson.JValueSerializer.serialize(JValueSerializer.scala:25) > at > org.json4s.jackson.JValueSerializer.serialize(JValueSerializer.scala:25) > at > org.json4s.jackson.JValueSerializer.serialize(JValueSerializer.scala:25) > at > org.json4s.jackson.JValueSerializer.serialize(JValueSerializer.scala:25) > at > org.json4s.jackson.JValueSerializer.serialize(JValueSerializer.scala:20) > at > org.json4s.jackson.JValueSerializer.serialize(JValueSerializer.scala:20) > at > org.json4s.jackson.JValueSerializer.serialize(JValueSerializer.scala:25) > at > org.json4s.jackson.JValueSerializer.serialize(JValueSerializer.scala:20) > at > org.json4s.jackson.JValueSerializer.serialize(JValueSerializer.scala:7) > at > com.fasterxml.jackson.databind.ser.DefaultSerializerProvider.serializeValue(DefaultSerializerProvider.java:128) > at > com.fasterxml.jackson.databind.ObjectMapper._configAndWriteValue(ObjectMapper.java:2881) > at > com.fasterxml.jackson.databind.ObjectMapper.writeValueAsString(ObjectMapper.java:2338) > at org.json4s.jackson.JsonMethods$class.compact(JsonMethods.scala:34) > at org.json4s.jackson.JsonMethods$.compact(JsonMethods.scala:50) > at > org.apache.spark.sql.catalyst.trees.TreeNode.toJSON(TreeNode.scala:566) > {code} > The query plan, stack trace, and jmap distribution is attached. -- This message was sent by Atlassian JIRA (v6.3.4#6332) --------------------------------------------------------------------- To unsubscribe, e-mail: issues-unsubscr...@spark.apache.org For additional commands, e-mail: issues-h...@spark.apache.org