[ 
https://issues.apache.org/jira/browse/SPARK-16003?page=com.atlassian.jira.plugin.system.issuetabpanels:all-tabpanel
 ]

Davies Liu updated SPARK-16003:
-------------------------------
    Description: 
This is observed while debugging 
https://issues.apache.org/jira/browse/SPARK-15811

{code}
sun.reflect.GeneratedMethodAccessor20.invoke(Unknown Source) 
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
 java.lang.reflect.Method.invoke(Method.java:497) 
java.io.ObjectStreamClass.invokeWriteReplace(ObjectStreamClass.java:1118) 
sun.reflect.GeneratedMethodAccessor84.invoke(Unknown Source) 
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
 java.lang.reflect.Method.invoke(Method.java:497) 
org.apache.spark.serializer.SerializationDebugger$ObjectStreamClassMethods$.invokeWriteReplace$extension(SerializationDebugger.scala:347)
 
org.apache.spark.serializer.SerializationDebugger$.org$apache$spark$serializer$SerializationDebugger$$findObjectAndDescriptor(SerializationDebugger.scala:269)
 
org.apache.spark.serializer.SerializationDebugger$SerializationDebugger.visitSerializable(SerializationDebugger.scala:154)
 
org.apache.spark.serializer.SerializationDebugger$SerializationDebugger.visit(SerializationDebugger.scala:108)
 
org.apache.spark.serializer.SerializationDebugger$SerializationDebugger.visitSerializable(SerializationDebugger.scala:206)
 
org.apache.spark.serializer.SerializationDebugger$SerializationDebugger.visit(SerializationDebugger.scala:108)
 
org.apache.spark.serializer.SerializationDebugger$.find(SerializationDebugger.scala:67)
 
org.apache.spark.serializer.SerializationDebugger$.improveException(SerializationDebugger.scala:41)
 
org.apache.spark.serializer.JavaSerializationStream.writeObject(JavaSerializer.scala:46)
 
org.apache.spark.serializer.JavaSerializerInstance.serialize(JavaSerializer.scala:100)
 
org.apache.spark.util.ClosureCleaner$.ensureSerializable(ClosureCleaner.scala:295)
 
org.apache.spark.util.ClosureCleaner$.org$apache$spark$util$ClosureCleaner$$clean(ClosureCleaner.scala:288)
 org.apache.spark.util.ClosureCleaner$.clean(ClosureCleaner.scala:108) 
org.apache.spark.SparkContext.clean(SparkContext.scala:2038) 
org.apache.spark.rdd.RDD$$anonfun$mapPartitionsWithIndex$1.apply(RDD.scala:789) 
org.apache.spark.rdd.RDD$$anonfun$mapPartitionsWithIndex$1.apply(RDD.scala:788) 
org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151) 
org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:112) 
org.apache.spark.rdd.RDD.withScope(RDD.scala:357) 
org.apache.spark.rdd.RDD.mapPartitionsWithIndex(RDD.scala:788) 
org.apache.spark.sql.execution.WholeStageCodegenExec.doExecute(WholeStageCodegenExec.scala:355)
 
org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:115)
 
org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:115)
 
org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:136)
 org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151) 
org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:133) 
org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:114) 
org.apache.spark.sql.execution.SparkPlan.getByteArrayRdd(SparkPlan.scala:240) 
org.apache.spark.sql.execution.SparkPlan.executeTake(SparkPlan.scala:323) 
org.apache.spark.sql.execution.CollectLimitExec.executeCollect(limit.scala:39) 
org.apache.spark.sql.Dataset$$anonfun$org$apache$spark$sql$Dataset$$execute$1$1.apply(Dataset.scala:2163)
 
org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:57)
 org.apache.spark.sql.Dataset.withNewExecutionId(Dataset.scala:2489) 
org.apache.spark.sql.Dataset.org$apache$spark$sql$Dataset$$execute$1(Dataset.scala:2162)
 
org.apache.spark.sql.Dataset.org$apache$spark$sql$Dataset$$collect(Dataset.scala:2169)
 org.apache.spark.sql.Dataset$$anonfun$head$1.apply(Dataset.scala:1905) 
org.apache.spark.sql.Dataset$$anonfun$head$1.apply(Dataset.scala:1904) 
org.apache.spark.sql.Dataset.withTypedCallback(Dataset.scala:2519) 
org.apache.spark.sql.Dataset.head(Dataset.scala:1904) 
org.apache.spark.sql.Dataset.take(Dataset.scala:2119) 
com.databricks.backend.daemon.driver.OutputAggregator$.withOutputAggregation0(OutputAggregator.scala:80)
 
com.databricks.backend.daemon.driver.OutputAggregator$.withOutputAggregation(OutputAggregator.scala:42)
 
com.databricks.backend.daemon.driver.ScalaDriverLocal$$anonfun$repl$2.apply(ScalaDriverLocal.scala:196)
 
com.databricks.backend.daemon.driver.ScalaDriverLocal$$anonfun$repl$2.apply(ScalaDriverLocal.scala:188)
 scala.Option.map(Option.scala:145) 
com.databricks.backend.daemon.driver.ScalaDriverLocal.repl(ScalaDriverLocal.scala:188)
 
com.databricks.backend.daemon.driver.DriverLocal$$anonfun$execute$3.apply(DriverLocal.scala:169)
 
com.databricks.backend.daemon.driver.DriverLocal$$anonfun$execute$3.apply(DriverLocal.scala:169)
 
com.databricks.logging.UsageLogging$$anonfun$withAttributionContext$1.apply(UsageLogging.scala:118)
 scala.util.DynamicVariable.withValue(DynamicVariable.scala:57) 
com.databricks.logging.UsageLogging$class.withAttributionContext(UsageLogging.scala:113)
 
com.databricks.backend.daemon.driver.DriverLocal.withAttributionContext(DriverLocal.scala:31)
 
com.databricks.logging.UsageLogging$class.withAttributionTags(UsageLogging.scala:151)
 
com.databricks.backend.daemon.driver.DriverLocal.withAttributionTags(DriverLocal.scala:31)
 
com.databricks.backend.daemon.driver.DriverLocal.execute(DriverLocal.scala:168) 
com.databricks.backend.daemon.driver.DriverWrapper$$anonfun$3.apply(DriverWrapper.scala:483)
 
com.databricks.backend.daemon.driver.DriverWrapper$$anonfun$3.apply(DriverWrapper.scala:483)
 scala.util.Try$.apply(Try.scala:161) 
com.databricks.backend.daemon.driver.DriverWrapper.executeCommand(DriverWrapper.scala:480)
 
com.databricks.backend.daemon.driver.DriverWrapper.runInner(DriverWrapper.scala:381)
 
com.databricks.backend.daemon.driver.DriverWrapper.run(DriverWrapper.scala:212) 
java.lang.Thread.run(Thread.java:745)
{code}

We should fix it or disable it by default.

  was:
This is observed while debugging 
https://issues.apache.org/jira/browse/SPARK-15811

We should fix it or disable it by default.


> SerializationDebugger run into infinite loop
> --------------------------------------------
>
>                 Key: SPARK-16003
>                 URL: https://issues.apache.org/jira/browse/SPARK-16003
>             Project: Spark
>          Issue Type: Bug
>          Components: Spark Core
>            Reporter: Davies Liu
>            Priority: Critical
>
> This is observed while debugging 
> https://issues.apache.org/jira/browse/SPARK-15811
> {code}
> sun.reflect.GeneratedMethodAccessor20.invoke(Unknown Source) 
> sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
>  java.lang.reflect.Method.invoke(Method.java:497) 
> java.io.ObjectStreamClass.invokeWriteReplace(ObjectStreamClass.java:1118) 
> sun.reflect.GeneratedMethodAccessor84.invoke(Unknown Source) 
> sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
>  java.lang.reflect.Method.invoke(Method.java:497) 
> org.apache.spark.serializer.SerializationDebugger$ObjectStreamClassMethods$.invokeWriteReplace$extension(SerializationDebugger.scala:347)
>  
> org.apache.spark.serializer.SerializationDebugger$.org$apache$spark$serializer$SerializationDebugger$$findObjectAndDescriptor(SerializationDebugger.scala:269)
>  
> org.apache.spark.serializer.SerializationDebugger$SerializationDebugger.visitSerializable(SerializationDebugger.scala:154)
>  
> org.apache.spark.serializer.SerializationDebugger$SerializationDebugger.visit(SerializationDebugger.scala:108)
>  
> org.apache.spark.serializer.SerializationDebugger$SerializationDebugger.visitSerializable(SerializationDebugger.scala:206)
>  
> org.apache.spark.serializer.SerializationDebugger$SerializationDebugger.visit(SerializationDebugger.scala:108)
>  
> org.apache.spark.serializer.SerializationDebugger$.find(SerializationDebugger.scala:67)
>  
> org.apache.spark.serializer.SerializationDebugger$.improveException(SerializationDebugger.scala:41)
>  
> org.apache.spark.serializer.JavaSerializationStream.writeObject(JavaSerializer.scala:46)
>  
> org.apache.spark.serializer.JavaSerializerInstance.serialize(JavaSerializer.scala:100)
>  
> org.apache.spark.util.ClosureCleaner$.ensureSerializable(ClosureCleaner.scala:295)
>  
> org.apache.spark.util.ClosureCleaner$.org$apache$spark$util$ClosureCleaner$$clean(ClosureCleaner.scala:288)
>  org.apache.spark.util.ClosureCleaner$.clean(ClosureCleaner.scala:108) 
> org.apache.spark.SparkContext.clean(SparkContext.scala:2038) 
> org.apache.spark.rdd.RDD$$anonfun$mapPartitionsWithIndex$1.apply(RDD.scala:789)
>  
> org.apache.spark.rdd.RDD$$anonfun$mapPartitionsWithIndex$1.apply(RDD.scala:788)
>  
> org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
>  
> org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:112)
>  org.apache.spark.rdd.RDD.withScope(RDD.scala:357) 
> org.apache.spark.rdd.RDD.mapPartitionsWithIndex(RDD.scala:788) 
> org.apache.spark.sql.execution.WholeStageCodegenExec.doExecute(WholeStageCodegenExec.scala:355)
>  
> org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:115)
>  
> org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:115)
>  
> org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:136)
>  
> org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
>  org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:133) 
> org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:114) 
> org.apache.spark.sql.execution.SparkPlan.getByteArrayRdd(SparkPlan.scala:240) 
> org.apache.spark.sql.execution.SparkPlan.executeTake(SparkPlan.scala:323) 
> org.apache.spark.sql.execution.CollectLimitExec.executeCollect(limit.scala:39)
>  
> org.apache.spark.sql.Dataset$$anonfun$org$apache$spark$sql$Dataset$$execute$1$1.apply(Dataset.scala:2163)
>  
> org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:57)
>  org.apache.spark.sql.Dataset.withNewExecutionId(Dataset.scala:2489) 
> org.apache.spark.sql.Dataset.org$apache$spark$sql$Dataset$$execute$1(Dataset.scala:2162)
>  
> org.apache.spark.sql.Dataset.org$apache$spark$sql$Dataset$$collect(Dataset.scala:2169)
>  org.apache.spark.sql.Dataset$$anonfun$head$1.apply(Dataset.scala:1905) 
> org.apache.spark.sql.Dataset$$anonfun$head$1.apply(Dataset.scala:1904) 
> org.apache.spark.sql.Dataset.withTypedCallback(Dataset.scala:2519) 
> org.apache.spark.sql.Dataset.head(Dataset.scala:1904) 
> org.apache.spark.sql.Dataset.take(Dataset.scala:2119) 
> com.databricks.backend.daemon.driver.OutputAggregator$.withOutputAggregation0(OutputAggregator.scala:80)
>  
> com.databricks.backend.daemon.driver.OutputAggregator$.withOutputAggregation(OutputAggregator.scala:42)
>  
> com.databricks.backend.daemon.driver.ScalaDriverLocal$$anonfun$repl$2.apply(ScalaDriverLocal.scala:196)
>  
> com.databricks.backend.daemon.driver.ScalaDriverLocal$$anonfun$repl$2.apply(ScalaDriverLocal.scala:188)
>  scala.Option.map(Option.scala:145) 
> com.databricks.backend.daemon.driver.ScalaDriverLocal.repl(ScalaDriverLocal.scala:188)
>  
> com.databricks.backend.daemon.driver.DriverLocal$$anonfun$execute$3.apply(DriverLocal.scala:169)
>  
> com.databricks.backend.daemon.driver.DriverLocal$$anonfun$execute$3.apply(DriverLocal.scala:169)
>  
> com.databricks.logging.UsageLogging$$anonfun$withAttributionContext$1.apply(UsageLogging.scala:118)
>  scala.util.DynamicVariable.withValue(DynamicVariable.scala:57) 
> com.databricks.logging.UsageLogging$class.withAttributionContext(UsageLogging.scala:113)
>  
> com.databricks.backend.daemon.driver.DriverLocal.withAttributionContext(DriverLocal.scala:31)
>  
> com.databricks.logging.UsageLogging$class.withAttributionTags(UsageLogging.scala:151)
>  
> com.databricks.backend.daemon.driver.DriverLocal.withAttributionTags(DriverLocal.scala:31)
>  
> com.databricks.backend.daemon.driver.DriverLocal.execute(DriverLocal.scala:168)
>  
> com.databricks.backend.daemon.driver.DriverWrapper$$anonfun$3.apply(DriverWrapper.scala:483)
>  
> com.databricks.backend.daemon.driver.DriverWrapper$$anonfun$3.apply(DriverWrapper.scala:483)
>  scala.util.Try$.apply(Try.scala:161) 
> com.databricks.backend.daemon.driver.DriverWrapper.executeCommand(DriverWrapper.scala:480)
>  
> com.databricks.backend.daemon.driver.DriverWrapper.runInner(DriverWrapper.scala:381)
>  
> com.databricks.backend.daemon.driver.DriverWrapper.run(DriverWrapper.scala:212)
>  java.lang.Thread.run(Thread.java:745)
> {code}
> We should fix it or disable it by default.



--
This message was sent by Atlassian JIRA
(v6.3.4#6332)

---------------------------------------------------------------------
To unsubscribe, e-mail: issues-unsubscr...@spark.apache.org
For additional commands, e-mail: issues-h...@spark.apache.org

Reply via email to