[ 
https://issues.apache.org/jira/browse/SPARK-19938?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel&focusedCommentId=15907608#comment-15907608
 ] 

srinivas thallam commented on SPARK-19938:
------------------------------------------

The tests are working fine in IDE(intellij) but when I run through sbt i am 
getting the following error.Any help would be appreciated.

[info]   org.apache.spark.SparkException: Job aborted due to stage failure: 
Task 0 in stage 0.0 failed 1 times, most recent failure: Lost task 0.0 in stage 
0.0 (TID 0, lo
calhost): java.lang.ClassCastException: cannot assign instance of 
scala.collection.immutable.List$SerializationProxy to field 
org.apache.spark.rdd.RDD.org$apache$spark$rd
d$RDD$$dependencies_ of type scala.collection.Seq in instance of 
org.apache.spark.rdd.MapPartitionsRDD                                           
                         
[info]  at 
java.io.ObjectStreamClass$FieldReflector.setObjFieldValues(ObjectStreamClass.java:2133)
                                                                        
[info]  at 
java.io.ObjectStreamClass.setObjFieldValues(ObjectStreamClass.java:1305)        
                                                                               
[info]  at 
java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:2237)        
                                                                               
[info]  at 
java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:2155)           
                                                                               
[info]  at 
java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:2013)       
                                                                               
[info]  at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1535)   
                                                                                
          
[info]  at 
java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:2231)        
                                                                               
[info]  at 
java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:2155)           
                                                                               
[info]  at 
java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:2013)       
                                                                               
[info]  at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1535)   
                                                                                
          
[info]  at java.io.ObjectInputStream.readObject(ObjectInputStream.java:422)     
                                                                                
          
[info]  at 
org.apache.spark.serializer.JavaDeserializationStream.readObject(JavaSerializer.scala:75)
                                                                      
[info]  at 
org.apache.spark.serializer.JavaSerializerInstance.deserialize(JavaSerializer.scala:114)
                                                                       
[info]  at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:66)   
                                                                                
          
[info]  at org.apache.spark.scheduler.Task.run(Task.scala:86)                   
                                                                                
          
[info]  at 
org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:274)           
                                                                               
[info]  at 
java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142) 
                                                                               
[info]  at 
java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617) 
                                                                               
[info]  at java.lang.Thread.run(Thread.java:745)                                
                                                                                
          
[info]                                                                          
                                                                                
          
[info] Driver stacktrace:                                                       
                                                                                
          
[info]   at 
org.apache.spark.scheduler.DAGScheduler.org$apache$spark$scheduler$DAGScheduler$$failJobAndIndependentStages(DAGScheduler.scala:1454)
                         
[info]   at 
org.apache.spark.scheduler.DAGScheduler$$anonfun$abortStage$1.apply(DAGScheduler.scala:1442)
                                                                  
[info]   at 
org.apache.spark.scheduler.DAGScheduler$$anonfun$abortStage$1.apply(DAGScheduler.scala:1441)
                                                                  
[info]   at 
scala.collection.mutable.ResizableArray$class.foreach(ResizableArray.scala:59)  
                                                                              
[info]   at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:48)  
                                                                                
          
[info]   at 
org.apache.spark.scheduler.DAGScheduler.abortStage(DAGScheduler.scala:1441)     
                                                                              
[info]   at 
org.apache.spark.scheduler.DAGScheduler$$anonfun$handleTaskSetFailed$1.apply(DAGScheduler.scala:811)
                                                          
[info]   at 
org.apache.spark.scheduler.DAGScheduler$$anonfun$handleTaskSetFailed$1.apply(DAGScheduler.scala:811)
                                                          
[info]   at scala.Option.foreach(Option.scala:257)                              
                                                                                
          
[info]   at 
org.apache.spark.scheduler.DAGScheduler.handleTaskSetFailed(DAGScheduler.scala:811)
                                                                           
[info]   ...                                                                    
                                                                                
          
[info]   Cause: java.lang.ClassCastException: cannot assign instance of 
scala.collection.immutable.List$SerializationProxy to field 
org.apache.spark.rdd.RDD.org$apache$sp
ark$rdd$RDD$$dependencies_ of type scala.collection.Seq in instance of 
org.apache.spark.rdd.MapPartitionsRDD                                           
                   
[info]   at 
java.io.ObjectStreamClass$FieldReflector.setObjFieldValues(ObjectStreamClass.java:2133)
                                                                       
[info]   at 
java.io.ObjectStreamClass.setObjFieldValues(ObjectStreamClass.java:1305)        
                                                                              
[info]   at 
java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:2237)        
                                                                              
[info]   at 
java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:2155)           
                                                                              
[info]   at 
java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:2013)       
                                                                              
[info]   at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1535)  
                                                                                
          
[info]   at 
java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:2231)        
                                                                              
[info]   at 
java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:2155)           
                                                                              
[info]   at 
java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:2013)       
                                                                              
[info]   at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1535)  
                                                                                
          
[info]   ...                                                                    
                                                                                
          
                                                 

> java.lang.ClassCastException: cannot assign instance of 
> scala.collection.immutable.List$SerializationProxy to field
> -------------------------------------------------------------------------------------------------------------------
>
>                 Key: SPARK-19938
>                 URL: https://issues.apache.org/jira/browse/SPARK-19938
>             Project: Spark
>          Issue Type: Bug
>          Components: Tests
>    Affects Versions: 2.0.2
>            Reporter: srinivas thallam
>




--
This message was sent by Atlassian JIRA
(v6.3.15#6346)

---------------------------------------------------------------------
To unsubscribe, e-mail: issues-unsubscr...@spark.apache.org
For additional commands, e-mail: issues-h...@spark.apache.org

Reply via email to