hey guys 
I have CDH 5.3.3 with Spark 1.2.0 (on Yarn)
This does not work /opt/cloudera/parcels/CDH/lib/spark/bin/spark-sql 
--deploy-mode client --master yarn --driver-memory 1g -e "select j.person_id, 
p.first_name, p.last_name, count(*) from (select person_id from 
cdr.cdr_mjp_joborder where person_id is not null) j join (select person_id, 
first_name, last_name from cdr.cdr_mjp_people where lower(last_name) like 
'%subramanian%') p on j.person_id = p.person_id GROUP BY j.person_id, 
p.first_name, p.last_name"
This works but only one Executor is 
used/opt/cloudera/parcels/CDH/lib/spark/bin/spark-sql  --driver-memory 1g -e 
"select j.person_id, p.first_name, p.last_name, count(*) from (select person_id 
from cdr.cdr_mjp_joborder where person_id is not null) j join (select 
person_id, first_name, last_name from cdr.cdr_mjp_people where lower(last_name) 
like '%subramanian%') p on j.person_id = p.person_id GROUP BY j.person_id, 
p.first_name, p.last_name"
Any thoughts ?

I found a related link but I don't understand the 
language.http://blog.csdn.net/freedomboy319/article/details/46332009

thanks
sanjay


ERRORSError: JAVA_HOME is not set and could not be found.15/06/16 18:17:19 WARN 
Holder: java.lang.ClassNotFoundException: 
org.apache.hadoop.yarn.server.webproxy.amfilter.AmIpFilter at 
java.net.URLClassLoader$1.run(URLClassLoader.java:202) at 
java.security.AccessController.doPrivileged(Native Method) at 
java.net.URLClassLoader.findClass(URLClassLoader.java:190) at 
java.lang.ClassLoader.loadClass(ClassLoader.java:306) at 
java.lang.ClassLoader.loadClass(ClassLoader.java:247) at 
org.eclipse.jetty.util.Loader.loadClass(Loader.java:100) at 
org.eclipse.jetty.util.Loader.loadClass(Loader.java:79) at 
org.eclipse.jetty.servlet.Holder.doStart(Holder.java:107) at 
org.eclipse.jetty.servlet.FilterHolder.doStart(FilterHolder.java:90) at 
org.eclipse.jetty.util.component.AbstractLifeCycle.start(AbstractLifeCycle.java:64)
 at 
org.eclipse.jetty.servlet.ServletHandler.initialize(ServletHandler.java:768) at 
org.eclipse.jetty.servlet.ServletHandler.updateMappings(ServletHandler.java:1357)
 at 
org.eclipse.jetty.servlet.ServletHandler.setFilterMappings(ServletHandler.java:1393)
 at 
org.eclipse.jetty.servlet.ServletHandler.addFilterMapping(ServletHandler.java:1113)
 at 
org.eclipse.jetty.servlet.ServletHandler.addFilterWithMapping(ServletHandler.java:979)
 at 
org.eclipse.jetty.servlet.ServletContextHandler.addFilter(ServletContextHandler.java:332)
 at 
org.apache.spark.ui.JettyUtils$$anonfun$addFilters$1$$anonfun$apply$6.apply(JettyUtils.scala:163)
 at 
org.apache.spark.ui.JettyUtils$$anonfun$addFilters$1$$anonfun$apply$6.apply(JettyUtils.scala:163)
 at 
scala.collection.mutable.ResizableArray$class.foreach(ResizableArray.scala:59) 
at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:47) at 
org.apache.spark.ui.JettyUtils$$anonfun$addFilters$1.apply(JettyUtils.scala:163)
 at 
org.apache.spark.ui.JettyUtils$$anonfun$addFilters$1.apply(JettyUtils.scala:141)
 at 
scala.collection.IndexedSeqOptimized$class.foreach(IndexedSeqOptimized.scala:33)
 at scala.collection.mutable.ArrayOps$ofRef.foreach(ArrayOps.scala:108) at 
org.apache.spark.ui.JettyUtils$.addFilters(JettyUtils.scala:141) at 
org.apache.spark.scheduler.cluster.YarnSchedulerBackend$$anonfun$org$apache$spark$scheduler$cluster$YarnSchedulerBackend$$addWebUIFilter$3.apply(YarnSchedulerBackend.scala:90)
 at 
org.apache.spark.scheduler.cluster.YarnSchedulerBackend$$anonfun$org$apache$spark$scheduler$cluster$YarnSchedulerBackend$$addWebUIFilter$3.apply(YarnSchedulerBackend.scala:90)
 at scala.Option.foreach(Option.scala:236) at 
org.apache.spark.scheduler.cluster.YarnSchedulerBackend.org$apache$spark$scheduler$cluster$YarnSchedulerBackend$$addWebUIFilter(YarnSchedulerBackend.scala:90)
 at 
org.apache.spark.scheduler.cluster.YarnSchedulerBackend$YarnSchedulerActor$$anonfun$receive$1.applyOrElse(YarnSchedulerBackend.scala:129)
 at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498) at 
akka.actor.ActorCell.invoke(ActorCell.scala:456) at 
akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237) at 
akka.dispatch.Mailbox.run(Mailbox.scala:219) at 
akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
 at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260) at 
scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
 at scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979) at 
scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)15/06/16
 18:17:19 WARN AbstractLifeCycle: FAILED 
org.apache.hadoop.yarn.server.webproxy.amfilter.AmIpFilter-1c7ab89d: 
javax.servlet.UnavailableException: 
org.apache.hadoop.yarn.server.webproxy.amfilter.AmIpFilterjavax.servlet.UnavailableException:
 org.apache.hadoop.yarn.server.webproxy.amfilter.AmIpFilter at 
org.eclipse.jetty.servlet.Holder.doStart(Holder.java:114) at 
org.eclipse.jetty.servlet.FilterHolder.doStart(FilterHolder.java:90) at 
org.eclipse.jetty.util.component.AbstractLifeCycle.start(AbstractLifeCycle.java:64)
 at 
org.eclipse.jetty.servlet.ServletHandler.initialize(ServletHandler.java:768) at 
org.eclipse.jetty.servlet.ServletHandler.updateMappings(ServletHandler.java:1357)
 at 
org.eclipse.jetty.servlet.ServletHandler.setFilterMappings(ServletHandler.java:1393)
 at 
org.eclipse.jetty.servlet.ServletHandler.addFilterMapping(ServletHandler.java:1113)
 at 
org.eclipse.jetty.servlet.ServletHandler.addFilterWithMapping(ServletHandler.java:979)
 at 
org.eclipse.jetty.servlet.ServletContextHandler.addFilter(ServletContextHandler.java:332)
 at 
org.apache.spark.ui.JettyUtils$$anonfun$addFilters$1$$anonfun$apply$6.apply(JettyUtils.scala:163)
 at 
org.apache.spark.ui.JettyUtils$$anonfun$addFilters$1$$anonfun$apply$6.apply(JettyUtils.scala:163)
 at 
scala.collection.mutable.ResizableArray$class.foreach(ResizableArray.scala:59) 
at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:47) at 
org.apache.spark.ui.JettyUtils$$anonfun$addFilters$1.apply(JettyUtils.scala:163)
 at 
org.apache.spark.ui.JettyUtils$$anonfun$addFilters$1.apply(JettyUtils.scala:141)
 at 
scala.collection.IndexedSeqOptimized$class.foreach(IndexedSeqOptimized.scala:33)
 at scala.collection.mutable.ArrayOps$ofRef.foreach(ArrayOps.scala:108) at 
org.apache.spark.ui.JettyUtils$.addFilters(JettyUtils.scala:141) at 
org.apache.spark.scheduler.cluster.YarnSchedulerBackend$$anonfun$org$apache$spark$scheduler$cluster$YarnSchedulerBackend$$addWebUIFilter$3.apply(YarnSchedulerBackend.scala:90)
 at 
org.apache.spark.scheduler.cluster.YarnSchedulerBackend$$anonfun$org$apache$spark$scheduler$cluster$YarnSchedulerBackend$$addWebUIFilter$3.apply(YarnSchedulerBackend.scala:90)
 at scala.Option.foreach(Option.scala:236) at 
org.apache.spark.scheduler.cluster.YarnSchedulerBackend.org$apache$spark$scheduler$cluster$YarnSchedulerBackend$$addWebUIFilter(YarnSchedulerBackend.scala:90)
 at 
org.apache.spark.scheduler.cluster.YarnSchedulerBackend$YarnSchedulerActor$$anonfun$receive$1.applyOrElse(YarnSchedulerBackend.scala:129)
 at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498) at 
akka.actor.ActorCell.invoke(ActorCell.scala:456) at 
akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237) at 
akka.dispatch.Mailbox.run(Mailbox.scala:219) at 
akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
 at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260) at 
scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
 at scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979) at 
scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)15/06/16
 18:17:19 ERROR OneForOneStrategy: javax.servlet.UnavailableException: 
org.apache.hadoop.yarn.server.webproxy.amfilter.AmIpFilterjava.lang.RuntimeException:
 javax.servlet.UnavailableException: 
org.apache.hadoop.yarn.server.webproxy.amfilter.AmIpFilter at 
org.eclipse.jetty.servlet.ServletHandler.updateMappings(ServletHandler.java:1361)
 at 
org.eclipse.jetty.servlet.ServletHandler.setFilterMappings(ServletHandler.java:1393)
 at 
org.eclipse.jetty.servlet.ServletHandler.addFilterMapping(ServletHandler.java:1113)
 at 
org.eclipse.jetty.servlet.ServletHandler.addFilterWithMapping(ServletHandler.java:979)
 at 
org.eclipse.jetty.servlet.ServletContextHandler.addFilter(ServletContextHandler.java:332)
 at 
org.apache.spark.ui.JettyUtils$$anonfun$addFilters$1$$anonfun$apply$6.apply(JettyUtils.scala:163)
 at 
org.apache.spark.ui.JettyUtils$$anonfun$addFilters$1$$anonfun$apply$6.apply(JettyUtils.scala:163)
 at 
scala.collection.mutable.ResizableArray$class.foreach(ResizableArray.scala:59) 
at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:47) at 
org.apache.spark.ui.JettyUtils$$anonfun$addFilters$1.apply(JettyUtils.scala:163)
 at 
org.apache.spark.ui.JettyUtils$$anonfun$addFilters$1.apply(JettyUtils.scala:141)
 at 
scala.collection.IndexedSeqOptimized$class.foreach(IndexedSeqOptimized.scala:33)
 at scala.collection.mutable.ArrayOps$ofRef.foreach(ArrayOps.scala:108) at 
org.apache.spark.ui.JettyUtils$.addFilters(JettyUtils.scala:141) at 
org.apache.spark.scheduler.cluster.YarnSchedulerBackend$$anonfun$org$apache$spark$scheduler$cluster$YarnSchedulerBackend$$addWebUIFilter$3.apply(YarnSchedulerBackend.scala:90)
 at 
org.apache.spark.scheduler.cluster.YarnSchedulerBackend$$anonfun$org$apache$spark$scheduler$cluster$YarnSchedulerBackend$$addWebUIFilter$3.apply(YarnSchedulerBackend.scala:90)
 at scala.Option.foreach(Option.scala:236) at 
org.apache.spark.scheduler.cluster.YarnSchedulerBackend.org$apache$spark$scheduler$cluster$YarnSchedulerBackend$$addWebUIFilter(YarnSchedulerBackend.scala:90)
 at 
org.apache.spark.scheduler.cluster.YarnSchedulerBackend$YarnSchedulerActor$$anonfun$receive$1.applyOrElse(YarnSchedulerBackend.scala:129)
 at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498) at 
akka.actor.ActorCell.invoke(ActorCell.scala:456) at 
akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237) at 
akka.dispatch.Mailbox.run(Mailbox.scala:219) at 
akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
 at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260) at 
scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
 at scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979) at 
scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)Caused
 by: javax.servlet.UnavailableException: 
org.apache.hadoop.yarn.server.webproxy.amfilter.AmIpFilter at 
org.eclipse.jetty.servlet.Holder.doStart(Holder.java:114) at 
org.eclipse.jetty.servlet.FilterHolder.doStart(FilterHolder.java:90) at 
org.eclipse.jetty.util.component.AbstractLifeCycle.start(AbstractLifeCycle.java:64)
 at 
org.eclipse.jetty.servlet.ServletHandler.initialize(ServletHandler.java:768) at 
org.eclipse.jetty.servlet.ServletHandler.updateMappings(ServletHandler.java:1357)
 ... 27 moreSET spark.sql.hive.version=0.13.1
[Stage 0:>                                                         (0 + 0) / 
60][Stage 0:>                                                         (0 + 2) / 
60][Stage 0:>                 (0 + 2) / 60][Stage 1:>                 (0 + 0) / 
99]15/06/16 18:17:31 ERROR YarnClientClusterScheduler: Lost executor 2 on 
ip-10-231-19-195.us-west-2.compute.internal: remote Akka client 
disassociated15/06/16 18:17:31 ERROR YarnClientSchedulerBackend: Asked to 
remove non-existent executor 215/06/16 18:17:31 ERROR 
YarnClientClusterScheduler: Lost executor 1 on 
ip-10-231-19-197.us-west-2.compute.internal: remote Akka client 
disassociated15/06/16 18:17:31 ERROR YarnClientSchedulerBackend: Asked to 
remove non-existent executor 115/06/16 18:17:31 ERROR 
YarnClientSchedulerBackend: Asked to remove non-existent executor 215/06/16 
18:17:31 ERROR YarnClientSchedulerBackend: Asked to remove non-existent 
executor 115/06/16 18:17:31 ERROR YarnClientSchedulerBackend: Asked to remove 
non-existent executor 215/06/16 18:17:31 ERROR YarnClientSchedulerBackend: 
Asked to remove non-existent executor 115/06/16 18:17:31 ERROR 
YarnClientSchedulerBackend: Asked to remove non-existent executor 215/06/16 
18:17:31 ERROR YarnClientSchedulerBackend: Asked to remove non-existent 
executor 1
[Stage 0:>                 (0 + 0) / 60][Stage 1:>                 (0 + 0) / 
99][Stage 0:>                 (0 + 1) / 60][Stage 1:>                 (0 + 0) / 
99][Stage 0:>                 (0 + 2) / 60][Stage 1:>                 (0 + 0) / 
99]15/06/16 18:17:48 ERROR YarnClientClusterScheduler: Lost executor 4 on 
ip-10-231-19-197.us-west-2.compute.internal: remote Akka client 
disassociated15/06/16 18:17:48 ERROR YarnClientSchedulerBackend: Asked to 
remove non-existent executor 415/06/16 18:17:48 ERROR 
YarnClientSchedulerBackend: Asked to remove non-existent executor 415/06/16 
18:17:48 ERROR YarnClientSchedulerBackend: Asked to remove non-existent 
executor 415/06/16 18:17:48 ERROR YarnClientSchedulerBackend: Asked to remove 
non-existent executor 4
[Stage 0:>                 (0 + 1) / 60][Stage 1:>                 (0 + 0) / 
99]15/06/16 18:17:48 ERROR YarnClientClusterScheduler: Lost executor 3 on 
ip-10-231-19-23.us-west-2.compute.internal: remote Akka client 
disassociated15/06/16 18:17:48 ERROR YarnClientSchedulerBackend: Asked to 
remove non-existent executor 315/06/16 18:17:48 ERROR 
YarnClientSchedulerBackend: Asked to remove non-existent executor 315/06/16 
18:17:48 ERROR YarnClientSchedulerBackend: Asked to remove non-existent 
executor 315/06/16 18:17:48 ERROR YarnClientSchedulerBackend: Asked to remove 
non-existent executor 3
[Stage 0:>                 (0 + 0) / 60][Stage 1:>                 (0 + 0) / 
99]15/06/16 18:18:09 ERROR OneForOneStrategy: No filter named 
org.apache.hadoop.yarn.server.webproxy.amfilter.AmIpFilter-1c7ab89djava.lang.IllegalStateException:
 No filter named 
org.apache.hadoop.yarn.server.webproxy.amfilter.AmIpFilter-1c7ab89d at 
org.eclipse.jetty.servlet.ServletHandler.updateMappings(ServletHandler.java:1291)
 at 
org.eclipse.jetty.servlet.ServletHandler.setFilterMappings(ServletHandler.java:1393)
 at 
org.eclipse.jetty.servlet.ServletHandler.addFilterMapping(ServletHandler.java:1132)
 at 
org.eclipse.jetty.servlet.ServletHandler.addFilterWithMapping(ServletHandler.java:979)
 at 
org.eclipse.jetty.servlet.ServletContextHandler.addFilter(ServletContextHandler.java:332)
 at 
org.apache.spark.ui.JettyUtils$$anonfun$addFilters$1$$anonfun$apply$6.apply(JettyUtils.scala:163)
 at 
org.apache.spark.ui.JettyUtils$$anonfun$addFilters$1$$anonfun$apply$6.apply(JettyUtils.scala:163)
 at 
scala.collection.mutable.ResizableArray$class.foreach(ResizableArray.scala:59) 
at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:47) at 
org.apache.spark.ui.JettyUtils$$anonfun$addFilters$1.apply(JettyUtils.scala:163)
 at 
org.apache.spark.ui.JettyUtils$$anonfun$addFilters$1.apply(JettyUtils.scala:141)
 at 
scala.collection.IndexedSeqOptimized$class.foreach(IndexedSeqOptimized.scala:33)
 at scala.collection.mutable.ArrayOps$ofRef.foreach(ArrayOps.scala:108) at 
org.apache.spark.ui.JettyUtils$.addFilters(JettyUtils.scala:141) at 
org.apache.spark.scheduler.cluster.YarnSchedulerBackend$$anonfun$org$apache$spark$scheduler$cluster$YarnSchedulerBackend$$addWebUIFilter$3.apply(YarnSchedulerBackend.scala:90)
 at 
org.apache.spark.scheduler.cluster.YarnSchedulerBackend$$anonfun$org$apache$spark$scheduler$cluster$YarnSchedulerBackend$$addWebUIFilter$3.apply(YarnSchedulerBackend.scala:90)
 at scala.Option.foreach(Option.scala:236) at 
org.apache.spark.scheduler.cluster.YarnSchedulerBackend.org$apache$spark$scheduler$cluster$YarnSchedulerBackend$$addWebUIFilter(YarnSchedulerBackend.scala:90)
 at 
org.apache.spark.scheduler.cluster.YarnSchedulerBackend$YarnSchedulerActor$$anonfun$receive$1.applyOrElse(YarnSchedulerBackend.scala:129)
 at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498) at 
akka.actor.ActorCell.invoke(ActorCell.scala:456) at 
akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237) at 
akka.dispatch.Mailbox.run(Mailbox.scala:219) at 
akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
 at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260) at 
scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
 at scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979) at 
scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
[Stage 0:>                 (0 + 2) / 60][Stage 1:>                 (0 + 0) / 
99]15/06/16 18:18:18 ERROR TaskSetManager: Task 2 in stage 0.0 failed 4 times; 
aborting job15/06/16 18:18:18 ERROR SparkSQLDriver: Failed in [select 
j.person_id, p.first_name, p.last_name, count(*) from (select person_id from 
cdr.cdr_mjp_joborder where person_id is not null) j join (select person_id, 
first_name, last_name from cdr.cdr_mjp_people where lower(last_name) like 
'%subramanian%') p on j.person_id = p.person_id GROUP BY j.person_id, 
p.first_name, p.last_name]org.apache.spark.SparkException: Job aborted due to 
stage failure: Task 2 in stage 0.0 failed 4 times, most recent failure: Lost 
task 2.3 in stage 0.0 (TID 9, ip-10-231-19-197.us-west-2.compute.internal): 
java.lang.NoClassDefFoundError: Lorg/apache/hadoop/hive/ql/plan/TableDesc; at 
java.lang.Class.getDeclaredFields0(Native Method) at 
java.lang.Class.privateGetDeclaredFields(Class.java:2436) at 
java.lang.Class.getDeclaredField(Class.java:1946) at 
java.io.ObjectStreamClass.getDeclaredSUID(ObjectStreamClass.java:1659) at 
java.io.ObjectStreamClass.access$700(ObjectStreamClass.java:72) at 
java.io.ObjectStreamClass$2.run(ObjectStreamClass.java:480) at 
java.io.ObjectStreamClass$2.run(ObjectStreamClass.java:468) at 
java.security.AccessController.doPrivileged(Native Method) at 
java.io.ObjectStreamClass.<init>(ObjectStreamClass.java:468) at 
java.io.ObjectStreamClass.lookup(ObjectStreamClass.java:365) at 
java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:602) at 
java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1622) at 
java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1517) at 
java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771) at 
java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1350) at 
java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1990) at 
java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1915) at 
java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1798) at 
java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1350) at 
java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1990) at 
java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1915) at 
java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1798) at 
java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1350) at 
java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1990) at 
java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1915) at 
java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1798) at 
java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1350) at 
java.io.ObjectInputStream.readObject(ObjectInputStream.java:370) at 
scala.collection.immutable.$colon$colon.readObject(List.scala:362) at 
sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at 
sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57) 
at 
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
 at java.lang.reflect.Method.invoke(Method.java:606) at 
java.io.ObjectStreamClass.invokeReadObject(ObjectStreamClass.java:1017) at 
java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1893) at 
java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1798) at 
java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1350) at 
java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1990) at 
java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1915) at 
java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1798) at 
java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1350) at 
java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1990) at 
java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1915) at 
java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1798) at 
java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1350) at 
java.io.ObjectInputStream.readObject(ObjectInputStream.java:370) at 
scala.collection.immutable.$colon$colon.readObject(List.scala:362) at 
sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at 
sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57) 
at 
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
 at java.lang.reflect.Method.invoke(Method.java:606) at 
java.io.ObjectStreamClass.invokeReadObject(ObjectStreamClass.java:1017) at 
java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1893) at 
java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1798) at 
java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1350) at 
java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1990) at 
java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1915) at 
java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1798) at 
java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1350) at 
java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1990) at 
java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1915) at 
java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1798) at 
java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1350)
Click here to Reply

Reply via email to