[ 
https://issues.apache.org/jira/browse/SPARK-21928?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel&focusedCommentId=17788104#comment-17788104
 ] 

Shivam Sharma commented on SPARK-21928:
---------------------------------------

I am getting this intermittent failure on spark 2.4.3 version. Here is the full 
stack trace:
{code:java}
Exception in thread "main" java.lang.reflect.InvocationTargetException    at 
sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)    at 
sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)   
 at 
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
    at java.lang.reflect.Method.invoke(Method.java:498)    at 
org.apache.spark.deploy.worker.DriverWrapper$.main(DriverWrapper.scala:65)    
at org.apache.spark.deploy.worker.DriverWrapper.main(DriverWrapper.scala)Caused 
by: org.apache.spark.SparkException: Job aborted due to stage failure: Task 75 
in stage 1.0 failed 4 times, most recent failure: Lost task 75.3 in stage 1.0 
(TID 171, phx6-kwq.prod.xyz.internal, executor 71): java.io.IOException: 
org.apache.spark.SparkException: Failed to register classes with Kryo    at 
org.apache.spark.util.Utils$.tryOrIOException(Utils.scala:1333)    at 
org.apache.spark.broadcast.TorrentBroadcast.readBroadcastBlock(TorrentBroadcast.scala:208)
    at 
org.apache.spark.broadcast.TorrentBroadcast._value$lzycompute(TorrentBroadcast.scala:66)
    at 
org.apache.spark.broadcast.TorrentBroadcast._value(TorrentBroadcast.scala:66)   
 at 
org.apache.spark.broadcast.TorrentBroadcast.getValue(TorrentBroadcast.scala:96) 
   at org.apache.spark.broadcast.Broadcast.value(Broadcast.scala:70)    at 
org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:89)    
at org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:55)   
 at org.apache.spark.scheduler.Task.run(Task.scala:121)    at 
org.apache.spark.executor.Executor$TaskRunner.$anonfun$run$3(Executor.scala:411)
    at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:1360)    at 
org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:414)    at 
java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) 
   at 
java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) 
   at java.lang.Thread.run(Thread.java:748)Caused by: 
org.apache.spark.SparkException: Failed to register classes with Kryo    at 
org.apache.spark.serializer.KryoSerializer.newKryo(KryoSerializer.scala:140)    
at 
org.apache.spark.serializer.KryoSerializerInstance.borrowKryo(KryoSerializer.scala:324)
    at 
org.apache.spark.serializer.KryoSerializerInstance.<init>(KryoSerializer.scala:309)
    at 
org.apache.spark.serializer.KryoSerializer.newInstance(KryoSerializer.scala:218)
    at 
org.apache.spark.broadcast.TorrentBroadcast$.unBlockifyObject(TorrentBroadcast.scala:305)
    at 
org.apache.spark.broadcast.TorrentBroadcast.$anonfun$readBroadcastBlock$3(TorrentBroadcast.scala:235)
    at scala.Option.getOrElse(Option.scala:138)    at 
org.apache.spark.broadcast.TorrentBroadcast.$anonfun$readBroadcastBlock$1(TorrentBroadcast.scala:211)
    at org.apache.spark.util.Utils$.tryOrIOException(Utils.scala:1326)    ... 
14 moreCaused by: java.lang.ClassNotFoundException: 
com.xyz.datashack.SparkKryoRegistrar    at 
java.lang.ClassLoader.findClass(ClassLoader.java:530)    at 
org.apache.spark.util.ParentClassLoader.findClass(ParentClassLoader.java:35)    
at java.lang.ClassLoader.loadClass(ClassLoader.java:424)    at 
org.apache.spark.util.ParentClassLoader.loadClass(ParentClassLoader.java:40)    
at 
org.apache.spark.util.ChildFirstURLClassLoader.loadClass(ChildFirstURLClassLoader.java:48)
    at java.lang.ClassLoader.loadClass(ClassLoader.java:357)    at 
java.lang.Class.forName0(Native Method)    at 
java.lang.Class.forName(Class.java:348)    at 
org.apache.spark.serializer.KryoSerializer.$anonfun$newKryo$6(KryoSerializer.scala:135)
    at 
scala.collection.TraversableLike.$anonfun$map$1(TraversableLike.scala:237)    
at scala.collection.IndexedSeqOptimized.foreach(IndexedSeqOptimized.scala:36)   
 at scala.collection.IndexedSeqOptimized.foreach$(IndexedSeqOptimized.scala:33) 
   at scala.collection.mutable.ArrayOps$ofRef.foreach(ArrayOps.scala:198)    at 
scala.collection.TraversableLike.map(TraversableLike.scala:237)    at 
scala.collection.TraversableLike.map$(TraversableLike.scala:230)    at 
scala.collection.mutable.ArrayOps$ofRef.map(ArrayOps.scala:198)    at 
org.apache.spark.serializer.KryoSerializer.newKryo(KryoSerializer.scala:135)    
... 22 more
Driver stacktrace:    at 
org.apache.spark.scheduler.DAGScheduler.failJobAndIndependentStages(DAGScheduler.scala:1889)
    at 
org.apache.spark.scheduler.DAGScheduler.$anonfun$abortStage$2(DAGScheduler.scala:1877)
    at 
org.apache.spark.scheduler.DAGScheduler.$anonfun$abortStage$2$adapted(DAGScheduler.scala:1876)
    at scala.collection.mutable.ResizableArray.foreach(ResizableArray.scala:62) 
   at scala.collection.mutable.ResizableArray.foreach$(ResizableArray.scala:55) 
   at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:49)    at 
org.apache.spark.scheduler.DAGScheduler.abortStage(DAGScheduler.scala:1876)    
at 
org.apache.spark.scheduler.DAGScheduler.$anonfun$handleTaskSetFailed$1(DAGScheduler.scala:926)
    at 
org.apache.spark.scheduler.DAGScheduler.$anonfun$handleTaskSetFailed$1$adapted(DAGScheduler.scala:926)
    at scala.Option.foreach(Option.scala:274)    at 
org.apache.spark.scheduler.DAGScheduler.handleTaskSetFailed(DAGScheduler.scala:926)
    at 
org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.doOnReceive(DAGScheduler.scala:2110)
    at 
org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.onReceive(DAGScheduler.scala:2059)
    at 
org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.onReceive(DAGScheduler.scala:2048)
    at org.apache.spark.util.EventLoop$$anon$1.run(EventLoop.scala:49)    at 
org.apache.spark.scheduler.DAGScheduler.runJob(DAGScheduler.scala:737)    at 
org.apache.spark.SparkContext.runJob(SparkContext.scala:2061)    at 
org.apache.spark.SparkContext.runJob(SparkContext.scala:2082)    at 
org.apache.spark.SparkContext.runJob(SparkContext.scala:2101)    at 
org.apache.spark.SparkContext.runJob(SparkContext.scala:2126)    at 
org.apache.spark.rdd.RDD.$anonfun$foreachPartition$1(RDD.scala:935)    at 
org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)  
  at 
org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:112)  
  at org.apache.spark.rdd.RDD.withScope(RDD.scala:363)    at 
org.apache.spark.rdd.RDD.foreachPartition(RDD.scala:933)    at 
org.apache.spark.api.java.JavaRDDLike.foreachPartition(JavaRDDLike.scala:219)   
 at 
org.apache.spark.api.java.JavaRDDLike.foreachPartition$(JavaRDDLike.scala:218)  
  at 
org.apache.spark.api.java.AbstractJavaRDDLike.foreachPartition(JavaRDDLike.scala:45)
    at com.xyz.datashack.tcdn.Backfill.buildIndex(Backfill.java:211)    at 
com.xyz.datashack.tcdn.Backfill.runWithContext(Backfill.java:122)    at 
com.xyz.datashack.tcdn.Backfill.lambda$main$0(Backfill.java:94)    at 
com.xyz.datashack.tcdn.TCDClient.runWithReporting(TCDClient.java:54)    at 
com.xyz.datashack.tcdn.Backfill.main(Backfill.java:94)    ... 6 moreCaused by: 
java.io.IOException: org.apache.spark.SparkException: Failed to register 
classes with Kryo    at 
org.apache.spark.util.Utils$.tryOrIOException(Utils.scala:1333)    at 
org.apache.spark.broadcast.TorrentBroadcast.readBroadcastBlock(TorrentBroadcast.scala:208)
    at 
org.apache.spark.broadcast.TorrentBroadcast._value$lzycompute(TorrentBroadcast.scala:66)
    at 
org.apache.spark.broadcast.TorrentBroadcast._value(TorrentBroadcast.scala:66)   
 at 
org.apache.spark.broadcast.TorrentBroadcast.getValue(TorrentBroadcast.scala:96) 
   at org.apache.spark.broadcast.Broadcast.value(Broadcast.scala:70)    at 
org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:89)    
at org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:55)   
 at org.apache.spark.scheduler.Task.run(Task.scala:121)    at 
org.apache.spark.executor.Executor$TaskRunner.$anonfun$run$3(Executor.scala:411)
    at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:1360)    at 
org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:414)    at 
java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) 
   at 
java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) 
   at java.lang.Thread.run(Thread.java:748)Caused by: 
org.apache.spark.SparkException: Failed to register classes with Kryo    at 
org.apache.spark.serializer.KryoSerializer.newKryo(KryoSerializer.scala:140)    
at 
org.apache.spark.serializer.KryoSerializerInstance.borrowKryo(KryoSerializer.scala:324)
    at 
org.apache.spark.serializer.KryoSerializerInstance.<init>(KryoSerializer.scala:309)
    at 
org.apache.spark.serializer.KryoSerializer.newInstance(KryoSerializer.scala:218)
    at 
org.apache.spark.broadcast.TorrentBroadcast$.unBlockifyObject(TorrentBroadcast.scala:305)
    at 
org.apache.spark.broadcast.TorrentBroadcast.$anonfun$readBroadcastBlock$3(TorrentBroadcast.scala:235)
    at scala.Option.getOrElse(Option.scala:138)    at 
org.apache.spark.broadcast.TorrentBroadcast.$anonfun$readBroadcastBlock$1(TorrentBroadcast.scala:211)
    at org.apache.spark.util.Utils$.tryOrIOException(Utils.scala:1326)    ... 
14 moreCaused by: java.lang.ClassNotFoundException: 
com.xyz.datashack.SparkKryoRegistrar    at 
java.lang.ClassLoader.findClass(ClassLoader.java:530)    at 
org.apache.spark.util.ParentClassLoader.findClass(ParentClassLoader.java:35)    
at java.lang.ClassLoader.loadClass(ClassLoader.java:424)    at 
org.apache.spark.util.ParentClassLoader.loadClass(ParentClassLoader.java:40)    
at 
org.apache.spark.util.ChildFirstURLClassLoader.loadClass(ChildFirstURLClassLoader.java:48)
    at java.lang.ClassLoader.loadClass(ClassLoader.java:357)    at 
java.lang.Class.forName0(Native Method)    at 
java.lang.Class.forName(Class.java:348)    at 
org.apache.spark.serializer.KryoSerializer.$anonfun$newKryo$6(KryoSerializer.scala:135)
    at 
scala.collection.TraversableLike.$anonfun$map$1(TraversableLike.scala:237)    
at scala.collection.IndexedSeqOptimized.foreach(IndexedSeqOptimized.scala:36)   
 at scala.collection.IndexedSeqOptimized.foreach$(IndexedSeqOptimized.scala:33) 
   at scala.collection.mutable.ArrayOps$ofRef.foreach(ArrayOps.scala:198)    at 
scala.collection.TraversableLike.map(TraversableLike.scala:237)    at 
scala.collection.TraversableLike.map$(TraversableLike.scala:230)    at 
scala.collection.mutable.ArrayOps$ofRef.map(ArrayOps.scala:198)    at 
org.apache.spark.serializer.KryoSerializer.newKryo(KryoSerializer.scala:135)    
... 22 more {code}

> ClassNotFoundException for custom Kryo registrator class during serde in 
> netty threads
> --------------------------------------------------------------------------------------
>
>                 Key: SPARK-21928
>                 URL: https://issues.apache.org/jira/browse/SPARK-21928
>             Project: Spark
>          Issue Type: Bug
>          Components: Spark Core
>    Affects Versions: 2.1.1, 2.2.0
>            Reporter: John Brock
>            Assignee: Imran Rashid
>            Priority: Major
>             Fix For: 2.1.2, 2.2.1, 2.3.0
>
>
> From SPARK-13990 & SPARK-13926, Spark's SerializerManager has its own 
> instance of a KryoSerializer which does not have the defaultClassLoader set 
> on it. For normal task execution, that doesn't cause problems, because the 
> serializer falls back to the current thread's task loader, which is set 
> anyway.
> however, netty maintains its own thread pool, and those threads don't change 
> their classloader to include the extra use jars needed for the custom kryo 
> registrator. That only matters when blocks are sent across the network which 
> force serde in the netty thread. That won't happen often, because (a) spark 
> tries to execute tasks where the RDDs are already cached and (b) broadcast 
> blocks generally don't require any serde in the netty threads (that occurs in 
> the task thread that is reading the broadcast value).  However it can come up 
> with remote cache reads, or if fetching a broadcast block forces another 
> block to disk, which requires serialization.
> This doesn't effect the shuffle path, because the serde is never done in the 
> threads created by netty.
> I think a fix for this should be fairly straight-forward, we just need to set 
> the classloader on that extra kryo instance.
>  (original problem description below)
> I unfortunately can't reliably reproduce this bug; it happens only 
> occasionally, when training a logistic regression model with very large 
> datasets. The training will often proceed through several {{treeAggregate}} 
> calls without any problems, and then suddenly workers will start running into 
> this {{java.lang.ClassNotFoundException}}.
> After doing some debugging, it seems that whenever this error happens, Spark 
> is trying to use the {{sun.misc.Launcher$AppClassLoader}} {{ClassLoader}} 
> instance instead of the usual 
> {{org.apache.spark.util.MutableURLClassLoader}}. {{MutableURLClassLoader}} 
> can see my custom Kryo registrator, but the {{AppClassLoader}} instance can't.
> When this error does pop up, it's usually accompanied by the task seeming to 
> hang, and I need to kill Spark manually.
> I'm running a Spark application in cluster mode via spark-submit, and I have 
> a custom Kryo registrator. The JAR is built with {{sbt assembly}}.
> Exception message:
> {noformat}
> 17/08/29 22:39:04 ERROR TransportRequestHandler: Error opening block 
> StreamChunkId{streamId=542074019336, chunkIndex=0} for request from 
> /10.0.29.65:34332
> org.apache.spark.SparkException: Failed to register classes with Kryo
>     at 
> org.apache.spark.serializer.KryoSerializer.newKryo(KryoSerializer.scala:139)
>     at 
> org.apache.spark.serializer.KryoSerializerInstance.borrowKryo(KryoSerializer.scala:292)
>     at 
> org.apache.spark.serializer.KryoSerializerInstance.<init>(KryoSerializer.scala:277)
>     at 
> org.apache.spark.serializer.KryoSerializer.newInstance(KryoSerializer.scala:186)
>     at 
> org.apache.spark.serializer.SerializerManager.dataSerializeStream(SerializerManager.scala:169)
>     at 
> org.apache.spark.storage.BlockManager$$anonfun$dropFromMemory$3.apply(BlockManager.scala:1382)
>     at 
> org.apache.spark.storage.BlockManager$$anonfun$dropFromMemory$3.apply(BlockManager.scala:1377)
>     at org.apache.spark.storage.DiskStore.put(DiskStore.scala:69)
>     at 
> org.apache.spark.storage.BlockManager.dropFromMemory(BlockManager.scala:1377)
>     at 
> org.apache.spark.storage.memory.MemoryStore.org$apache$spark$storage$memory$MemoryStore$$dropBlock$1(MemoryStore.scala:524)
>     at 
> org.apache.spark.storage.memory.MemoryStore$$anonfun$evictBlocksToFreeSpace$2.apply(MemoryStore.scala:545)
>     at 
> org.apache.spark.storage.memory.MemoryStore$$anonfun$evictBlocksToFreeSpace$2.apply(MemoryStore.scala:539)
>     at 
> scala.collection.mutable.ResizableArray$class.foreach(ResizableArray.scala:59)
>     at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:48)
>     at 
> org.apache.spark.storage.memory.MemoryStore.evictBlocksToFreeSpace(MemoryStore.scala:539)
>     at 
> org.apache.spark.memory.StorageMemoryPool.acquireMemory(StorageMemoryPool.scala:92)
>     at 
> org.apache.spark.memory.StorageMemoryPool.acquireMemory(StorageMemoryPool.scala:73)
>     at 
> org.apache.spark.memory.StaticMemoryManager.acquireStorageMemory(StaticMemoryManager.scala:72)
>     at 
> org.apache.spark.storage.memory.MemoryStore.putBytes(MemoryStore.scala:147)
>     at 
> org.apache.spark.storage.BlockManager.maybeCacheDiskBytesInMemory(BlockManager.scala:1143)
>     at 
> org.apache.spark.storage.BlockManager.org$apache$spark$storage$BlockManager$$doGetLocalBytes(BlockManager.scala:594)
>     at 
> org.apache.spark.storage.BlockManager$$anonfun$getLocalBytes$2.apply(BlockManager.scala:559)
>     at 
> org.apache.spark.storage.BlockManager$$anonfun$getLocalBytes$2.apply(BlockManager.scala:559)
>     at scala.Option.map(Option.scala:146)
>     at 
> org.apache.spark.storage.BlockManager.getLocalBytes(BlockManager.scala:559)
>     at 
> org.apache.spark.storage.BlockManager.getBlockData(BlockManager.scala:353)
>     at 
> org.apache.spark.network.netty.NettyBlockRpcServer$$anonfun$1.apply(NettyBlockRpcServer.scala:61)
>     at 
> org.apache.spark.network.netty.NettyBlockRpcServer$$anonfun$1.apply(NettyBlockRpcServer.scala:60)
>     at scala.collection.Iterator$$anon$11.next(Iterator.scala:409)
>     at 
> scala.collection.convert.Wrappers$IteratorWrapper.next(Wrappers.scala:31)
>     at 
> org.apache.spark.network.server.OneForOneStreamManager.getChunk(OneForOneStreamManager.java:89)
>     at 
> org.apache.spark.network.server.TransportRequestHandler.processFetchRequest(TransportRequestHandler.java:125)
>     at 
> org.apache.spark.network.server.TransportRequestHandler.handle(TransportRequestHandler.java:103)
>     at 
> org.apache.spark.network.server.TransportChannelHandler.channelRead(TransportChannelHandler.java:118)
>     at 
> io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:357)
>     at 
> io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:343)
>     at 
> io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:336)
>     at 
> io.netty.handler.timeout.IdleStateHandler.channelRead(IdleStateHandler.java:287)
>     at 
> io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:357)
>     at 
> io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:343)
>     at 
> io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:336)
>     at 
> io.netty.handler.codec.MessageToMessageDecoder.channelRead(MessageToMessageDecoder.java:102)
>     at 
> io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:357)
>     at 
> io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:343)
>     at 
> io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:336)
>     at 
> org.apache.spark.network.util.TransportFrameDecoder.channelRead(TransportFrameDecoder.java:85)
>     at 
> io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:357)
>     at 
> io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:343)
>     at 
> io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:336)
>     at 
> io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1294)
>     at 
> io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:357)
>     at 
> io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:343)
>     at 
> io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:911)
>     at 
> io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:131)
>     at 
> io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:643)
>     at 
> io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:566)
>     at 
> io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:480)
>     at io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:442)
>     at 
> io.netty.util.concurrent.SingleThreadEventExecutor$2.run(SingleThreadEventExecutor.java:131)
>     at 
> io.netty.util.concurrent.DefaultThreadFactory$DefaultRunnableDecorator.run(DefaultThreadFactory.java:144)
>     at java.lang.Thread.run(Thread.java:745)
> Caused by: java.lang.ClassNotFoundException: com.foo.bar.MyKryoRegistrator
>     at java.net.URLClassLoader.findClass(URLClassLoader.java:381)
>     at java.lang.ClassLoader.loadClass(ClassLoader.java:424)
>     at sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:331)
>     at java.lang.ClassLoader.loadClass(ClassLoader.java:357)
>     at java.lang.Class.forName0(Native Method)
>     at java.lang.Class.forName(Class.java:348)
>     at 
> org.apache.spark.serializer.KryoSerializer$$anonfun$newKryo$5.apply(KryoSerializer.scala:134)
>     at 
> org.apache.spark.serializer.KryoSerializer$$anonfun$newKryo$5.apply(KryoSerializer.scala:134)
>     at 
> scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)
>     at 
> scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)
>     at 
> scala.collection.IndexedSeqOptimized$class.foreach(IndexedSeqOptimized.scala:33)
>     at scala.collection.mutable.ArrayOps$ofRef.foreach(ArrayOps.scala:186)
>     at scala.collection.TraversableLike$class.map(TraversableLike.scala:234)
>     at scala.collection.mutable.ArrayOps$ofRef.map(ArrayOps.scala:186)
>     at 
> org.apache.spark.serializer.KryoSerializer.newKryo(KryoSerializer.scala:134)
>     ... 60 more
> {noformat}
> My Spark session is created like so:
> {code:java}
> val spark = SparkSession.builder()
>                 .appName("FooBar")
>                 .config("spark.serializer", 
> "org.apache.spark.serializer.KryoSerializer")
>                 .config("spark.kryoserializer.buffer.max", "2047m")           
>                              
>                 
> .config("spark.kryo.registrator","com.foo.bar.MyKryoRegistrator")
>                 .config("spark.kryo.registrationRequired", "true")
>                 .config("spark.network.timeout", "3600s")
>                 .config("spark.driver.maxResultSize", "0")
>                 .config("spark.rdd.compress", "true")
>                 .config("spark.shuffle.spill", "true")
>                 .getOrCreate()
> {code}
> Here are the config options I'm passing to spark-submit:
> {noformat}
> --conf "spark.executor.heartbeatInterval=400s"
> --conf "spark.speculation=true"
> --conf "spark.speculation.multiplier=30"
> --conf "spark.speculation.quantile=0.95"
> --conf "spark.memory.useLegacyMode=true"
> --conf "spark.shuffle.memoryFraction=0.8"
> --conf "spark.storage.memoryFraction=0.2"
> --driver-java-options "-XX:+UseG1GC"
> {noformat}
> I was able to find a workaround: copy your application JAR to each of the 
> machines in your cluster, and pass the JAR's path to {{spark-submit}} with:
> {noformat}
> --conf "spark.driver.extraClassPath=/path/to/sparklogisticregre‌​ssion.jar"
> --conf "spark.executor.extraClassPath=/path/to/sparklogisticreg‌​ression.jar"
> {noformat}



--
This message was sent by Atlassian Jira
(v8.20.10#820010)

---------------------------------------------------------------------
To unsubscribe, e-mail: issues-unsubscr...@spark.apache.org
For additional commands, e-mail: issues-h...@spark.apache.org

Reply via email to