[ https://issues.apache.org/jira/browse/SPARK-3937?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel&focusedCommentId=14486763#comment-14486763 ]
Guoqiang Li commented on SPARK-3937: ------------------------------------ I have encountered this issues in spark 1.3 . My configuration spark-defaults.conf is : {code:none} spark.akka.frameSize 20 spark.akka.askTimeout 120 spark.akka.timeout 120 spark.default.parallelism 72 spark.locality.wait 10000 spark.storage.blockManagerTimeoutIntervalMs 6000000 #spark.yarn.max.executor.failures 100 spark.core.connection.ack.wait.timeout 360 spark.storage.memoryFraction 0.2 spark.broadcast.factory org.apache.spark.broadcast.TorrentBroadcastFactory #spark.broadcast.blockSize 8192 spark.driver.maxResultSize 4000 #spark.shuffle.blockTransferService nio #spark.akka.heartbeat.interval 100 spark.kryoserializer.buffer.max.mb 256 spark.serializer org.apache.spark.serializer.KryoSerializer spark.kryo.registrator org.apache.spark.graphx.GraphKryoRegistrator #spark.kryo.registrator org.apache.spark.mllib.clustering.LDAKryoRegistrator {code} {code:none} java.lang.InternalError: a fault occurred in a recent unsafe memory access operation in compiled Java code at org.xerial.snappy.SnappyNative.uncompressedLength(Native Method) at org.xerial.snappy.Snappy.uncompressedLength(Snappy.java:594) at org.xerial.snappy.SnappyInputStream.hasNextChunk(SnappyInputStream.java:358) at org.xerial.snappy.SnappyInputStream.rawRead(SnappyInputStream.java:167) at org.xerial.snappy.SnappyInputStream.read(SnappyInputStream.java:150) at com.esotericsoftware.kryo.io.Input.fill(Input.java:140) at com.esotericsoftware.kryo.io.Input.require(Input.java:169) at com.esotericsoftware.kryo.io.Input.readInt(Input.java:337) at com.esotericsoftware.kryo.util.DefaultClassResolver.readClass(DefaultClassResolver.java:109) at com.esotericsoftware.kryo.Kryo.readClass(Kryo.java:610) at com.esotericsoftware.kryo.Kryo.readClassAndObject(Kryo.java:721) at org.apache.spark.serializer.KryoDeserializationStream.readObject(KryoSerializer.scala:138) at org.apache.spark.serializer.DeserializationStream$$anon$1.getNext(Serializer.scala:133) at org.apache.spark.util.NextIterator.hasNext(NextIterator.scala:71) at scala.collection.Iterator$$anon$12.hasNext(Iterator.scala:350) at org.apache.spark.InterruptibleIterator.hasNext(InterruptibleIterator.scala:39) at scala.collection.Iterator$$anon$13.hasNext(Iterator.scala:371) at org.apache.spark.util.collection.ExternalSorter.insertAll(ExternalSorter.scala:202) at org.apache.spark.shuffle.sort.SortShuffleWriter.write(SortShuffleWriter.scala:56) at org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:68) at org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:41) at org.apache.spark.scheduler.Task.run(Task.scala:64) at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:203) at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145) at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615) at java.lang.Thread.run(Thread.java:744) {code} > Unsafe memory access inside of Snappy library > --------------------------------------------- > > Key: SPARK-3937 > URL: https://issues.apache.org/jira/browse/SPARK-3937 > Project: Spark > Issue Type: Bug > Components: Spark Core > Affects Versions: 1.2.0 > Reporter: Patrick Wendell > > This was observed on master between Spark 1.1 and 1.2. Unfortunately I don't > have much information about this other than the stack trace. However, it was > concerning enough I figured I should post it. > {code} > java.lang.InternalError: a fault occurred in a recent unsafe memory access > operation in compiled Java code > org.xerial.snappy.SnappyNative.rawUncompress(Native Method) > org.xerial.snappy.Snappy.rawUncompress(Snappy.java:444) > org.xerial.snappy.Snappy.uncompress(Snappy.java:480) > > org.xerial.snappy.SnappyInputStream.hasNextChunk(SnappyInputStream.java:355) > > org.xerial.snappy.SnappyInputStream.rawRead(SnappyInputStream.java:159) > org.xerial.snappy.SnappyInputStream.read(SnappyInputStream.java:142) > > java.io.ObjectInputStream$PeekInputStream.read(ObjectInputStream.java:2310) > > java.io.ObjectInputStream$BlockDataInputStream.read(ObjectInputStream.java:2712) > > java.io.ObjectInputStream$BlockDataInputStream.readFully(ObjectInputStream.java:2742) > java.io.ObjectInputStream.readArray(ObjectInputStream.java:1687) > java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1344) > java.io.ObjectInputStream.readArray(ObjectInputStream.java:1706) > java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1344) > > java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1990) > java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1915) > > java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1798) > java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1350) > java.io.ObjectInputStream.readObject(ObjectInputStream.java:370) > > org.apache.spark.serializer.JavaDeserializationStream.readObject(JavaSerializer.scala:62) > > org.apache.spark.serializer.DeserializationStream$$anon$1.getNext(Serializer.scala:133) > org.apache.spark.util.NextIterator.hasNext(NextIterator.scala:71) > scala.collection.Iterator$$anon$12.hasNext(Iterator.scala:350) > > org.apache.spark.InterruptibleIterator.hasNext(InterruptibleIterator.scala:39) > scala.collection.Iterator$$anon$13.hasNext(Iterator.scala:371) > scala.collection.Iterator$$anon$14.hasNext(Iterator.scala:388) > scala.collection.Iterator$$anon$11.hasNext(Iterator.scala:327) > scala.collection.Iterator$$anon$10.hasNext(Iterator.scala:308) > scala.collection.Iterator$class.foreach(Iterator.scala:727) > scala.collection.AbstractIterator.foreach(Iterator.scala:1157) > > scala.collection.generic.Growable$class.$plus$plus$eq(Growable.scala:48) > > scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:103) > > scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:47) > scala.collection.TraversableOnce$class.to(TraversableOnce.scala:273) > scala.collection.AbstractIterator.to(Iterator.scala:1157) > > scala.collection.TraversableOnce$class.toBuffer(TraversableOnce.scala:265) > scala.collection.AbstractIterator.toBuffer(Iterator.scala:1157) > > scala.collection.TraversableOnce$class.toArray(TraversableOnce.scala:252) > scala.collection.AbstractIterator.toArray(Iterator.scala:1157) > > org.apache.spark.sql.execution.Limit$$anonfun$4.apply(basicOperators.scala:140) > > org.apache.spark.sql.execution.Limit$$anonfun$4.apply(basicOperators.scala:140) > > org.apache.spark.SparkContext$$anonfun$runJob$3.apply(SparkContext.scala:1118) > > org.apache.spark.SparkContext$$anonfun$runJob$3.apply(SparkContext.scala:1118) > org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:61) > org.apache.spark.scheduler.Task.run(Task.scala:56) > org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:182) > > java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145) > > java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615) > java.lang.Thread.run(Thread.java:745) > {code} -- This message was sent by Atlassian JIRA (v6.3.4#6332) --------------------------------------------------------------------- To unsubscribe, e-mail: issues-unsubscr...@spark.apache.org For additional commands, e-mail: issues-h...@spark.apache.org