Hi,

Trying to run a query on spark-sql but it keeps failing with this error on
the cli ( we are running spark-sql on a yarn cluster):


org.apache.spark.SparkException: Job cancelled because SparkContext was
shut down
  at
org.apache.spark.scheduler.DAGScheduler$$anonfun$cleanUpAfterSchedulerStop$1.apply(DAGScheduler.scala:700)
  at
org.apache.spark.scheduler.DAGScheduler$$anonfun$cleanUpAfterSchedulerStop$1.apply(DAGScheduler.scala:699)
  at scala.collection.mutable.HashSet.foreach(HashSet.scala:79)
  at
org.apache.spark.scheduler.DAGScheduler.cleanUpAfterSchedulerStop(DAGScheduler.scala:699)
  at
org.apache.spark.scheduler.DAGSchedulerEventProcessActor.postStop(DAGScheduler.scala:1405)
  at akka.actor.Actor$class.aroundPostStop(Actor.scala:475)
  at
org.apache.spark.scheduler.DAGSchedulerEventProcessActor.aroundPostStop(DAGScheduler.scala:1352)
  at
akka.actor.dungeon.FaultHandling$class.akka$actor$dungeon$FaultHandling$$finishTerminate(FaultHandling.scala:210)
  at
akka.actor.dungeon.FaultHandling$class.terminate(FaultHandling.scala:172)
  at akka.actor.ActorCell.terminate(ActorCell.scala:369)
  at akka.actor.ActorCell.invokeAll$1(ActorCell.scala:462)
  at akka.actor.ActorCell.systemInvoke(ActorCell.scala:478)
  at akka.dispatch.Mailbox.processAllSystemMessages(Mailbox.scala:263)
  at akka.dispatch.Mailbox.run(Mailbox.scala:219)
  at
akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:393)
  at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
  at
scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
  at
scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
  at
scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)

On the UI, I see there are connection failures in mapPartition stage:

java.net.SocketTimeoutException: Read timed out
        java.net.SocketInputStream.socketRead0(Native Method)
        java.net.SocketInputStream.read(SocketInputStream.java:150)
        java.net.SocketInputStream.read(SocketInputStream.java:121)
        java.io.BufferedInputStream.fill(BufferedInputStream.java:246)
        java.io.BufferedInputStream.read1(BufferedInputStream.java:286)
        java.io.BufferedInputStream.read(BufferedInputStream.java:345)
        sun.net.www.http.HttpClient.parseHTTPHeader(HttpClient.java:703)
        sun.net.www.http.HttpClient.parseHTTP(HttpClient.java:647)
        
sun.net.www.protocol.http.HttpURLConnection.getInputStream0(HttpURLConnection.java:1534)
        
sun.net.www.protocol.http.HttpURLConnection.getInputStream(HttpURLConnection.java:1439)
        org.apache.spark.util.Utils$.fetchFile(Utils.scala:362)
        
org.apache.spark.executor.Executor$$anonfun$org$apache$spark$executor$Executor$$updateDependencies$6.apply(Executor.scala:331)
        
org.apache.spark.executor.Executor$$anonfun$org$apache$spark$executor$Executor$$updateDependencies$6.apply(Executor.scala:329)
        
scala.collection.TraversableLike$WithFilter$$anonfun$foreach$1.apply(TraversableLike.scala:772)
        
scala.collection.mutable.HashMap$$anonfun$foreach$1.apply(HashMap.scala:98)
        
scala.collection.mutable.HashMap$$anonfun$foreach$1.apply(HashMap.scala:98)
        
scala.collection.mutable.HashTable$class.foreachEntry(HashTable.scala:226)
        scala.collection.mutable.HashMap.foreachEntry(HashMap.scala:39)
        scala.collection.mutable.HashMap.foreach(HashMap.scala:98)
        
scala.collection.TraversableLike$WithFilter.foreach(TraversableLike.scala:771)
        
org.apache.spark.executor.Executor.org$apache$spark$executor$Executor$$updateDependencies(Executor.scala:329)
        org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:162)
        
java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
        
java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
        java.lang.Thread.run(Thread.java:745)

Reply via email to