See 
<https://builds.apache.org/job/beam_PostRelease_NightlySnapshot/109/display/redirect>

------------------------------------------
[...truncated 2.55 MB...]
        at org.apache.spark.scheduler.Task.run(Task.scala:108)
        at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:338)
        at 
java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
        at 
java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
        at java.lang.Thread.run(Thread.java:748)

Driver stacktrace:
        at 
org.apache.spark.scheduler.DAGScheduler.org$apache$spark$scheduler$DAGScheduler$$failJobAndIndependentStages(DAGScheduler.scala:1517)
        at 
org.apache.spark.scheduler.DAGScheduler$$anonfun$abortStage$1.apply(DAGScheduler.scala:1505)
        at 
org.apache.spark.scheduler.DAGScheduler$$anonfun$abortStage$1.apply(DAGScheduler.scala:1504)
        at 
scala.collection.mutable.ResizableArray$class.foreach(ResizableArray.scala:59)
        at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:48)
        at 
org.apache.spark.scheduler.DAGScheduler.abortStage(DAGScheduler.scala:1504)
        at 
org.apache.spark.scheduler.DAGScheduler$$anonfun$handleTaskSetFailed$1.apply(DAGScheduler.scala:814)
        at 
org.apache.spark.scheduler.DAGScheduler$$anonfun$handleTaskSetFailed$1.apply(DAGScheduler.scala:814)
        at scala.Option.foreach(Option.scala:257)
        at 
org.apache.spark.scheduler.DAGScheduler.handleTaskSetFailed(DAGScheduler.scala:814)
        at 
org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.doOnReceive(DAGScheduler.scala:1732)
        at 
org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.onReceive(DAGScheduler.scala:1687)
        at 
org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.onReceive(DAGScheduler.scala:1676)
        at org.apache.spark.util.EventLoop$$anon$1.run(EventLoop.scala:48)
        at 
org.apache.spark.scheduler.DAGScheduler.runJob(DAGScheduler.scala:630)
        at org.apache.spark.SparkContext.runJob(SparkContext.scala:2029)
        at org.apache.spark.SparkContext.runJob(SparkContext.scala:2050)
        at org.apache.spark.SparkContext.runJob(SparkContext.scala:2069)
        at org.apache.spark.SparkContext.runJob(SparkContext.scala:2094)
        at org.apache.spark.rdd.RDD$$anonfun$collect$1.apply(RDD.scala:936)
        at 
org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
        at 
org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:112)
        at org.apache.spark.rdd.RDD.withScope(RDD.scala:362)
        at org.apache.spark.rdd.RDD.collect(RDD.scala:935)
        at 
org.apache.spark.api.java.JavaRDDLike$class.collect(JavaRDDLike.scala:361)
        at 
org.apache.spark.api.java.AbstractJavaRDDLike.collect(JavaRDDLike.scala:45)
        at 
org.apache.beam.runners.spark.io.SparkUnboundedSource$ReadReportDStream.compute(SparkUnboundedSource.java:202)
        at 
org.apache.spark.streaming.dstream.DStream$$anonfun$getOrCompute$1$$anonfun$1$$anonfun$apply$7.apply(DStream.scala:342)
        at 
org.apache.spark.streaming.dstream.DStream$$anonfun$getOrCompute$1$$anonfun$1$$anonfun$apply$7.apply(DStream.scala:342)
        at scala.util.DynamicVariable.withValue(DynamicVariable.scala:58)
        at 
org.apache.spark.streaming.dstream.DStream$$anonfun$getOrCompute$1$$anonfun$1.apply(DStream.scala:341)
        at 
org.apache.spark.streaming.dstream.DStream$$anonfun$getOrCompute$1$$anonfun$1.apply(DStream.scala:341)
        at 
org.apache.spark.streaming.dstream.DStream.createRDDWithLocalProperties(DStream.scala:416)
        at 
org.apache.spark.streaming.dstream.DStream$$anonfun$getOrCompute$1.apply(DStream.scala:336)
        at 
org.apache.spark.streaming.dstream.DStream$$anonfun$getOrCompute$1.apply(DStream.scala:334)
        at scala.Option.orElse(Option.scala:289)
        at 
org.apache.spark.streaming.dstream.DStream.getOrCompute(DStream.scala:331)
        at 
org.apache.spark.streaming.dstream.DStream.generateJob(DStream.scala:432)
        at 
org.apache.spark.streaming.DStreamGraph$$anonfun$1.apply(DStreamGraph.scala:122)
        at 
org.apache.spark.streaming.DStreamGraph$$anonfun$1.apply(DStreamGraph.scala:121)
        at 
scala.collection.TraversableLike$$anonfun$flatMap$1.apply(TraversableLike.scala:241)
        at 
scala.collection.TraversableLike$$anonfun$flatMap$1.apply(TraversableLike.scala:241)
        at 
scala.collection.mutable.ResizableArray$class.foreach(ResizableArray.scala:59)
        at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:48)
        at 
scala.collection.TraversableLike$class.flatMap(TraversableLike.scala:241)
        at scala.collection.AbstractTraversable.flatMap(Traversable.scala:104)
        at 
org.apache.spark.streaming.DStreamGraph.generateJobs(DStreamGraph.scala:121)
        at 
org.apache.spark.streaming.scheduler.JobGenerator$$anonfun$3.apply(JobGenerator.scala:249)
        at 
org.apache.spark.streaming.scheduler.JobGenerator$$anonfun$3.apply(JobGenerator.scala:247)
        at scala.util.Try$.apply(Try.scala:192)
        at 
org.apache.spark.streaming.scheduler.JobGenerator.generateJobs(JobGenerator.scala:247)
        at 
org.apache.spark.streaming.scheduler.JobGenerator.org$apache$spark$streaming$scheduler$JobGenerator$$processEvent(JobGenerator.scala:183)
        at 
org.apache.spark.streaming.scheduler.JobGenerator$$anon$1.onReceive(JobGenerator.scala:89)
        at 
org.apache.spark.streaming.scheduler.JobGenerator$$anon$1.onReceive(JobGenerator.scala:88)
        at org.apache.spark.util.EventLoop$$anon$1.run(EventLoop.scala:48)
Caused by: java.lang.NullPointerException
        at 
org.apache.beam.sdk.io.gcp.pubsub.PubsubUnboundedSource$PubsubReader.ackBatch(PubsubUnboundedSource.java:651)
        at 
org.apache.beam.sdk.io.gcp.pubsub.PubsubUnboundedSource$PubsubCheckpoint.finalizeCheckpoint(PubsubUnboundedSource.java:313)
        at 
org.apache.beam.runners.spark.io.MicrobatchSource$Reader.finalizeCheckpoint(MicrobatchSource.java:261)
        at 
org.apache.beam.runners.spark.io.MicrobatchSource$Reader.advanceWithBackoff(MicrobatchSource.java:246)
        at 
org.apache.beam.runners.spark.io.MicrobatchSource$Reader.advance(MicrobatchSource.java:236)
        at 
org.apache.beam.runners.spark.stateful.StateSpecFunctions$1.apply(StateSpecFunctions.java:176)
        at 
org.apache.beam.runners.spark.stateful.StateSpecFunctions$1.apply(StateSpecFunctions.java:105)
        at 
org.apache.spark.streaming.StateSpec$$anonfun$1.apply(StateSpec.scala:181)
        at 
org.apache.spark.streaming.StateSpec$$anonfun$1.apply(StateSpec.scala:180)
        at 
org.apache.spark.streaming.rdd.MapWithStateRDDRecord$$anonfun$updateRecordWithData$1.apply(MapWithStateRDD.scala:57)
        at 
org.apache.spark.streaming.rdd.MapWithStateRDDRecord$$anonfun$updateRecordWithData$1.apply(MapWithStateRDD.scala:55)
        at scala.collection.Iterator$class.foreach(Iterator.scala:893)
        at scala.collection.AbstractIterator.foreach(Iterator.scala:1336)
        at 
org.apache.spark.streaming.rdd.MapWithStateRDDRecord$.updateRecordWithData(MapWithStateRDD.scala:55)
        at 
org.apache.spark.streaming.rdd.MapWithStateRDD.compute(MapWithStateRDD.scala:159)
        at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:323)
        at org.apache.spark.rdd.RDD$$anonfun$8.apply(RDD.scala:336)
        at org.apache.spark.rdd.RDD$$anonfun$8.apply(RDD.scala:334)
        at 
org.apache.spark.storage.BlockManager$$anonfun$doPutIterator$1.apply(BlockManager.scala:1038)
        at 
org.apache.spark.storage.BlockManager$$anonfun$doPutIterator$1.apply(BlockManager.scala:1029)
        at org.apache.spark.storage.BlockManager.doPut(BlockManager.scala:969)
        at 
org.apache.spark.storage.BlockManager.doPutIterator(BlockManager.scala:1029)
        at 
org.apache.spark.storage.BlockManager.getOrElseUpdate(BlockManager.scala:760)
        at org.apache.spark.rdd.RDD.getOrCompute(RDD.scala:334)
        at org.apache.spark.rdd.RDD.iterator(RDD.scala:285)
        at 
org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:38)
        at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:323)
        at org.apache.spark.rdd.RDD.iterator(RDD.scala:287)
        at 
org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:38)
        at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:323)
        at org.apache.spark.rdd.RDD.iterator(RDD.scala:287)
        at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:87)
        at org.apache.spark.scheduler.Task.run(Task.scala:108)
        at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:338)
        at 
java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
        at 
java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
        at java.lang.Thread.run(Thread.java:748)

Mar 07, 2018 10:33:58 PM org.apache.spark.internal.Logging$class logInfo
INFO: Executor killed task 7.0 in stage 23.0 (TID 65), reason: stage cancelled
Mar 07, 2018 10:33:58 PM org.apache.beam.runners.spark.io.SourceDStream 
computeReadMaxRecords
INFO: Max records per batch has not been limited by neither configuration nor 
the rate controller, and will remain unlimited for the current batch 
(9223372036854775807).
Mar 07, 2018 10:33:58 PM org.apache.spark.internal.Logging$class logInfo
INFO: Stopped generation timer
Mar 07, 2018 10:33:58 PM org.apache.spark.internal.Logging$class logWarning
WARNING: Lost task 7.0 in stage 23.0 (TID 65, localhost, executor driver): 
TaskKilled (stage cancelled)
Mar 07, 2018 10:33:58 PM org.apache.spark.internal.Logging$class logInfo
INFO: Waiting for jobs to be processed and checkpoints to be written
Mar 07, 2018 10:33:58 PM org.apache.spark.internal.Logging$class logWarning
WARNING: Timed out while stopping the job generator (timeout = 5000)
Mar 07, 2018 10:33:58 PM org.apache.spark.internal.Logging$class logInfo
INFO: Waited for jobs to be processed and checkpoints to be written
Mar 07, 2018 10:33:58 PM org.apache.spark.internal.Logging$class logInfo
INFO: Starting job: DStream at SparkUnboundedSource.java:172
Mar 07, 2018 10:33:58 PM org.apache.spark.internal.Logging$class logInfo
INFO: CheckpointWriter executor terminated? true, waited for 0 ms.
Mar 07, 2018 10:33:58 PM org.apache.spark.internal.Logging$class logInfo
INFO: Stopped JobGenerator
Mar 07, 2018 10:33:58 PM org.apache.spark.internal.Logging$class logInfo
INFO: Stopped JobScheduler
Mar 07, 2018 10:33:58 PM org.spark_project.jetty.server.handler.ContextHandler 
doStop
INFO: Stopped 
o.s.j.s.ServletContextHandler@4556dc5{/streaming,null,UNAVAILABLE,@Spark}
Mar 07, 2018 10:33:58 PM org.spark_project.jetty.server.handler.ContextHandler 
doStop
INFO: Stopped 
o.s.j.s.ServletContextHandler@6431ede4{/streaming/batch,null,UNAVAILABLE,@Spark}
Mar 07, 2018 10:33:58 PM org.spark_project.jetty.server.handler.ContextHandler 
doStop
INFO: Stopped 
o.s.j.s.ServletContextHandler@3ce4194e{/static/streaming,null,UNAVAILABLE,@Spark}
Mar 07, 2018 10:33:58 PM org.apache.spark.internal.Logging$class logInfo
INFO: StreamingContext stopped successfully
Mar 07, 2018 10:33:58 PM org.spark_project.jetty.server.AbstractConnector doStop
INFO: Stopped Spark@4d9a0556{HTTP/1.1,[http/1.1]}{127.0.0.1:4040}
Mar 07, 2018 10:33:58 PM org.apache.spark.internal.Logging$class logInfo
INFO: Stopped Spark web UI at http://127.0.0.1:4040
Mar 07, 2018 10:33:58 PM 
org.apache.beam.runners.spark.stateful.StateSpecFunctions$1 apply
INFO: Source id 0_5 spent 1279 millis on reading.
Mar 07, 2018 10:33:58 PM org.apache.spark.internal.Logging$class logInfo
INFO: Block rdd_27_5 stored as values in memory (estimated size 985.8 KB, free 
1794.5 MB)
Mar 07, 2018 10:33:58 PM org.apache.spark.internal.Logging$class logInfo
INFO: Added rdd_27_5 in memory on 127.0.0.1:36700 (size: 985.8 KB, free: 1794.6 
MB)
Mar 07, 2018 10:33:58 PM org.apache.spark.internal.Logging$class logWarning
WARNING: Putting block rdd_31_5 failed due to an exception
Mar 07, 2018 10:33:58 PM org.apache.spark.internal.Logging$class logWarning
WARNING: Block rdd_31_5 could not be removed as it was not found on disk or in 
memory
Mar 07, 2018 10:33:58 PM org.apache.spark.internal.Logging$class logWarning
WARNING: Putting block rdd_35_5 failed due to an exception
Mar 07, 2018 10:33:58 PM org.apache.spark.internal.Logging$class logWarning
WARNING: Block rdd_35_5 could not be removed as it was not found on disk or in 
memory
Mar 07, 2018 10:33:58 PM org.apache.spark.internal.Logging$class logInfo
INFO: Executor killed task 5.0 in stage 23.0 (TID 63), reason: stage cancelled
Mar 07, 2018 10:33:58 PM org.apache.spark.internal.Logging$class logWarning
WARNING: Lost task 5.0 in stage 23.0 (TID 63, localhost, executor driver): 
TaskKilled (stage cancelled)
Mar 07, 2018 10:33:58 PM 
org.apache.beam.runners.spark.stateful.StateSpecFunctions$1 apply
INFO: Source id 0_6 spent 1327 millis on reading.
Mar 07, 2018 10:33:58 PM org.apache.spark.internal.Logging$class logInfo
INFO: Block rdd_19_6 stored as values in memory (estimated size 678.8 KB, free 
1793.8 MB)
Mar 07, 2018 10:33:58 PM org.apache.spark.internal.Logging$class logInfo
INFO: Added rdd_19_6 in memory on 127.0.0.1:36700 (size: 678.8 KB, free: 1793.9 
MB)
Mar 07, 2018 10:33:58 PM org.apache.spark.internal.Logging$class logWarning
WARNING: Putting block rdd_23_6 failed due to an exception
Mar 07, 2018 10:33:58 PM org.apache.spark.internal.Logging$class logWarning
WARNING: Block rdd_23_6 could not be removed as it was not found on disk or in 
memory
Mar 07, 2018 10:33:58 PM org.apache.spark.internal.Logging$class logWarning
WARNING: Putting block rdd_27_6 failed due to an exception
Mar 07, 2018 10:33:58 PM org.apache.spark.internal.Logging$class logWarning
WARNING: Block rdd_27_6 could not be removed as it was not found on disk or in 
memory
Mar 07, 2018 10:33:58 PM org.apache.spark.internal.Logging$class logWarning
WARNING: Putting block rdd_31_6 failed due to an exception
Mar 07, 2018 10:33:58 PM org.apache.spark.internal.Logging$class logWarning
WARNING: Block rdd_31_6 could not be removed as it was not found on disk or in 
memory
Mar 07, 2018 10:33:58 PM org.apache.spark.internal.Logging$class logWarning
WARNING: Putting block rdd_35_6 failed due to an exception
Mar 07, 2018 10:33:58 PM org.apache.spark.internal.Logging$class logWarning
WARNING: Block rdd_35_6 could not be removed as it was not found on disk or in 
memory
Mar 07, 2018 10:33:58 PM org.apache.spark.internal.Logging$class logInfo
INFO: Executor killed task 6.0 in stage 23.0 (TID 64), reason: stage cancelled
Mar 07, 2018 10:33:58 PM org.apache.spark.internal.Logging$class logWarning
WARNING: Lost task 6.0 in stage 23.0 (TID 64, localhost, executor driver): 
TaskKilled (stage cancelled)
Mar 07, 2018 10:33:59 PM 
org.apache.beam.runners.spark.stateful.StateSpecFunctions$1 apply
INFO: Source id 0_3 spent 1259 millis on reading.
Mar 07, 2018 10:33:59 PM org.apache.spark.internal.Logging$class logInfo
INFO: Block rdd_31_3 stored as values in memory (estimated size 1062.0 KB, free 
1792.8 MB)
Mar 07, 2018 10:33:59 PM org.apache.spark.internal.Logging$class logInfo
INFO: Added rdd_31_3 in memory on 127.0.0.1:36700 (size: 1062.0 KB, free: 
1792.9 MB)
Mar 07, 2018 10:33:59 PM org.apache.spark.internal.Logging$class logWarning
WARNING: Putting block rdd_35_3 failed due to an exception
Mar 07, 2018 10:33:59 PM org.apache.spark.internal.Logging$class logWarning
WARNING: Block rdd_35_3 could not be removed as it was not found on disk or in 
memory
Mar 07, 2018 10:33:59 PM org.apache.spark.internal.Logging$class logInfo
INFO: Executor killed task 3.0 in stage 23.0 (TID 61), reason: stage cancelled
Mar 07, 2018 10:33:59 PM org.apache.spark.internal.Logging$class logWarning
WARNING: Lost task 3.0 in stage 23.0 (TID 61, localhost, executor driver): 
TaskKilled (stage cancelled)
Mar 07, 2018 10:33:59 PM org.apache.spark.internal.Logging$class logInfo
INFO: Removed TaskSet 23.0, whose tasks have all completed, from pool 
Mar 07, 2018 10:33:59 PM 
org.apache.beam.sdk.io.gcp.pubsub.PubsubUnboundedSource createRandomSubscription
WARNING: Created subscription 
projects/apache-beam-testing/subscriptions/java_mobile_gaming_topic_beam_-5493834867768394301
 to topic projects/apache-beam-testing/topics/java_mobile_gaming_topic. Note 
this subscription WILL NOT be deleted when the pipeline terminates
Mar 07, 2018 10:33:59 PM org.apache.spark.internal.Logging$class logInfo
INFO: Got job 8 (DStream at SparkUnboundedSource.java:172) with 16 output 
partitions
Mar 07, 2018 10:33:59 PM org.apache.spark.internal.Logging$class logInfo
INFO: Final stage: ResultStage 26 (DStream at SparkUnboundedSource.java:172)
Mar 07, 2018 10:33:59 PM org.apache.spark.internal.Logging$class logInfo
INFO: Parents of final stage: List(ShuffleMapStage 24, ShuffleMapStage 25)
Mar 07, 2018 10:33:59 PM org.apache.spark.internal.Logging$class logInfo
INFO: MapOutputTrackerMasterEndpoint stopped!
Mar 07, 2018 10:33:59 PM org.apache.spark.internal.Logging$class logInfo
INFO: MemoryStore cleared
Mar 07, 2018 10:33:59 PM org.apache.spark.internal.Logging$class logInfo
INFO: BlockManager stopped
Mar 07, 2018 10:33:59 PM org.apache.spark.internal.Logging$class logInfo
INFO: BlockManagerMaster stopped
Mar 07, 2018 10:33:59 PM org.apache.spark.internal.Logging$class logInfo
INFO: OutputCommitCoordinator stopped!
Mar 07, 2018 10:33:59 PM org.apache.spark.internal.Logging$class logInfo
INFO: Successfully stopped SparkContext
[ERROR] Failed to execute goal org.codehaus.mojo:exec-maven-plugin:1.6.0:java 
(default-cli) on project word-count-beam: An exception occured while executing 
the Java class. Failed to wait the pipeline until finish: 
org.apache.beam.runners.spark.SparkPipelineResult$StreamingMode@6b9a692f -> 
[Help 1]
[ERROR] 
[ERROR] To see the full stack trace of the errors, re-run Maven with the -e 
switch.
[ERROR] Re-run Maven using the -X switch to enable full debug logging.
[ERROR] 
[ERROR] For more information about the errors and possible solutions, please 
read the following articles:
[ERROR] [Help 1] 
http://cwiki.apache.org/confluence/display/MAVEN/MojoExecutionException

***********************************************************
***********************************************************
*************************Tear Down*************************
The Pub/Sub topic has been deleted: 
projects/apache-beam-testing/topics/leaderboard-jenkins-0307223259-1726341a
The Pub/Sub subscription has been deleted: 
projects/apache-beam-testing/subscriptions/leaderboard-jenkins-0307223259-1726341a
***********************************************************
***********************************************************
[ERROR] Failed command
:runners:spark:runMobileGamingJavaSpark FAILED

FAILURE: Build failed with an exception.

* What went wrong:
Execution failed for task ':runners:spark:runMobileGamingJavaSpark'.
> Process 'command '/usr/local/asfpackages/java/jdk1.8.0_152/bin/java'' 
> finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug 
option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

BUILD FAILED in 4m 7s
2 actionable tasks: 2 executed
Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure
Not sending mail to unregistered user 
yifan...@yifanzou-linuxworkstation.sea.corp.google.com

Reply via email to