Hello,

This error looks strange. Configuration should not be null by code. Could
you try to change your code in the following fashion?

*val igniteContext = new IgniteContext(spark.sparkContext, () ⇒
configuration, standalone = false)*

*def configuration(): IgniteConfiguration = {*
*  val config = new IgniteConfiguration()*
*  val tcpDiscoverySpi = new TcpDiscoverySpi()*
*  val ipFinder = new TcpDiscoveryVmIpFinder()*
*  ipFinder.setAddresses(*
*    util.Arrays.asList(*
*      "server1-ip",*
*      "server2-ip",*
*      "server3-ip",*
*      "server4-ip",*
*      "server5-ip:47500"))*

*  tcpDiscoverySpi.setIpFinder(ipFinder)*
*  config.setDiscoverySpi(tcpDiscoverySpi)*

*  config*
*}*


On Fri, Sep 22, 2017 at 1:09 AM, pradeepchanumolu <[email protected]>
wrote:

> I am hitting the following exception when running Ignite with Spark on
> Yarn.
> Here is the snippet of the code.
> The same job runs fine in spark local mode (spark-master: local). Only
> failing when running on Yarn.
>
> val config = new IgniteConfiguration()
> val tcpDiscoverySpi = new TcpDiscoverySpi()
> val ipFinder = new TcpDiscoveryVmIpFinder()
> ipFinder.setAddresses(
>       util.Arrays.asList(
>         "server1-ip",
>         "server2-ip",
>         "server3-ip",
>         "server4-ip",
>         "server5-ip:47500"
>       ))
> tcpDiscoverySpi.setIpFinder(ipFinder)
> config.setDiscoverySpi(tcpDiscoverySpi)
>
>
> val igniteContext = new IgniteContext(spark.sparkContext, () ⇒ config,
> standalone = false)
>
> Exception:
>
>
> Driver stacktrace:
>         at
> org.apache.spark.scheduler.DAGScheduler.org$apache$spark$
> scheduler$DAGScheduler$$failJobAndIndependentStages(
> DAGScheduler.scala:1435)
>         at
> org.apache.spark.scheduler.DAGScheduler$$anonfun$abortStage$1.apply(
> DAGScheduler.scala:1423)
>         at
> org.apache.spark.scheduler.DAGScheduler$$anonfun$abortStage$1.apply(
> DAGScheduler.scala:1422)
>         at
> scala.collection.mutable.ResizableArray$class.foreach(
> ResizableArray.scala:59)
>         at scala.collection.mutable.ArrayBuffer.foreach(
> ArrayBuffer.scala:48)
>         at
> org.apache.spark.scheduler.DAGScheduler.abortStage(
> DAGScheduler.scala:1422)
>         at
> org.apache.spark.scheduler.DAGScheduler$$anonfun$
> handleTaskSetFailed$1.apply(DAGScheduler.scala:802)
>         at
> org.apache.spark.scheduler.DAGScheduler$$anonfun$
> handleTaskSetFailed$1.apply(DAGScheduler.scala:802)
>         at scala.Option.foreach(Option.scala:257)
>         at
> org.apache.spark.scheduler.DAGScheduler.handleTaskSetFailed(
> DAGScheduler.scala:802)
>         at
> org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.
> doOnReceive(DAGScheduler.scala:1650)
>         at
> org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.
> onReceive(DAGScheduler.scala:1605)
>         at
> org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.
> onReceive(DAGScheduler.scala:1594)
>         at org.apache.spark.util.EventLoop$$anon$1.run(EventLoop.scala:48)
>         at org.apache.spark.scheduler.DAGScheduler.runJob(
> DAGScheduler.scala:628)
>         at org.apache.spark.SparkContext.runJob(SparkContext.scala:1918)
>         at org.apache.spark.SparkContext.runJob(SparkContext.scala:1931)
>         at org.apache.spark.SparkContext.runJob(SparkContext.scala:1944)
>         at org.apache.spark.SparkContext.runJob(SparkContext.scala:1958)
>         at
> org.apache.spark.rdd.RDD$$anonfun$foreachPartition$1.apply(RDD.scala:925)
>         at
> org.apache.spark.rdd.RDD$$anonfun$foreachPartition$1.apply(RDD.scala:923)
>         at
> org.apache.spark.rdd.RDDOperationScope$.withScope(
> RDDOperationScope.scala:151)
>         at
> org.apache.spark.rdd.RDDOperationScope$.withScope(
> RDDOperationScope.scala:112)
>         at org.apache.spark.rdd.RDD.withScope(RDD.scala:362)
>         at org.apache.spark.rdd.RDD.foreachPartition(RDD.scala:923)
>         at org.apache.ignite.spark.IgniteContext.<init>(
> IgniteContext.scala:54)
>         at
> BulkLoadFeatures$.delayedEndpoint$BulkLoadFeatures$1(
> BulkLoadFeatures.scala:37)
>         at BulkLoadFeatures$delayedInit$body.apply(BulkLoadFeatures.
> scala:18)
>         at scala.Function0$class.apply$mcV$sp(Function0.scala:34)
>         at scala.runtime.AbstractFunction0.apply$mcV$
> sp(AbstractFunction0.scala:12)
>         at scala.App$$anonfun$main$1.apply(App.scala:76)
>         at scala.App$$anonfun$main$1.apply(App.scala:76)
>         at scala.collection.immutable.List.foreach(List.scala:381)
>         at
> scala.collection.generic.TraversableForwarder$class.
> foreach(TraversableForwarder.scala:35)
>         at scala.App$class.main(App.scala:76)
>         at BulkLoadFeatures$.main(BulkLoadFeatures.scala:18)
>         at BulkLoadFeatures.main(BulkLoadFeatures.scala)
>         at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
>         at
> sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:
> 62)
>         at
> sun.reflect.DelegatingMethodAccessorImpl.invoke(
> DelegatingMethodAccessorImpl.java:43)
>         at java.lang.reflect.Method.invoke(Method.java:498)
>         at
> org.apache.spark.deploy.yarn.ApplicationMaster$$anon$2.run(
> ApplicationMaster.scala:637)
> Caused by: java.lang.NullPointerException: Ouch! Argument cannot be null:
> cfg
>         at
> org.apache.ignite.internal.util.GridArgumentCheck.
> notNull(GridArgumentCheck.java:48)
>         at org.apache.ignite.internal.IgnitionEx.start(IgnitionEx.
> java:594)
>         at org.apache.ignite.internal.IgnitionEx.start(IgnitionEx.
> java:536)
>         at org.apache.ignite.Ignition.getOrStart(Ignition.java:414)
>         at org.apache.ignite.spark.IgniteContext.ignite(
> IgniteContext.scala:143)
>         at
> org.apache.ignite.spark.IgniteContext$$anonfun$1.
> apply(IgniteContext.scala:54)
>         at
> org.apache.ignite.spark.IgniteContext$$anonfun$1.
> apply(IgniteContext.scala:54)
>         at
> org.apache.spark.rdd.RDD$$anonfun$foreachPartition$1$$
> anonfun$apply$29.apply(RDD.scala:925)
>         at
> org.apache.spark.rdd.RDD$$anonfun$foreachPartition$1$$
> anonfun$apply$29.apply(RDD.scala:925)
>         at
> org.apache.spark.SparkContext$$anonfun$runJob$5.apply(
> SparkContext.scala:1944)
>         at
> org.apache.spark.SparkContext$$anonfun$runJob$5.apply(
> SparkContext.scala:1944)
>         at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.
> scala:87)
>         at org.apache.spark.scheduler.Task.run(Task.scala:99)
>         at org.apache.spark.executor.Executor$TaskRunner.run(
> Executor.scala:282)
>         at
> java.util.concurrent.ThreadPoolExecutor.runWorker(
> ThreadPoolExecutor.java:1142)
>         at
> java.util.concurrent.ThreadPoolExecutor$Worker.run(
> ThreadPoolExecutor.java:617)
>         at java.lang.Thread.run(Thread.java:745)
>
>
>
>
>
> Can someone please look into this error?
>
>
>
> --
> Sent from: http://apache-ignite-users.70518.x6.nabble.com/
>

Reply via email to