I use IgniteSparkSession to excute spark sql but get an
exception:org.apache.ignite.IgniteIllegalStateException: Ignite instance
with provided name doesn't exist. Did you call Ignition.start(..) to start
an Ignite instance? [name=null] 

My test case runs well when I run spark in local mode,but it throws the
exception when I run it with my local spark cluster.I try to find out why in
mailing list but did not get a clear reason. 

My test environment: 
My app uses default settings in the examples 
OS:Windows 10 
JDK:1.8.0_112 
Ignition version is 2.6.0,I start a node with default settings. 
Sprak version is 2.3.1,I start a standalone cluster with a master and a
worker.I have copied required jars from Ignition to Spark. 

The full exception stack trace is: 

Exception in thread "main" org.apache.spark.SparkException: Exception thrown
in awaitResult: 
        at
org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:205) 
        at
org.apache.spark.sql.execution.exchange.BroadcastExchangeExec.doExecuteBroadcast(BroadcastExchangeExec.scala:136)
 
        at
org.apache.spark.sql.execution.SparkPlan$$anonfun$executeBroadcast$1.apply(SparkPlan.scala:144)
 
        at
org.apache.spark.sql.execution.SparkPlan$$anonfun$executeBroadcast$1.apply(SparkPlan.scala:140)
 
        at
org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:155)
 
        at
org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151) 
        at
org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:152) 
        at
org.apache.spark.sql.execution.SparkPlan.executeBroadcast(SparkPlan.scala:140) 
        at
org.apache.spark.sql.execution.joins.BroadcastNestedLoopJoinExec.doExecute(BroadcastNestedLoopJoinExec.scala:343)
 
        at
org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:131)
 
        at
org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:127)
 
        at
org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:155)
 
        at
org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151) 
        at
org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:152) 
        at
org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:127) 
        at
org.apache.spark.sql.execution.exchange.ShuffleExchangeExec.prepareShuffleDependency(ShuffleExchangeExec.scala:92)
 
        at
org.apache.spark.sql.execution.exchange.ExchangeCoordinator.doEstimationIfNecessary(ExchangeCoordinator.scala:211)
 
        at
org.apache.spark.sql.execution.exchange.ExchangeCoordinator.postShuffleRDD(ExchangeCoordinator.scala:259)
 
        at
org.apache.spark.sql.execution.exchange.ShuffleExchangeExec$$anonfun$doExecute$1.apply(ShuffleExchangeExec.scala:124)
 
        at
org.apache.spark.sql.execution.exchange.ShuffleExchangeExec$$anonfun$doExecute$1.apply(ShuffleExchangeExec.scala:119)
 
        at
org.apache.spark.sql.catalyst.errors.package$.attachTree(package.scala:52) 
        at
org.apache.spark.sql.execution.exchange.ShuffleExchangeExec.doExecute(ShuffleExchangeExec.scala:119)
 
        at
org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:131)
 
        at
org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:127)
 
        at
org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:155)
 
        at
org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151) 
        at
org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:152) 
        at
org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:127) 
        at
org.apache.spark.sql.execution.InputAdapter.inputRDDs(WholeStageCodegenExec.scala:371)
 
        at
org.apache.spark.sql.execution.SortExec.inputRDDs(SortExec.scala:121) 
        at
org.apache.spark.sql.execution.WholeStageCodegenExec.doExecute(WholeStageCodegenExec.scala:605)
 
        at
org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:131)
 
        at
org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:127)
 
        at
org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:155)
 
        at
org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151) 
        at
org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:152) 
        at
org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:127) 
        at
org.apache.spark.sql.execution.InputAdapter.doExecute(WholeStageCodegenExec.scala:363)
 
        at
org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:131)
 
        at
org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:127)
 
        at
org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:155)
 
        at
org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151) 
        at
org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:152) 
        at
org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:127) 
        at
org.apache.spark.sql.execution.joins.SortMergeJoinExec.inputRDDs(SortMergeJoinExec.scala:386)
 
        at
org.apache.spark.sql.execution.ProjectExec.inputRDDs(basicPhysicalOperators.scala:41)
 
        at
org.apache.spark.sql.execution.WholeStageCodegenExec.doExecute(WholeStageCodegenExec.scala:605)
 
        at
org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:131)
 
        at
org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:127)
 
        at
org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:155)
 
        at
org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151) 
        at
org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:152) 
        at
org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:127) 
        at
org.apache.spark.sql.execution.exchange.ShuffleExchangeExec.prepareShuffleDependency(ShuffleExchangeExec.scala:92)
 
        at
org.apache.spark.sql.execution.exchange.ExchangeCoordinator.doEstimationIfNecessary(ExchangeCoordinator.scala:211)
 
        at
org.apache.spark.sql.execution.exchange.ExchangeCoordinator.postShuffleRDD(ExchangeCoordinator.scala:259)
 
        at
org.apache.spark.sql.execution.exchange.ShuffleExchangeExec$$anonfun$doExecute$1.apply(ShuffleExchangeExec.scala:124)
 
        at
org.apache.spark.sql.execution.exchange.ShuffleExchangeExec$$anonfun$doExecute$1.apply(ShuffleExchangeExec.scala:119)
 
        at
org.apache.spark.sql.catalyst.errors.package$.attachTree(package.scala:52) 
        at
org.apache.spark.sql.execution.exchange.ShuffleExchangeExec.doExecute(ShuffleExchangeExec.scala:119)
 
        at
org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:131)
 
        at
org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:127)
 
        at
org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:155)
 
        at
org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151) 
        at
org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:152) 
        at
org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:127) 
        at
org.apache.spark.sql.execution.InputAdapter.inputRDDs(WholeStageCodegenExec.scala:371)
 
        at
org.apache.spark.sql.execution.SortExec.inputRDDs(SortExec.scala:121) 
        at
org.apache.spark.sql.execution.WholeStageCodegenExec.doExecute(WholeStageCodegenExec.scala:605)
 
        at
org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:131)
 
        at
org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:127)
 
        at
org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:155)
 
        at
org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151) 
        at
org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:152) 
        at
org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:127) 
        at
org.apache.spark.sql.execution.InputAdapter.doExecute(WholeStageCodegenExec.scala:363)
 
        at
org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:131)
 
        at
org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:127)
 
        at
org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:155)
 
        at
org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151) 
        at
org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:152) 
        at
org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:127) 
        at
org.apache.spark.sql.execution.joins.SortMergeJoinExec.inputRDDs(SortMergeJoinExec.scala:386)
 
        at
org.apache.spark.sql.execution.ProjectExec.inputRDDs(basicPhysicalOperators.scala:41)
 
        at
org.apache.spark.sql.execution.WholeStageCodegenExec.doExecute(WholeStageCodegenExec.scala:605)
 
        at
org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:131)
 
        at
org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:127)
 
        at
org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:155)
 
        at
org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151) 
        at
org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:152) 
        at
org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:127) 
        at
org.apache.spark.sql.execution.exchange.ShuffleExchangeExec.prepareShuffleDependency(ShuffleExchangeExec.scala:92)
 
        at
org.apache.spark.sql.execution.exchange.ExchangeCoordinator.doEstimationIfNecessary(ExchangeCoordinator.scala:211)
 
        at
org.apache.spark.sql.execution.exchange.ExchangeCoordinator.postShuffleRDD(ExchangeCoordinator.scala:259)
 
        at
org.apache.spark.sql.execution.exchange.ShuffleExchangeExec$$anonfun$doExecute$1.apply(ShuffleExchangeExec.scala:124)
 
        at
org.apache.spark.sql.execution.exchange.ShuffleExchangeExec$$anonfun$doExecute$1.apply(ShuffleExchangeExec.scala:119)
 
        at
org.apache.spark.sql.catalyst.errors.package$.attachTree(package.scala:52) 
        at
org.apache.spark.sql.execution.exchange.ShuffleExchangeExec.doExecute(ShuffleExchangeExec.scala:119)
 
        at
org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:131)
 
        at
org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:127)
 
        at
org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:155)
 
        at
org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151) 
        at
org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:152) 
        at
org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:127) 
        at
org.apache.spark.sql.execution.InputAdapter.inputRDDs(WholeStageCodegenExec.scala:371)
 
        at
org.apache.spark.sql.execution.SortExec.inputRDDs(SortExec.scala:121) 
        at
org.apache.spark.sql.execution.WholeStageCodegenExec.doExecute(WholeStageCodegenExec.scala:605)
 
        at
org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:131)
 
        at
org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:127)
 
        at
org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:155)
 
        at
org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151) 
        at
org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:152) 
        at
org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:127) 
        at
org.apache.spark.sql.execution.InputAdapter.doExecute(WholeStageCodegenExec.scala:363)
 
        at
org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:131)
 
        at
org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:127)
 
        at
org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:155)
 
        at
org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151) 
        at
org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:152) 
        at
org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:127) 
        at
org.apache.spark.sql.execution.joins.SortMergeJoinExec.inputRDDs(SortMergeJoinExec.scala:386)
 
        at
org.apache.spark.sql.execution.ProjectExec.inputRDDs(basicPhysicalOperators.scala:41)
 
        at
org.apache.spark.sql.execution.aggregate.HashAggregateExec.inputRDDs(HashAggregateExec.scala:150)
 
        at
org.apache.spark.sql.execution.WholeStageCodegenExec.doExecute(WholeStageCodegenExec.scala:605)
 
        at
org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:131)
 
        at
org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:127)
 
        at
org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:155)
 
        at
org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151) 
        at
org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:152) 
        at
org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:127) 
        at
org.apache.spark.sql.execution.exchange.ShuffleExchangeExec.prepareShuffleDependency(ShuffleExchangeExec.scala:92)
 
        at
org.apache.spark.sql.execution.exchange.ExchangeCoordinator.doEstimationIfNecessary(ExchangeCoordinator.scala:211)
 
        at
org.apache.spark.sql.execution.exchange.ExchangeCoordinator.postShuffleRDD(ExchangeCoordinator.scala:259)
 
        at
org.apache.spark.sql.execution.exchange.ShuffleExchangeExec$$anonfun$doExecute$1.apply(ShuffleExchangeExec.scala:124)
 
        at
org.apache.spark.sql.execution.exchange.ShuffleExchangeExec$$anonfun$doExecute$1.apply(ShuffleExchangeExec.scala:119)
 
        at
org.apache.spark.sql.catalyst.errors.package$.attachTree(package.scala:52) 
        at
org.apache.spark.sql.execution.exchange.ShuffleExchangeExec.doExecute(ShuffleExchangeExec.scala:119)
 
        at
org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:131)
 
        at
org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:127)
 
        at
org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:155)
 
        at
org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151) 
        at
org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:152) 
        at
org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:127) 
        at
org.apache.spark.sql.execution.InputAdapter.inputRDDs(WholeStageCodegenExec.scala:371)
 
        at
org.apache.spark.sql.execution.aggregate.HashAggregateExec.inputRDDs(HashAggregateExec.scala:150)
 
        at
org.apache.spark.sql.execution.aggregate.HashAggregateExec.inputRDDs(HashAggregateExec.scala:150)
 
        at
org.apache.spark.sql.execution.WholeStageCodegenExec.doExecute(WholeStageCodegenExec.scala:605)
 
        at
org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:131)
 
        at
org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:127)
 
        at
org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:155)
 
        at
org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151) 
        at
org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:152) 
        at
org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:127) 
        at
org.apache.spark.sql.execution.exchange.ShuffleExchangeExec.prepareShuffleDependency(ShuffleExchangeExec.scala:92)
 
        at
org.apache.spark.sql.execution.exchange.ShuffleExchangeExec$$anonfun$doExecute$1.apply(ShuffleExchangeExec.scala:128)
 
        at
org.apache.spark.sql.execution.exchange.ShuffleExchangeExec$$anonfun$doExecute$1.apply(ShuffleExchangeExec.scala:119)
 
        at
org.apache.spark.sql.catalyst.errors.package$.attachTree(package.scala:52) 
        at
org.apache.spark.sql.execution.exchange.ShuffleExchangeExec.doExecute(ShuffleExchangeExec.scala:119)
 
        at
org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:131)
 
        at
org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:127)
 
        at
org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:155)
 
        at
org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151) 
        at
org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:152) 
        at
org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:127) 
        at
org.apache.spark.sql.execution.InputAdapter.inputRDDs(WholeStageCodegenExec.scala:371)
 
        at
org.apache.spark.sql.execution.aggregate.HashAggregateExec.inputRDDs(HashAggregateExec.scala:150)
 
        at
org.apache.spark.sql.execution.WholeStageCodegenExec.doExecute(WholeStageCodegenExec.scala:605)
 
        at
org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:131)
 
        at
org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:127)
 
        at
org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:155)
 
        at
org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151) 
        at
org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:152) 
        at
org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:127) 
        at
org.apache.spark.sql.execution.SparkPlan.getByteArrayRdd(SparkPlan.scala:247) 
        at
org.apache.spark.sql.execution.SparkPlan.executeCollect(SparkPlan.scala:294) 
        at
org.apache.spark.sql.Dataset$$anonfun$count$1.apply(Dataset.scala:2770) 
        at
org.apache.spark.sql.Dataset$$anonfun$count$1.apply(Dataset.scala:2769) 
        at
org.apache.spark.sql.Dataset$$anonfun$52.apply(Dataset.scala:3253) 
        at
org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:77)
 
        at org.apache.spark.sql.Dataset.withAction(Dataset.scala:3252) 
        at org.apache.spark.sql.Dataset.count(Dataset.scala:2769) 
        at
spark.JavaIgniteCatalogExample.main(JavaIgniteCatalogExample.java:79) 
Caused by: org.apache.spark.SparkException: Job aborted due to stage
failure: Task 0 in stage 0.0 failed 4 times, most recent failure: Lost task
0.3 in stage 0.0 (TID 4, 10.45.105.213, executor 0): class
org.apache.ignite.IgniteIllegalStateException: Ignite instance with provided
name doesn't exist. Did you call Ignition.start(..) to start an Ignite
instance? [name=null] 
        at org.apache.ignite.internal.IgnitionEx.grid(IgnitionEx.java:1383) 
        at org.apache.ignite.internal.IgnitionEx.grid(IgnitionEx.java:1251) 
        at org.apache.ignite.Ignition.ignite(Ignition.java:493) 
        at org.apache.ignite.spark.impl.package$.ignite(package.scala:84) 
        at
org.apache.ignite.spark.impl.IgniteRelationProvider$$anonfun$configProvider$1$2.apply(IgniteRelationProvider.scala:226)
 
        at
org.apache.ignite.spark.impl.IgniteRelationProvider$$anonfun$configProvider$1$2.apply(IgniteRelationProvider.scala:223)
 
        at org.apache.ignite.spark.Once.apply(IgniteContext.scala:222) 
        at
org.apache.ignite.spark.IgniteContext.ignite(IgniteContext.scala:144) 
        at
org.apache.ignite.spark.impl.IgniteSQLDataFrameRDD.compute(IgniteSQLDataFrameRDD.scala:65)
 
        at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:324) 
        at org.apache.spark.rdd.RDD.iterator(RDD.scala:288) 
        at
org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:38) 
        at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:324) 
        at org.apache.spark.rdd.RDD.iterator(RDD.scala:288) 
        at
org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:38) 
        at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:324) 
        at org.apache.spark.rdd.RDD.iterator(RDD.scala:288) 
        at
org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:38) 
        at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:324) 
        at org.apache.spark.rdd.RDD.iterator(RDD.scala:288) 
        at
org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:87) 
        at org.apache.spark.scheduler.Task.run(Task.scala:109) 
        at
org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:345) 
        at
java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142) 
        at
java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617) 
        at java.lang.Thread.run(Thread.java:745) 

Driver stacktrace: 
        at
org.apache.spark.scheduler.DAGScheduler.org$apache$spark$scheduler$DAGScheduler$$failJobAndIndependentStages(DAGScheduler.scala:1599)
 
        at
org.apache.spark.scheduler.DAGScheduler$$anonfun$abortStage$1.apply(DAGScheduler.scala:1587)
 
        at
org.apache.spark.scheduler.DAGScheduler$$anonfun$abortStage$1.apply(DAGScheduler.scala:1586)
 
        at
scala.collection.mutable.ResizableArray$class.foreach(ResizableArray.scala:59) 
        at
scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:48) 
        at
org.apache.spark.scheduler.DAGScheduler.abortStage(DAGScheduler.scala:1586) 
        at
org.apache.spark.scheduler.DAGScheduler$$anonfun$handleTaskSetFailed$1.apply(DAGScheduler.scala:831)
 
        at
org.apache.spark.scheduler.DAGScheduler$$anonfun$handleTaskSetFailed$1.apply(DAGScheduler.scala:831)
 
        at scala.Option.foreach(Option.scala:257) 
        at
org.apache.spark.scheduler.DAGScheduler.handleTaskSetFailed(DAGScheduler.scala:831)
 
        at
org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.doOnReceive(DAGScheduler.scala:1820)
 
        at
org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.onReceive(DAGScheduler.scala:1769)
 
        at
org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.onReceive(DAGScheduler.scala:1758)
 
        at org.apache.spark.util.EventLoop$$anon$1.run(EventLoop.scala:48) 
        at
org.apache.spark.scheduler.DAGScheduler.runJob(DAGScheduler.scala:642) 
        at org.apache.spark.SparkContext.runJob(SparkContext.scala:2027) 
        at org.apache.spark.SparkContext.runJob(SparkContext.scala:2048) 
        at org.apache.spark.SparkContext.runJob(SparkContext.scala:2067) 
        at org.apache.spark.SparkContext.runJob(SparkContext.scala:2092) 
        at org.apache.spark.rdd.RDD$$anonfun$collect$1.apply(RDD.scala:939) 
        at
org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151) 
        at
org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:112) 
        at org.apache.spark.rdd.RDD.withScope(RDD.scala:363) 
        at org.apache.spark.rdd.RDD.collect(RDD.scala:938) 
        at
org.apache.spark.sql.execution.SparkPlan.executeCollectIterator(SparkPlan.scala:304)
 
        at
org.apache.spark.sql.execution.exchange.BroadcastExchangeExec$$anonfun$relationFuture$1$$anonfun$apply$1.apply(BroadcastExchangeExec.scala:76)
 
        at
org.apache.spark.sql.execution.exchange.BroadcastExchangeExec$$anonfun$relationFuture$1$$anonfun$apply$1.apply(BroadcastExchangeExec.scala:73)
 
        at
org.apache.spark.sql.execution.SQLExecution$.withExecutionId(SQLExecution.scala:97)
 
        at
org.apache.spark.sql.execution.exchange.BroadcastExchangeExec$$anonfun$relationFuture$1.apply(BroadcastExchangeExec.scala:72)
 
        at
org.apache.spark.sql.execution.exchange.BroadcastExchangeExec$$anonfun$relationFuture$1.apply(BroadcastExchangeExec.scala:72)
 
        at
scala.concurrent.impl.Future$PromiseCompletingRunnable.liftedTree1$1(Future.scala:24)
 
        at
scala.concurrent.impl.Future$PromiseCompletingRunnable.run(Future.scala:24) 
        at java.util.concurrent.ThreadPoolExecutor.runWorker(Unknown Source) 
        at java.util.concurrent.ThreadPoolExecutor$Worker.run(Unknown
Source) 
        at java.lang.Thread.run(Unknown Source) 
Caused by: class org.apache.ignite.IgniteIllegalStateException: Ignite
instance with provided name doesn't exist. Did you call Ignition.start(..)
to start an Ignite instance? [name=null] 
        at org.apache.ignite.internal.IgnitionEx.grid(IgnitionEx.java:1383) 
        at org.apache.ignite.internal.IgnitionEx.grid(IgnitionEx.java:1251) 
        at org.apache.ignite.Ignition.ignite(Ignition.java:493) 
        at org.apache.ignite.spark.impl.package$.ignite(package.scala:84) 
        at
org.apache.ignite.spark.impl.IgniteRelationProvider$$anonfun$configProvider$1$2.apply(IgniteRelationProvider.scala:226)
 
        at
org.apache.ignite.spark.impl.IgniteRelationProvider$$anonfun$configProvider$1$2.apply(IgniteRelationProvider.scala:223)
 
        at org.apache.ignite.spark.Once.apply(IgniteContext.scala:222) 
        at
org.apache.ignite.spark.IgniteContext.ignite(IgniteContext.scala:144) 
        at
org.apache.ignite.spark.impl.IgniteSQLDataFrameRDD.compute(IgniteSQLDataFrameRDD.scala:65)
 
        at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:324) 
        at org.apache.spark.rdd.RDD.iterator(RDD.scala:288) 
        at
org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:38) 
        at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:324) 
        at org.apache.spark.rdd.RDD.iterator(RDD.scala:288) 
        at
org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:38) 
        at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:324) 
        at org.apache.spark.rdd.RDD.iterator(RDD.scala:288) 
        at
org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:38) 
        at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:324) 
        at org.apache.spark.rdd.RDD.iterator(RDD.scala:288) 
        at
org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:87) 
        at org.apache.spark.scheduler.Task.run(Task.scala:109) 
        at
org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:345) 
        at
java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142) 
        at
java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617) 
        at java.lang.Thread.run(Thread.java:745)



--
Sent from: http://apache-ignite-users.70518.x6.nabble.com/

Reply via email to