I am seeing this problem even on Ignite 2.8.1, spark 2.4.4
Does spark-worker and ignite be on the same server?


val CONFIG = "resources/node-config-spark.xml"
val TABLE_NAME = "table_access_master"

//Creating Ignite-specific implementation of Spark session.

val igniteSession = IgniteSparkSession.builder()
  .appName("Spark test")
  .master("spark://192.168.1.25:7077")
  .config("spark.executor.instances", "1")
  .config("spark.cores.max", 2)
  .config("spark.submit.deployMode", "client")
  .config("spark.executor.memory", "1g")
  .config("spark.driver.cores", 2)
  .config("spark.executor.cores", 2)
  .config("spark.driver.memory", "2g")
  .config("spark.driver.extraClassPath", "/opt/ignite/libs/*" +
    
":/opt/ignite/libs/optional/ignite-spark/*:/opt/ignite/libs/optional/ignite-log4j/*"
 +
    ":/opt/ignite/libs/optional/ignite-yarn/*:/opt/ignite/libs/ignite-spring/" +
    ":/root/spark-2.4.4-bin-hadoop2.7/jars/*")
  .config("spark.executor.extraClassPath", "/opt/ignite/libs/*" +
    
":/opt/ignite/libs/optional/ignite-spark/*:/opt/ignite/libs/optional/ignite-log4j/*"
 +
    ":/opt/ignite/libs/optional/ignite-yarn/*:/opt/ignite/libs/ignite-spring/")

  .igniteConfig(CONFIG)
  .getOrCreate()

//Showing existing tables.
igniteSession.catalog.listTables().show()
igniteSession.catalog.listColumns(TABLE_NAME).show()

var df = igniteSession.sql("select * from table_access_master limit 3")
df.show(2)
igniteSession.close()

Ignition.stopAll(true)


Reply via email to