Hi

I'm trying to use sparkContext.addJar method for adding new jar files
like TomCat. But in some cases, it does not work well.

The error message says an Executor can not load an anonymous function.
Why anonymous functions cannot be loaded in spite of adding a jar to all Executors?


This is my code.
---
package packageA
object AddJar{

 def main() ={
    val spark = SparkSession
      .builder
      .appName("AddJar")
      .getOrCreate()

    spark.sparkContext.addJar(jarPath)

    val urls:Array[URL] = Array(new URL(jarPath))
    val cl = new URLClassLoader(urls,parentClassloader)
    val loaded:Class[_] = cl.loadClass(className)
    val instance:Any = loaded.newInstance()
    var method: Method = loaded.getDeclaredMethod(methodName,classOf[T])
    method.invoke(instance, params)
  }
}

package packageA
class AnoFun {
  def main() = {
    val spark = SparkSession
      .builder
      .getOrCreate()

    val ds = Seq(Seq(1,2,3,4,5), Seq(2,3,4,5,6)).toDS()
     ds.foreach{ x =>
       x.fold(0)(_ + _)
     }
  }
}
---

and error messages

------
Exception in thread "Thread-30" java.lang.reflect.InvocationTargetException
        at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
        at
sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
        at
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
        at java.lang.reflect.Method.invoke(Method.java:498)
        at streamupdate.ExecClass.run(Streaming.scala:142)
        at java.lang.Thread.run(Thread.java:748)
Caused by: java.lang.ClassNotFoundException:
packageA.AnoFun$$anonfun$main$1$$anonfun$apply$1
        at java.net.URLClassLoader.findClass(URLClassLoader.java:382)
        at java.lang.ClassLoader.loadClass(ClassLoader.java:424)
        at java.lang.ClassLoader.loadClass(ClassLoader.java:357)
        at java.lang.Class.forName0(Native Method)
        at java.lang.Class.forName(Class.java:348)
        at
org.apache.spark.util.InnerClosureFinder$$anon$4.visitMethodInsn(ClosureCleaner.scala:552)
at org.apache.xbean.asm6.ClassReader.readCode(ClassReader.java:2175)
        at
org.apache.xbean.asm6.ClassReader.readMethod(ClassReader.java:1238)
        at org.apache.xbean.asm6.ClassReader.accept(ClassReader.java:631)
        at org.apache.xbean.asm6.ClassReader.accept(ClassReader.java:355)
        at
org.apache.spark.util.ClosureCleaner$.getInnerClosureClasses(ClosureCleaner.scala:90)
        at
org.apache.spark.util.ClosureCleaner$.org$apache$spark$util$ClosureCleaner$$clean(ClosureCleaner.scala:269)
        at
org.apache.spark.util.ClosureCleaner$.clean(ClosureCleaner.scala:162)
        at org.apache.spark.SparkContext.clean(SparkContext.scala:2326)
        at org.apache.spark.rdd.RDD$$anonfun$foreach$1.apply(RDD.scala:926)
        at org.apache.spark.rdd.RDD$$anonfun$foreach$1.apply(RDD.scala:925)
        at
org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
        at
org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:112)
        at org.apache.spark.rdd.RDD.withScope(RDD.scala:363)
        at org.apache.spark.rdd.RDD.foreach(RDD.scala:925)
        at
org.apache.spark.sql.Dataset$$anonfun$foreach$1.apply$mcV$sp(Dataset.scala:2716)
        at
org.apache.spark.sql.Dataset$$anonfun$foreach$1.apply(Dataset.scala:2716)
        at
org.apache.spark.sql.Dataset$$anonfun$foreach$1.apply(Dataset.scala:2716)
        at
org.apache.spark.sql.Dataset$$anonfun$withNewRDDExecutionId$1.apply(Dataset.scala:3349)
        at
org.apache.spark.sql.execution.SQLExecution$$anonfun$withNewExecutionId$1.apply(SQLExecution.scala:78)
        at
org.apache.spark.sql.execution.SQLExecution$.withSQLConfPropagated(SQLExecution.scala:125)
        at
org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:73)
        at
org.apache.spark.sql.Dataset.withNewRDDExecutionId(Dataset.scala:3345)
        at org.apache.spark.sql.Dataset.foreach(Dataset.scala:2715)
        at packageA.AnoFun.main(AnoFun.scala:66)
        ... 6 more
------

--
Regards,



---------------------------------------------------------------------
To unsubscribe e-mail: user-unsubscr...@spark.apache.org

Reply via email to