Running code outside spark shell give following . Same code works fine within
spark shell.

Here is simple code.
object zyz extends Serializable {
  def main(args: Array[String]) {
    val sc = new SparkContext(args(0), "Knn ",
      System.getenv("SPARK_HOME"), Seq(System.getenv("SPARK_APP_JAR")))
    val knnInput = if (args.length > 1) args(1) else
"/user/root/input/test1.dat"
    val outputpath = if (args.length > 2) args(2) else
"/user/root/output/knn"
    val approxmateSize = if (args.length > 3) args(3).toInt else 10

    var partition = 3
    if (approxmateSize >= 10000) {
      partition = approxmateSize / 100
    }
    val data = sc.textFile(knnInput)
    val data1 = data.map(x => {
      val pt = x.split(",")
      val zie = pt.length
      var arr = new scala.collection.mutable.ArrayBuffer[Double]()
      for (i <- 1 until zie) {
        arr += pt(i).toDouble
      }
      (scala.util.Random.nextInt(partition), (pt(0), arr))
    })
 data1.saveAsTextFile(outputpath)
----
following error is shown.
7243 [Result resolver thread-0] WARN
org.apache.spark.scheduler.TaskSetManager - Loss was due to
java.lang.ClassNotFoundException
java.lang.ClassNotFoundException: com.intel.ihaat.knn.Knn$$anonfun$1
        at java.net.URLClassLoader$1.run(URLClassLoader.java:202)
        at java.security.AccessController.doPrivileged(Native Method)
        at java.net.URLClassLoader.findClass(URLClassLoader.java:190)
        at java.lang.ClassLoader.loadClass(ClassLoader.java:306)
        at java.lang.ClassLoader.loadClass(ClassLoader.java:247)
        at java.lang.Class.forName0(Native Method)
        at java.lang.Class.forName(Class.java:247)
        at
org.apache.spark.serializer.JavaDeserializationStream$$anon$1.resolveClass(JavaSerializer.scala:37)
        at
java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1574)
        at
java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1495)
        at
java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1731)
        at
java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1328)
        at
java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1946)
        at
java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1870)
        at
java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1752)
        at
java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1328)
        at
java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1946)
        at
java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1870)
        at
java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1752)
        at
java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1328)
        at java.io.ObjectInputStream.readObject(ObjectInputStream.java:350)
        at
scala.collection.immutable.$colon$colon.readObject(List.scala:362)
        at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
        at
sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39)
        at
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
        at java.lang.reflect.Method.invoke(Method.java:597)
        at
java.io.ObjectStreamClass.invokeReadObject(ObjectStreamClass.java:969)
        at
java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1848)
        at
java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1752)
        at
java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1328)
        at
java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1946)
        at
java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1870)
        at
java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1752)
        at
java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1328)
        at java.io.ObjectInputStream.readObject(ObjectInputStream.java:350)
        at
org.apache.spark.serializer.JavaDeserializationStream.readObject(JavaSerializer.scala:40)
        at
org.apache.spark.scheduler.ResultTask$.deserializeInfo(ResultTask.scala:63)
        at
org.apache.spark.scheduler.ResultTask.readExternal(ResultTask.scala:139)
        at
java.io.ObjectInputStream.readExternalData(ObjectInputStream.java:1791)
        at
java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
        at
java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1328)
        at java.io.ObjectInputStream.readObject(ObjectInputStream.java:350)
        at
org.apache.spark.serializer.JavaDeserializationStream.readObject(JavaSerializer.scala:40)
        at
org.apache.spark.serializer.JavaSerializerInstance.deserialize(JavaSerializer.scala:62)
        at
org.apache.spark.executor.Executor$TaskRunner$$anonfun$run$1.apply$mcV$sp(Executor.scala:195)
        at
org.apache.spark.deploy.SparkHadoopUtil.runAsUser(SparkHadoopUtil.scala:49)
        at
org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:178)
        at
java.util.concurrent.ThreadPoolExecutor$Worker.runTask(ThreadPoolExecutor.java:886)
        at
java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:908)
        at java.lang.Thread.run(Thread.java:662)
7245 [spark-akka.actor.default-dispatcher-16] INFO
org.apache.spark.scheduler.TaskSetManager - Starting task 0.0:0 as TID 2 on
executor 2: ip-10-0-20-148.ec2.internal (NODE_LOCAL)
7246 [spark-akka.actor.default-dispatcher-16] INFO
org.apache.spark.scheduler.TaskSetManager - Serialized task 0.0:0 as 11283
bytes in 0 ms
7268 [Result resolver thread-1] WARN
org.apache.spark.scheduler.TaskSetManager - Lost TID 2 (task 0.0:0)
7269 [Result resolver thread-1] INFO
org.apache.spark.scheduler.TaskSetManager - Loss was due to
java.lang.ClassNotFoundException: com.intel.ihaat.knn.Knn$$anonfun$1
[duplicate 1]
7270 [spark-akka.actor.default-dispatcher-16] INFO
org.apache.spark.scheduler.TaskSetManager - Starting task 0.0:0 as TID 3 on
executor 2: ip-10-0-20-148.ec2.internal (NODE_LOCAL)
7270 [spark-akka.actor.default-dispatcher-16] INFO
org.apache.spark.scheduler.TaskSetManager - Serialized task 0.0:0 as 11283
bytes in 0 ms
7291 [Result resolver thread-2] WARN
org.apache.spark.scheduler.TaskSetManager - Lost TID 3 (task 0.0:0)
7291 [Result resolver thread-2] INFO
org.apache.spark.scheduler.TaskSetManager - Loss was due to
java.lang.ClassNotFoundException: com.intel.ihaat.knn.Knn$$anonfun$1
[duplicate 2]
7299 [spark-akka.actor.default-dispatcher-15] INFO
org.apache.spark.scheduler.TaskSetManager - Starting task 0.0:0 as TID 4 on
executor 2: ip-10-0-20-148.ec2.internal (NODE_LOCAL)
7299 [spark-akka.actor.default-dispatcher-15] INFO
org.apache.spark.scheduler.TaskSetManager - Serialized task 0.0:0 as 11283
bytes in 0 ms
7324 [Result resolver thread-3] WARN
org.apache.spark.scheduler.TaskSetManager - Lost TID 4 (task 0.0:0)
7325 [Result resolver thread-3] INFO
org.apache.spark.scheduler.TaskSetManager - Loss was due to
java.lang.ClassNotFoundException: com.intel.ihaat.knn.Knn$$anonfun$1
[duplicate 3]
7326 [Result resolver thread-3] ERROR
org.apache.spark.scheduler.TaskSetManager - Task 0.0:0 failed 4 times;
aborting job
7329 [main] INFO org.apache.spark.scheduler.DAGScheduler - Failed to run
saveAsTextFile at Knn.scala:51
Exception in thread "main" org.apache.spark.SparkException: Job aborted:
Task 0.0:0 failed 4 times (most recent failure: Exception failure:
java.lang.ClassNotFoundException: com.intel.ihaat.knn.Knn$$anonfun$1)
        at
org.apache.spark.scheduler.DAGScheduler$$anonfun$org$apache$spark$scheduler$DAGScheduler$$abortStage$1.apply(DAGScheduler.scala:1028)
        at
org.apache.spark.scheduler.DAGScheduler$$anonfun$org$apache$spark$scheduler$DAGScheduler$$abortStage$1.apply(DAGScheduler.scala:1026)
        at
scala.collection.mutable.ResizableArray$class.foreach(ResizableArray.scala:59)
        at
scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:47)
        at
org.apache.spark.scheduler.DAGScheduler.org$apache$spark$scheduler$DAGScheduler$$abortStage(DAGScheduler.scala:1026)
        at
org.apache.spark.scheduler.DAGScheduler$$anonfun$processEvent$10.apply(DAGScheduler.scala:619)
        at
org.apache.spark.scheduler.DAGScheduler$$anonfun$processEvent$10.apply(DAGScheduler.scala:619)
        at scala.Option.foreach(Option.scala:236)
        at
org.apache.spark.scheduler.DAGScheduler.processEvent(DAGScheduler.scala:619)
        at
org.apache.spark.scheduler.DAGScheduler$$anonfun$start$1$$anon$2$$anonfun$receive$1.applyOrElse(DAGScheduler.scala:207)
        at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
        at akka.actor.ActorCell.invoke(ActorCell.scala:456)
        at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
        at akka.dispatch.Mailbox.run(Mailbox.scala:219)
        at
akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
        at
scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
        at
scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
        at
scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
        at
scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
7335 [Result resolver thread-3] INFO
org.apache.spark.scheduler.TaskSchedulerImpl - Remove TaskSet 0.0 from pool

Any idea.
I am running this build using mvn. 
Any help will be appreciated.
Sunjay karan






--
View this message in context: 
http://apache-spark-user-list.1001560.n3.nabble.com/spark-0-9-0-saveAsTextFile-gives-classnotfound-error-tp1444.html
Sent from the Apache Spark User List mailing list archive at Nabble.com.

Reply via email to