You are using Scala 2.11 with 2.10 libraries. You can change

"org.apache.spark" % "spark-streaming_2.10" % "1.3.1"

to

"org.apache.spark" %% "spark-streaming" % "1.3.1"

And sbt will use the corresponding libraries according to your Scala
version.


Best Regards,
Shixiong Zhu

2015-05-06 16:21 GMT-07:00 anshu shukla <anshushuk...@gmail.com>:

> Exception with sample testing in Intellij IDE:
>
> Exception in thread "main" java.lang.NoClassDefFoundError:
> scala/collection/GenTraversableOnce$class
> at akka.util.Collections$EmptyImmutableSeq$.<init>(Collections.scala:15)
> at akka.util.Collections$EmptyImmutableSeq$.<clinit>(Collections.scala)
> at akka.japi.Util$.immutableSeq(JavaAPI.scala:229)
> at akka.remote.RemoteSettings.<init>(RemoteSettings.scala:30)
> at
> akka.remote.RemoteActorRefProvider.<init>(RemoteActorRefProvider.scala:114)
> at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
> at
> sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:57)
> at
> sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
> at java.lang.reflect.Constructor.newInstance(Constructor.java:526)
> at
> akka.actor.ReflectiveDynamicAccess$$anonfun$createInstanceFor$2.apply(DynamicAccess.scala:78)
> at scala.util.Try$.apply(Try.scala:191)
> at
> akka.actor.ReflectiveDynamicAccess.createInstanceFor(DynamicAccess.scala:73)
> at
> akka.actor.ReflectiveDynamicAccess$$anonfun$createInstanceFor$3.apply(DynamicAccess.scala:84)
> at
> akka.actor.ReflectiveDynamicAccess$$anonfun$createInstanceFor$3.apply(DynamicAccess.scala:84)
> at scala.util.Success.flatMap(Try.scala:230)
> at
> akka.actor.ReflectiveDynamicAccess.createInstanceFor(DynamicAccess.scala:84)
> at akka.actor.ActorSystemImpl.liftedTree1$1(ActorSystem.scala:584)
> at akka.actor.ActorSystemImpl.<init>(ActorSystem.scala:577)
> at akka.actor.ActorSystem$.apply(ActorSystem.scala:141)
> at akka.actor.ActorSystem$.apply(ActorSystem.scala:118)
> at
> org.apache.spark.util.AkkaUtils$.org$apache$spark$util$AkkaUtils$$doCreateActorSystem(AkkaUtils.scala:122)
> at org.apache.spark.util.AkkaUtils$$anonfun$1.apply(AkkaUtils.scala:55)
> at org.apache.spark.util.AkkaUtils$$anonfun$1.apply(AkkaUtils.scala:54)
> at
> org.apache.spark.util.Utils$$anonfun$startServiceOnPort$1.apply$mcVI$sp(Utils.scala:1837)
> at scala.collection.immutable.Range.foreach$mVc$sp(Range.scala:166)
> at org.apache.spark.util.Utils$.startServiceOnPort(Utils.scala:1828)
> at org.apache.spark.util.AkkaUtils$.createActorSystem(AkkaUtils.scala:57)
> at org.apache.spark.SparkEnv$.create(SparkEnv.scala:223)
> at org.apache.spark.SparkEnv$.createDriverEnv(SparkEnv.scala:163)
> at org.apache.spark.SparkContext.createSparkEnv(SparkContext.scala:269)
> at org.apache.spark.SparkContext.<init>(SparkContext.scala:272)
> *at Testspark$.main(Testspark.scala:17)*
> at Testspark.main(Testspark.scala)
> at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
> at
> sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
> at
> sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
> at java.lang.reflect.Method.invoke(Method.java:606)
> at com.intellij.rt.execution.application.AppMain.main(AppMain.java:140)
> Caused by: java.lang.ClassNotFoundException:
> scala.collection.GenTraversableOnce$class
> at java.net.URLClassLoader$1.run(URLClassLoader.java:366)
> at java.net.URLClassLoader$1.run(URLClassLoader.java:355)
> at java.security.AccessController.doPrivileged(Native Method)
> at java.net.URLClassLoader.findClass(URLClassLoader.java:354)
> at java.lang.ClassLoader.loadClass(ClassLoader.java:425)
> at sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:308)
> at java.lang.ClassLoader.loadClass(ClassLoader.java:358)
> ... 38 more
>
>
>
> *Code is Testspark.scala-*
>
> /**
>  * Created by anshushukla on 07/05/15.
>  */
> import org.apache.spark.{SparkConf, SparkContext}
>
>
> object Testspark {
>
>
>   def main (args: Array[String]) {
>     val conf=new SparkConf()
>     .setMaster("local[2]")
>     .setAppName("TestSpark")
>
> val sc=new SparkContext(conf)//line number 17 showing exception
>
>     val data=sc.parallelize(1 to 1000000000).collect().filter(_<1000)
>     data.foreach(println)
>
>   }
>
> }
>
>
> *build.sbt is -*
>
> name := "scala-test-workspace"
>
> version := "1.0"
>
> scalaVersion := "2.11.6"
>
> libraryDependencies += "org.apache.spark" % "spark-streaming_2.10" % "1.3.1"
>
>
> --
> Thanks & Regards,
> Anshu Shukla
> Indian Institute of Science
>

Reply via email to