[ 
https://issues.apache.org/jira/browse/SPARK-8494?page=com.atlassian.jira.plugin.system.issuetabpanels:all-tabpanel
 ]

PJ Fanning updated SPARK-8494:
------------------------------
    Description: 
I found a similar issue to SPARK-1923 but with Scala 2.10.4.
I used the Test.scala from SPARK-1923 but used the libraryDependencies from a 
build.sbt that I am working on.
If I remove the spray 1.3.3 jars, the test case passes but has a 
ClassNotFoundException otherwise.

Application:
{code}
import org.apache.spark.SparkConf
import org.apache.spark.SparkContext

object Test {
  def main(args: Array[String]): Unit = {
    val conf = new SparkConf().setMaster("local[4]").setAppName("Test")
    val sc = new SparkContext(conf)
    sc.makeRDD(1 to 1000, 10).map(x => Some(x)).count
    sc.stop()
  }
{code}

Exception:
{code}
org.apache.spark.SparkException: Job aborted due to stage failure: Task 0.0:1 
failed 1 times, most recent failure: Exception failure in TID 1 on host 
localhost: java.lang.ClassNotFoundException: scala.collection.immutable.Range
        java.net.URLClassLoader$1.run(URLClassLoader.java:366)
        java.net.URLClassLoader$1.run(URLClassLoader.java:355)
        java.security.AccessController.doPrivileged(Native Method)
        java.net.URLClassLoader.findClass(URLClassLoader.java:354)
        java.lang.ClassLoader.loadClass(ClassLoader.java:425)
        java.lang.ClassLoader.loadClass(ClassLoader.java:358)
        java.lang.Class.forName0(Native Method)
        java.lang.Class.forName(Class.java:270)
        
org.apache.spark.serializer.JavaDeserializationStream$$anon$1.resolveClass(JavaSerializer.scala:60)
        java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1612)
        java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1517)
{code}

{code}
name := "spark-test-case"

version := "1.0"

scalaVersion := "2.10.4"

resolvers += "spray repo" at "http://repo.spray.io";

resolvers += "Scalaz Bintray Repo" at "https://dl.bintray.com/scalaz/releases";

val akkaVersion = "2.3.11"
val sprayVersion = "1.3.3"

libraryDependencies ++= Seq(
  "com.h2database"      % "h2"               % "1.4.187",
  "com.typesafe.akka"  %% "akka-actor"       % akkaVersion,
  "com.typesafe.akka"  %% "akka-slf4j"       % akkaVersion,
  "ch.qos.logback"      % "logback-classic"  % "1.0.13",
  "io.spray"           %% "spray-can"        % sprayVersion,
  "io.spray"           %% "spray-routing"    % sprayVersion,
  "io.spray"           %% "spray-json"       % "1.3.1",
  "com.databricks"     %% "spark-csv"        % "1.0.3",
  "org.specs2"         %% "specs2"           % "2.4.17"       % "test",
  "org.specs2"         %% "specs2-junit"     % "2.4.17"       % "test",
  "io.spray"           %% "spray-testkit"    % sprayVersion   % "test",
  "com.typesafe.akka"  %% "akka-testkit"     % akkaVersion    % "test",
  "junit"               % "junit"            % "4.12"         % "test"
)

scalacOptions ++= Seq(
  "-unchecked",
  "-deprecation",
  "-Xlint",
  "-Ywarn-dead-code",
  "-language:_",
  "-target:jvm-1.7",
  "-encoding", "UTF-8"
)

testOptions += Tests.Argument(TestFrameworks.JUnit, "-v")
{code}


  was:
I found a similar issue to SPARK-1923 but with Scala 2.10.4.
I used the Test.scala from SPARK-1923 but used the libraryDependencies from a 
build.sbt that I am working on.
If I remove the spray 1.3.3 jars, the test case passes but has a SPARK-1923

Application:
{code}
import org.apache.spark.SparkConf
import org.apache.spark.SparkContext

object Test {
  def main(args: Array[String]): Unit = {
    val conf = new SparkConf().setMaster("local[4]").setAppName("Test")
    val sc = new SparkContext(conf)
    sc.makeRDD(1 to 1000, 10).map(x => Some(x)).count
    sc.stop()
  }
{code}

Exception:
{code}
org.apache.spark.SparkException: Job aborted due to stage failure: Task 0.0:1 
failed 1 times, most recent failure: Exception failure in TID 1 on host 
localhost: java.lang.ClassNotFoundException: scala.None$
        java.net.URLClassLoader$1.run(URLClassLoader.java:366)
        java.net.URLClassLoader$1.run(URLClassLoader.java:355)
        java.security.AccessController.doPrivileged(Native Method)
        java.net.URLClassLoader.findClass(URLClassLoader.java:354)
        java.lang.ClassLoader.loadClass(ClassLoader.java:425)
        java.lang.ClassLoader.loadClass(ClassLoader.java:358)
        java.lang.Class.forName0(Native Method)
        java.lang.Class.forName(Class.java:270)
        
org.apache.spark.serializer.JavaDeserializationStream$$anon$1.resolveClass(JavaSerializer.scala:60)
        java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1612)
        java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1517)
{code}



> ClassNotFoundException when running with sbt, scala 2.10.4, spray 1.3.3
> -----------------------------------------------------------------------
>
>                 Key: SPARK-8494
>                 URL: https://issues.apache.org/jira/browse/SPARK-8494
>             Project: Spark
>          Issue Type: Bug
>          Components: Spark Core
>            Reporter: PJ Fanning
>            Assignee: Patrick Wendell
>
> I found a similar issue to SPARK-1923 but with Scala 2.10.4.
> I used the Test.scala from SPARK-1923 but used the libraryDependencies from a 
> build.sbt that I am working on.
> If I remove the spray 1.3.3 jars, the test case passes but has a 
> ClassNotFoundException otherwise.
> Application:
> {code}
> import org.apache.spark.SparkConf
> import org.apache.spark.SparkContext
> object Test {
>   def main(args: Array[String]): Unit = {
>     val conf = new SparkConf().setMaster("local[4]").setAppName("Test")
>     val sc = new SparkContext(conf)
>     sc.makeRDD(1 to 1000, 10).map(x => Some(x)).count
>     sc.stop()
>   }
> {code}
> Exception:
> {code}
> org.apache.spark.SparkException: Job aborted due to stage failure: Task 0.0:1 
> failed 1 times, most recent failure: Exception failure in TID 1 on host 
> localhost: java.lang.ClassNotFoundException: scala.collection.immutable.Range
>         java.net.URLClassLoader$1.run(URLClassLoader.java:366)
>         java.net.URLClassLoader$1.run(URLClassLoader.java:355)
>         java.security.AccessController.doPrivileged(Native Method)
>         java.net.URLClassLoader.findClass(URLClassLoader.java:354)
>         java.lang.ClassLoader.loadClass(ClassLoader.java:425)
>         java.lang.ClassLoader.loadClass(ClassLoader.java:358)
>         java.lang.Class.forName0(Native Method)
>         java.lang.Class.forName(Class.java:270)
>         
> org.apache.spark.serializer.JavaDeserializationStream$$anon$1.resolveClass(JavaSerializer.scala:60)
>         
> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1612)
>         java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1517)
> {code}
> {code}
> name := "spark-test-case"
> version := "1.0"
> scalaVersion := "2.10.4"
> resolvers += "spray repo" at "http://repo.spray.io";
> resolvers += "Scalaz Bintray Repo" at "https://dl.bintray.com/scalaz/releases";
> val akkaVersion = "2.3.11"
> val sprayVersion = "1.3.3"
> libraryDependencies ++= Seq(
>   "com.h2database"      % "h2"               % "1.4.187",
>   "com.typesafe.akka"  %% "akka-actor"       % akkaVersion,
>   "com.typesafe.akka"  %% "akka-slf4j"       % akkaVersion,
>   "ch.qos.logback"      % "logback-classic"  % "1.0.13",
>   "io.spray"           %% "spray-can"        % sprayVersion,
>   "io.spray"           %% "spray-routing"    % sprayVersion,
>   "io.spray"           %% "spray-json"       % "1.3.1",
>   "com.databricks"     %% "spark-csv"        % "1.0.3",
>   "org.specs2"         %% "specs2"           % "2.4.17"       % "test",
>   "org.specs2"         %% "specs2-junit"     % "2.4.17"       % "test",
>   "io.spray"           %% "spray-testkit"    % sprayVersion   % "test",
>   "com.typesafe.akka"  %% "akka-testkit"     % akkaVersion    % "test",
>   "junit"               % "junit"            % "4.12"         % "test"
> )
> scalacOptions ++= Seq(
>   "-unchecked",
>   "-deprecation",
>   "-Xlint",
>   "-Ywarn-dead-code",
>   "-language:_",
>   "-target:jvm-1.7",
>   "-encoding", "UTF-8"
> )
> testOptions += Tests.Argument(TestFrameworks.JUnit, "-v")
> {code}



--
This message was sent by Atlassian JIRA
(v6.3.4#6332)

---------------------------------------------------------------------
To unsubscribe, e-mail: issues-unsubscr...@spark.apache.org
For additional commands, e-mail: issues-h...@spark.apache.org

Reply via email to