name := "SparkLeaning"

version := "1.0"

scalaVersion := "2.10.4"
//scalaVersion := "2.11.2"

libraryDependencies ++= Seq(
  //"org.apache.hive"% "hive-jdbc" % "0.13.0"
  //"io.spray" % "spray-can" % "1.3.1",
  //"io.spray" % "spray-routing" % "1.3.1",
  "io.spray" % "spray-testkit" % "1.3.1" % "test",
  "io.spray" %% "spray-json" % "1.2.6",
  "com.typesafe.akka" %% "akka-actor" % "2.3.2",
  "com.typesafe.akka" %% "akka-testkit" % "2.3.2" % "test",
  "org.scalatest" %% "scalatest" % "2.2.0",
  "org.apache.spark" %% "spark-core" % "1.4.0",
  "org.apache.spark" %% "spark-sql" % "1.4.0",
  "org.apache.spark" %% "spark-hive" % "1.4.0",
  "org.apache.spark" %% "spark-mllib" % "1.4.0",
  //"org.apache.hadoop" %% "hadoop-client" % "2.4.0"
  "javax.servlet" % "javax.servlet-api" % "3.0.1"//,
  //"org.eclipse.jetty"%%"jetty-servlet"%"8.1.14.v20131031",
  //"org.eclipse.jetty.orbit"%"javax.servlet"%"3.0.0.v201112011016"
  //"org.mortbay.jetty"%%"servlet-api"%"3.0.20100224"

)object SparkPI {
  def main(args:Array[String]): Unit = {
    val conf = new SparkConf().setAppName("Spark Pi")
    conf.setMaster("local")

    val spark = new SparkContext(conf)
    val slices = if (args.length > 0)args(0).toInt else 2
    val n = 100000 * slices
    val count = spark.parallelize(1 to n, slices).map{ i =>
      val x = random * 2 -1
      val y = random * 2 -1
      if (x*x + y*y < 1) 1 else 0
    }.reduce(_ + _)
    println("Pi is roughly" + 4.0 * count / n)
    spark.stop()
  }
}when Running this program,something is error! help me?15/06/15 21:40:08 INFO 
HttpServer: Starting HTTP Server
Exception in thread "main" java.lang.NoClassDefFoundError: 
javax/servlet/http/HttpServletResponse
        at 
org.apache.spark.HttpServer.org$apache$spark$HttpServer$$doStart(HttpServer.scala:75)
        at org.apache.spark.HttpServer$$anonfun$1.apply(HttpServer.scala:62)
        at org.apache.spark.HttpServer$$anonfun$1.apply(HttpServer.scala:62)
        at 
org.apache.spark.util.Utils$$anonfun$startServiceOnPort$1.apply$mcVI$sp(Utils.scala:1991)
        at scala.collection.immutable.Range.foreach$mVc$sp(Range.scala:141)
        at org.apache.spark.util.Utils$.startServiceOnPort(Utils.scala:1982)
        at org.apache.spark.HttpServer.start(HttpServer.scala:62)
        at org.apache.spark.HttpFileServer.initialize(HttpFileServer.scala:46)
        at org.apache.spark.SparkEnv$.create(SparkEnv.scala:350)
        at org.apache.spark.SparkEnv$.createDriverEnv(SparkEnv.scala:188)
        at org.apache.spark.SparkContext.createSparkEnv(SparkContext.scala:267)
        at org.apache.spark.SparkContext.<init>(SparkContext.scala:424)
        at org.learn.SparkPI$.main(SparkPI.scala:24)
        at org.learn.SparkPI.main(SparkPI.scala)
        at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
        at 
sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
        at 
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
        at java.lang.reflect.Method.invoke(Method.java:606)
        at com.intellij.rt.execution.application.AppMain.main(AppMain.java:134)
Caused by: java.lang.ClassNotFoundException: 
javax.servlet.http.HttpServletResponse
        at java.net.URLClassLoader$1.run(URLClassLoader.java:366)
        at java.net.URLClassLoader$1.run(URLClassLoader.java:355)
        at java.security.AccessController.doPrivileged(Native Method)
        at java.net.URLClassLoader.findClass(URLClassLoader.java:354)
        at java.lang.ClassLoader.loadClass(ClassLoader.java:425)
        at sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:308)
        at java.lang.ClassLoader.loadClass(ClassLoader.java:358)
        ... 19 more
15/06/15 21:40:08 INFO DiskBlockManager: Shutdown hook called
                                          

Reply via email to