Repository: spark
Updated Branches:
  refs/heads/master 1390e56fa -> e9a4fe12d


[BUILD] Add the ability to launch spark-shell from SBT.

Now you can quickly launch the spark-shell without building an assembly.  For 
quick development iteration run `build/sbt ~sparkShell` and calling exit will 
relaunch with any changes.

Author: Michael Armbrust <mich...@databricks.com>

Closes #4438 from marmbrus/sparkShellSbt and squashes the following commits:

b4e44fe [Michael Armbrust] [BUILD] Add the ability to launch spark-shell from 
SBT.


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/e9a4fe12
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/e9a4fe12
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/e9a4fe12

Branch: refs/heads/master
Commit: e9a4fe12d3dc4afeb715c7649df2c5527a2d5d35
Parents: 1390e56
Author: Michael Armbrust <mich...@databricks.com>
Authored: Sat Feb 7 00:14:38 2015 -0800
Committer: Reynold Xin <r...@databricks.com>
Committed: Sat Feb 7 00:14:38 2015 -0800

----------------------------------------------------------------------
 project/SparkBuild.scala | 23 +++++++++++++++++++++++
 1 file changed, 23 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/e9a4fe12/project/SparkBuild.scala
----------------------------------------------------------------------
diff --git a/project/SparkBuild.scala b/project/SparkBuild.scala
index 5e3051c..95f8dfa 100644
--- a/project/SparkBuild.scala
+++ b/project/SparkBuild.scala
@@ -177,6 +177,29 @@ object SparkBuild extends PomBuild {
 
   enable(Flume.settings)(streamingFlumeSink)
 
+
+  /**
+   * Adds the ability to run the spark shell directly from SBT without 
building an assembly
+   * jar.
+   *
+   * Usage: `build/sbt sparkShell`
+   */
+  val sparkShell = taskKey[Unit]("start a spark-shell.")
+
+  enable(Seq(
+    connectInput in run := true,
+    fork := true,
+    outputStrategy in run := Some (StdoutOutput),
+
+    javaOptions ++= Seq("-Xmx2G", "-XX:MaxPermSize=1g"),
+
+    sparkShell := {
+      (runMain in Compile).toTask(" org.apache.spark.repl.Main 
-usejavacp").value
+    }
+  ))(assembly)
+
+  enable(Seq(sparkShell := sparkShell in "assembly"))(spark)
+
   // TODO: move this to its upstream project.
   override def projectDefinitions(baseDirectory: File): Seq[Project] = {
     super.projectDefinitions(baseDirectory).map { x =>


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to