git commit: [Hot Fix #42] Persisted RDD disappears on storage page if re-used
Repository: spark Updated Branches: refs/heads/master 94fe7fd4f -> ada310a9d [Hot Fix #42] Persisted RDD disappears on storage page if re-used If a previously persisted RDD is re-used, its information disappears from the Storage page. This is because the tasks associated with re-using the RDD do not report the RDD's blocks as updated (which is correct). On stage submit, however, we overwrite any existing information regarding that RDD with a fresh one, whether or not the information for the RDD already exists. Author: Andrew Or Closes #281 from andrewor14/ui-storage-fix and squashes the following commits: 408585a [Andrew Or] Fix storage UI bug Project: http://git-wip-us.apache.org/repos/asf/spark/repo Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/ada310a9 Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/ada310a9 Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/ada310a9 Branch: refs/heads/master Commit: ada310a9d3d5419e101b24d9b41398f609da1ad3 Parents: 94fe7fd Author: Andrew Or Authored: Mon Mar 31 23:01:14 2014 -0700 Committer: Patrick Wendell Committed: Mon Mar 31 23:01:14 2014 -0700 -- .../main/scala/org/apache/spark/ui/storage/BlockManagerUI.scala| 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) -- http://git-wip-us.apache.org/repos/asf/spark/blob/ada310a9/core/src/main/scala/org/apache/spark/ui/storage/BlockManagerUI.scala -- diff --git a/core/src/main/scala/org/apache/spark/ui/storage/BlockManagerUI.scala b/core/src/main/scala/org/apache/spark/ui/storage/BlockManagerUI.scala index 4d8b01d..a7b24ff 100644 --- a/core/src/main/scala/org/apache/spark/ui/storage/BlockManagerUI.scala +++ b/core/src/main/scala/org/apache/spark/ui/storage/BlockManagerUI.scala @@ -84,7 +84,7 @@ private[ui] class BlockManagerListener(storageStatusListener: StorageStatusListe override def onStageSubmitted(stageSubmitted: SparkListenerStageSubmitted) = synchronized { val rddInfo = stageSubmitted.stageInfo.rddInfo -_rddInfoMap(rddInfo.id) = rddInfo +_rddInfoMap.getOrElseUpdate(rddInfo.id, rddInfo) } override def onStageCompleted(stageCompleted: SparkListenerStageCompleted) = synchronized {
git commit: [SPARK-1377] Upgrade Jetty to 8.1.14v20131031
Repository: spark Updated Branches: refs/heads/master 564f1c137 -> 94fe7fd4f [SPARK-1377] Upgrade Jetty to 8.1.14v20131031 Previous version was 7.6.8v20121106. The only difference between Jetty 7 and Jetty 8 is that the former uses Servlet API 2.5, while the latter uses Servlet API 3.0. Author: Andrew Or Closes #280 from andrewor14/jetty-upgrade and squashes the following commits: dd57104 [Andrew Or] Merge github.com:apache/spark into jetty-upgrade e75fa85 [Andrew Or] Upgrade Jetty to 8.1.14v20131031 Project: http://git-wip-us.apache.org/repos/asf/spark/repo Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/94fe7fd4 Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/94fe7fd4 Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/94fe7fd4 Branch: refs/heads/master Commit: 94fe7fd4fa9749cb13e540e4f9caf28de47eaf32 Parents: 564f1c1 Author: Andrew Or Authored: Mon Mar 31 21:42:36 2014 -0700 Committer: Patrick Wendell Committed: Mon Mar 31 21:42:36 2014 -0700 -- .../src/main/scala/org/apache/spark/ui/JettyUtils.scala | 3 ++- pom.xml | 8 project/SparkBuild.scala| 12 ++-- 3 files changed, 12 insertions(+), 11 deletions(-) -- http://git-wip-us.apache.org/repos/asf/spark/blob/94fe7fd4/core/src/main/scala/org/apache/spark/ui/JettyUtils.scala -- diff --git a/core/src/main/scala/org/apache/spark/ui/JettyUtils.scala b/core/src/main/scala/org/apache/spark/ui/JettyUtils.scala index 6e1736f..e1a1f20 100644 --- a/core/src/main/scala/org/apache/spark/ui/JettyUtils.scala +++ b/core/src/main/scala/org/apache/spark/ui/JettyUtils.scala @@ -18,13 +18,14 @@ package org.apache.spark.ui import java.net.{InetSocketAddress, URL} +import javax.servlet.DispatcherType import javax.servlet.http.{HttpServlet, HttpServletRequest, HttpServletResponse} import scala.annotation.tailrec import scala.util.{Failure, Success, Try} import scala.xml.Node -import org.eclipse.jetty.server.{DispatcherType, Server} +import org.eclipse.jetty.server.Server import org.eclipse.jetty.server.handler._ import org.eclipse.jetty.servlet._ import org.eclipse.jetty.util.thread.QueuedThreadPool http://git-wip-us.apache.org/repos/asf/spark/blob/94fe7fd4/pom.xml -- diff --git a/pom.xml b/pom.xml index 72acf2b..09a449d 100644 --- a/pom.xml +++ b/pom.xml @@ -192,22 +192,22 @@ org.eclipse.jetty jetty-util -7.6.8.v20121106 +8.1.14.v20131031 org.eclipse.jetty jetty-security -7.6.8.v20121106 +8.1.14.v20131031 org.eclipse.jetty jetty-plus -7.6.8.v20121106 +8.1.14.v20131031 org.eclipse.jetty jetty-server -7.6.8.v20121106 +8.1.14.v20131031 com.google.guava http://git-wip-us.apache.org/repos/asf/spark/blob/94fe7fd4/project/SparkBuild.scala -- diff --git a/project/SparkBuild.scala b/project/SparkBuild.scala index 2549bc9..7457ff4 100644 --- a/project/SparkBuild.scala +++ b/project/SparkBuild.scala @@ -248,13 +248,13 @@ object SparkBuild extends Build { */ libraryDependencies ++= Seq( -"io.netty" % "netty-all" % "4.0.17.Final", -"org.eclipse.jetty" % "jetty-server"% "7.6.8.v20121106", -"org.eclipse.jetty" % "jetty-util" % "7.6.8.v20121106", -"org.eclipse.jetty" % "jetty-plus" % "7.6.8.v20121106", -"org.eclipse.jetty" % "jetty-security" % "7.6.8.v20121106", +"io.netty" % "netty-all" % "4.0.17.Final", +"org.eclipse.jetty" % "jetty-server" % "8.1.14.v20131031", +"org.eclipse.jetty" % "jetty-util" % "8.1.14.v20131031", +"org.eclipse.jetty" % "jetty-plus" % "8.1.14.v20131031", +"org.eclipse.jetty" % "jetty-security" % "8.1.14.v20131031", /** Workaround for SPARK-959. Dependency used by org.eclipse.jetty. Fixed in ivy 2.3.0. */ -"org.eclipse.jetty.orbit" % "javax.servlet" % "2.5.0.v201103041518" artifacts Artifact("javax.servlet", "jar", "jar"), +"org.eclipse.jetty.orbit" % "javax.servlet" % "3.0.0.v201112011016" artifacts Artifact("javax.servlet", "jar", "jar"), "org.scalatest"%% "scalatest" % "1.9.1" % "test", "org.scalacheck" %% "scalacheck" % "1.10.0" % "test", "com.novocode" % "junit-interface" % "0.10" % "test",
git commit: SPARK-1376. In the yarn-cluster submitter, rename "args" option to "arg"
Repository: spark Updated Branches: refs/heads/master 33b3c2a8c -> 564f1c137 SPARK-1376. In the yarn-cluster submitter, rename "args" option to "arg" Author: Sandy Ryza Closes #279 from sryza/sandy-spark-1376 and squashes the following commits: d8aebfa [Sandy Ryza] SPARK-1376. In the yarn-cluster submitter, rename "args" option to "arg" Project: http://git-wip-us.apache.org/repos/asf/spark/repo Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/564f1c13 Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/564f1c13 Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/564f1c13 Branch: refs/heads/master Commit: 564f1c137caf07bd1f073ec6c93551dcad935ee5 Parents: 33b3c2a Author: Sandy Ryza Authored: Tue Apr 1 08:26:31 2014 +0530 Committer: Mridul Muralidharan Committed: Tue Apr 1 08:26:31 2014 +0530 -- docs/running-on-yarn.md | 7 --- .../org/apache/spark/deploy/yarn/ClientArguments.scala | 9 ++--- 2 files changed, 10 insertions(+), 6 deletions(-) -- http://git-wip-us.apache.org/repos/asf/spark/blob/564f1c13/docs/running-on-yarn.md -- diff --git a/docs/running-on-yarn.md b/docs/running-on-yarn.md index d8657c4..9825143 100644 --- a/docs/running-on-yarn.md +++ b/docs/running-on-yarn.md @@ -61,7 +61,7 @@ The command to launch the Spark application on the cluster is as follows: SPARK_JAR= ./bin/spark-class org.apache.spark.deploy.yarn.Client \ --jar \ --class \ - --args \ + --arg \ --num-executors \ --driver-memory \ --executor-memory \ @@ -72,7 +72,7 @@ The command to launch the Spark application on the cluster is as follows: --files \ --archives -For example: +To pass multiple arguments the "arg" option can be specified multiple times. For example: # Build the Spark assembly JAR and the Spark examples JAR $ SPARK_HADOOP_VERSION=2.0.5-alpha SPARK_YARN=true sbt/sbt assembly @@ -85,7 +85,8 @@ For example: ./bin/spark-class org.apache.spark.deploy.yarn.Client \ --jar examples/target/scala-{{site.SCALA_BINARY_VERSION}}/spark-examples-assembly-{{site.SPARK_VERSION}}.jar \ --class org.apache.spark.examples.SparkPi \ - --args yarn-cluster \ + --arg yarn-cluster \ + --arg 5 \ --num-executors 3 \ --driver-memory 4g \ --executor-memory 2g \ http://git-wip-us.apache.org/repos/asf/spark/blob/564f1c13/yarn/common/src/main/scala/org/apache/spark/deploy/yarn/ClientArguments.scala -- diff --git a/yarn/common/src/main/scala/org/apache/spark/deploy/yarn/ClientArguments.scala b/yarn/common/src/main/scala/org/apache/spark/deploy/yarn/ClientArguments.scala index c565f2d..3e4c739 100644 --- a/yarn/common/src/main/scala/org/apache/spark/deploy/yarn/ClientArguments.scala +++ b/yarn/common/src/main/scala/org/apache/spark/deploy/yarn/ClientArguments.scala @@ -63,7 +63,10 @@ class ClientArguments(val args: Array[String], val sparkConf: SparkConf) { userClass = value args = tail -case ("--args") :: value :: tail => +case ("--args" | "--arg") :: value :: tail => + if (args(0) == "--args") { +println("--args is deprecated. Use --arg instead.") + } userArgsBuffer += value args = tail @@ -146,8 +149,8 @@ class ClientArguments(val args: Array[String], val sparkConf: SparkConf) { "Options:\n" + " --jar JAR_PATH Path to your application's JAR file (required in yarn-cluster mode)\n" + " --class CLASS_NAME Name of your application's main class (required)\n" + - " --args ARGSArguments to be passed to your application's main class.\n" + - " Mutliple invocations are possible, each will be passed in order.\n" + + " --arg ARGS Argument to be passed to your application's main class.\n" + + " Multiple invocations are possible, each will be passed in order.\n" + " --num-executors NUMNumber of executors to start (Default: 2)\n" + " --executor-cores NUM Number of cores for the executors (Default: 1).\n" + " --driver-memory MEMMemory for driver (e.g. 1000M, 2G) (Default: 512 Mb)\n" +
git commit: SPARK-1365 [HOTFIX] Fix RateLimitedOutputStream test
Repository: spark Updated Branches: refs/heads/master 5731af5be -> 33b3c2a8c SPARK-1365 [HOTFIX] Fix RateLimitedOutputStream test This test needs to be fixed. It currently depends on Thread.sleep() having exact-timing semantics, which is not a valid assumption. Author: Patrick Wendell Closes #277 from pwendell/rate-limited-stream and squashes the following commits: 6c0ff81 [Patrick Wendell] SPARK-1365: Fix RateLimitedOutputStream test Project: http://git-wip-us.apache.org/repos/asf/spark/repo Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/33b3c2a8 Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/33b3c2a8 Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/33b3c2a8 Branch: refs/heads/master Commit: 33b3c2a8c6c71b89744834017a183ea855e1697c Parents: 5731af5 Author: Patrick Wendell Authored: Mon Mar 31 16:25:43 2014 -0700 Committer: Patrick Wendell Committed: Mon Mar 31 16:25:43 2014 -0700 -- .../spark/streaming/util/RateLimitedOutputStreamSuite.scala | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) -- http://git-wip-us.apache.org/repos/asf/spark/blob/33b3c2a8/streaming/src/test/scala/org/apache/spark/streaming/util/RateLimitedOutputStreamSuite.scala -- diff --git a/streaming/src/test/scala/org/apache/spark/streaming/util/RateLimitedOutputStreamSuite.scala b/streaming/src/test/scala/org/apache/spark/streaming/util/RateLimitedOutputStreamSuite.scala index 7d18a0f..9ebf7b4 100644 --- a/streaming/src/test/scala/org/apache/spark/streaming/util/RateLimitedOutputStreamSuite.scala +++ b/streaming/src/test/scala/org/apache/spark/streaming/util/RateLimitedOutputStreamSuite.scala @@ -36,8 +36,9 @@ class RateLimitedOutputStreamSuite extends FunSuite { val stream = new RateLimitedOutputStream(underlying, desiredBytesPerSec = 1) val elapsedNs = benchmark { stream.write(data.getBytes("UTF-8")) } -// We accept anywhere from 4.0 to 4.9 seconds since the value is rounded down. -assert(SECONDS.convert(elapsedNs, NANOSECONDS) === 4) +val seconds = SECONDS.convert(elapsedNs, NANOSECONDS) +assert(seconds >= 4, s"Seconds value ($seconds) is less than 4.") +assert(seconds <= 30, s"Took more than 30 seconds ($seconds) to write data.") assert(underlying.toString("UTF-8") === data) } }
git commit: [SQL] Rewrite join implementation to allow streaming of one relation.
Repository: spark Updated Branches: refs/heads/master 841721e03 -> 5731af5be [SQL] Rewrite join implementation to allow streaming of one relation. Before we were materializing everything in memory. This also uses the projection interface so will be easier to plug in code gen (its ported from that branch). @rxin @liancheng Author: Michael Armbrust Closes #250 from marmbrus/hashJoin and squashes the following commits: 1ad873e [Michael Armbrust] Change hasNext logic back to the correct version. 8e6f2a2 [Michael Armbrust] Review comments. 1e9fb63 [Michael Armbrust] style bc0cb84 [Michael Armbrust] Rewrite join implementation to allow streaming of one relation. Project: http://git-wip-us.apache.org/repos/asf/spark/repo Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/5731af5b Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/5731af5b Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/5731af5b Branch: refs/heads/master Commit: 5731af5be65ccac831445f351baf040a0d007687 Parents: 841721e Author: Michael Armbrust Authored: Mon Mar 31 15:23:46 2014 -0700 Committer: Reynold Xin Committed: Mon Mar 31 15:23:46 2014 -0700 -- .../spark/sql/catalyst/expressions/Row.scala| 10 ++ .../sql/catalyst/expressions/predicates.scala | 6 + .../scala/org/apache/spark/sql/SQLContext.scala | 2 +- .../spark/sql/execution/SparkStrategies.scala | 6 +- .../org/apache/spark/sql/execution/joins.scala | 127 ++- .../org/apache/spark/sql/hive/HiveContext.scala | 2 +- 6 files changed, 116 insertions(+), 37 deletions(-) -- http://git-wip-us.apache.org/repos/asf/spark/blob/5731af5b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Row.scala -- diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Row.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Row.scala index 31d42b9..6f939e6 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Row.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Row.scala @@ -44,6 +44,16 @@ trait Row extends Seq[Any] with Serializable { s"[${this.mkString(",")}]" def copy(): Row + + /** Returns true if there are any NULL values in this row. */ + def anyNull: Boolean = { +var i = 0 +while (i < length) { + if (isNullAt(i)) { return true } + i += 1 +} +false + } } /** http://git-wip-us.apache.org/repos/asf/spark/blob/5731af5b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/predicates.scala -- diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/predicates.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/predicates.scala index 722ff51..02fedd1 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/predicates.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/predicates.scala @@ -21,6 +21,12 @@ import org.apache.spark.sql.catalyst.trees import org.apache.spark.sql.catalyst.analysis.UnresolvedException import org.apache.spark.sql.catalyst.types.{BooleanType, StringType} +object InterpretedPredicate { + def apply(expression: Expression): (Row => Boolean) = { +(r: Row) => expression.apply(r).asInstanceOf[Boolean] + } +} + trait Predicate extends Expression { self: Product => http://git-wip-us.apache.org/repos/asf/spark/blob/5731af5b/sql/core/src/main/scala/org/apache/spark/sql/SQLContext.scala -- diff --git a/sql/core/src/main/scala/org/apache/spark/sql/SQLContext.scala b/sql/core/src/main/scala/org/apache/spark/sql/SQLContext.scala index cf3c06a..f950ea0 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/SQLContext.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/SQLContext.scala @@ -117,7 +117,7 @@ class SQLContext(@transient val sparkContext: SparkContext) val strategies: Seq[Strategy] = TopK :: PartialAggregation :: - SparkEquiInnerJoin :: + HashJoin :: ParquetOperations :: BasicOperators :: CartesianProduct :: http://git-wip-us.apache.org/repos/asf/spark/blob/5731af5b/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkStrategies.scala -- diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkStrategies.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkStrategies.scala index 86f9d3e..e35ac0b 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkStrategies.scala +++ b/sq
git commit: SPARK-1352: Improve robustness of spark-submit script
Repository: spark Updated Branches: refs/heads/master d66605367 -> 841721e03 SPARK-1352: Improve robustness of spark-submit script 1. Better error messages when required arguments are missing. 2. Support for unit testing cases where presented arguments are invalid. 3. Bug fix: Only use environment varaibles when they are set (otherwise will cause NPE). 4. A verbose mode to aid debugging. 5. Visibility of several variables is set to private. 6. Deprecation warning for existing scripts. Author: Patrick Wendell Closes #271 from pwendell/spark-submit and squashes the following commits: 9146def [Patrick Wendell] SPARK-1352: Improve robustness of spark-submit script Project: http://git-wip-us.apache.org/repos/asf/spark/repo Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/841721e0 Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/841721e0 Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/841721e0 Branch: refs/heads/master Commit: 841721e03cc44ee7d8fe72c882db8c0f9f3af365 Parents: d666053 Author: Patrick Wendell Authored: Mon Mar 31 12:07:14 2014 -0700 Committer: Patrick Wendell Committed: Mon Mar 31 12:07:14 2014 -0700 -- .../scala/org/apache/spark/deploy/Client.scala | 3 + .../org/apache/spark/deploy/SparkSubmit.scala | 67 +++--- .../spark/deploy/SparkSubmitArguments.scala | 74 ++-- .../apache/spark/deploy/SparkSubmitSuite.scala | 61 +++- .../org/apache/spark/deploy/yarn/Client.scala | 3 + .../org/apache/spark/deploy/yarn/Client.scala | 3 + 6 files changed, 163 insertions(+), 48 deletions(-) -- http://git-wip-us.apache.org/repos/asf/spark/blob/841721e0/core/src/main/scala/org/apache/spark/deploy/Client.scala -- diff --git a/core/src/main/scala/org/apache/spark/deploy/Client.scala b/core/src/main/scala/org/apache/spark/deploy/Client.scala index d9e3035..8fd2c7e 100644 --- a/core/src/main/scala/org/apache/spark/deploy/Client.scala +++ b/core/src/main/scala/org/apache/spark/deploy/Client.scala @@ -128,6 +128,9 @@ private class ClientActor(driverArgs: ClientArguments, conf: SparkConf) extends */ object Client { def main(args: Array[String]) { +println("WARNING: This client is deprecated and will be removed in a future version of Spark.") +println("Use ./bin/spark-submit with \"--master spark://host:port\"") + val conf = new SparkConf() val driverArgs = new ClientArguments(args) http://git-wip-us.apache.org/repos/asf/spark/blob/841721e0/core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala -- diff --git a/core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala b/core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala index 24a9c98..1fa7991 100644 --- a/core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala +++ b/core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala @@ -17,7 +17,7 @@ package org.apache.spark.deploy -import java.io.File +import java.io.{PrintStream, File} import java.net.URL import org.apache.spark.executor.ExecutorURLClassLoader @@ -32,38 +32,51 @@ import scala.collection.mutable.Map * modes that Spark supports. */ object SparkSubmit { - val YARN = 1 - val STANDALONE = 2 - val MESOS = 4 - val LOCAL = 8 - val ALL_CLUSTER_MGRS = YARN | STANDALONE | MESOS | LOCAL + private val YARN = 1 + private val STANDALONE = 2 + private val MESOS = 4 + private val LOCAL = 8 + private val ALL_CLUSTER_MGRS = YARN | STANDALONE | MESOS | LOCAL - var clusterManager: Int = LOCAL + private var clusterManager: Int = LOCAL def main(args: Array[String]) { val appArgs = new SparkSubmitArguments(args) +if (appArgs.verbose) { + printStream.println(appArgs) +} val (childArgs, classpath, sysProps, mainClass) = createLaunchEnv(appArgs) -launch(childArgs, classpath, sysProps, mainClass) +launch(childArgs, classpath, sysProps, mainClass, appArgs.verbose) } + // Exposed for testing + private[spark] var printStream: PrintStream = System.err + private[spark] var exitFn: () => Unit = () => System.exit(-1) + + private[spark] def printErrorAndExit(str: String) = { +printStream.println("error: " + str) +printStream.println("run with --help for more information or --verbose for debugging output") +exitFn() + } + private[spark] def printWarning(str: String) = printStream.println("warning: " + str) + /** * @return * a tuple containing the arguments for the child, a list of classpath * entries for the child, and the main class for the child */ - def createLaunchEnv(appArgs: SparkSubmitArguments): (ArrayBuffer[String], + private[spark] def createLaunchEnv(appArgs: SparkSubmitArguments): (