[4/8] spark git commit: Support cross building for Scala 2.11

2014-11-11 Thread pwendell
http://git-wip-us.apache.org/repos/asf/spark/blob/12f56334/repl/scala-2.11/src/main/scala/org/apache/spark/repl/SparkIMain.scala
--
diff --git 
a/repl/scala-2.11/src/main/scala/org/apache/spark/repl/SparkIMain.scala 
b/repl/scala-2.11/src/main/scala/org/apache/spark/repl/SparkIMain.scala
new file mode 100644
index 000..1bb62c8
--- /dev/null
+++ b/repl/scala-2.11/src/main/scala/org/apache/spark/repl/SparkIMain.scala
@@ -0,0 +1,1319 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2013 LAMP/EPFL
+ * @author  Martin Odersky
+ */
+
+package scala
+package tools.nsc
+package interpreter
+
+import PartialFunction.cond
+import scala.language.implicitConversions
+import scala.beans.BeanProperty
+import scala.collection.mutable
+import scala.concurrent.{ Future, ExecutionContext }
+import scala.reflect.runtime.{ universe => ru }
+import scala.reflect.{ ClassTag, classTag }
+import scala.reflect.internal.util.{ BatchSourceFile, SourceFile }
+import scala.tools.util.PathResolver
+import scala.tools.nsc.io.AbstractFile
+import scala.tools.nsc.typechecker.{ TypeStrings, StructuredTypeStrings }
+import scala.tools.nsc.util.{ ScalaClassLoader, stringFromReader, 
stringFromWriter, StackTraceOps }
+import scala.tools.nsc.util.Exceptional.unwrap
+import javax.script.{AbstractScriptEngine, Bindings, ScriptContext, 
ScriptEngine, ScriptEngineFactory, ScriptException, CompiledScript, Compilable}
+
+/** An interpreter for Scala code.
+  *
+  *  The main public entry points are compile(), interpret(), and bind().
+  *  The compile() method loads a complete Scala file.  The interpret() method
+  *  executes one line of Scala code at the request of the user.  The bind()
+  *  method binds an object to a variable that can then be used by later
+  *  interpreted code.
+  *
+  *  The overall approach is based on compiling the requested code and then
+  *  using a Java classloader and Java reflection to run the code
+  *  and access its results.
+  *
+  *  In more detail, a single compiler instance is used
+  *  to accumulate all successfully compiled or interpreted Scala code.  To
+  *  "interpret" a line of code, the compiler generates a fresh object that
+  *  includes the line of code and which has public member(s) to export
+  *  all variables defined by that code.  To extract the result of an
+  *  interpreted line to show the user, a second "result object" is created
+  *  which imports the variables exported by the above object and then
+  *  exports members called "$eval" and "$print". To accomodate user 
expressions
+  *  that read from variables or methods defined in previous statements, 
"import"
+  *  statements are used.
+  *
+  *  This interpreter shares the strengths and weaknesses of using the
+  *  full compiler-to-Java.  The main strength is that interpreted code
+  *  behaves exactly as does compiled code, including running at full speed.
+  *  The main weakness is that redefining classes and methods is not handled
+  *  properly, because rebinding at the Java level is technically difficult.
+  *
+  *  @author Moez A. Abdel-Gawad
+  *  @author Lex Spoon
+  */
+class SparkIMain(@BeanProperty val factory: ScriptEngineFactory, 
initialSettings: Settings,
+  protected val out: JPrintWriter) extends AbstractScriptEngine with 
Compilable with SparkImports {
+  imain =>
+
+  setBindings(createBindings, ScriptContext.ENGINE_SCOPE)
+  object replOutput extends ReplOutput(settings.Yreploutdir) { }
+
+  @deprecated("Use replOutput.dir instead", "2.11.0")
+  def virtualDirectory = replOutput.dir
+  // Used in a test case.
+  def showDirectory() = replOutput.show(out)
+
+  private[nsc] var printResults   = true  // whether to print 
result lines
+  private[nsc] var totalSilence   = false // whether to print 
anything
+  private var _initializeComplete = false // compiler is 
initialized
+  private var _isInitialized: Future[Boolean] = null  // set up 
initialization future
+  private var bindExceptions  = true  // whether to bind 
the lastException variable
+  private var _executionWrapper   = ""// code to be 
wrapped around all lines
+
+  /** We're going to go to some trouble to initialize the compiler 
asynchronously.
+*  It's critical that nothing call into it until it's been initialized or 
we will
+*  run into unrecoverable issues, but the perceived repl startup time goes
+*  through the roof if we wait for it.  So we initialize it with a future 
and
+*  use a lazy val to ensure that any attempt to use the compiler object 
waits
+*  on the future.
+*/
+  private var _classLoader: util.AbstractFileClassLoader = null
  // active classloader
+  private val _compiler: ReplGlobal = newCompiler(settings, 
reporter)   // our private compiler
+
+  def compilerClasspath: Seq[java.net.URL] = (
+if 

[4/8] spark git commit: Support cross building for Scala 2.11

2014-11-11 Thread pwendell
http://git-wip-us.apache.org/repos/asf/spark/blob/daaca14c/repl/scala-2.11/src/main/scala/org/apache/spark/repl/SparkIMain.scala
--
diff --git 
a/repl/scala-2.11/src/main/scala/org/apache/spark/repl/SparkIMain.scala 
b/repl/scala-2.11/src/main/scala/org/apache/spark/repl/SparkIMain.scala
new file mode 100644
index 000..1bb62c8
--- /dev/null
+++ b/repl/scala-2.11/src/main/scala/org/apache/spark/repl/SparkIMain.scala
@@ -0,0 +1,1319 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2013 LAMP/EPFL
+ * @author  Martin Odersky
+ */
+
+package scala
+package tools.nsc
+package interpreter
+
+import PartialFunction.cond
+import scala.language.implicitConversions
+import scala.beans.BeanProperty
+import scala.collection.mutable
+import scala.concurrent.{ Future, ExecutionContext }
+import scala.reflect.runtime.{ universe => ru }
+import scala.reflect.{ ClassTag, classTag }
+import scala.reflect.internal.util.{ BatchSourceFile, SourceFile }
+import scala.tools.util.PathResolver
+import scala.tools.nsc.io.AbstractFile
+import scala.tools.nsc.typechecker.{ TypeStrings, StructuredTypeStrings }
+import scala.tools.nsc.util.{ ScalaClassLoader, stringFromReader, 
stringFromWriter, StackTraceOps }
+import scala.tools.nsc.util.Exceptional.unwrap
+import javax.script.{AbstractScriptEngine, Bindings, ScriptContext, 
ScriptEngine, ScriptEngineFactory, ScriptException, CompiledScript, Compilable}
+
+/** An interpreter for Scala code.
+  *
+  *  The main public entry points are compile(), interpret(), and bind().
+  *  The compile() method loads a complete Scala file.  The interpret() method
+  *  executes one line of Scala code at the request of the user.  The bind()
+  *  method binds an object to a variable that can then be used by later
+  *  interpreted code.
+  *
+  *  The overall approach is based on compiling the requested code and then
+  *  using a Java classloader and Java reflection to run the code
+  *  and access its results.
+  *
+  *  In more detail, a single compiler instance is used
+  *  to accumulate all successfully compiled or interpreted Scala code.  To
+  *  "interpret" a line of code, the compiler generates a fresh object that
+  *  includes the line of code and which has public member(s) to export
+  *  all variables defined by that code.  To extract the result of an
+  *  interpreted line to show the user, a second "result object" is created
+  *  which imports the variables exported by the above object and then
+  *  exports members called "$eval" and "$print". To accomodate user 
expressions
+  *  that read from variables or methods defined in previous statements, 
"import"
+  *  statements are used.
+  *
+  *  This interpreter shares the strengths and weaknesses of using the
+  *  full compiler-to-Java.  The main strength is that interpreted code
+  *  behaves exactly as does compiled code, including running at full speed.
+  *  The main weakness is that redefining classes and methods is not handled
+  *  properly, because rebinding at the Java level is technically difficult.
+  *
+  *  @author Moez A. Abdel-Gawad
+  *  @author Lex Spoon
+  */
+class SparkIMain(@BeanProperty val factory: ScriptEngineFactory, 
initialSettings: Settings,
+  protected val out: JPrintWriter) extends AbstractScriptEngine with 
Compilable with SparkImports {
+  imain =>
+
+  setBindings(createBindings, ScriptContext.ENGINE_SCOPE)
+  object replOutput extends ReplOutput(settings.Yreploutdir) { }
+
+  @deprecated("Use replOutput.dir instead", "2.11.0")
+  def virtualDirectory = replOutput.dir
+  // Used in a test case.
+  def showDirectory() = replOutput.show(out)
+
+  private[nsc] var printResults   = true  // whether to print 
result lines
+  private[nsc] var totalSilence   = false // whether to print 
anything
+  private var _initializeComplete = false // compiler is 
initialized
+  private var _isInitialized: Future[Boolean] = null  // set up 
initialization future
+  private var bindExceptions  = true  // whether to bind 
the lastException variable
+  private var _executionWrapper   = ""// code to be 
wrapped around all lines
+
+  /** We're going to go to some trouble to initialize the compiler 
asynchronously.
+*  It's critical that nothing call into it until it's been initialized or 
we will
+*  run into unrecoverable issues, but the perceived repl startup time goes
+*  through the roof if we wait for it.  So we initialize it with a future 
and
+*  use a lazy val to ensure that any attempt to use the compiler object 
waits
+*  on the future.
+*/
+  private var _classLoader: util.AbstractFileClassLoader = null
  // active classloader
+  private val _compiler: ReplGlobal = newCompiler(settings, 
reporter)   // our private compiler
+
+  def compilerClasspath: Seq[java.net.URL] = (
+if