Repository: spark
Updated Branches:
  refs/heads/master 4793c8402 -> 3d3ecd774


[SPARK-5586][Spark Shell][SQL] Make `sqlContext` available in spark shell

Result is like this
```
15/02/05 13:41:22 INFO SparkILoop: Created spark context..
Spark context available as sc.
15/02/05 13:41:22 INFO SparkILoop: Created sql context..
SQLContext available as sqlContext.

scala> sq
sql          sqlContext   sqlParser    sqrt
```

Author: OopsOutOfMemory <victorshen...@126.com>

Closes #4387 from OopsOutOfMemory/sqlContextInShell and squashes the following 
commits:

c7f5203 [OopsOutOfMemory] auto-import sql() function
e160697 [OopsOutOfMemory] Merge branch 'sqlContextInShell' of 
https://github.com/OopsOutOfMemory/spark into sqlContextInShell
37c0a16 [OopsOutOfMemory] auto detect hive support
a9c59d9 [OopsOutOfMemory] rename and reduce range of imports
6b9e309 [OopsOutOfMemory] Merge branch 'master' into sqlContextInShell
cae652f [OopsOutOfMemory] make sqlContext available in spark shell


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/3d3ecd77
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/3d3ecd77
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/3d3ecd77

Branch: refs/heads/master
Commit: 3d3ecd77411bfdd1c72be51616c46a6caf839be2
Parents: 4793c84
Author: OopsOutOfMemory <victorshen...@126.com>
Authored: Fri Feb 6 13:20:10 2015 -0800
Committer: Michael Armbrust <mich...@databricks.com>
Committed: Fri Feb 6 13:20:10 2015 -0800

----------------------------------------------------------------------
 .../scala/org/apache/spark/repl/SparkILoop.scala | 19 +++++++++++++++++++
 .../org/apache/spark/repl/SparkILoopInit.scala   | 10 ++++++++++
 .../main/scala/org/apache/spark/repl/Main.scala  | 18 ++++++++++++++++++
 .../scala/org/apache/spark/repl/SparkILoop.scala | 12 +++++++++++-
 4 files changed, 58 insertions(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/3d3ecd77/repl/scala-2.10/src/main/scala/org/apache/spark/repl/SparkILoop.scala
----------------------------------------------------------------------
diff --git 
a/repl/scala-2.10/src/main/scala/org/apache/spark/repl/SparkILoop.scala 
b/repl/scala-2.10/src/main/scala/org/apache/spark/repl/SparkILoop.scala
index 72c1a98..b4db3df 100644
--- a/repl/scala-2.10/src/main/scala/org/apache/spark/repl/SparkILoop.scala
+++ b/repl/scala-2.10/src/main/scala/org/apache/spark/repl/SparkILoop.scala
@@ -45,6 +45,7 @@ import scala.reflect.api.{Mirror, TypeCreator, Universe => 
ApiUniverse}
 import org.apache.spark.Logging
 import org.apache.spark.SparkConf
 import org.apache.spark.SparkContext
+import org.apache.spark.sql.SQLContext
 import org.apache.spark.util.Utils
 
 /** The Scala interactive shell.  It provides a read-eval-print loop
@@ -130,6 +131,7 @@ class SparkILoop(
   // NOTE: Must be public for visibility
   @DeveloperApi
   var sparkContext: SparkContext = _
+  var sqlContext: SQLContext = _
 
   override def echoCommandMessage(msg: String) {
     intp.reporter printMessage msg
@@ -1016,6 +1018,23 @@ class SparkILoop(
     sparkContext
   }
 
+  @DeveloperApi
+  def createSQLContext(): SQLContext = {
+    val name = "org.apache.spark.sql.hive.HiveContext"
+    val loader = Utils.getContextOrSparkClassLoader
+    try {
+      sqlContext = loader.loadClass(name).getConstructor(classOf[SparkContext])
+        .newInstance(sparkContext).asInstanceOf[SQLContext] 
+      logInfo("Created sql context (with Hive support)..")
+    }
+    catch {
+      case cnf: java.lang.ClassNotFoundException =>
+        sqlContext = new SQLContext(sparkContext)
+        logInfo("Created sql context..")
+    }
+    sqlContext
+  }
+
   private def getMaster(): String = {
     val master = this.master match {
       case Some(m) => m

http://git-wip-us.apache.org/repos/asf/spark/blob/3d3ecd77/repl/scala-2.10/src/main/scala/org/apache/spark/repl/SparkILoopInit.scala
----------------------------------------------------------------------
diff --git 
a/repl/scala-2.10/src/main/scala/org/apache/spark/repl/SparkILoopInit.scala 
b/repl/scala-2.10/src/main/scala/org/apache/spark/repl/SparkILoopInit.scala
index 99bd777..0cf2de6 100644
--- a/repl/scala-2.10/src/main/scala/org/apache/spark/repl/SparkILoopInit.scala
+++ b/repl/scala-2.10/src/main/scala/org/apache/spark/repl/SparkILoopInit.scala
@@ -127,7 +127,17 @@ private[repl] trait SparkILoopInit {
            _sc
          }
         """)
+      command("""
+         @transient val sqlContext = {
+           val _sqlContext = 
org.apache.spark.repl.Main.interp.createSQLContext()
+           println("SQL context available as sqlContext.")
+           _sqlContext
+         }
+        """)
       command("import org.apache.spark.SparkContext._")
+      command("import sqlContext.implicits._")
+      command("import sqlContext.sql")
+      command("import org.apache.spark.sql.Dsl._")
     }
   }
 

http://git-wip-us.apache.org/repos/asf/spark/blob/3d3ecd77/repl/scala-2.11/src/main/scala/org/apache/spark/repl/Main.scala
----------------------------------------------------------------------
diff --git a/repl/scala-2.11/src/main/scala/org/apache/spark/repl/Main.scala 
b/repl/scala-2.11/src/main/scala/org/apache/spark/repl/Main.scala
index 69e44d4..dc25692 100644
--- a/repl/scala-2.11/src/main/scala/org/apache/spark/repl/Main.scala
+++ b/repl/scala-2.11/src/main/scala/org/apache/spark/repl/Main.scala
@@ -19,6 +19,7 @@ package org.apache.spark.repl
 
 import org.apache.spark.util.Utils
 import org.apache.spark._
+import org.apache.spark.sql.SQLContext
 
 import scala.tools.nsc.Settings
 import scala.tools.nsc.interpreter.SparkILoop
@@ -34,6 +35,7 @@ object Main extends Logging {
     "-Yrepl-outdir", s"${outputDir.getAbsolutePath}", "-Yrepl-sync"), true)
   val classServer = new HttpServer(conf, outputDir, new SecurityManager(conf))
   var sparkContext: SparkContext = _
+  var sqlContext: SQLContext = _
   var interp = new SparkILoop // this is a public var because tests reset it.
 
   def main(args: Array[String]) {
@@ -74,6 +76,22 @@ object Main extends Logging {
     sparkContext
   }
 
+  def createSQLContext(): SQLContext = {
+    val name = "org.apache.spark.sql.hive.HiveContext"
+    val loader = Utils.getContextOrSparkClassLoader
+    try {
+      sqlContext = loader.loadClass(name).getConstructor(classOf[SparkContext])
+        .newInstance(sparkContext).asInstanceOf[SQLContext] 
+      logInfo("Created sql context (with Hive support)..")
+    }
+    catch {
+      case cnf: java.lang.ClassNotFoundException =>
+        sqlContext = new SQLContext(sparkContext)
+        logInfo("Created sql context..")
+    }
+    sqlContext
+  }
+
   private def getMaster: String = {
     val master = {
       val envMaster = sys.env.get("MASTER")

http://git-wip-us.apache.org/repos/asf/spark/blob/3d3ecd77/repl/scala-2.11/src/main/scala/org/apache/spark/repl/SparkILoop.scala
----------------------------------------------------------------------
diff --git 
a/repl/scala-2.11/src/main/scala/org/apache/spark/repl/SparkILoop.scala 
b/repl/scala-2.11/src/main/scala/org/apache/spark/repl/SparkILoop.scala
index 2507273..1bd2a69 100644
--- a/repl/scala-2.11/src/main/scala/org/apache/spark/repl/SparkILoop.scala
+++ b/repl/scala-2.11/src/main/scala/org/apache/spark/repl/SparkILoop.scala
@@ -66,8 +66,18 @@ class SparkILoop(in0: Option[BufferedReader], protected val 
out: JPrintWriter)
            println("Spark context available as sc.")
            _sc
          }
-               """)
+        """)
+      command( """
+         @transient val sqlContext = {
+           val _sqlContext = org.apache.spark.repl.Main.createSQLContext()
+           println("SQL context available as sqlContext.")
+           _sqlContext
+         }
+        """)
       command("import org.apache.spark.SparkContext._")
+      command("import sqlContext.implicits._")
+      command("import sqlContext.sql")
+      command("import org.apache.spark.sql.Dsl._")
     }
   }
 


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to