Repository: spark
Updated Branches:
  refs/heads/master 8cba57a75 -> ed6f3f8a5


[SPARK-15072][SQL][REPL][EXAMPLES] Remove SparkSession.withHiveSupport

## What changes were proposed in this pull request?
Removing the `withHiveSupport` method of `SparkSession`, instead use 
`enableHiveSupport`

## How was this patch tested?
ran tests locally

Author: Sandeep Singh <sand...@techaddict.me>

Closes #12851 from techaddict/SPARK-15072.


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/ed6f3f8a
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/ed6f3f8a
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/ed6f3f8a

Branch: refs/heads/master
Commit: ed6f3f8a5f3a6bf7c53e13c2798de398c9a526a6
Parents: 8cba57a
Author: Sandeep Singh <sand...@techaddict.me>
Authored: Thu May 5 14:35:15 2016 -0700
Committer: Andrew Or <and...@databricks.com>
Committed: Thu May 5 14:35:15 2016 -0700

----------------------------------------------------------------------
 .../spark/examples/sql/hive/HiveFromSpark.scala       | 14 +++++++++-----
 .../scala/org/apache/spark/sql/SparkSession.scala     | 13 -------------
 .../spark/sql/hive/thriftserver/SparkSQLEnv.scala     | 10 ++++++----
 .../apache/spark/sql/hive/HiveSparkSubmitSuite.scala  |  7 +++++--
 4 files changed, 20 insertions(+), 24 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/ed6f3f8a/examples/src/main/scala/org/apache/spark/examples/sql/hive/HiveFromSpark.scala
----------------------------------------------------------------------
diff --git 
a/examples/src/main/scala/org/apache/spark/examples/sql/hive/HiveFromSpark.scala
 
b/examples/src/main/scala/org/apache/spark/examples/sql/hive/HiveFromSpark.scala
index ff33091..a15cf5d 100644
--- 
a/examples/src/main/scala/org/apache/spark/examples/sql/hive/HiveFromSpark.scala
+++ 
b/examples/src/main/scala/org/apache/spark/examples/sql/hive/HiveFromSpark.scala
@@ -36,15 +36,19 @@ object HiveFromSpark {
 
   def main(args: Array[String]) {
     val sparkConf = new SparkConf().setAppName("HiveFromSpark")
-    val sc = new SparkContext(sparkConf)
 
     // A hive context adds support for finding tables in the MetaStore and 
writing queries
     // using HiveQL. Users who do not have an existing Hive deployment can 
still create a
     // HiveContext. When not configured by the hive-site.xml, the context 
automatically
     // creates metastore_db and warehouse in the current directory.
-    val sparkSession = SparkSession.withHiveSupport(sc)
-    import sparkSession.implicits._
-    import sparkSession.sql
+    val spark = SparkSession.builder
+      .config(sparkConf)
+      .enableHiveSupport()
+      .getOrCreate()
+    val sc = spark.sparkContext
+
+    import spark.implicits._
+    import spark.sql
 
     sql("CREATE TABLE IF NOT EXISTS src (key INT, value STRING)")
     sql(s"LOAD DATA LOCAL INPATH '${kv1File.getAbsolutePath}' INTO TABLE src")
@@ -74,7 +78,7 @@ object HiveFromSpark {
     println("Result of SELECT *:")
     sql("SELECT * FROM records r JOIN src s ON r.key = 
s.key").collect().foreach(println)
 
-    sc.stop()
+    spark.stop()
   }
 }
 // scalastyle:on println

http://git-wip-us.apache.org/repos/asf/spark/blob/ed6f3f8a/sql/core/src/main/scala/org/apache/spark/sql/SparkSession.scala
----------------------------------------------------------------------
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/SparkSession.scala 
b/sql/core/src/main/scala/org/apache/spark/sql/SparkSession.scala
index aa7c335..9ed3756 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/SparkSession.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/SparkSession.scala
@@ -816,17 +816,4 @@ object SparkSession {
     }
   }
 
-  /**
-   * Create a new [[SparkSession]] with a catalog backed by Hive.
-   */
-  def withHiveSupport(sc: SparkContext): SparkSession = {
-    if (hiveClassesArePresent) {
-      sc.conf.set(CATALOG_IMPLEMENTATION.key, "hive")
-      new SparkSession(sc)
-    } else {
-      throw new IllegalArgumentException(
-        "Unable to instantiate SparkSession with Hive support because Hive 
classes are not found.")
-    }
-  }
-
 }

http://git-wip-us.apache.org/repos/asf/spark/blob/ed6f3f8a/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkSQLEnv.scala
----------------------------------------------------------------------
diff --git 
a/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkSQLEnv.scala
 
b/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkSQLEnv.scala
index 665a44e..8de223f 100644
--- 
a/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkSQLEnv.scala
+++ 
b/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkSQLEnv.scala
@@ -54,13 +54,15 @@ private[hive] object SparkSQLEnv extends Logging {
           "spark.kryo.referenceTracking",
           maybeKryoReferenceTracking.getOrElse("false"))
 
-      sparkContext = new SparkContext(sparkConf)
-      sqlContext = SparkSession.withHiveSupport(sparkContext).wrapped
-      val sessionState = sqlContext.sessionState.asInstanceOf[HiveSessionState]
+      val sparkSession = 
SparkSession.builder.config(sparkConf).enableHiveSupport().getOrCreate()
+      sparkContext = sparkSession.sparkContext
+      sqlContext = sparkSession.wrapped
+
+      val sessionState = 
sparkSession.sessionState.asInstanceOf[HiveSessionState]
       sessionState.metadataHive.setOut(new PrintStream(System.out, true, 
"UTF-8"))
       sessionState.metadataHive.setInfo(new PrintStream(System.err, true, 
"UTF-8"))
       sessionState.metadataHive.setError(new PrintStream(System.err, true, 
"UTF-8"))
-      sqlContext.setConf("spark.sql.hive.version", 
HiveUtils.hiveExecutionVersion)
+      sparkSession.conf.set("spark.sql.hive.version", 
HiveUtils.hiveExecutionVersion)
     }
   }
 

http://git-wip-us.apache.org/repos/asf/spark/blob/ed6f3f8a/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveSparkSubmitSuite.scala
----------------------------------------------------------------------
diff --git 
a/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveSparkSubmitSuite.scala 
b/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveSparkSubmitSuite.scala
index 77a6a94..a320011 100644
--- 
a/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveSparkSubmitSuite.scala
+++ 
b/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveSparkSubmitSuite.scala
@@ -290,8 +290,11 @@ object SetWarehouseLocationTest extends Logging {
     conf.set("spark.sql.warehouse.dir", warehouseLocation.toString)
     conf.set("hive.metastore.warehouse.dir", hiveWarehouseLocation.toString)
 
-    val sc = new SparkContext(conf)
-    val sparkSession = SparkSession.withHiveSupport(sc)
+    val sparkSession = SparkSession.builder
+      .config(conf)
+      .enableHiveSupport()
+      .getOrCreate()
+
     val catalog = sparkSession.sessionState.catalog
 
     sparkSession.sql("drop table if exists testLocation")


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to