Repository: spark
Updated Branches:
  refs/heads/branch-2.0 f8804bb10 -> 114be703d


[SPARK-15072][SQL][PYSPARK] FollowUp: Remove SparkSession.withHiveSupport in 
PySpark

## What changes were proposed in this pull request?
This is a followup of https://github.com/apache/spark/pull/12851
Remove `SparkSession.withHiveSupport` in PySpark and instead use 
`SparkSession.builder. enableHiveSupport`

## How was this patch tested?
Existing tests.

Author: Sandeep Singh <[email protected]>

Closes #13063 from techaddict/SPARK-15072-followup.


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/114be703
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/114be703
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/114be703

Branch: refs/heads/branch-2.0
Commit: 114be703d5655b6456955e795e670cd62915b37e
Parents: f8804bb
Author: Sandeep Singh <[email protected]>
Authored: Wed May 11 17:44:00 2016 -0700
Committer: Andrew Or <[email protected]>
Committed: Wed May 11 17:44:37 2016 -0700

----------------------------------------------------------------------
 .../sbt_app_hive/src/main/scala/HiveApp.scala             |  8 +++++---
 python/pyspark/shell.py                                   |  4 +++-
 python/pyspark/sql/session.py                             | 10 ----------
 .../scala/org/apache/spark/sql/hive/HiveContext.scala     |  2 +-
 4 files changed, 9 insertions(+), 15 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/114be703/dev/audit-release/sbt_app_hive/src/main/scala/HiveApp.scala
----------------------------------------------------------------------
diff --git a/dev/audit-release/sbt_app_hive/src/main/scala/HiveApp.scala 
b/dev/audit-release/sbt_app_hive/src/main/scala/HiveApp.scala
index f69d46c..8cbfb9c 100644
--- a/dev/audit-release/sbt_app_hive/src/main/scala/HiveApp.scala
+++ b/dev/audit-release/sbt_app_hive/src/main/scala/HiveApp.scala
@@ -33,7 +33,9 @@ object SparkSqlExample {
       case None => new SparkConf().setAppName("Simple Sql App")
     }
     val sc = new SparkContext(conf)
-    val sparkSession = SparkSession.withHiveSupport(sc)
+    val sparkSession = SparkSession.builder
+      .enableHiveSupport()
+      .getOrCreate()
 
     import sparkSession._
     sql("DROP TABLE IF EXISTS src")
@@ -41,14 +43,14 @@ object SparkSqlExample {
     sql("LOAD DATA LOCAL INPATH 'data.txt' INTO TABLE src")
     val results = sql("FROM src SELECT key, value WHERE key >= 0 AND KEY < 
5").collect()
     results.foreach(println)
-    
+
     def test(f: => Boolean, failureMsg: String) = {
       if (!f) {
         println(failureMsg)
         System.exit(-1)
       }
     }
-    
+
     test(results.size == 5, "Unexpected number of selected elements: " + 
results)
     println("Test succeeded")
     sc.stop()

http://git-wip-us.apache.org/repos/asf/spark/blob/114be703/python/pyspark/shell.py
----------------------------------------------------------------------
diff --git a/python/pyspark/shell.py b/python/pyspark/shell.py
index c6b0eda..adaa3b5 100644
--- a/python/pyspark/shell.py
+++ b/python/pyspark/shell.py
@@ -41,7 +41,9 @@ atexit.register(lambda: sc.stop())
 try:
     # Try to access HiveConf, it will raise exception if Hive is not added
     sc._jvm.org.apache.hadoop.hive.conf.HiveConf()
-    spark = SparkSession.withHiveSupport(sc)
+    spark = SparkSession.builder\
+        .enableHiveSupport()\
+        .getOrCreate()
 except py4j.protocol.Py4JError:
     spark = SparkSession(sc)
 except TypeError:

http://git-wip-us.apache.org/repos/asf/spark/blob/114be703/python/pyspark/sql/session.py
----------------------------------------------------------------------
diff --git a/python/pyspark/sql/session.py b/python/pyspark/sql/session.py
index 04842f6..4ee9ab8 100644
--- a/python/pyspark/sql/session.py
+++ b/python/pyspark/sql/session.py
@@ -182,16 +182,6 @@ class SparkSession(object):
         if SparkSession._instantiatedContext is None:
             SparkSession._instantiatedContext = self
 
-    @classmethod
-    @since(2.0)
-    def withHiveSupport(cls, sparkContext):
-        """Returns a new SparkSession with a catalog backed by Hive.
-
-        :param sparkContext: The underlying :class:`SparkContext`.
-        """
-        jsparkSession = 
sparkContext._jvm.SparkSession.withHiveSupport(sparkContext._jsc.sc())
-        return cls(sparkContext, jsparkSession)
-
     @since(2.0)
     def newSession(self):
         """

http://git-wip-us.apache.org/repos/asf/spark/blob/114be703/sql/hivecontext-compatibility/src/main/scala/org/apache/spark/sql/hive/HiveContext.scala
----------------------------------------------------------------------
diff --git 
a/sql/hivecontext-compatibility/src/main/scala/org/apache/spark/sql/hive/HiveContext.scala
 
b/sql/hivecontext-compatibility/src/main/scala/org/apache/spark/sql/hive/HiveContext.scala
index aa0485a..75166f6 100644
--- 
a/sql/hivecontext-compatibility/src/main/scala/org/apache/spark/sql/hive/HiveContext.scala
+++ 
b/sql/hivecontext-compatibility/src/main/scala/org/apache/spark/sql/hive/HiveContext.scala
@@ -27,7 +27,7 @@ import org.apache.spark.sql.{SparkSession, SQLContext}
  * An instance of the Spark SQL execution engine that integrates with data 
stored in Hive.
  * Configuration for Hive is read from hive-site.xml on the classpath.
  */
-@deprecated("Use SparkSession.withHiveSupport instead", "2.0.0")
+@deprecated("Use SparkSession.builder.enableHiveSupport instead", "2.0.0")
 class HiveContext private[hive](
     _sparkSession: SparkSession,
     isRootContext: Boolean)


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to